GraphQL: Optimize queries, fixes some null returns (on object), fix stitched GraphQLUpload (#6709)

* Optimize query, fixes some null returns, fix stitched GraphQLUpload

* Fix authData key selection

* Prefer Iso string since other GraphQL solutions use this format

* fix tests

Co-authored-by: Antonio Davi Macedo Coelho de Castro <adavimacedo@gmail.com>
This commit is contained in:
Antoine Cormouls
2020-10-02 00:19:26 +02:00
committed by GitHub
parent 929c4e1b0d
commit 62048260c9
32 changed files with 1533 additions and 1161 deletions

195
package-lock.json generated
View File

@@ -4,6 +4,63 @@
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@apollo/client": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.2.1.tgz",
"integrity": "sha512-w1EdCf3lvSwsxG2zbn8Rm31nPh9gQrB7u61BnU1QCM5BNIfOxiuuldzGNMHi5kI9KleisFvZl/9OA7pEkVg/yw==",
"requires": {
"@graphql-typed-document-node/core": "^3.0.0",
"@types/zen-observable": "^0.8.0",
"@wry/context": "^0.5.2",
"@wry/equality": "^0.2.0",
"fast-json-stable-stringify": "^2.0.0",
"graphql-tag": "^2.11.0",
"hoist-non-react-statics": "^3.3.2",
"optimism": "^0.12.1",
"prop-types": "^15.7.2",
"symbol-observable": "^2.0.0",
"terser": "^5.2.0",
"ts-invariant": "^0.4.4",
"tslib": "^1.10.0",
"zen-observable": "^0.8.14"
},
"dependencies": {
"@wry/context": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/@wry/context/-/context-0.5.2.tgz",
"integrity": "sha512-B/JLuRZ/vbEKHRUiGj6xiMojST1kHhu4WcreLfNN7q9DqQFrb97cWgf/kiYsPSUCAMVN0HzfFc8XjJdzgZzfjw==",
"requires": {
"tslib": "^1.9.3"
}
},
"@wry/equality": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/@wry/equality/-/equality-0.2.0.tgz",
"integrity": "sha512-Y4d+WH6hs+KZJUC8YKLYGarjGekBrhslDbf/R20oV+AakHPINSitHfDRQz3EGcEWc1luXYNUvMhawWtZVWNGvQ==",
"requires": {
"tslib": "^1.9.3"
}
},
"graphql-tag": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.11.0.tgz",
"integrity": "sha512-VmsD5pJqWJnQZMUeRwrDhfgoyqcfwEkvtpANqcoUG8/tOLkwNgU9mzub/Mc78OJMhHjx7gfAMTxzdG43VGg3bA=="
},
"optimism": {
"version": "0.12.2",
"resolved": "https://registry.npmjs.org/optimism/-/optimism-0.12.2.tgz",
"integrity": "sha512-k7hFhlmfLl6HNThIuuvYMQodC1c+q6Uc6V9cLVsMWyW514QuaxVJH/khPu2vLRIoDTpFdJ5sojlARhg1rzyGbg==",
"requires": {
"@wry/context": "^0.5.2"
}
},
"symbol-observable": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-2.0.3.tgz",
"integrity": "sha512-sQV7phh2WCYAn81oAkakC5qjq2Ml0g8ozqz03wOGnx9dDlG1de6yrF+0RAzSJD8fPUow3PTSMf2SAbOGxb93BA=="
}
}
},
"@apollo/protobufjs": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@apollo/protobufjs/-/protobufjs-1.0.5.tgz",
@@ -2794,6 +2851,65 @@
}
}
},
"@graphql-tools/links": {
"version": "6.2.4",
"resolved": "https://registry.npmjs.org/@graphql-tools/links/-/links-6.2.4.tgz",
"integrity": "sha512-dQH3oWVTkCwzGmfIi1OjyKAjPw1jOexP1f3hv8UajgU7Um/DCjVkvXQHeMGlihXg4bH/wogFheCJ0SwF4oFFUA==",
"requires": {
"@graphql-tools/utils": "^6.2.4",
"apollo-link": "1.2.14",
"apollo-upload-client": "14.1.2",
"cross-fetch": "3.0.6",
"form-data": "3.0.0",
"is-promise": "4.0.0",
"tslib": "~2.0.1"
},
"dependencies": {
"@ardatan/aggregate-error": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@ardatan/aggregate-error/-/aggregate-error-0.0.6.tgz",
"integrity": "sha512-vyrkEHG1jrukmzTPtyWB4NLPauUw5bQeg4uhn8f+1SSynmrOcyvlb1GKQjjgoBzElLdfXCRYX8UnBlhklOHYRQ==",
"requires": {
"tslib": "~2.0.1"
}
},
"@graphql-tools/utils": {
"version": "6.2.4",
"resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-6.2.4.tgz",
"integrity": "sha512-ybgZ9EIJE3JMOtTrTd2VcIpTXtDrn2q6eiYkeYMKRVh3K41+LZa6YnR2zKERTXqTWqhobROwLt4BZbw2O3Aeeg==",
"requires": {
"@ardatan/aggregate-error": "0.0.6",
"camel-case": "4.1.1",
"tslib": "~2.0.1"
}
},
"apollo-upload-client": {
"version": "14.1.2",
"resolved": "https://registry.npmjs.org/apollo-upload-client/-/apollo-upload-client-14.1.2.tgz",
"integrity": "sha512-ozaW+4tnVz1rpfwiQwG3RCdCcZ93RV/37ZQbRnObcQ9mjb+zur58sGDPVg9Ef3fiujLmiE/Fe9kdgvIMA3VOjA==",
"requires": {
"@apollo/client": "^3.1.5",
"@babel/runtime": "^7.11.2",
"extract-files": "^9.0.0"
}
},
"extract-files": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/extract-files/-/extract-files-9.0.0.tgz",
"integrity": "sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ=="
},
"is-promise": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz",
"integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="
},
"tslib": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.0.1.tgz",
"integrity": "sha512-SgIkNheinmEBgx1IUNirK0TUD4X9yjjBRTqqjggWCU3pUEqIk3/Uwl3yRixYKT6WjQuGiwDv4NomL3wqRCj+CQ=="
}
}
},
"@graphql-tools/merge": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-6.2.0.tgz",
@@ -2949,6 +3065,11 @@
}
}
},
"@graphql-typed-document-node/core": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.1.0.tgz",
"integrity": "sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg=="
},
"@istanbuljs/load-nyc-config": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
@@ -3434,8 +3555,7 @@
"@types/zen-observable": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/@types/zen-observable/-/zen-observable-0.8.0.tgz",
"integrity": "sha512-te5lMAWii1uEJ4FwLjzdlbw3+n0FZNOvFXHxQDKeT0dilh7HOzdMzV2TrJVUzq8ep7J4Na8OUYPRLSQkJHAlrg==",
"dev": true
"integrity": "sha512-te5lMAWii1uEJ4FwLjzdlbw3+n0FZNOvFXHxQDKeT0dilh7HOzdMzV2TrJVUzq8ep7J4Na8OUYPRLSQkJHAlrg=="
},
"@wry/context": {
"version": "0.4.4",
@@ -4492,6 +4612,11 @@
"integrity": "sha1-+PeLdniYiO858gXNY39o5wISKyw=",
"dev": true
},
"buffer-from": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
},
"buffer-writer": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz",
@@ -5124,6 +5249,14 @@
"cross-spawn": "^7.0.1"
}
},
"cross-fetch": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.0.6.tgz",
"integrity": "sha512-KBPUbqgFjzWlVcURG+Svp9TlhA5uliYtiNx/0r8nv0pdypeQCRJ9IaSIc3q/x3q8t3F75cHuwxVql1HFGHCNJQ==",
"requires": {
"node-fetch": "2.6.1"
}
},
"cross-spawn": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.1.tgz",
@@ -7253,6 +7386,14 @@
}
}
},
"hoist-non-react-statics": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz",
"integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==",
"requires": {
"react-is": "^16.7.0"
}
},
"html-escaper": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
@@ -8089,8 +8230,7 @@
"js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"js-yaml": {
"version": "3.13.1",
@@ -8924,7 +9064,6 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"dev": true,
"requires": {
"js-tokens": "^3.0.0 || ^4.0.0"
}
@@ -10522,6 +10661,16 @@
"with-callback": "^1.0.2"
}
},
"prop-types": {
"version": "15.7.2",
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz",
"integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==",
"requires": {
"loose-envify": "^1.4.0",
"object-assign": "^4.1.1",
"react-is": "^16.8.1"
}
},
"proto-list": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
@@ -10628,6 +10777,11 @@
"unpipe": "1.0.0"
}
},
"react-is": {
"version": "16.13.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
},
"react-native-crypto-js": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/react-native-crypto-js/-/react-native-crypto-js-1.0.0.tgz",
@@ -11413,6 +11567,15 @@
"urix": "^0.1.0"
}
},
"source-map-support": {
"version": "0.5.19",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz",
"integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==",
"requires": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
}
},
"source-map-url": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz",
@@ -11774,6 +11937,28 @@
"xtend": "^4.0.0"
}
},
"terser": {
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/terser/-/terser-5.3.3.tgz",
"integrity": "sha512-vRQDIlD+2Pg8YMwVK9kMM3yGylG95EIwzBai1Bw7Ot4OBfn3VP1TZn3EWx4ep2jERN/AmnVaTiGuelZSN7ds/A==",
"requires": {
"commander": "^2.20.0",
"source-map": "~0.7.2",
"source-map-support": "~0.5.19"
},
"dependencies": {
"commander": {
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="
},
"source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
"integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ=="
}
}
},
"test-exclude": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",

View File

@@ -20,6 +20,7 @@
"license": "BSD-3-Clause",
"dependencies": {
"@apollographql/graphql-playground-html": "1.6.26",
"@graphql-tools/links": "^6.2.4",
"@graphql-tools/stitch": "6.2.0",
"@graphql-tools/utils": "6.2.1",
"@parse/fs-files-adapter": "1.0.1",

File diff suppressed because it is too large Load Diff

View File

@@ -25,7 +25,8 @@ describe_only_db('mongo')('Idempotency', () => {
config,
auth.master(config),
'_Idempotency',
res.results[0].objectId);
res.results[0].objectId
);
}
async function setup(options) {
await reconfigureServer({
@@ -37,14 +38,16 @@ describe_only_db('mongo')('Idempotency', () => {
}
// Setups
beforeEach(async () => {
if (SIMULATE_TTL) { jasmine.DEFAULT_TIMEOUT_INTERVAL = 200000; }
if (SIMULATE_TTL) {
jasmine.DEFAULT_TIMEOUT_INTERVAL = 200000;
}
await setup({
paths: [
"functions/.*",
"jobs/.*",
"classes/.*",
"users",
"installations"
'functions/.*',
'jobs/.*',
'classes/.*',
'users',
'installations',
],
ttl: 30,
});
@@ -61,14 +64,14 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
expect(Config.get(Parse.applicationId).idempotencyOptions.ttl).toBe(30);
await request(params);
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("Duplicate request");
expect(e.data.error).toEqual('Duplicate request');
});
expect(counter).toBe(1);
});
@@ -84,8 +87,8 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await expectAsync(request(params)).toBeResolved();
if (SIMULATE_TTL) {
@@ -108,13 +111,13 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await expectAsync(request(params)).toBeResolved();
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("Duplicate request");
expect(e.data.error).toEqual('Duplicate request');
});
expect(counter).toBe(1);
});
@@ -130,13 +133,13 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await expectAsync(request(params)).toBeResolved();
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("Duplicate request");
expect(e.data.error).toEqual('Duplicate request');
});
expect(counter).toBe(1);
});
@@ -150,19 +153,19 @@ describe_only_db('mongo')('Idempotency', () => {
method: 'POST',
url: 'http://localhost:8378/1/users',
body: {
username: "user",
password: "pass"
username: 'user',
password: 'pass',
},
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await expectAsync(request(params)).toBeResolved();
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("Duplicate request");
expect(e.data.error).toEqual('Duplicate request');
});
expect(counter).toBe(1);
});
@@ -176,19 +179,19 @@ describe_only_db('mongo')('Idempotency', () => {
method: 'POST',
url: 'http://localhost:8378/1/installations',
body: {
installationId: "1",
deviceType: "ios"
installationId: '1',
deviceType: 'ios',
},
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await expectAsync(request(params)).toBeResolved();
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("Duplicate request");
expect(e.data.error).toEqual('Duplicate request');
});
expect(counter).toBe(1);
});
@@ -205,8 +208,8 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': uuid.v4()
}
'X-Parse-Request-Id': uuid.v4(),
},
};
return request(params);
});
@@ -215,7 +218,9 @@ describe_only_db('mongo')('Idempotency', () => {
});
it('should re-throw any other error unchanged when writing request entry fails for any other reason', async () => {
spyOn(rest, 'create').and.rejectWith(new Parse.Error(0, "some other error"));
spyOn(rest, 'create').and.rejectWith(
new Parse.Error(0, 'some other error')
);
Parse.Cloud.define('myFunction', () => {});
const params = {
method: 'POST',
@@ -223,19 +228,23 @@ describe_only_db('mongo')('Idempotency', () => {
headers: {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'X-Parse-Request-Id': 'abc-123'
}
'X-Parse-Request-Id': 'abc-123',
},
};
await request(params).then(fail, e => {
expect(e.status).toEqual(400);
expect(e.data.error).toEqual("some other error");
expect(e.data.error).toEqual('some other error');
});
});
it('should use default configuration when none is set', async () => {
await setup({});
expect(Config.get(Parse.applicationId).idempotencyOptions.ttl).toBe(Definitions.IdempotencyOptions.ttl.default);
expect(Config.get(Parse.applicationId).idempotencyOptions.paths).toBe(Definitions.IdempotencyOptions.paths.default);
expect(Config.get(Parse.applicationId).idempotencyOptions.ttl).toBe(
Definitions.IdempotencyOptions.ttl.default
);
expect(Config.get(Parse.applicationId).idempotencyOptions.paths).toBe(
Definitions.IdempotencyOptions.paths.default
);
});
it('should throw on invalid configuration', async () => {

View File

@@ -12,7 +12,7 @@ describe('middlewares', () => {
_ApplicationId: 'FakeAppId',
},
headers: {},
get: (key) => {
get: key => {
return fakeReq.headers[key.toLowerCase()];
},
};
@@ -24,7 +24,7 @@ describe('middlewares', () => {
AppCache.del(fakeReq.body._ApplicationId);
});
it('should use _ContentType if provided', (done) => {
it('should use _ContentType if provided', done => {
expect(fakeReq.headers['content-type']).toEqual(undefined);
const contentType = 'image/jpeg';
fakeReq.body._ContentType = contentType;
@@ -64,7 +64,7 @@ describe('middlewares', () => {
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should succeed when any one of the configured keys supplied', (done) => {
it('should succeed when any one of the configured keys supplied', done => {
AppCache.put(fakeReq.body._ApplicationId, {
clientKey: 'clientKey',
masterKey: 'masterKey',
@@ -77,7 +77,7 @@ describe('middlewares', () => {
});
});
it('should succeed when client key supplied but empty', (done) => {
it('should succeed when client key supplied but empty', done => {
AppCache.put(fakeReq.body._ApplicationId, {
clientKey: '',
masterKey: 'masterKey',
@@ -90,7 +90,7 @@ describe('middlewares', () => {
});
});
it('should succeed when no keys are configured and none supplied', (done) => {
it('should succeed when no keys are configured and none supplied', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
});
@@ -110,22 +110,22 @@ describe('middlewares', () => {
const BodyKeys = Object.keys(BodyParams);
BodyKeys.forEach((infoKey) => {
BodyKeys.forEach(infoKey => {
const bodyKey = BodyParams[infoKey];
const keyValue = 'Fake' + bodyKey;
// javascriptKey is the only one that gets defaulted,
const otherKeys = BodyKeys.filter(
(otherKey) => otherKey !== infoKey && otherKey !== 'javascriptKey'
otherKey => otherKey !== infoKey && otherKey !== 'javascriptKey'
);
it(`it should pull ${bodyKey} into req.info`, (done) => {
it(`it should pull ${bodyKey} into req.info`, done => {
fakeReq.body[bodyKey] = keyValue;
middlewares.handleParseHeaders(fakeReq, fakeRes, () => {
expect(fakeReq.body[bodyKey]).toEqual(undefined);
expect(fakeReq.info[infoKey]).toEqual(keyValue);
otherKeys.forEach((otherKey) => {
otherKeys.forEach(otherKey => {
expect(fakeReq.info[otherKey]).toEqual(undefined);
});
@@ -145,7 +145,7 @@ describe('middlewares', () => {
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should succeed if the ip does belong to masterKeyIps list', (done) => {
it('should succeed if the ip does belong to masterKeyIps list', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1', 'ip2'],
@@ -169,7 +169,7 @@ describe('middlewares', () => {
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should succeed if the connection.remoteAddress does belong to masterKeyIps list', (done) => {
it('should succeed if the connection.remoteAddress does belong to masterKeyIps list', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1', 'ip2'],
@@ -193,7 +193,7 @@ describe('middlewares', () => {
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should succeed if the socket.remoteAddress does belong to masterKeyIps list', (done) => {
it('should succeed if the socket.remoteAddress does belong to masterKeyIps list', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1', 'ip2'],
@@ -217,7 +217,7 @@ describe('middlewares', () => {
expect(fakeRes.status).toHaveBeenCalledWith(403);
});
it('should succeed if the connection.socket.remoteAddress does belong to masterKeyIps list', (done) => {
it('should succeed if the connection.socket.remoteAddress does belong to masterKeyIps list', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1', 'ip2'],
@@ -230,7 +230,7 @@ describe('middlewares', () => {
});
});
it('should allow any ip to use masterKey if masterKeyIps is empty', (done) => {
it('should allow any ip to use masterKey if masterKeyIps is empty', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: [],
@@ -243,7 +243,7 @@ describe('middlewares', () => {
});
});
it('should succeed if xff header does belong to masterKeyIps', (done) => {
it('should succeed if xff header does belong to masterKeyIps', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1'],
@@ -256,7 +256,7 @@ describe('middlewares', () => {
});
});
it('should succeed if xff header with one ip does belong to masterKeyIps', (done) => {
it('should succeed if xff header with one ip does belong to masterKeyIps', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
masterKeyIps: ['ip1'],
@@ -393,7 +393,7 @@ describe('middlewares', () => {
);
});
it('should use user provided on field userFromJWT', (done) => {
it('should use user provided on field userFromJWT', done => {
AppCache.put(fakeReq.body._ApplicationId, {
masterKey: 'masterKey',
});

View File

@@ -354,14 +354,12 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
it('should delete field without index', async () => {
const database = Config.get(Parse.applicationId).database;
const obj = new Parse.Object('MyObject');
obj.set("test", 1);
obj.set('test', 1);
await obj.save();
const schemaBeforeDeletion = await new Parse.Schema('MyObject').get();
await database.adapter.deleteFields(
"MyObject",
schemaBeforeDeletion,
["test"]
);
await database.adapter.deleteFields('MyObject', schemaBeforeDeletion, [
'test',
]);
const schemaAfterDeletion = await new Parse.Schema('MyObject').get();
expect(schemaBeforeDeletion.fields.test).toBeDefined();
expect(schemaAfterDeletion.fields.test).toBeUndefined();
@@ -370,19 +368,15 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
it('should delete field with index', async () => {
const database = Config.get(Parse.applicationId).database;
const obj = new Parse.Object('MyObject');
obj.set("test", 1);
obj.set('test', 1);
await obj.save();
const schemaBeforeDeletion = await new Parse.Schema('MyObject').get();
await database.adapter.ensureIndex(
'MyObject',
schemaBeforeDeletion,
['test']
);
await database.adapter.deleteFields(
"MyObject",
schemaBeforeDeletion,
["test"]
);
await database.adapter.ensureIndex('MyObject', schemaBeforeDeletion, [
'test',
]);
await database.adapter.deleteFields('MyObject', schemaBeforeDeletion, [
'test',
]);
const schemaAfterDeletion = await new Parse.Schema('MyObject').get();
expect(schemaBeforeDeletion.fields.test).toBeDefined();
expect(schemaAfterDeletion.fields.test).toBeUndefined();

View File

@@ -55,7 +55,7 @@ const loadTestData = () => {
return Parse.Object.saveAll([obj1, obj2, obj3, obj4]);
};
const get = function(url, options) {
const get = function (url, options) {
options.qs = options.body;
delete options.body;
Object.keys(options.qs).forEach(key => {
@@ -1345,10 +1345,10 @@ describe('Parse.Query Aggregate testing', () => {
user.set('score', score);
user
.signUp()
.then(function() {
.then(function () {
return get(Parse.serverURL + '/aggregate/_User', options);
})
.then(function(resp) {
.then(function (resp) {
expect(resp.results.length).toBe(1);
const result = resp.results[0];
@@ -1369,7 +1369,7 @@ describe('Parse.Query Aggregate testing', () => {
done();
})
.catch(function(err) {
.catch(function (err) {
fail(err);
});
});
@@ -1440,13 +1440,25 @@ describe('Parse.Query Aggregate testing', () => {
['location'],
undefined,
false,
{ indexType: '2dsphere' },
{ indexType: '2dsphere' }
);
// Create objects
const GeoObject = Parse.Object.extend('GeoObject');
const obj1 = new GeoObject({ value: 1, location: new Parse.GeoPoint(1, 1), date: new Date(1) });
const obj2 = new GeoObject({ value: 2, location: new Parse.GeoPoint(2, 1), date: new Date(2) });
const obj3 = new GeoObject({ value: 3, location: new Parse.GeoPoint(3, 1), date: new Date(3) });
const obj1 = new GeoObject({
value: 1,
location: new Parse.GeoPoint(1, 1),
date: new Date(1),
});
const obj2 = new GeoObject({
value: 2,
location: new Parse.GeoPoint(2, 1),
date: new Date(2),
});
const obj3 = new GeoObject({
value: 3,
location: new Parse.GeoPoint(3, 1),
date: new Date(3),
});
await Parse.Object.saveAll([obj1, obj2, obj3]);
// Create query
const pipeline = [
@@ -1454,18 +1466,18 @@ describe('Parse.Query Aggregate testing', () => {
geoNear: {
near: {
type: 'Point',
coordinates: [1, 1]
coordinates: [1, 1],
},
key: 'location',
spherical: true,
distanceField: 'dist',
query: {
date: {
$gte: new Date(2)
}
}
}
}
$gte: new Date(2),
},
},
},
},
];
const query = new Parse.Query(GeoObject);
const results = await query.aggregate(pipeline);
@@ -1489,9 +1501,21 @@ describe('Parse.Query Aggregate testing', () => {
);
// Create objects
const GeoObject = Parse.Object.extend('GeoObject');
const obj1 = new GeoObject({ value: 1, location: new Parse.GeoPoint(1, 1), date: new Date(1) });
const obj2 = new GeoObject({ value: 2, location: new Parse.GeoPoint(2, 1), date: new Date(2) });
const obj3 = new GeoObject({ value: 3, location: new Parse.GeoPoint(3, 1), date: new Date(3) });
const obj1 = new GeoObject({
value: 1,
location: new Parse.GeoPoint(1, 1),
date: new Date(1),
});
const obj2 = new GeoObject({
value: 2,
location: new Parse.GeoPoint(2, 1),
date: new Date(2),
});
const obj3 = new GeoObject({
value: 3,
location: new Parse.GeoPoint(3, 1),
date: new Date(3),
});
await Parse.Object.saveAll([obj1, obj2, obj3]);
// Create query
const pipeline = [
@@ -1499,13 +1523,13 @@ describe('Parse.Query Aggregate testing', () => {
geoNear: {
near: {
type: 'Point',
coordinates: [1, 1]
coordinates: [1, 1],
},
key: 'location',
spherical: true,
distanceField: 'dist'
}
}
distanceField: 'dist',
},
},
];
const query = new Parse.Query(GeoObject);
const results = await query.aggregate(pipeline);
@@ -1513,38 +1537,53 @@ describe('Parse.Query Aggregate testing', () => {
expect(results.length).toEqual(3);
});
it_only_db('mongo')('aggregate geoNear with near legacy coordinate pair', async () => {
// Create geo index which is required for `geoNear` query
const database = Config.get(Parse.applicationId).database;
const schema = await new Parse.Schema('GeoObject').save();
await database.adapter.ensureIndex(
'GeoObject',
schema,
['location'],
undefined,
false,
'2dsphere'
);
// Create objects
const GeoObject = Parse.Object.extend('GeoObject');
const obj1 = new GeoObject({ value: 1, location: new Parse.GeoPoint(1, 1), date: new Date(1) });
const obj2 = new GeoObject({ value: 2, location: new Parse.GeoPoint(2, 1), date: new Date(2) });
const obj3 = new GeoObject({ value: 3, location: new Parse.GeoPoint(3, 1), date: new Date(3) });
await Parse.Object.saveAll([obj1, obj2, obj3]);
// Create query
const pipeline = [
{
geoNear: {
near: [1, 1],
key: 'location',
spherical: true,
distanceField: 'dist'
}
}
];
const query = new Parse.Query(GeoObject);
const results = await query.aggregate(pipeline);
// Check results
expect(results.length).toEqual(3);
});
it_only_db('mongo')(
'aggregate geoNear with near legacy coordinate pair',
async () => {
// Create geo index which is required for `geoNear` query
const database = Config.get(Parse.applicationId).database;
const schema = await new Parse.Schema('GeoObject').save();
await database.adapter.ensureIndex(
'GeoObject',
schema,
['location'],
undefined,
false,
'2dsphere'
);
// Create objects
const GeoObject = Parse.Object.extend('GeoObject');
const obj1 = new GeoObject({
value: 1,
location: new Parse.GeoPoint(1, 1),
date: new Date(1),
});
const obj2 = new GeoObject({
value: 2,
location: new Parse.GeoPoint(2, 1),
date: new Date(2),
});
const obj3 = new GeoObject({
value: 3,
location: new Parse.GeoPoint(3, 1),
date: new Date(3),
});
await Parse.Object.saveAll([obj1, obj2, obj3]);
// Create query
const pipeline = [
{
geoNear: {
near: [1, 1],
key: 'location',
spherical: true,
distanceField: 'dist',
},
},
];
const query = new Parse.Query(GeoObject);
const results = await query.aggregate(pipeline);
// Check results
expect(results.length).toEqual(3);
}
);
});

View File

@@ -384,10 +384,10 @@ describe('Parse.User testing', () => {
let sessionToken = null;
Promise.resolve()
.then(function() {
.then(function () {
return Parse.User.signUp('Jason', 'Parse', { code: 'red' });
})
.then(function(newUser) {
.then(function (newUser) {
equal(Parse.User.current(), newUser);
user = newUser;
@@ -401,7 +401,7 @@ describe('Parse.User testing', () => {
return Parse.User.become(sessionToken);
})
.then(function(newUser) {
.then(function (newUser) {
equal(Parse.User.current(), newUser);
ok(newUser);
@@ -417,24 +417,24 @@ describe('Parse.User testing', () => {
return Parse.User.become('somegarbage');
})
.then(
function() {
function () {
// This should have failed actually.
ok(
false,
"Shouldn't have been able to log in with garbage session token."
);
},
function(error) {
function (error) {
ok(error);
// Handle the error.
return Promise.resolve();
}
)
.then(
function() {
function () {
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -733,11 +733,11 @@ describe('Parse.User testing', () => {
function signUpAll(list, optionsOrCallback) {
let promise = Promise.resolve();
list.forEach(user => {
promise = promise.then(function() {
promise = promise.then(function () {
return user.signUp();
});
});
promise = promise.then(function() {
promise = promise.then(function () {
return list;
});
return promise.then(optionsOrCallback);
@@ -748,7 +748,7 @@ describe('Parse.User testing', () => {
const MESSAGES = 5;
// Make a list of users.
const userList = range(USERS).map(function(i) {
const userList = range(USERS).map(function (i) {
const user = new Parse.User();
user.set('password', 'user_num_' + i);
user.set('email', 'user_num_' + i + '@example.com');
@@ -756,14 +756,14 @@ describe('Parse.User testing', () => {
return user;
});
signUpAll(userList, async function(users) {
signUpAll(userList, async function (users) {
// Make a list of messages.
if (!users || users.length != USERS) {
fail('signupAll failed');
done();
return;
}
const messageList = range(MESSAGES).map(function(i) {
const messageList = range(MESSAGES).map(function (i) {
const message = new TestObject();
message.set('to', users[(i + 1) % USERS]);
message.set('from', users[i % USERS]);
@@ -844,7 +844,7 @@ describe('Parse.User testing', () => {
const user = new Parse.User();
user.set('username', 'alice');
user.set('password', 'password');
user.signUp().then(function(userAgain) {
user.signUp().then(function (userAgain) {
equal(userAgain.get('username'), 'bob');
ok(userAgain.dirty('username'));
const query = new Parse.Query(Parse.User);
@@ -938,14 +938,14 @@ describe('Parse.User testing', () => {
let id;
Parse.User.signUp('alice', 'password', null)
.then(function(alice) {
.then(function (alice) {
id = alice.id;
return Parse.User.logOut();
})
.then(() => {
return Parse.User.logIn('alice', 'password');
})
.then(function() {
.then(function () {
// Simulate browser refresh by force-reloading user from localStorage
Parse.User._clearCache();
@@ -953,7 +953,7 @@ describe('Parse.User testing', () => {
return Parse.User.current().save({ some_field: 1 });
})
.then(
function() {
function () {
// Check the user in memory just after save operation
const userInMemory = Parse.User.current();
@@ -1046,7 +1046,7 @@ describe('Parse.User testing', () => {
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -1089,7 +1089,7 @@ describe('Parse.User testing', () => {
it('user signup class method uses subclassing', async done => {
const SuperUser = Parse.User.extend({
secret: function() {
secret: function () {
return 1337;
},
});
@@ -1102,7 +1102,7 @@ describe('Parse.User testing', () => {
it('user on disk gets updated after save', async done => {
Parse.User.extend({
isSuper: function() {
isSuper: function () {
return true;
},
});
@@ -1130,7 +1130,7 @@ describe('Parse.User testing', () => {
done();
});
const getMockFacebookProviderWithIdToken = function(id, token) {
const getMockFacebookProviderWithIdToken = function (id, token) {
return {
authData: {
id: id,
@@ -1143,7 +1143,7 @@ describe('Parse.User testing', () => {
synchronizedAuthToken: null,
synchronizedExpiration: null,
authenticate: function(options) {
authenticate: function (options) {
if (this.shouldError) {
options.error(this, 'An error occurred');
} else if (this.shouldCancel) {
@@ -1152,7 +1152,7 @@ describe('Parse.User testing', () => {
options.success(this, this.authData);
}
},
restoreAuthentication: function(authData) {
restoreAuthentication: function (authData) {
if (!authData) {
this.synchronizedUserId = null;
this.synchronizedAuthToken = null;
@@ -1164,10 +1164,10 @@ describe('Parse.User testing', () => {
this.synchronizedExpiration = authData.expiration_date;
return true;
},
getAuthType: function() {
getAuthType: function () {
return 'facebook';
},
deauthenticate: function() {
deauthenticate: function () {
this.loggedOut = true;
this.restoreAuthentication(null);
},
@@ -1176,11 +1176,11 @@ describe('Parse.User testing', () => {
// Note that this mocks out client-side Facebook action rather than
// server-side.
const getMockFacebookProvider = function() {
const getMockFacebookProvider = function () {
return getMockFacebookProviderWithIdToken('8675309', 'jenny');
};
const getMockMyOauthProvider = function() {
const getMockMyOauthProvider = function () {
return {
authData: {
id: '12345',
@@ -1193,7 +1193,7 @@ describe('Parse.User testing', () => {
synchronizedAuthToken: null,
synchronizedExpiration: null,
authenticate: function(options) {
authenticate: function (options) {
if (this.shouldError) {
options.error(this, 'An error occurred');
} else if (this.shouldCancel) {
@@ -1202,7 +1202,7 @@ describe('Parse.User testing', () => {
options.success(this, this.authData);
}
},
restoreAuthentication: function(authData) {
restoreAuthentication: function (authData) {
if (!authData) {
this.synchronizedUserId = null;
this.synchronizedAuthToken = null;
@@ -1214,10 +1214,10 @@ describe('Parse.User testing', () => {
this.synchronizedExpiration = authData.expiration_date;
return true;
},
getAuthType: function() {
getAuthType: function () {
return 'myoauth';
},
deauthenticate: function() {
deauthenticate: function () {
this.loggedOut = true;
this.restoreAuthentication(null);
},
@@ -1225,7 +1225,7 @@ describe('Parse.User testing', () => {
};
Parse.User.extend({
extended: function() {
extended: function () {
return true;
},
});
@@ -1438,7 +1438,7 @@ describe('Parse.User testing', () => {
Parse.User._registerAuthenticationProvider(provider);
await Parse.User._logInWith('facebook');
Parse.User.logOut().then(async () => {
Parse.Cloud.beforeSave(Parse.User, function(req, res) {
Parse.Cloud.beforeSave(Parse.User, function (req, res) {
res.error("Before save shouldn't be called on login");
});
await Parse.User._logInWith('facebook');
@@ -1871,16 +1871,16 @@ describe('Parse.User testing', () => {
id: '12345',
access_token: 'token',
},
restoreAuthentication: function() {
restoreAuthentication: function () {
return true;
},
deauthenticate: function() {
deauthenticate: function () {
provider.authData = {};
},
authenticate: function(options) {
authenticate: function (options) {
options.success(this, provider.authData);
},
getAuthType: function() {
getAuthType: function () {
return 'shortLivedAuth';
},
};
@@ -1912,16 +1912,16 @@ describe('Parse.User testing', () => {
id: '12345',
access_token: 'token',
},
restoreAuthentication: function() {
restoreAuthentication: function () {
return true;
},
deauthenticate: function() {
deauthenticate: function () {
provider.authData = {};
},
authenticate: function(options) {
authenticate: function (options) {
options.success(this, provider.authData);
},
getAuthType: function() {
getAuthType: function () {
return 'shortLivedAuth';
},
};
@@ -2068,10 +2068,10 @@ describe('Parse.User testing', () => {
access_token: 'jenny',
expiration_date: new Date().toJSON(),
}).then(
function() {
function () {
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -2097,10 +2097,10 @@ describe('Parse.User testing', () => {
access_token: 'jenny',
expiration_date: new Date().toJSON(),
}).then(
function() {
function () {
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -2111,27 +2111,27 @@ describe('Parse.User testing', () => {
const data = { foo: 'bar' };
Parse.User.signUp('finn', 'human', data)
.then(function(user) {
.then(function (user) {
equal(Parse.User.current(), user);
equal(user.get('foo'), 'bar');
return Parse.User.logOut();
})
.then(function() {
.then(function () {
return Parse.User.logIn('finn', 'human');
})
.then(function(user) {
.then(function (user) {
equal(user, Parse.User.current());
equal(user.get('foo'), 'bar');
return Parse.User.logOut();
})
.then(function() {
.then(function () {
const user = new Parse.User();
user.set('username', 'jake');
user.set('password', 'dog');
user.set('foo', 'baz');
return user.signUp();
})
.then(function(user) {
.then(function (user) {
equal(user, Parse.User.current());
equal(user.get('foo'), 'baz');
user = new Parse.User();
@@ -2139,14 +2139,14 @@ describe('Parse.User testing', () => {
user.set('password', 'dog');
return user.logIn();
})
.then(function(user) {
.then(function (user) {
equal(user, Parse.User.current());
equal(user.get('foo'), 'baz');
const userAgain = new Parse.User();
userAgain.id = user.id;
return userAgain.fetch();
})
.then(function(userAgain) {
.then(function (userAgain) {
equal(userAgain.get('foo'), 'baz');
done();
});
@@ -2154,7 +2154,7 @@ describe('Parse.User testing', () => {
it("querying for users doesn't get session tokens", done => {
Parse.User.signUp('finn', 'human', { foo: 'bar' })
.then(function() {
.then(function () {
return Parse.User.logOut();
})
.then(() => {
@@ -2164,7 +2164,7 @@ describe('Parse.User testing', () => {
user.set('foo', 'baz');
return user.signUp();
})
.then(function() {
.then(function () {
return Parse.User.logOut();
})
.then(() => {
@@ -2172,7 +2172,7 @@ describe('Parse.User testing', () => {
return query.find({ sessionToken: null });
})
.then(
function(users) {
function (users) {
equal(users.length, 2);
users.forEach(user => {
expect(user.getSessionToken()).toBeUndefined();
@@ -2183,7 +2183,7 @@ describe('Parse.User testing', () => {
});
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -2214,19 +2214,19 @@ describe('Parse.User testing', () => {
user.setUsername('zxcv');
let currentSessionToken = '';
Promise.resolve()
.then(function() {
.then(function () {
return user.signUp();
})
.then(function() {
.then(function () {
currentSessionToken = user.getSessionToken();
return user.fetch();
})
.then(
function(u) {
function (u) {
expect(currentSessionToken).toEqual(u.getSessionToken());
done();
},
function(error) {
function (error) {
ok(false, error);
done();
}
@@ -2739,25 +2739,25 @@ describe('Parse.User testing', () => {
let sessionToken = null;
Promise.resolve()
.then(function() {
.then(function () {
return Parse.User.signUp('fosco', 'parse');
})
.then(function(newUser) {
.then(function (newUser) {
equal(Parse.User.current(), newUser);
sessionToken = newUser.getSessionToken();
ok(sessionToken);
newUser.set('password', 'facebook');
return newUser.save();
})
.then(function() {
.then(function () {
return Parse.User.become(sessionToken);
})
.then(
function() {
function () {
fail('Session should have been invalidated');
done();
},
function(err) {
function (err) {
expect(err.code).toBe(Parse.Error.INVALID_SESSION_TOKEN);
expect(err.message).toBe('Invalid session token');
done();
@@ -2768,25 +2768,25 @@ describe('Parse.User testing', () => {
it('test parse user become', done => {
let sessionToken = null;
Promise.resolve()
.then(function() {
.then(function () {
return Parse.User.signUp('flessard', 'folo', { foo: 1 });
})
.then(function(newUser) {
.then(function (newUser) {
equal(Parse.User.current(), newUser);
sessionToken = newUser.getSessionToken();
ok(sessionToken);
newUser.set('foo', 2);
return newUser.save();
})
.then(function() {
.then(function () {
return Parse.User.become(sessionToken);
})
.then(
function(newUser) {
function (newUser) {
equal(newUser.get('foo'), 2);
done();
},
function() {
function () {
fail('The session should still be valid');
done();
}
@@ -2798,7 +2798,7 @@ describe('Parse.User testing', () => {
let sessionToken = null;
Promise.resolve()
.then(function() {
.then(function () {
return Parse.User.signUp('log', 'out');
})
.then(newUser => {
@@ -3362,10 +3362,7 @@ describe('Parse.User testing', () => {
user
.signUp()
.then(() => {
return Parse.User.current()
.relation('relation')
.query()
.find();
return Parse.User.current().relation('relation').query().find();
})
.then(res => {
expect(res.length).toBe(0);
@@ -3401,9 +3398,7 @@ describe('Parse.User testing', () => {
return user.signUp();
})
.then(() => {
return Parse.User.current()
.set('emailVerified', true)
.save();
return Parse.User.current().set('emailVerified', true).save();
})
.then(() => {
fail('Should not be able to update emailVerified');
@@ -3574,9 +3569,7 @@ describe('Parse.User testing', () => {
return user.signUp();
})
.then(() => {
return Parse.User.current()
.set('_email_verify_token', 'bad')
.save();
return Parse.User.current().set('_email_verify_token', 'bad').save();
})
.then(() => {
fail('Should not be able to update email verification token');
@@ -4081,7 +4074,7 @@ describe('Parse.User testing', () => {
});
});
describe('Security Advisory GHSA-8w3j-g983-8jh5', function() {
describe('Security Advisory GHSA-8w3j-g983-8jh5', function () {
it_only_db('mongo')(
'should validate credentials first and check if account already linked afterwards ()',
async done => {

View File

@@ -219,9 +219,9 @@ describe('batch', () => {
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe(
databaseAdapter.createObject.calls.argsFor(1)[3]
);
expect(results.map(result => result.get('key')).sort()).toEqual(
['value1', 'value2']
);
expect(
results.map(result => result.get('key')).sort()
).toEqual(['value1', 'value2']);
done();
});
});

View File

@@ -473,7 +473,7 @@ describe('defaultGraphQLTypes', () => {
it('should serialize date', () => {
const date = new Date();
expect(serialize(date)).toBe(date.toUTCString());
expect(serialize(date)).toBe(date.toISOString());
});
it('should return iso value if object', () => {

View File

@@ -200,7 +200,7 @@ beforeEach(done => {
.catch(done.fail);
});
afterEach(function(done) {
afterEach(function (done) {
const afterLogOut = () => {
if (Object.keys(openConnections).length > 0) {
fail(
@@ -230,7 +230,7 @@ afterEach(function(done) {
'_Session',
'_Product',
'_Audience',
'_Idempotency'
'_Idempotency',
].indexOf(className) >= 0
);
}
@@ -327,13 +327,13 @@ function range(n) {
function mockCustomAuthenticator(id, password) {
const custom = {};
custom.validateAuthData = function(authData) {
custom.validateAuthData = function (authData) {
if (authData.id === id && authData.password.startsWith(password)) {
return Promise.resolve();
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'not validated');
};
custom.validateAppId = function() {
custom.validateAppId = function () {
return Promise.resolve();
};
return custom;
@@ -345,14 +345,14 @@ function mockCustom() {
function mockFacebookAuthenticator(id, token) {
const facebook = {};
facebook.validateAuthData = function(authData) {
facebook.validateAuthData = function (authData) {
if (authData.id === id && authData.access_token.startsWith(token)) {
return Promise.resolve();
} else {
throw undefined;
}
};
facebook.validateAppId = function(appId, authData) {
facebook.validateAppId = function (appId, authData) {
if (authData.access_token.startsWith(token)) {
return Promise.resolve();
} else {
@@ -369,17 +369,17 @@ function mockFacebook() {
function mockShortLivedAuth() {
const auth = {};
let accessToken;
auth.setValidAccessToken = function(validAccessToken) {
auth.setValidAccessToken = function (validAccessToken) {
accessToken = validAccessToken;
};
auth.validateAuthData = function(authData) {
auth.validateAuthData = function (authData) {
if (authData.access_token == accessToken) {
return Promise.resolve();
} else {
return Promise.reject('Invalid access token');
}
};
auth.validateAppId = function() {
auth.validateAppId = function () {
return Promise.resolve();
};
return auth;
@@ -404,7 +404,7 @@ global.defaultConfiguration = defaultConfiguration;
global.mockCustomAuthenticator = mockCustomAuthenticator;
global.mockFacebookAuthenticator = mockFacebookAuthenticator;
global.databaseAdapter = databaseAdapter;
global.jfail = function(err) {
global.jfail = function (err) {
fail(JSON.stringify(err));
};
@@ -454,7 +454,7 @@ global.describe_only = validator => {
};
const libraryCache = {};
jasmine.mockLibrary = function(library, name, mock) {
jasmine.mockLibrary = function (library, name, mock) {
const original = require(library)[name];
if (!libraryCache[library]) {
libraryCache[library] = {};
@@ -463,7 +463,7 @@ jasmine.mockLibrary = function(library, name, mock) {
libraryCache[library][name] = original;
};
jasmine.restoreLibrary = function(library, name) {
jasmine.restoreLibrary = function (library, name) {
if (!libraryCache[library] || !libraryCache[library][name]) {
throw 'Can not find library ' + library + ' ' + name;
}

View File

@@ -1,4 +1,4 @@
"use strict";
'use strict';
// Helper functions for accessing the google API.
var Parse = require('parse/node').Parse;
@@ -11,7 +11,6 @@ const HTTPS_TOKEN_ISSUER = 'https://accounts.google.com';
let cache = {};
// Retrieve Google Signin Keys (with cache control)
function getGoogleKeyByKeyId(keyId) {
if (cache[keyId] && cache.expiresAt > new Date()) {
@@ -19,42 +18,60 @@ function getGoogleKeyByKeyId(keyId) {
}
return new Promise((resolve, reject) => {
https.get(`https://www.googleapis.com/oauth2/v3/certs`, res => {
let data = '';
res.on('data', chunk => {
data += chunk.toString('utf8');
});
res.on('end', () => {
const {keys} = JSON.parse(data);
const pems = keys.reduce((pems, {n: modulus, e: exposant, kid}) => Object.assign(pems, {[kid]: rsaPublicKeyToPEM(modulus, exposant)}), {});
https
.get(`https://www.googleapis.com/oauth2/v3/certs`, res => {
let data = '';
res.on('data', chunk => {
data += chunk.toString('utf8');
});
res.on('end', () => {
const { keys } = JSON.parse(data);
const pems = keys.reduce(
(pems, { n: modulus, e: exposant, kid }) =>
Object.assign(pems, {
[kid]: rsaPublicKeyToPEM(modulus, exposant),
}),
{}
);
if (res.headers['cache-control']) {
var expire = res.headers['cache-control'].match(/max-age=([0-9]+)/);
if (res.headers['cache-control']) {
var expire = res.headers['cache-control'].match(/max-age=([0-9]+)/);
if (expire) {
cache = Object.assign({}, pems, {expiresAt: new Date((new Date()).getTime() + Number(expire[1]) * 1000)});
if (expire) {
cache = Object.assign({}, pems, {
expiresAt: new Date(
new Date().getTime() + Number(expire[1]) * 1000
),
});
}
}
}
resolve(pems[keyId]);
});
}).on('error', reject);
resolve(pems[keyId]);
});
})
.on('error', reject);
});
}
function getHeaderFromToken(token) {
const decodedToken = jwt.decode(token, {complete: true});
const decodedToken = jwt.decode(token, { complete: true });
if (!decodedToken) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `provided token does not decode as JWT`);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
`provided token does not decode as JWT`
);
}
return decodedToken.header;
}
async function verifyIdToken({id_token: token, id}, {clientId}) {
async function verifyIdToken({ id_token: token, id }, { clientId }) {
if (!token) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token is invalid for this user.`);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
`id token is invalid for this user.`
);
}
const { kid: keyId, alg: algorithm } = getHeaderFromToken(token);
@@ -62,22 +79,34 @@ async function verifyIdToken({id_token: token, id}, {clientId}) {
const googleKey = await getGoogleKeyByKeyId(keyId);
try {
jwtClaims = jwt.verify(token, googleKey, { algorithms: algorithm, audience: clientId });
jwtClaims = jwt.verify(token, googleKey, {
algorithms: algorithm,
audience: clientId,
});
} catch (exception) {
const message = exception.message;
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `${message}`);
}
if (jwtClaims.iss !== TOKEN_ISSUER && jwtClaims.iss !== HTTPS_TOKEN_ISSUER) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token not issued by correct provider - expected: ${TOKEN_ISSUER} or ${HTTPS_TOKEN_ISSUER} | from: ${jwtClaims.iss}`);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
`id token not issued by correct provider - expected: ${TOKEN_ISSUER} or ${HTTPS_TOKEN_ISSUER} | from: ${jwtClaims.iss}`
);
}
if (jwtClaims.sub !== id) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `auth data is invalid for this user.`);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
`auth data is invalid for this user.`
);
}
if (clientId && jwtClaims.aud !== clientId) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, `id token not authorized for this clientId.`);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
`id token not authorized for this clientId.`
);
}
return jwtClaims;
@@ -95,10 +124,9 @@ function validateAppId() {
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};
// Helpers functions to convert the RSA certs to PEM (from jwks-rsa)
function rsaPublicKeyToPEM(modulusB64, exponentB64) {
const modulus = new Buffer(modulusB64, 'base64');
@@ -110,13 +138,19 @@ function rsaPublicKeyToPEM(modulusB64, exponentB64) {
const encodedModlen = encodeLengthHex(modlen);
const encodedExplen = encodeLengthHex(explen);
const encodedPubkey = '30' +
encodeLengthHex(modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2) +
'02' + encodedModlen + modulusHex +
'02' + encodedExplen + exponentHex;
const encodedPubkey =
'30' +
encodeLengthHex(
modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2
) +
'02' +
encodedModlen +
modulusHex +
'02' +
encodedExplen +
exponentHex;
const der = new Buffer(encodedPubkey, 'hex')
.toString('base64');
const der = new Buffer(encodedPubkey, 'hex').toString('base64');
let pem = '-----BEGIN RSA PUBLIC KEY-----\n';
pem += `${der.match(/.{1,64}/g).join('\n')}`;

View File

@@ -46,7 +46,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
this._connectionPromise = MongoClient.connect(
this._databaseURI,
this._mongoOptions
).then((client) => {
).then(client => {
this._client = client;
return client.db(client.s.options.dbName);
});
@@ -55,7 +55,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
}
_getBucket() {
return this._connect().then((database) => new GridFSBucket(database));
return this._connect().then(database => new GridFSBucket(database));
}
// For a given config object, filename, and data, store a file
@@ -92,7 +92,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
throw new Error('FileNotFound');
}
return Promise.all(
documents.map((doc) => {
documents.map(doc => {
return bucket.delete(doc._id);
})
);
@@ -104,7 +104,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
stream.read();
return new Promise((resolve, reject) => {
const chunks = [];
stream.on('data', (data) => {
stream.on('data', data => {
chunks.push(data);
});
stream.on('end', () => {
@@ -127,7 +127,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
}
resolve(data);
});
stream.on('error', (err) => {
stream.on('error', err => {
reject(err);
});
});
@@ -177,7 +177,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
});
const stream = bucket.openDownloadStreamByName(filename);
stream.start(start);
stream.on('data', (chunk) => {
stream.on('data', chunk => {
res.write(chunk);
});
stream.on('error', () => {

View File

@@ -24,7 +24,7 @@ const debug = function (...args: any) {
import { StorageAdapter } from '../StorageAdapter';
import type { SchemaType, QueryType, QueryOptions } from '../StorageAdapter';
const parseTypeToPostgresType = (type) => {
const parseTypeToPostgresType = type => {
switch (type.type) {
case 'String':
return 'text';
@@ -79,7 +79,7 @@ const mongoAggregateToPostgres = {
$year: 'YEAR',
};
const toPostgresValue = (value) => {
const toPostgresValue = value => {
if (typeof value === 'object') {
if (value.__type === 'Date') {
return value.iso;
@@ -91,7 +91,7 @@ const toPostgresValue = (value) => {
return value;
};
const transformValue = (value) => {
const transformValue = value => {
if (typeof value === 'object' && value.__type === 'Pointer') {
return value.objectId;
}
@@ -121,7 +121,7 @@ const defaultCLPS = Object.freeze({
protectedFields: { '*': [] },
});
const toParseSchema = (schema) => {
const toParseSchema = schema => {
if (schema.className === '_User') {
delete schema.fields._hashed_password;
}
@@ -145,7 +145,7 @@ const toParseSchema = (schema) => {
};
};
const toPostgresSchema = (schema) => {
const toPostgresSchema = schema => {
if (!schema) {
return schema;
}
@@ -159,8 +159,8 @@ const toPostgresSchema = (schema) => {
return schema;
};
const handleDotFields = (object) => {
Object.keys(object).forEach((fieldName) => {
const handleDotFields = object => {
Object.keys(object).forEach(fieldName => {
if (fieldName.indexOf('.') > -1) {
const components = fieldName.split('.');
const first = components.shift();
@@ -186,7 +186,7 @@ const handleDotFields = (object) => {
return object;
};
const transformDotFieldToComponents = (fieldName) => {
const transformDotFieldToComponents = fieldName => {
return fieldName.split('.').map((cmpt, index) => {
if (index === 0) {
return `"${cmpt}"`;
@@ -195,7 +195,7 @@ const transformDotFieldToComponents = (fieldName) => {
});
};
const transformDotField = (fieldName) => {
const transformDotField = fieldName => {
if (fieldName.indexOf('.') === -1) {
return `"${fieldName}"`;
}
@@ -205,7 +205,7 @@ const transformDotField = (fieldName) => {
return name;
};
const transformAggregateField = (fieldName) => {
const transformAggregateField = fieldName => {
if (typeof fieldName !== 'string') {
return fieldName;
}
@@ -218,7 +218,7 @@ const transformAggregateField = (fieldName) => {
return fieldName.substr(1);
};
const validateKeys = (object) => {
const validateKeys = object => {
if (typeof object == 'object') {
for (const key in object) {
if (typeof object[key] == 'object') {
@@ -236,10 +236,10 @@ const validateKeys = (object) => {
};
// Returns the list of join tables on a schema
const joinTablesForSchema = (schema) => {
const joinTablesForSchema = schema => {
const list = [];
if (schema) {
Object.keys(schema.fields).forEach((field) => {
Object.keys(schema.fields).forEach(field => {
if (schema.fields[field].type === 'Relation') {
list.push(`_Join:${field}:${schema.className}`);
}
@@ -343,7 +343,7 @@ const buildWhereClause = ({
} else if (['$or', '$nor', '$and'].includes(fieldName)) {
const clauses = [];
const clauseValues = [];
fieldValue.forEach((subQuery) => {
fieldValue.forEach(subQuery => {
const clause = buildWhereClause({
schema,
query: subQuery,
@@ -490,13 +490,13 @@ const buildWhereClause = ({
};
if (fieldValue.$in) {
createConstraint(
_.flatMap(fieldValue.$in, (elt) => elt),
_.flatMap(fieldValue.$in, elt => elt),
false
);
}
if (fieldValue.$nin) {
createConstraint(
_.flatMap(fieldValue.$nin, (elt) => elt),
_.flatMap(fieldValue.$nin, elt => elt),
true
);
}
@@ -711,7 +711,7 @@ const buildWhereClause = ({
);
}
points = points
.map((point) => {
.map(point => {
if (point instanceof Array && point.length === 2) {
Parse.GeoPoint._validate(point[1], point[0]);
return `(${point[0]}, ${point[1]})`;
@@ -799,7 +799,7 @@ const buildWhereClause = ({
index += 2;
}
Object.keys(ParseToPosgresComparator).forEach((cmp) => {
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (fieldValue[cmp] || fieldValue[cmp] === 0) {
const pgComparator = ParseToPosgresComparator[cmp];
const postgresValue = toPostgresValue(fieldValue[cmp]);
@@ -879,7 +879,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
.none(
'CREATE TABLE IF NOT EXISTS "_SCHEMA" ( "className" varChar(120), "schema" jsonb, "isParseClass" bool, PRIMARY KEY ("className") )'
)
.catch((error) => {
.catch(error => {
if (
error.code === PostgresDuplicateRelationError ||
error.code === PostgresUniqueIndexViolationError ||
@@ -896,13 +896,13 @@ export class PostgresStorageAdapter implements StorageAdapter {
return this._client.one(
'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = $1)',
[name],
(a) => a.exists
a => a.exists
);
}
async setClassLevelPermissions(className: string, CLPs: any) {
const self = this;
await this._client.task('set-class-level-permissions', async (t) => {
await this._client.task('set-class-level-permissions', async t => {
await self._ensureSchemaCollectionExists(t);
const values = [
className,
@@ -934,7 +934,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
const deletedIndexes = [];
const insertedIndexes = [];
Object.keys(submittedIndexes).forEach((name) => {
Object.keys(submittedIndexes).forEach(name => {
const field = submittedIndexes[name];
if (existingIndexes[name] && field.__op !== 'Delete') {
throw new Parse.Error(
@@ -952,7 +952,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
deletedIndexes.push(name);
delete existingIndexes[name];
} else {
Object.keys(field).forEach((key) => {
Object.keys(field).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(fields, key)) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
@@ -967,7 +967,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}
});
await conn.tx('set-indexes-with-schema-format', async (t) => {
await conn.tx('set-indexes-with-schema-format', async t => {
if (insertedIndexes.length > 0) {
await self.createIndexes(className, insertedIndexes, t);
}
@@ -985,7 +985,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
async createClass(className: string, schema: SchemaType, conn: ?any) {
conn = conn || this._client;
return conn
.tx('create-class', async (t) => {
.tx('create-class', async t => {
const q1 = this.createTable(className, schema, t);
const q2 = t.none(
'INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($<className>, $<schema>, true)',
@@ -1005,7 +1005,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
.then(() => {
return toParseSchema(schema);
})
.catch((err) => {
.catch(err => {
if (err.data[0].result.code === PostgresTransactionAbortedError) {
err = err.data[1].result;
}
@@ -1042,7 +1042,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
let index = 2;
const relations = [];
Object.keys(fields).forEach((fieldName) => {
Object.keys(fields).forEach(fieldName => {
const parseType = fields[fieldName];
// Skip when it's a relation
// We'll create the tables later
@@ -1065,7 +1065,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
const values = [className, ...valuesArray];
debug(qs, values);
return conn.task('create-table', async (t) => {
return conn.task('create-table', async t => {
try {
await self._ensureSchemaCollectionExists(t);
await t.none(qs, values);
@@ -1075,9 +1075,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
// ELSE: Table already exists, must have been created by a different request. Ignore the error.
}
await t.tx('create-table-tx', (tx) => {
await t.tx('create-table-tx', tx => {
return tx.batch(
relations.map((fieldName) => {
relations.map(fieldName => {
return tx.none(
'CREATE TABLE IF NOT EXISTS $<joinTable:name> ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )',
{ joinTable: `_Join:${fieldName}:${className}` }
@@ -1093,15 +1093,15 @@ export class PostgresStorageAdapter implements StorageAdapter {
conn = conn || this._client;
const self = this;
await conn.tx('schema-upgrade', async (t) => {
await conn.tx('schema-upgrade', async t => {
const columns = await t.map(
'SELECT column_name FROM information_schema.columns WHERE table_name = $<className>',
{ className },
(a) => a.column_name
a => a.column_name
);
const newColumns = Object.keys(schema.fields)
.filter((item) => columns.indexOf(item) === -1)
.map((fieldName) =>
.filter(item => columns.indexOf(item) === -1)
.map(fieldName =>
self.addFieldIfNotExists(
className,
fieldName,
@@ -1124,7 +1124,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
debug('addFieldIfNotExists', { className, fieldName, type });
conn = conn || this._client;
const self = this;
await conn.tx('add-field-if-not-exists', async (t) => {
await conn.tx('add-field-if-not-exists', async t => {
if (type.type !== 'Relation') {
try {
await t.none(
@@ -1183,7 +1183,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
},
];
return this._client
.tx((t) => t.none(this._pgp.helpers.concat(operations)))
.tx(t => t.none(this._pgp.helpers.concat(operations)))
.then(() => className.indexOf('_Join:') != 0); // resolves with false when _Join table
}
@@ -1194,7 +1194,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
debug('deleteAllClasses');
await this._client
.task('delete-all-classes', async (t) => {
.task('delete-all-classes', async t => {
try {
const results = await t.any('SELECT * FROM "_SCHEMA"');
const joins = results.reduce((list: Array<string>, schema: any) => {
@@ -1210,14 +1210,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
'_GraphQLConfig',
'_Audience',
'_Idempotency',
...results.map((result) => result.className),
...results.map(result => result.className),
...joins,
];
const queries = classes.map((className) => ({
const queries = classes.map(className => ({
query: 'DROP TABLE IF EXISTS $<className:name>',
values: { className },
}));
await t.tx((tx) => tx.none(helpers.concat(queries)));
await t.tx(tx => tx.none(helpers.concat(queries)));
} catch (error) {
if (error.code !== PostgresRelationDoesNotExistError) {
throw error;
@@ -1265,7 +1265,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
})
.join(', DROP COLUMN');
await this._client.tx('delete-fields', async (t) => {
await this._client.tx('delete-fields', async t => {
await t.none(
'UPDATE "_SCHEMA" SET "schema" = $<schema> WHERE "className" = $<className>',
{ schema, className }
@@ -1281,9 +1281,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
// rejection reason are TBD.
async getAllClasses() {
const self = this;
return this._client.task('get-all-classes', async (t) => {
return this._client.task('get-all-classes', async t => {
await self._ensureSchemaCollectionExists(t);
return await t.map('SELECT * FROM "_SCHEMA"', null, (row) =>
return await t.map('SELECT * FROM "_SCHEMA"', null, row =>
toParseSchema({ className: row.className, ...row.schema })
);
});
@@ -1298,7 +1298,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
.any('SELECT * FROM "_SCHEMA" WHERE "className" = $<className>', {
className,
})
.then((result) => {
.then(result => {
if (result.length !== 1) {
throw undefined;
}
@@ -1324,7 +1324,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
validateKeys(object);
Object.keys(object).forEach((fieldName) => {
Object.keys(object).forEach(fieldName => {
if (object[fieldName] === null) {
return;
}
@@ -1426,7 +1426,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
return `$${index + 2 + columnsArray.length}${termination}`;
});
const geoPointsInjects = Object.keys(geoPoints).map((key) => {
const geoPointsInjects = Object.keys(geoPoints).map(key => {
const value = geoPoints[key];
valuesArray.push(value.longitude, value.latitude);
const l = valuesArray.length + columnsArray.length;
@@ -1447,7 +1447,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
)
.none(qs, values)
.then(() => ({ ops: [object] }))
.catch((error) => {
.catch(error => {
if (error.code === PostgresUniqueIndexViolationError) {
const err = new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
@@ -1498,8 +1498,8 @@ export class PostgresStorageAdapter implements StorageAdapter {
? transactionalSession.t
: this._client
)
.one(qs, values, (a) => +a.count)
.then((count) => {
.one(qs, values, a => +a.count)
.then(count => {
if (count === 0) {
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
@@ -1509,7 +1509,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
return count;
}
})
.catch((error) => {
.catch(error => {
if (error.code !== PostgresRelationDoesNotExistError) {
throw error;
}
@@ -1535,7 +1535,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
query,
update,
transactionalSession
).then((val) => val[0]);
).then(val => val[0]);
}
// Apply the update to all objects that match the given Parse Query.
@@ -1556,7 +1556,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
// Set flag for dot notation fields
const dotNotationOptions = {};
Object.keys(update).forEach((fieldName) => {
Object.keys(update).forEach(fieldName => {
if (fieldName.indexOf('.') > -1) {
const components = fieldName.split('.');
const first = components.shift();
@@ -1707,7 +1707,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
) {
// Gather keys to increment
const keysToIncrement = Object.keys(originalUpdate)
.filter((k) => {
.filter(k => {
// choose top level fields that have a delete operation set
// Note that Object.keys is iterating over the **original** update object
// and that some of the keys of the original update could be null or undefined:
@@ -1720,26 +1720,26 @@ export class PostgresStorageAdapter implements StorageAdapter {
k.split('.')[0] === fieldName
);
})
.map((k) => k.split('.')[1]);
.map(k => k.split('.')[1]);
let incrementPatterns = '';
if (keysToIncrement.length > 0) {
incrementPatterns =
' || ' +
keysToIncrement
.map((c) => {
.map(c => {
const amount = fieldValue[c].amount;
return `CONCAT('{"${c}":', COALESCE($${index}:name->>'${c}','0')::int + ${amount}, '}')::jsonb`;
})
.join(' || ');
// Strip the keys
keysToIncrement.forEach((key) => {
keysToIncrement.forEach(key => {
delete fieldValue[key];
});
}
const keysToDelete: Array<string> = Object.keys(originalUpdate)
.filter((k) => {
.filter(k => {
// choose top level fields that have a delete operation set.
const value = originalUpdate[k];
return (
@@ -1749,7 +1749,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
k.split('.')[0] === fieldName
);
})
.map((k) => k.split('.')[1]);
.map(k => k.split('.')[1]);
const deletePatterns = keysToDelete.reduce(
(p: string, c: string, i: number) => {
@@ -1834,7 +1834,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
schema,
createValue,
transactionalSession
).catch((error) => {
).catch(error => {
// ignore duplicate value errors as it's upsert
if (error.code !== Parse.Error.DUPLICATE_VALUE) {
throw error;
@@ -1889,7 +1889,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
if (sort) {
const sortCopy: any = sort;
const sorting = Object.keys(sort)
.map((key) => {
.map(key => {
const transformKey = transformDotFieldToComponents(key).join('->');
// Using $idx pattern gives: non-integer constant in ORDER BY
if (sortCopy[key] === 1) {
@@ -1938,18 +1938,18 @@ export class PostgresStorageAdapter implements StorageAdapter {
debug(qs, values);
return this._client
.any(qs, values)
.catch((error) => {
.catch(error => {
// Query on non existing table, don't crash
if (error.code !== PostgresRelationDoesNotExistError) {
throw error;
}
return [];
})
.then((results) => {
.then(results => {
if (explain) {
return results;
}
return results.map((object) =>
return results.map(object =>
this.postgresObjectToParseObject(className, object, schema)
);
});
@@ -1958,7 +1958,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
// Converts from a postgres-format object to a REST-format object.
// Does not strip out anything based on a lack of authentication.
postgresObjectToParseObject(className: string, object: any, schema: any) {
Object.keys(schema.fields).forEach((fieldName) => {
Object.keys(schema.fields).forEach(fieldName => {
if (schema.fields[fieldName].type === 'Pointer' && object[fieldName]) {
object[fieldName] = {
objectId: object[fieldName],
@@ -1982,7 +1982,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
if (object[fieldName] && schema.fields[fieldName].type === 'Polygon') {
let coords = object[fieldName];
coords = coords.substr(2, coords.length - 4).split('),(');
coords = coords.map((point) => {
coords = coords.map(point => {
return [
parseFloat(point.split(',')[1]),
parseFloat(point.split(',')[0]),
@@ -2072,7 +2072,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
const qs = `ALTER TABLE $1:name ADD CONSTRAINT $2:name UNIQUE (${constraintPatterns.join()})`;
return this._client
.none(qs, [className, constraintName, ...fieldNames])
.catch((error) => {
.catch(error => {
if (
error.code === PostgresDuplicateRelationError &&
error.message.includes(constraintName)
@@ -2123,14 +2123,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
return this._client
.one(qs, values, (a) => {
.one(qs, values, a => {
if (a.approximate_row_count != null) {
return +a.approximate_row_count;
} else {
return +a.count;
}
})
.catch((error) => {
.catch(error => {
if (error.code !== PostgresRelationDoesNotExistError) {
throw error;
}
@@ -2179,16 +2179,16 @@ export class PostgresStorageAdapter implements StorageAdapter {
debug(qs, values);
return this._client
.any(qs, values)
.catch((error) => {
.catch(error => {
if (error.code === PostgresMissingColumnError) {
return [];
}
throw error;
})
.then((results) => {
.then(results => {
if (!isNested) {
results = results.filter((object) => object[field] !== null);
return results.map((object) => {
results = results.filter(object => object[field] !== null);
return results.map(object => {
if (!isPointerField) {
return object[field];
}
@@ -2200,10 +2200,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
}
const child = fieldName.split('.')[1];
return results.map((object) => object[column][child]);
return results.map(object => object[column][child]);
})
.then((results) =>
results.map((object) =>
.then(results =>
results.map(object =>
this.postgresObjectToParseObject(className, object, schema)
)
);
@@ -2340,7 +2340,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
if (stage.$match.$or) {
const collapse = {};
stage.$match.$or.forEach((element) => {
stage.$match.$or.forEach(element => {
for (const key in element) {
collapse[key] = element[key];
}
@@ -2350,7 +2350,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
for (const field in stage.$match) {
const value = stage.$match[field];
const matchPatterns = [];
Object.keys(ParseToPosgresComparator).forEach((cmp) => {
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (value[cmp]) {
const pgComparator = ParseToPosgresComparator[cmp];
matchPatterns.push(
@@ -2390,7 +2390,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
const sort = stage.$sort;
const keys = Object.keys(sort);
const sorting = keys
.map((key) => {
.map(key => {
const transformer = sort[key] === 1 ? 'ASC' : 'DESC';
const order = `$${index}:name ${transformer}`;
index += 1;
@@ -2418,14 +2418,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
? this.createExplainableQuery(originalQuery)
: originalQuery;
debug(qs, values);
return this._client.any(qs, values).then((a) => {
return this._client.any(qs, values).then(a => {
if (explain) {
return a;
}
const results = a.map((object) =>
const results = a.map(object =>
this.postgresObjectToParseObject(className, object, schema)
);
results.forEach((result) => {
results.forEach(result => {
if (!Object.prototype.hasOwnProperty.call(result, 'objectId')) {
result.objectId = null;
}
@@ -2447,9 +2447,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
async performInitialization({ VolatileClassesSchemas }: any) {
// TODO: This method needs to be rewritten to make proper use of connections (@vitaly-t)
debug('performInitialization');
const promises = VolatileClassesSchemas.map((schema) => {
const promises = VolatileClassesSchemas.map(schema => {
return this.createTable(schema.className, schema)
.catch((err) => {
.catch(err => {
if (
err.code === PostgresDuplicateRelationError ||
err.code === Parse.Error.INVALID_CLASS_NAME
@@ -2462,7 +2462,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
});
return Promise.all(promises)
.then(() => {
return this._client.tx('perform-initialization', (t) => {
return this._client.tx('perform-initialization', t => {
return t.batch([
t.none(sql.misc.jsonObjectSetKeys),
t.none(sql.array.add),
@@ -2474,10 +2474,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
]);
});
})
.then((data) => {
.then(data => {
debug(`initializationDone in ${data.duration}`);
})
.catch((error) => {
.catch(error => {
/* eslint-disable no-console */
console.error(error);
});
@@ -2488,9 +2488,9 @@ export class PostgresStorageAdapter implements StorageAdapter {
indexes: any,
conn: ?any
): Promise<void> {
return (conn || this._client).tx((t) =>
return (conn || this._client).tx(t =>
t.batch(
indexes.map((i) => {
indexes.map(i => {
return t.none('CREATE INDEX $1:name ON $2:name ($3:name)', [
i.name,
className,
@@ -2517,11 +2517,11 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
async dropIndexes(className: string, indexes: any, conn: any): Promise<void> {
const queries = indexes.map((i) => ({
const queries = indexes.map(i => ({
query: 'DROP INDEX $1:name',
values: i,
}));
await (conn || this._client).tx((t) =>
await (conn || this._client).tx(t =>
t.none(this._pgp.helpers.concat(queries))
);
}
@@ -2541,11 +2541,11 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
async createTransactionalSession(): Promise<any> {
return new Promise((resolve) => {
return new Promise(resolve => {
const transactionalSession = {};
transactionalSession.result = this._client.tx((t) => {
transactionalSession.result = this._client.tx(t => {
transactionalSession.t = t;
transactionalSession.promise = new Promise((resolve) => {
transactionalSession.promise = new Promise(resolve => {
transactionalSession.resolve = resolve;
});
transactionalSession.batch = [];
@@ -2577,7 +2577,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
fieldNames: string[],
indexName: ?string,
caseInsensitive: boolean = false,
options?: Object = {},
options?: Object = {}
): Promise<any> {
const conn = options.conn !== undefined ? options.conn : this._client;
const defaultIndexName = `parse_default_${fieldNames.sort().join('_')}`;
@@ -2591,7 +2591,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
const qs = `CREATE INDEX $1:name ON $2:name (${constraintPatterns.join()})`;
await conn
.none(qs, [indexNameOptions.name, className, ...fieldNames])
.catch((error) => {
.catch(error => {
if (
error.code === PostgresDuplicateRelationError &&
error.message.includes(indexNameOptions.name)
@@ -2644,7 +2644,7 @@ function convertPolygonToSQL(polygon) {
);
}
const points = polygon
.map((point) => {
.map(point => {
Parse.GeoPoint._validate(parseFloat(point[1]), parseFloat(point[0]));
return `(${point[1]}, ${point[0]})`;
})
@@ -2721,7 +2721,7 @@ function isAnyValueRegexStartsWith(values) {
function createLiteralRegex(remaining) {
return remaining
.split('')
.map((c) => {
.map(c => {
const regex = RegExp('[0-9 ]|\\p{L}', 'u'); // Support all unicode letter chars
if (c.match(regex) !== null) {
// don't escape alphanumeric characters

View File

@@ -93,7 +93,7 @@ export interface StorageAdapter {
fieldNames: string[],
indexName?: string,
caseSensitive?: boolean,
options?: Object,
options?: Object
): Promise<any>;
ensureUniqueness(
className: string,

View File

@@ -114,7 +114,9 @@ export class Config {
}
static validateIdempotencyOptions(idempotencyOptions) {
if (!idempotencyOptions) { return; }
if (!idempotencyOptions) {
return;
}
if (idempotencyOptions.ttl === undefined) {
idempotencyOptions.ttl = IdempotencyOptions.ttl.default;
} else if (!isNaN(idempotencyOptions.ttl) && idempotencyOptions.ttl <= 0) {

View File

@@ -147,7 +147,7 @@ const defaultColumns: { [string]: SchemaFields } = Object.freeze({
_Idempotency: {
reqId: { type: 'String' },
expire: { type: 'Date' },
}
},
});
const requiredColumns = Object.freeze({
@@ -165,7 +165,7 @@ const systemClasses = Object.freeze([
'_JobStatus',
'_JobSchedule',
'_Audience',
'_Idempotency'
'_Idempotency',
]);
const volatileClasses = Object.freeze([
@@ -176,7 +176,7 @@ const volatileClasses = Object.freeze([
'_GraphQLConfig',
'_JobSchedule',
'_Audience',
'_Idempotency'
'_Idempotency',
]);
// Anything that start with role
@@ -681,7 +681,7 @@ const VolatileClassesSchemas = [
_GlobalConfigSchema,
_GraphQLConfigSchema,
_AudienceSchema,
_IdempotencySchema
_IdempotencySchema,
];
const dbTypeMatchesObjectType = (

View File

@@ -5,8 +5,16 @@ const createObject = async (className, fields, config, auth, info) => {
fields = {};
}
return (await rest.create(config, auth, className, fields, info.clientSDK, info.context))
.response;
return (
await rest.create(
config,
auth,
className,
fields,
info.clientSDK,
info.context
)
).response;
};
const updateObject = async (
@@ -21,15 +29,17 @@ const updateObject = async (
fields = {};
}
return (await rest.update(
config,
auth,
className,
{ objectId },
fields,
info.clientSDK,
info.context
)).response;
return (
await rest.update(
config,
auth,
className,
{ objectId },
fields,
info.clientSDK,
info.context
)
).response;
};
const deleteObject = async (className, objectId, config, auth, info) => {

View File

@@ -7,7 +7,7 @@ import { transformQueryInputToParse } from '../transformers/query';
/* eslint-disable*/
const needToGetAllKeys = (fields, keys, parseClasses) =>
keys
? keys.split(',').some((keyName) => {
? keys.split(',').some(keyName => {
const key = keyName.split('.');
if (fields[key[0]]) {
if (fields[key[0]].type === 'Pointer') {
@@ -19,7 +19,11 @@ const needToGetAllKeys = (fields, keys, parseClasses) =>
// Current sub key is not custom
return false;
}
} else if (!key[1]) {
} else if (
!key[1] ||
fields[key[0]].type === 'Array' ||
fields[key[0]].type === 'Object'
) {
// current key is not custom
return false;
}
@@ -156,7 +160,7 @@ const findObjects = async (
if (
selectedFields.find(
(field) => field.startsWith('edges.') || field.startsWith('pageInfo.')
field => field.startsWith('edges.') || field.startsWith('pageInfo.')
)
) {
if (limit || limit === 0) {

View File

@@ -15,7 +15,7 @@ import {
GraphQLUnionType,
} from 'graphql';
import { toGlobalId } from 'graphql-relay';
import { GraphQLUpload } from 'graphql-upload';
import { GraphQLUpload } from '@graphql-tools/links';
class TypeValidationError extends Error {
constructor(value, type) {
@@ -162,7 +162,7 @@ const serializeDateIso = (value) => {
return value;
}
if (value instanceof Date) {
return value.toUTCString();
return value.toISOString();
}
throw new TypeValidationError(value, 'Date');

View File

@@ -1,6 +1,6 @@
import { GraphQLNonNull } from 'graphql';
import { mutationWithClientMutationId } from 'graphql-relay';
import { GraphQLUpload } from 'graphql-upload';
import { GraphQLUpload } from '@graphql-tools/links';
import Parse from 'parse/node';
import * as defaultGraphQLTypes from './defaultGraphQLTypes';
import logger from '../../logger';
@@ -14,7 +14,7 @@ const handleUpload = async (upload, config) => {
const chunks = [];
stream
.on('error', reject)
.on('data', chunk => chunks.push(chunk))
.on('data', (chunk) => chunks.push(chunk))
.on('end', () => resolve(Buffer.concat(chunks)));
});
}
@@ -52,7 +52,7 @@ const handleUpload = async (upload, config) => {
}
};
const load = parseGraphQLSchema => {
const load = (parseGraphQLSchema) => {
const createMutation = mutationWithClientMutationId({
name: 'CreateFile',
description:

View File

@@ -22,13 +22,12 @@ export function toGraphQLError(error) {
return new ApolloError(message, code);
}
export const extractKeysAndInclude = selectedFields => {
export const extractKeysAndInclude = (selectedFields) => {
selectedFields = selectedFields.filter(
field => !field.includes('__typename')
(field) => !field.includes('__typename')
);
// Handles "id" field for both current and included objects
selectedFields = selectedFields.map(field => {
selectedFields = selectedFields.map((field) => {
if (field === 'id') return 'objectId';
return field.endsWith('.id')
? `${field.substring(0, field.lastIndexOf('.id'))}.objectId`
@@ -36,25 +35,21 @@ export const extractKeysAndInclude = selectedFields => {
});
let keys = undefined;
let include = undefined;
if (selectedFields.length > 0) {
keys = selectedFields.join(',');
include = selectedFields
.reduce((fields, field) => {
fields = fields.slice();
let pointIndex = field.lastIndexOf('.');
while (pointIndex > 0) {
const lastField = field.slice(pointIndex + 1);
field = field.slice(0, pointIndex);
if (!fields.includes(field) && lastField !== 'objectId') {
fields.push(field);
}
pointIndex = field.lastIndexOf('.');
}
return fields;
}, [])
.join(',');
keys = [...new Set(selectedFields)].join(',');
// We can use this shortcut since optimization is handled
// later on RestQuery, avoid overhead here.
include = keys;
}
return { keys, include };
return {
// If authData is detected keys will not work properly
// since authData has a special storage behavior
// so we need to skip keys currently
keys: keys && keys.indexOf('authData') === -1 ? keys : undefined,
include,
};
};
export const getParseClassMutationConfig = function (parseClassConfig) {

File diff suppressed because it is too large Load Diff

View File

@@ -118,4 +118,3 @@
* @property {String[]} paths An array of paths for which the feature should be enabled. The mount path must not be included, for example instead of `/parse/functions/myFunction` specifiy `functions/myFunction`. The entries are interpreted as regular expression, for example `functions/.*` matches all functions, `jobs/.*` matches all jobs, `classes/.*` matches all classes, `.*` matches all paths.
* @property {Number} ttl The duration in seconds after which a request record is discarded from the database, defaults to 300s.
*/

View File

@@ -85,7 +85,7 @@ class ParseServer {
serverStartComplete();
}
})
.catch((error) => {
.catch(error => {
if (serverStartComplete) {
serverStartComplete(error);
} else {
@@ -183,7 +183,7 @@ class ParseServer {
if (!process.env.TESTING) {
//This causes tests to spew some useless warnings, so disable in test
/* istanbul ignore next */
process.on('uncaughtException', (err) => {
process.on('uncaughtException', err => {
if (err.code === 'EADDRINUSE') {
// user-friendly message for this common error
process.stderr.write(
@@ -270,7 +270,10 @@ class ParseServer {
graphQLCustomTypeDefs = parse(
fs.readFileSync(options.graphQLSchema, 'utf8')
);
} else if (typeof options.graphQLSchema === 'object' || typeof options.graphQLSchema === 'function') {
} else if (
typeof options.graphQLSchema === 'object' ||
typeof options.graphQLSchema === 'function'
) {
graphQLCustomTypeDefs = options.graphQLSchema;
}
@@ -338,8 +341,8 @@ class ParseServer {
if (Parse.serverURL) {
const request = require('./request');
request({ url: Parse.serverURL.replace(/\/$/, '') + '/health' })
.catch((response) => response)
.then((response) => {
.catch(response => response)
.then(response => {
const json = response.data || null;
if (
response.status !== 200 ||
@@ -372,7 +375,7 @@ function addParseCloud() {
}
function injectDefaults(options: ParseServerOptions) {
Object.keys(defaults).forEach((key) => {
Object.keys(defaults).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(options, key)) {
options[key] = defaults[key];
}
@@ -428,12 +431,12 @@ function injectDefaults(options: ParseServerOptions) {
}
// Merge protectedFields options with defaults.
Object.keys(defaults.protectedFields).forEach((c) => {
Object.keys(defaults.protectedFields).forEach(c => {
const cur = options.protectedFields[c];
if (!cur) {
options.protectedFields[c] = defaults.protectedFields[c];
} else {
Object.keys(defaults.protectedFields[c]).forEach((r) => {
Object.keys(defaults.protectedFields[c]).forEach(r => {
const unq = new Set([
...(options.protectedFields[c][r] || []),
...defaults.protectedFields[c][r],
@@ -457,7 +460,7 @@ function configureListeners(parseServer) {
const sockets = {};
/* Currently, express doesn't shut down immediately after receiving SIGINT/SIGTERM if it has client connections that haven't timed out. (This is a known issue with node - https://github.com/nodejs/node/issues/2642)
This function, along with `destroyAliveConnections()`, intend to fix this behavior such that parse server will close all open connections and initiate the shutdown process as soon as it receives a SIGINT/SIGTERM signal. */
server.on('connection', (socket) => {
server.on('connection', socket => {
const socketId = socket.remoteAddress + ':' + socket.remotePort;
sockets[socketId] = socket;
socket.on('close', () => {

View File

@@ -106,7 +106,7 @@ function RestWrite(
// write, in order.
// Returns a promise for a {response, status, location} object.
// status and location are optional.
RestWrite.prototype.execute = function() {
RestWrite.prototype.execute = function () {
return Promise.resolve()
.then(() => {
return this.getUserAndRoleACL();
@@ -166,7 +166,7 @@ RestWrite.prototype.execute = function() {
};
// Uses the Auth object to get the list of roles, adds the user id
RestWrite.prototype.getUserAndRoleACL = function() {
RestWrite.prototype.getUserAndRoleACL = function () {
if (this.auth.isMaster) {
return Promise.resolve();
}
@@ -186,7 +186,7 @@ RestWrite.prototype.getUserAndRoleACL = function() {
};
// Validates this operation against the allowClientClassCreation config.
RestWrite.prototype.validateClientClassCreation = function() {
RestWrite.prototype.validateClientClassCreation = function () {
if (
this.config.allowClientClassCreation === false &&
!this.auth.isMaster &&
@@ -211,7 +211,7 @@ RestWrite.prototype.validateClientClassCreation = function() {
};
// Validates this operation against the schema.
RestWrite.prototype.validateSchema = function() {
RestWrite.prototype.validateSchema = function () {
return this.config.database.validateObject(
this.className,
this.data,
@@ -222,7 +222,7 @@ RestWrite.prototype.validateSchema = function() {
// Runs any beforeSave triggers against this operation.
// Any change leads to our data being mutated.
RestWrite.prototype.runBeforeSaveTrigger = function() {
RestWrite.prototype.runBeforeSaveTrigger = function () {
if (this.response) {
return;
}
@@ -315,7 +315,7 @@ RestWrite.prototype.runBeforeSaveTrigger = function() {
});
};
RestWrite.prototype.runBeforeLoginTrigger = async function(userData) {
RestWrite.prototype.runBeforeLoginTrigger = async function (userData) {
// Avoid doing any setup for triggers if there is no 'beforeLogin' trigger
if (
!triggers.triggerExists(
@@ -346,7 +346,7 @@ RestWrite.prototype.runBeforeLoginTrigger = async function(userData) {
);
};
RestWrite.prototype.setRequiredFieldsIfNeeded = function() {
RestWrite.prototype.setRequiredFieldsIfNeeded = function () {
if (this.data) {
return this.validSchemaController.getAllClasses().then(allClasses => {
const schema = allClasses.find(
@@ -416,7 +416,7 @@ RestWrite.prototype.setRequiredFieldsIfNeeded = function() {
// Transforms auth data for a user object.
// Does nothing if this isn't a user object.
// Returns a promise for when we're done if it can't finish this tick.
RestWrite.prototype.validateAuthData = function() {
RestWrite.prototype.validateAuthData = function () {
if (this.className !== '_User') {
return;
}
@@ -477,7 +477,7 @@ RestWrite.prototype.validateAuthData = function() {
);
};
RestWrite.prototype.handleAuthDataValidation = function(authData) {
RestWrite.prototype.handleAuthDataValidation = function (authData) {
const validations = Object.keys(authData).map(provider => {
if (authData[provider] === null) {
return Promise.resolve();
@@ -496,7 +496,7 @@ RestWrite.prototype.handleAuthDataValidation = function(authData) {
return Promise.all(validations);
};
RestWrite.prototype.findUsersWithAuthData = function(authData) {
RestWrite.prototype.findUsersWithAuthData = function (authData) {
const providers = Object.keys(authData);
const query = providers
.reduce((memo, provider) => {
@@ -521,7 +521,7 @@ RestWrite.prototype.findUsersWithAuthData = function(authData) {
return findPromise;
};
RestWrite.prototype.filteredObjectsByACL = function(objects) {
RestWrite.prototype.filteredObjectsByACL = function (objects) {
if (this.auth.isMaster) {
return objects;
}
@@ -534,7 +534,7 @@ RestWrite.prototype.filteredObjectsByACL = function(objects) {
});
};
RestWrite.prototype.handleAuthData = function(authData) {
RestWrite.prototype.handleAuthData = function (authData) {
let results;
return this.findUsersWithAuthData(authData).then(async r => {
results = this.filteredObjectsByACL(r);
@@ -638,7 +638,7 @@ RestWrite.prototype.handleAuthData = function(authData) {
};
// The non-third-party parts of User transformation
RestWrite.prototype.transformUser = function() {
RestWrite.prototype.transformUser = function () {
var promise = Promise.resolve();
if (this.className !== '_User') {
@@ -700,7 +700,7 @@ RestWrite.prototype.transformUser = function() {
});
};
RestWrite.prototype._validateUserName = function() {
RestWrite.prototype._validateUserName = function () {
// Check for username uniqueness
if (!this.data.username) {
if (!this.query) {
@@ -750,7 +750,7 @@ RestWrite.prototype._validateUserName = function() {
Given that this lookup should be a relatively low use case and that the case sensitive
unique index will be used by the db for the query, this is an adequate solution.
*/
RestWrite.prototype._validateEmail = function() {
RestWrite.prototype._validateEmail = function () {
if (!this.data.email || this.data.email.__op === 'Delete') {
return Promise.resolve();
}
@@ -795,14 +795,14 @@ RestWrite.prototype._validateEmail = function() {
});
};
RestWrite.prototype._validatePasswordPolicy = function() {
RestWrite.prototype._validatePasswordPolicy = function () {
if (!this.config.passwordPolicy) return Promise.resolve();
return this._validatePasswordRequirements().then(() => {
return this._validatePasswordHistory();
});
};
RestWrite.prototype._validatePasswordRequirements = function() {
RestWrite.prototype._validatePasswordRequirements = function () {
// check if the password conforms to the defined password policy if configured
// If we specified a custom error in our configuration use it.
// Example: "Passwords must include a Capital Letter, Lowercase Letter, and a number."
@@ -858,7 +858,7 @@ RestWrite.prototype._validatePasswordRequirements = function() {
return Promise.resolve();
};
RestWrite.prototype._validatePasswordHistory = function() {
RestWrite.prototype._validatePasswordHistory = function () {
// check whether password is repeating from specified history
if (this.query && this.config.passwordPolicy.maxPasswordHistory) {
return this.config.database
@@ -881,7 +881,7 @@ RestWrite.prototype._validatePasswordHistory = function() {
oldPasswords.push(user.password);
const newPassword = this.data.password;
// compare the new password hash with all old password hashes
const promises = oldPasswords.map(function(hash) {
const promises = oldPasswords.map(function (hash) {
return passwordCrypto.compare(newPassword, hash).then(result => {
if (result)
// reject if there is a match
@@ -910,7 +910,7 @@ RestWrite.prototype._validatePasswordHistory = function() {
return Promise.resolve();
};
RestWrite.prototype.createSessionTokenIfNeeded = function() {
RestWrite.prototype.createSessionTokenIfNeeded = function () {
if (this.className !== '_User') {
return;
}
@@ -933,7 +933,7 @@ RestWrite.prototype.createSessionTokenIfNeeded = function() {
return this.createSessionToken();
};
RestWrite.prototype.createSessionToken = async function() {
RestWrite.prototype.createSessionToken = async function () {
// cloud installationId from Cloud Code,
// never create session tokens from there.
if (this.auth.installationId && this.auth.installationId === 'cloud') {
@@ -957,7 +957,7 @@ RestWrite.prototype.createSessionToken = async function() {
};
// Delete email reset tokens if user is changing password or email.
RestWrite.prototype.deleteEmailResetTokenIfNeeded = function() {
RestWrite.prototype.deleteEmailResetTokenIfNeeded = function () {
if (this.className !== '_User' || this.query === null) {
// null query means create
return;
@@ -972,7 +972,7 @@ RestWrite.prototype.deleteEmailResetTokenIfNeeded = function() {
}
};
RestWrite.prototype.destroyDuplicatedSessions = function() {
RestWrite.prototype.destroyDuplicatedSessions = function () {
// Only for _Session, and at creation time
if (this.className != '_Session' || this.query) {
return;
@@ -998,7 +998,7 @@ RestWrite.prototype.destroyDuplicatedSessions = function() {
};
// Handles any followup logic
RestWrite.prototype.handleFollowup = function() {
RestWrite.prototype.handleFollowup = function () {
if (
this.storage &&
this.storage['clearSessions'] &&
@@ -1032,7 +1032,7 @@ RestWrite.prototype.handleFollowup = function() {
// Handles the _Session class specialness.
// Does nothing if this isn't an _Session object.
RestWrite.prototype.handleSession = function() {
RestWrite.prototype.handleSession = function () {
if (this.response || this.className !== '_Session') {
return;
}
@@ -1105,7 +1105,7 @@ RestWrite.prototype.handleSession = function() {
// If an installation is found, this can mutate this.query and turn a create
// into an update.
// Returns a promise for when we're done if it can't finish this tick.
RestWrite.prototype.handleInstallation = function() {
RestWrite.prototype.handleInstallation = function () {
if (this.response || this.className !== '_Installation') {
return;
}
@@ -1394,7 +1394,7 @@ RestWrite.prototype.handleInstallation = function() {
// If we short-circuted the object response - then we need to make sure we expand all the files,
// since this might not have a query, meaning it won't return the full result back.
// TODO: (nlutsenko) This should die when we move to per-class based controllers on _Session/_User
RestWrite.prototype.expandFilesForExistingObjects = function() {
RestWrite.prototype.expandFilesForExistingObjects = function () {
// Check whether we have a short-circuited response - only then run expansion.
if (this.response && this.response.response) {
this.config.filesController.expandFilesInObject(
@@ -1404,7 +1404,7 @@ RestWrite.prototype.expandFilesForExistingObjects = function() {
}
};
RestWrite.prototype.runDatabaseOperation = function() {
RestWrite.prototype.runDatabaseOperation = function () {
if (this.response) {
return;
}
@@ -1630,7 +1630,7 @@ RestWrite.prototype.runDatabaseOperation = function() {
};
// Returns nothing - doesn't wait for the trigger.
RestWrite.prototype.runAfterSaveTrigger = function() {
RestWrite.prototype.runAfterSaveTrigger = function () {
if (!this.response || !this.response.response) {
return;
}
@@ -1695,13 +1695,13 @@ RestWrite.prototype.runAfterSaveTrigger = function() {
this.response.response = result;
}
})
.catch(function(err) {
.catch(function (err) {
logger.warn('afterSave caught an error', err);
});
};
// A helper to figure out what location this operation happens at.
RestWrite.prototype.location = function() {
RestWrite.prototype.location = function () {
var middle =
this.className === '_User' ? '/users/' : '/classes/' + this.className + '/';
return this.config.mount + middle + this.data.objectId;
@@ -1709,12 +1709,12 @@ RestWrite.prototype.location = function() {
// A helper to get the object id for this operation.
// Because it could be either on the query or on the data
RestWrite.prototype.objectId = function() {
RestWrite.prototype.objectId = function () {
return this.data.objectId || this.query.objectId;
};
// Returns a copy of the data and delete bad keys (_auth_data, _hashed_password...)
RestWrite.prototype.sanitizedData = function() {
RestWrite.prototype.sanitizedData = function () {
const data = Object.keys(this.data).reduce((data, key) => {
// Regexp comes from Parse.Object.prototype.validate
if (!/^[A-Za-z][0-9A-Za-z_]*$/.test(key)) {
@@ -1726,9 +1726,9 @@ RestWrite.prototype.sanitizedData = function() {
};
// Returns an updated copy of the object
RestWrite.prototype.buildUpdatedObject = function(extraData) {
RestWrite.prototype.buildUpdatedObject = function (extraData) {
const updatedObject = triggers.inflate(extraData, this.originalData);
Object.keys(this.data).reduce(function(data, key) {
Object.keys(this.data).reduce(function (data, key) {
if (key.indexOf('.') > 0) {
// subdocument key with dot notation ('x.y':v => 'x':{'y':v})
const splittedKey = key.split('.');
@@ -1748,7 +1748,7 @@ RestWrite.prototype.buildUpdatedObject = function(extraData) {
return updatedObject;
};
RestWrite.prototype.cleanUserAuthData = function() {
RestWrite.prototype.cleanUserAuthData = function () {
if (this.response && this.response.response && this.className === '_User') {
const user = this.response.response;
if (user.authData) {
@@ -1764,7 +1764,7 @@ RestWrite.prototype.cleanUserAuthData = function() {
}
};
RestWrite.prototype._updateResponseWithData = function(response, data) {
RestWrite.prototype._updateResponseWithData = function (response, data) {
if (_.isEmpty(this.storage.fieldsChangedByTrigger)) {
return response;
}

View File

@@ -251,9 +251,14 @@ export class ClassesRouter extends PromiseRouter {
this.route('POST', '/classes/:className', promiseEnsureIdempotency, req => {
return this.handleCreate(req);
});
this.route('PUT', '/classes/:className/:objectId', promiseEnsureIdempotency, req => {
return this.handleUpdate(req);
});
this.route(
'PUT',
'/classes/:className/:objectId',
promiseEnsureIdempotency,
req => {
return this.handleUpdate(req);
}
);
this.route('DELETE', '/classes/:className/:objectId', req => {
return this.handleDelete(req);
});

View File

@@ -4,7 +4,10 @@ var Parse = require('parse/node').Parse,
triggers = require('../triggers');
import PromiseRouter from '../PromiseRouter';
import { promiseEnforceMasterKeyAccess, promiseEnsureIdempotency } from '../middlewares';
import {
promiseEnforceMasterKeyAccess,
promiseEnsureIdempotency,
} from '../middlewares';
import { jobStatusHandler } from '../StatusHandler';
import _ from 'lodash';
import { logger } from '../logger';

View File

@@ -40,9 +40,14 @@ export class InstallationsRouter extends ClassesRouter {
this.route('POST', '/installations', promiseEnsureIdempotency, req => {
return this.handleCreate(req);
});
this.route('PUT', '/installations/:objectId', promiseEnsureIdempotency, req => {
return this.handleUpdate(req);
});
this.route(
'PUT',
'/installations/:objectId',
promiseEnsureIdempotency,
req => {
return this.handleUpdate(req);
}
);
this.route('DELETE', '/installations/:objectId', req => {
return this.handleDelete(req);
});

View File

@@ -25,7 +25,7 @@ function makeBatchRoutingPathFunction(originalUrl, serverURL, publicServerURL) {
const apiPrefixLength = originalUrl.length - batchPath.length;
let apiPrefix = originalUrl.slice(0, apiPrefixLength);
const makeRoutablePath = function(requestPath) {
const makeRoutablePath = function (requestPath) {
// The routablePath is the path minus the api prefix
if (requestPath.slice(0, apiPrefix.length) != apiPrefix) {
throw new Parse.Error(
@@ -41,7 +41,7 @@ function makeBatchRoutingPathFunction(originalUrl, serverURL, publicServerURL) {
const publicPath = publicServerURL.path;
// Override the api prefix
apiPrefix = localPath;
return function(requestPath) {
return function (requestPath) {
// Build the new path by removing the public path
// and joining with the local path
const newPath = path.posix.join(

View File

@@ -239,13 +239,13 @@ export function handleParseHeaders(req, res, next) {
});
}
})
.then((auth) => {
.then(auth => {
if (auth) {
req.auth = auth;
next();
}
})
.catch((error) => {
.catch(error => {
if (error instanceof Parse.Error) {
next(error);
return;
@@ -416,12 +416,16 @@ export function promiseEnforceMasterKeyAccess(request) {
*/
export function promiseEnsureIdempotency(req) {
// Enable feature only for MongoDB
if (!(req.config.database.adapter instanceof MongoStorageAdapter)) { return Promise.resolve(); }
if (!(req.config.database.adapter instanceof MongoStorageAdapter)) {
return Promise.resolve();
}
// Get parameters
const config = req.config;
const requestId = ((req || {}).headers || {})["x-parse-request-id"];
const requestId = ((req || {}).headers || {})['x-parse-request-id'];
const { paths, ttl } = config.idempotencyOptions;
if (!requestId || !config.idempotencyOptions) { return Promise.resolve(); }
if (!requestId || !config.idempotencyOptions) {
return Promise.resolve();
}
// Request path may contain trailing slashes, depending on the original request, so remove
// leading and trailing slashes to make it easier to specify paths in the configuration
const reqPath = req.path.replace(/^\/|\/$/, '');
@@ -435,23 +439,27 @@ export function promiseEnsureIdempotency(req) {
break;
}
}
if (!match) { return Promise.resolve(); }
if (!match) {
return Promise.resolve();
}
// Try to store request
const expiryDate = new Date(new Date().setSeconds(new Date().getSeconds() + ttl));
return rest.create(
config,
auth.master(config),
'_Idempotency',
{ reqId: requestId, expire: Parse._encode(expiryDate) }
).catch (e => {
if (e.code == Parse.Error.DUPLICATE_VALUE) {
throw new Parse.Error(
Parse.Error.DUPLICATE_REQUEST,
'Duplicate request'
);
}
throw e;
});
const expiryDate = new Date(
new Date().setSeconds(new Date().getSeconds() + ttl)
);
return rest
.create(config, auth.master(config), '_Idempotency', {
reqId: requestId,
expire: Parse._encode(expiryDate),
})
.catch(e => {
if (e.code == Parse.Error.DUPLICATE_VALUE) {
throw new Parse.Error(
Parse.Error.DUPLICATE_REQUEST,
'Duplicate request'
);
}
throw e;
});
}
function invalidRequest(req, res) {

View File

@@ -31,7 +31,15 @@ function checkLiveQuery(className, config) {
}
// Returns a promise for an object with optional keys 'results' and 'count'.
function find(config, auth, className, restWhere, restOptions, clientSDK, context) {
function find(
config,
auth,
className,
restWhere,
restOptions,
clientSDK,
context
) {
enforceRoleSecurity('find', className, auth);
return triggers
.maybeRunQueryTrigger(
@@ -59,7 +67,15 @@ function find(config, auth, className, restWhere, restOptions, clientSDK, contex
}
// get is just like find but only queries an objectId.
const get = (config, auth, className, objectId, restOptions, clientSDK, context) => {
const get = (
config,
auth,
className,
objectId,
restOptions,
clientSDK,
context
) => {
var restWhere = { objectId };
enforceRoleSecurity('get', className, auth);
return triggers
@@ -218,7 +234,15 @@ function create(config, auth, className, restObject, clientSDK, context) {
// Returns a promise that contains the fields of the update that the
// REST API is supposed to return.
// Usually, this is just updatedAt.
function update(config, auth, className, restWhere, restObject, clientSDK, context) {
function update(
config,
auth,
className,
restWhere,
restObject,
clientSDK,
context
) {
enforceRoleSecurity('update', className, auth);
return Promise.resolve()