Prettier some files + opti object relation (#7044)

This commit is contained in:
Antoine Cormouls
2020-12-07 05:25:08 +01:00
committed by GitHub
parent de9c7dc12e
commit 88e958a75f
4 changed files with 40 additions and 133 deletions

View File

@@ -44,9 +44,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
await expectMissingFile(encryptedAdapter, 'myFileName');
const originalString = 'abcdefghi';
await encryptedAdapter.createFile('myFileName', originalString);
const unencryptedResult = await unencryptedAdapter.getFileData(
'myFileName'
);
const unencryptedResult = await unencryptedAdapter.getFileData('myFileName');
expect(unencryptedResult.toString('utf8')).not.toBe(originalString);
const encryptedResult = await encryptedAdapter.getFileData('myFileName');
expect(encryptedResult.toString('utf8')).toBe(originalString);
@@ -71,10 +69,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
const unencryptedResult2 = await unencryptedAdapter.getFileData(fileName2);
expect(unencryptedResult2.toString('utf8')).toBe(data2);
//Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter
const {
rotated,
notRotated,
} = await encryptedAdapter.rotateEncryptionKey();
const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey();
expect(rotated.length).toEqual(2);
expect(
rotated.filter(function (value) {
@@ -101,30 +96,18 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
it('should rotate key of all old encrypted GridFS files to encrypted files', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const fileName1 = 'file1.txt';
const data1 = 'hello world';
const fileName2 = 'file2.txt';
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Check if encrypted adapter can read data and make sure it's not the same as unEncrypted adapter
const { rotated, notRotated } = await encryptedAdapter.rotateEncryptionKey({
@@ -170,11 +153,7 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
it('should rotate key of all old encrypted GridFS files to unencrypted files', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
@@ -182,20 +161,13 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Check if unEncrypted adapter can read data and make sure it's not the same as oldEncrypted adapter
const {
rotated,
notRotated,
} = await unEncryptedAdapter.rotateEncryptionKey({
const { rotated, notRotated } = await unEncryptedAdapter.rotateEncryptionKey({
oldKey: oldEncryptionKey,
});
expect(rotated.length).toEqual(2);
@@ -238,16 +210,8 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
it('should only encrypt specified fileNames', async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
@@ -255,14 +219,10 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Inject unecrypted file to see if causes an issue
const fileName3 = 'file3.txt';
@@ -318,16 +278,8 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
it("should return fileNames of those it can't encrypt with the new key", async () => {
const oldEncryptionKey = 'oldKeyThatILoved';
const oldEncryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
oldEncryptionKey
);
const encryptedAdapter = new GridFSBucketAdapter(
databaseURI,
{},
'newKeyThatILove'
);
const oldEncryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, oldEncryptionKey);
const encryptedAdapter = new GridFSBucketAdapter(databaseURI, {}, 'newKeyThatILove');
const unEncryptedAdapter = new GridFSBucketAdapter(databaseURI);
const fileName1 = 'file1.txt';
const data1 = 'hello world';
@@ -335,14 +287,10 @@ describe_only_db('mongo')('GridFSBucket and GridStore interop', () => {
const data2 = 'hello new world';
//Store unecrypted files
await oldEncryptedAdapter.createFile(fileName1, data1);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(
fileName1
);
const oldEncryptedResult1 = await oldEncryptedAdapter.getFileData(fileName1);
expect(oldEncryptedResult1.toString('utf8')).toBe(data1);
await oldEncryptedAdapter.createFile(fileName2, data2);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(
fileName2
);
const oldEncryptedResult2 = await oldEncryptedAdapter.getFileData(fileName2);
expect(oldEncryptedResult2.toString('utf8')).toBe(data2);
//Inject unecrypted file to see if causes an issue
const fileName3 = 'file3.txt';

View File

@@ -39,7 +39,7 @@ it('Should succeed with right credentials when LDAPS is used and certifcate is n
suffix: 'o=example',
url: `ldaps://localhost:${sslport}`,
dn: 'uid={{id}}, o=example',
tlsOptions: { rejectUnauthorized: false }
tlsOptions: { rejectUnauthorized: false },
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
@@ -57,8 +57,8 @@ it('Should succeed when LDAPS is used and the presented certificate is the expec
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
@@ -76,8 +76,8 @@ it('Should fail when LDAPS is used and the presented certificate is not the expe
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/anothercert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'secret' }, options)
@@ -98,8 +98,8 @@ it('Should fail when LDAPS is used certifcate matches but credentials are wrong'
dn: 'uid={{id}}, o=example',
tlsOptions: {
ca: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
rejectUnauthorized: true
}
rejectUnauthorized: true,
},
};
ldap
.validateAuthData({ id: 'testuser', password: 'wrong!' }, options)
@@ -112,7 +112,6 @@ it('Should fail when LDAPS is used certifcate matches but credentials are wrong'
});
});
it('Should fail with wrong credentials', done => {
mockLdapServer(port, 'uid=testuser, o=example').then(server => {
const options = {

View File

@@ -3,8 +3,8 @@ const fs = require('fs');
const tlsOptions = {
key: fs.readFileSync(__dirname + '/support/cert/key.pem'),
certificate: fs.readFileSync(__dirname + '/support/cert/cert.pem')
}
certificate: fs.readFileSync(__dirname + '/support/cert/cert.pem'),
};
function newServer(port, dn, provokeSearchError = false, ssl = false) {
const server = ssl ? ldapjs.createServer(tlsOptions) : ldapjs.createServer();

View File

@@ -10,10 +10,10 @@ const needToGetAllKeys = (fields, keys, parseClasses) =>
? keys.split(',').some(keyName => {
const key = keyName.split('.');
if (fields[key[0]]) {
if (fields[key[0]].type === 'Relation') return false;
if (fields[key[0]].type === 'Pointer') {
const subClass = parseClasses.find(
({ className: parseClassName }) =>
fields[key[0]].targetClass === parseClassName
({ className: parseClassName }) => fields[key[0]].targetClass === parseClassName
);
if (subClass && subClass.fields[key[1]]) {
// Current sub key is not custom
@@ -50,9 +50,7 @@ const getObject = async (
try {
if (
!needToGetAllKeys(
parseClasses.find(
({ className: parseClassName }) => className === parseClassName
).fields,
parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields,
keys,
parseClasses
)
@@ -141,15 +139,7 @@ const findObjects = async (
preCountOptions.subqueryReadPreference = subqueryReadPreference;
}
preCount = (
await rest.find(
config,
auth,
className,
where,
preCountOptions,
info.clientSDK,
info.context
)
await rest.find(config, auth, className, where, preCountOptions, info.clientSDK, info.context)
).count;
if ((skip || 0) + limit < preCount) {
skip = preCount - limit;
@@ -158,11 +148,7 @@ const findObjects = async (
const options = {};
if (
selectedFields.find(
field => field.startsWith('edges.') || field.startsWith('pageInfo.')
)
) {
if (selectedFields.find(field => field.startsWith('edges.') || field.startsWith('pageInfo.'))) {
if (limit || limit === 0) {
options.limit = limit;
} else {
@@ -181,9 +167,7 @@ const findObjects = async (
}
if (
!needToGetAllKeys(
parseClasses.find(
({ className: parseClassName }) => className === parseClassName
).fields,
parseClasses.find(({ className: parseClassName }) => className === parseClassName).fields,
keys,
parseClasses
)
@@ -245,9 +229,7 @@ const findObjects = async (
pageInfo = {
hasPreviousPage:
((preCount && preCount > 0) || (count && count > 0)) &&
skip !== undefined &&
skip > 0,
((preCount && preCount > 0) || (count && count > 0)) && skip !== undefined && skip > 0,
startCursor: offsetToCursor(skip || 0),
endCursor: offsetToCursor((skip || 0) + (results.length || 1) - 1),
hasNextPage: (preCount || count) > (skip || 0) + results.length,
@@ -261,14 +243,7 @@ const findObjects = async (
};
};
const calculateSkipAndLimit = (
skipInput,
first,
after,
last,
before,
maxLimit
) => {
const calculateSkipAndLimit = (skipInput, first, after, last, before, maxLimit) => {
let skip = undefined;
let limit = undefined;
let needToPreCount = false;
@@ -276,10 +251,7 @@ const calculateSkipAndLimit = (
// Validates the skip input
if (skipInput || skipInput === 0) {
if (skipInput < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Skip should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Skip should be a positive number');
}
skip = skipInput;
}
@@ -288,10 +260,7 @@ const calculateSkipAndLimit = (
if (after) {
after = cursorToOffset(after);
if ((!after && after !== 0) || after < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'After is not a valid cursor'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'After is not a valid cursor');
}
// If skip and after are passed, a new skip is calculated by adding them
@@ -301,10 +270,7 @@ const calculateSkipAndLimit = (
// Validates the first param
if (first || first === 0) {
if (first < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'First should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'First should be a positive number');
}
// The first param is translated to the limit param of the Parse legacy API
@@ -316,10 +282,7 @@ const calculateSkipAndLimit = (
// This method converts the cursor to the index of the object
before = cursorToOffset(before);
if ((!before && before !== 0) || before < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Before is not a valid cursor'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Before is not a valid cursor');
}
if ((skip || 0) >= before) {
@@ -334,10 +297,7 @@ const calculateSkipAndLimit = (
// Validates the last param
if (last || last === 0) {
if (last < 0) {
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Last should be a positive number'
);
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Last should be a positive number');
}
if (last > maxLimit) {