Add tests against multiple MongoDB versions (#7161)

* added tests environment with mongodb 4.4.3

* added CI test for mongodb 4.4.3

* added CI tests for MongoDB versions 4.0, 4.2

* improved flaky test (seems to max out the limit of simultaneous connections)

* added spec helpers to run tests only for specific MongoDB version

* addedn npm scripts to run tests against relevant mongodb versions

* added spec helper function to exclude specific mongodb version

* added test for changed aggregate query planner results

* fixed regex test with incorrect regex syntax

* fixed test where query has select no keys (empty array)

* added changelog entry and ordered list

* fixed test that tried to simultaneously delete and build index on same collection

* added MongoDB compatibility table to readme

* updated default local tests to use MongoDB 4.4.3

* added MongoDB badges for new versions to README

* fixed typo in readme

* added new test helper filter to contribution guide

* fixed incorrect storage engine for mongodb 4.4

* changed CI to test MongoDB 3.6. with mmapv1 storage engine and standalone

* improved CI test description

* added CI self check for new MongoDB versions

* fixed CI

* removed CI

* added CI

* added throwing error if any of the checks failed

* added github action connector

* improved error message

* improved error messages

* improved error message

* updated CI environment to MongoDB 3.6.22

* improved error messages

* update CI env name

* updated CI env name

* improved error message

* removed patch versions from CI env description

* improved status message

* removed version range from core lib

* added explicit mongodb version to redis test and node 12 test

* bumped Node 12 test to 12.20.1 (version currently recommended by AWS Elastic Beanstalk)
This commit is contained in:
Manuel
2021-02-07 23:16:46 +01:00
committed by GitHub
parent 6097e82194
commit b59517fd68
14 changed files with 524 additions and 116 deletions

View File

@@ -16,6 +16,10 @@
"expectAsync": true,
"notEqual": true,
"it_only_db": true,
"it_only_mongodb_version": true,
"fit_only_mongodb_version": true,
"it_exclude_mongodb_version": true,
"fit_exclude_mongodb_version": true,
"it_exclude_dbs": true,
"describe_only_db": true,
"describe_only": true,

View File

@@ -1,3 +1,5 @@
'use strict';
describe('Auth', () => {
const { Auth, getAuthForSessionToken } = require('../lib/Auth.js');
const Config = require('../lib/Config');
@@ -151,7 +153,7 @@ describe('Auth', () => {
});
describe('getRolesForUser', () => {
const rolesNumber = 300;
const rolesNumber = 100;
it('should load all roles without config', async () => {
const user = new Parse.User();
@@ -201,7 +203,6 @@ describe('Auth', () => {
});
it('should load all roles for different users with config', async () => {
const rolesNumber = 100;
const user = new Parse.User();
await user.signUp({
username: 'hello',

View File

@@ -54,7 +54,7 @@ describe_only_db('mongo')('Parse.Query hint', () => {
});
});
it('query aggregate with hint string', async () => {
it_only_mongodb_version('<4.4')('query aggregate with hint string', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
@@ -74,7 +74,31 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(queryPlanner.winningPlan.inputStage.indexName).toBe('_id_');
});
it('query aggregate with hint object', async () => {
it_only_mongodb_version('>=4.4')('query aggregate with hint string', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
const collection = await config.database.adapter._adaptiveCollection('TestObject');
let result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
explain: true,
});
let { queryPlanner } = result[0].stages[0].$cursor;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
hint: '_id_',
explain: true,
});
queryPlanner = result[0].stages[0].$cursor.queryPlanner;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
});
it_only_mongodb_version('<4.4')('query aggregate with hint object', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
@@ -94,6 +118,31 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 });
});
it_only_mongodb_version('>=4.4')('query aggregate with hint object', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
const collection = await config.database.adapter._adaptiveCollection('TestObject');
let result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
explain: true,
});
let { queryPlanner } = result[0].stages[0].$cursor;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
result = await collection.aggregate([{ $group: { _id: '$foo' } }], {
hint: { _id: 1 },
explain: true,
});
queryPlanner = result[0].stages[0].$cursor.queryPlanner;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 });
});
it('query find with hint (rest)', async () => {
const object = new TestObject();
await object.save();
@@ -119,7 +168,7 @@ describe_only_db('mongo')('Parse.Query hint', () => {
expect(explain.queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
});
it('query aggregate with hint (rest)', async () => {
it_only_mongodb_version('<4.4')('query aggregate with hint (rest)', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
let options = Object.assign({}, masterKeyOptions, {
@@ -145,4 +194,37 @@ describe_only_db('mongo')('Parse.Query hint', () => {
queryPlanner = response.data.results[0].stages[0].$cursor.queryPlanner;
expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 });
});
it_only_mongodb_version('>=4.4')('query aggregate with hint (rest)', async () => {
const object = new TestObject({ foo: 'bar' });
await object.save();
let options = Object.assign({}, masterKeyOptions, {
url: Parse.serverURL + '/aggregate/TestObject',
qs: {
explain: true,
group: JSON.stringify({ objectId: '$foo' }),
},
});
let response = await request(options);
let { queryPlanner } = response.data.results[0].stages[0].$cursor;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('COLLSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage).toBeUndefined();
options = Object.assign({}, masterKeyOptions, {
url: Parse.serverURL + '/aggregate/TestObject',
qs: {
explain: true,
hint: '_id_',
group: JSON.stringify({ objectId: '$foo' }),
},
});
response = await request(options);
queryPlanner = response.data.results[0].stages[0].$cursor.queryPlanner;
expect(queryPlanner.winningPlan.stage).toBe('PROJECTION_SIMPLE');
expect(queryPlanner.winningPlan.inputStage.stage).toBe('FETCH');
expect(queryPlanner.winningPlan.inputStage.inputStage.stage).toBe('IXSCAN');
expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_');
expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 });
});
});

View File

@@ -2048,9 +2048,9 @@ describe('Parse.Query testing', () => {
const query = new Parse.Query(TestObject);
query.matches(
'myString',
"parse # First fragment. We'll write this in one case but match " +
'insensitively\n.com # Second fragment. This can be separated by any ' +
'character, including newline',
'parse # First fragment. We\'ll write this in one case but match insensitively\n' +
'.com # Second fragment. This can be separated by any character, including newline;' +
'however, this comment must end with a newline to recognize it as a comment\n',
'mixs'
);
query.find().then(
@@ -3209,6 +3209,7 @@ describe('Parse.Query testing', () => {
}
);
});
it('exclude keys', async () => {
const obj = new TestObject({ foo: 'baz', hello: 'world' });
await obj.save();

View File

@@ -1,4 +1,6 @@
'use strict';
const semver = require('semver');
// Sets up a Parse API server for testing.
jasmine.DEFAULT_TIMEOUT_INTERVAL = process.env.PARSE_SERVER_TEST_TIMEOUT || 5000;
@@ -417,6 +419,42 @@ global.it_only_db = db => {
}
};
global.it_only_mongodb_version = version => {
const envVersion = process.env.MONGODB_VERSION;
if (!envVersion || semver.satisfies(envVersion, version)) {
return it;
} else {
return xit;
}
};
global.fit_only_mongodb_version = version => {
const envVersion = process.env.MONGODB_VERSION;
if (!envVersion || semver.satisfies(envVersion, version)) {
return fit;
} else {
return xit;
}
};
global.it_exclude_mongodb_version = version => {
const envVersion = process.env.MONGODB_VERSION;
if (!envVersion || !semver.satisfies(envVersion, version)) {
return it;
} else {
return xit;
}
};
global.fit_exclude_mongodb_version = version => {
const envVersion = process.env.MONGODB_VERSION;
if (!envVersion || !semver.satisfies(envVersion, version)) {
return fit;
} else {
return xit;
}
};
global.fit_exclude_dbs = excluded => {
if (excluded.indexOf(process.env.PARSE_SERVER_TEST_DB) >= 0) {
return xit;

View File

@@ -3340,94 +3340,130 @@ describe('schemas', () => {
});
});
it('lets you add and delete indexes', done => {
request({
it('lets you add and delete indexes', async () => {
// Wait due to index building in MongoDB on background process with collection lock
const waitForIndexBuild = new Promise(r => setTimeout(r, 500));
await request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'POST',
headers: masterKeyHeaders,
json: true,
body: {},
}).then(() => {
request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'PUT',
headers: masterKeyHeaders,
json: true,
body: {
fields: {
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
indexes: {
name1: { aString: 1 },
name2: { bString: 1 },
name3: { cString: 1 },
},
},
}).then(response => {
expect(
dd(response.data, {
className: 'NewClass',
fields: {
ACL: { type: 'ACL' },
createdAt: { type: 'Date' },
updatedAt: { type: 'Date' },
objectId: { type: 'String' },
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
classLevelPermissions: defaultClassLevelPermissions,
indexes: {
_id_: { _id: 1 },
name1: { aString: 1 },
name2: { bString: 1 },
name3: { cString: 1 },
},
})
).toEqual(undefined);
request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'PUT',
headers: masterKeyHeaders,
json: true,
body: {
indexes: {
name1: { __op: 'Delete' },
name2: { __op: 'Delete' },
name4: { dString: 1 },
},
},
}).then(response => {
expect(response.data).toEqual({
className: 'NewClass',
fields: {
ACL: { type: 'ACL' },
createdAt: { type: 'Date' },
updatedAt: { type: 'Date' },
objectId: { type: 'String' },
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
classLevelPermissions: defaultClassLevelPermissions,
indexes: {
_id_: { _id: 1 },
name3: { cString: 1 },
name4: { dString: 1 },
},
});
config.database.adapter.getIndexes('NewClass').then(indexes => {
expect(indexes.length).toEqual(3);
done();
});
});
});
});
let response = await request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'PUT',
headers: masterKeyHeaders,
json: true,
body: {
fields: {
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
indexes: {
name1: { aString: 1 },
name2: { bString: 1 },
name3: { cString: 1 },
},
},
});
expect(
dd(response.data, {
className: 'NewClass',
fields: {
ACL: { type: 'ACL' },
createdAt: { type: 'Date' },
updatedAt: { type: 'Date' },
objectId: { type: 'String' },
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
classLevelPermissions: defaultClassLevelPermissions,
indexes: {
_id_: { _id: 1 },
name1: { aString: 1 },
name2: { bString: 1 },
name3: { cString: 1 },
},
})
).toEqual(undefined);
await waitForIndexBuild;
response = await request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'PUT',
headers: masterKeyHeaders,
json: true,
body: {
indexes: {
name1: { __op: 'Delete' },
name2: { __op: 'Delete' },
},
},
});
expect(response.data).toEqual({
className: 'NewClass',
fields: {
ACL: { type: 'ACL' },
createdAt: { type: 'Date' },
updatedAt: { type: 'Date' },
objectId: { type: 'String' },
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
classLevelPermissions: defaultClassLevelPermissions,
indexes: {
_id_: { _id: 1 },
name3: { cString: 1 },
},
});
await waitForIndexBuild;
response = await request({
url: 'http://localhost:8378/1/schemas/NewClass',
method: 'PUT',
headers: masterKeyHeaders,
json: true,
body: {
indexes: {
name4: { dString: 1 },
},
},
});
expect(response.data).toEqual({
className: 'NewClass',
fields: {
ACL: { type: 'ACL' },
createdAt: { type: 'Date' },
updatedAt: { type: 'Date' },
objectId: { type: 'String' },
aString: { type: 'String' },
bString: { type: 'String' },
cString: { type: 'String' },
dString: { type: 'String' },
},
classLevelPermissions: defaultClassLevelPermissions,
indexes: {
_id_: { _id: 1 },
name3: { cString: 1 },
name4: { dString: 1 },
},
});
await waitForIndexBuild;
const indexes = await config.database.adapter.getIndexes('NewClass');
expect(indexes.length).toEqual(3);
});
it('cannot delete index that does not exist', done => {