Batch transaction (#5849)

* Batch transaction boilerplate

* Refactoring transaction boilerplate

* Independent sessions test

* Transactions - partial

* Missing only one test

* All tests passing for mongo db

* Tests on Travis

* Transactions on postgres

* Fix travis to restart mongodb

* Remove mongodb service and keep only mongodb runner

* MongoDB service back

* Initialize replicaset

* Remove mongodb runner again

* Again only with mongodb-runner and removing cache

* Trying with pretest and posttest

* WiredTiger

* Pretest and posttest again

* Removing inexistent scripts

* wiredTiger

* One more attempt

* Trying another way to run mongodb-runner

* Fixing tests

* Include batch transaction on direct access

* Add tests to direct access
This commit is contained in:
Antonio Davi Macedo Coelho de Castro
2019-07-31 02:41:07 -07:00
committed by GitHub
parent fe18fe0f61
commit 8b97c1380b
15 changed files with 931 additions and 106 deletions

View File

@@ -1,7 +1,6 @@
language: node_js
dist: trusty
services:
- mongodb
- postgresql
- redis-server
- docker
@@ -19,13 +18,12 @@ branches:
cache:
directories:
- "$HOME/.npm"
- "$HOME/.mongodb/versions"
stage: test
env:
global:
- COVERAGE_OPTION='./node_modules/.bin/nyc'
matrix:
- MONGODB_VERSION=4.0.4
- MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredTiger
- MONGODB_VERSION=3.6.9
- PARSE_SERVER_TEST_DB=postgres
- PARSE_SERVER_TEST_CACHE=redis
@@ -42,7 +40,6 @@ before_script:
- psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres
- psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database
- psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database
- silent=1 mongodb-runner --start
- greenkeeper-lockfile-update
script:
- npm run lint

View File

@@ -95,8 +95,10 @@
"lint": "flow && eslint --cache ./",
"build": "babel src/ -d lib/ --copy-files",
"watch": "babel --watch src/ -d lib/ --copy-files",
"test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine",
"coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine",
"pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start",
"test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine",
"posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop",
"coverage": "npm run pretest && cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine && npm run posttest",
"start": "node ./bin/parse-server",
"prepare": "npm run build",
"postinstall": "node -p 'require(\"./postinstall.js\")()'"

View File

@@ -25,7 +25,8 @@
"jequal": true,
"create": true,
"arrayContains": true,
"expectAsync": true
"expectAsync": true,
"databaseAdapter": true
},
"rules": {
"no-console": [0],

View File

@@ -18,7 +18,7 @@ describe('GridFSBucket and GridStore interop', () => {
beforeEach(async () => {
const gsAdapter = new GridStoreAdapter(databaseURI);
const db = await gsAdapter._connect();
db.dropDatabase();
await db.dropDatabase();
});
it('a file created in GridStore should be available in GridFS', async () => {

View File

@@ -13,7 +13,7 @@ describe_only_db('mongo')('GridStoreAdapter', () => {
const config = Config.get(Parse.applicationId);
const gridStoreAdapter = new GridStoreAdapter(databaseURI);
const db = await gridStoreAdapter._connect();
db.dropDatabase();
await db.dropDatabase();
const filesController = new FilesController(
gridStoreAdapter,
Parse.applicationId,

View File

@@ -2,6 +2,7 @@ const ParseServerRESTController = require('../lib/ParseServerRESTController')
.ParseServerRESTController;
const ParseServer = require('../lib/ParseServer').default;
const Parse = require('parse/node').Parse;
const TestUtils = require('../lib/TestUtils');
let RESTController;
@@ -40,7 +41,7 @@ describe('ParseServerRESTController', () => {
);
});
it('should handle a POST batch', done => {
it('should handle a POST batch without transaction', done => {
RESTController.request('POST', 'batch', {
requests: [
{
@@ -69,6 +70,272 @@ describe('ParseServerRESTController', () => {
);
});
it('should handle a POST batch with transaction=false', done => {
RESTController.request('POST', 'batch', {
requests: [
{
method: 'GET',
path: '/classes/MyObject',
},
{
method: 'POST',
path: '/classes/MyObject',
body: { key: 'value' },
},
{
method: 'GET',
path: '/classes/MyObject',
},
],
transaction: false,
}).then(
res => {
expect(res.length).toBe(3);
done();
},
err => {
jfail(err);
done();
}
);
});
if (
(process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') ||
process.env.PARSE_SERVER_TEST_DB === 'postgres'
) {
describe('transactions', () => {
beforeAll(async () => {
if (
process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger'
) {
await reconfigureServer({
databaseAdapter: undefined,
databaseURI:
'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset',
});
}
});
beforeEach(async () => {
await TestUtils.destroyAllDataPermanently(true);
});
it('should handle a batch request with transaction = true', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
spyOn(databaseAdapter, 'createObject').and.callThrough();
RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
}).then(response => {
expect(response.length).toEqual(2);
expect(response[0].success.objectId).toBeDefined();
expect(response[0].success.createdAt).toBeDefined();
expect(response[1].success.objectId).toBeDefined();
expect(response[1].success.createdAt).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(databaseAdapter.createObject.calls.count()).toBe(2);
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe(
databaseAdapter.createObject.calls.argsFor(1)[3]
);
expect(results.map(result => result.get('key')).sort()).toEqual(
['value1', 'value2']
);
done();
});
});
});
});
it('should not save anything when one operation fails in a transaction', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 10 },
},
],
transaction: true,
}).catch(error => {
expect(error.message).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(results.length).toBe(0);
done();
});
});
});
});
it('should generate separate session for each call', async () => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
await myObject.save();
await myObject.destroy();
const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections
await myObject2.save();
await myObject2.destroy();
spyOn(databaseAdapter, 'createObject').and.callThrough();
let myObjectCalls = 0;
Parse.Cloud.beforeSave('MyObject', async () => {
myObjectCalls++;
if (myObjectCalls === 2) {
try {
await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 10 },
},
],
transaction: true,
});
fail('should fail');
} catch (e) {
expect(e).toBeDefined();
}
}
});
const response = await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
});
expect(response.length).toEqual(2);
expect(response[0].success.objectId).toBeDefined();
expect(response[0].success.createdAt).toBeDefined();
expect(response[1].success.objectId).toBeDefined();
expect(response[1].success.createdAt).toBeDefined();
await RESTController.request('POST', 'batch', {
requests: [
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value2' },
},
],
});
const query = new Parse.Query('MyObject');
const results = await query.find();
expect(results.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
const query2 = new Parse.Query('MyObject2');
const results2 = await query2.find();
expect(results2.length).toEqual(0);
const query3 = new Parse.Query('MyObject3');
const results3 = await query3.find();
expect(results3.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
expect(databaseAdapter.createObject.calls.count()).toBe(5);
let transactionalSession;
let transactionalSession2;
let myObjectDBCalls = 0;
let myObject2DBCalls = 0;
let myObject3DBCalls = 0;
for (let i = 0; i < 5; i++) {
const args = databaseAdapter.createObject.calls.argsFor(i);
switch (args[0]) {
case 'MyObject':
myObjectDBCalls++;
if (!transactionalSession) {
transactionalSession = args[3];
} else {
expect(transactionalSession).toBe(args[3]);
}
if (transactionalSession2) {
expect(transactionalSession2).not.toBe(args[3]);
}
break;
case 'MyObject2':
myObject2DBCalls++;
transactionalSession2 = args[3];
if (transactionalSession) {
expect(transactionalSession).not.toBe(args[3]);
}
break;
case 'MyObject3':
myObject3DBCalls++;
expect(args[3]).toEqual(null);
break;
}
}
expect(myObjectDBCalls).toEqual(2);
expect(myObject2DBCalls).toEqual(1);
expect(myObject3DBCalls).toEqual(2);
});
});
}
it('should handle a POST request', done => {
RESTController.request('POST', '/classes/MyObject', { key: 'value' })
.then(() => {

View File

@@ -1,4 +1,6 @@
const batch = require('../lib/batch');
const request = require('../lib/request');
const TestUtils = require('../lib/TestUtils');
const originalURL = '/parse/batch';
const serverURL = 'http://localhost:1234/parse';
@@ -7,6 +9,13 @@ const serverURLNaked = 'http://localhost:1234/';
const publicServerURL = 'http://domain.com/parse';
const publicServerURLNaked = 'http://domain.com/';
const headers = {
'Content-Type': 'application/json',
'X-Parse-Application-Id': 'test',
'X-Parse-REST-API-Key': 'rest',
'X-Parse-Installation-Id': 'yolo',
};
describe('batch', () => {
it('should return the proper url', () => {
const internalURL = batch.makeBatchRoutingPathFunction(originalURL)(
@@ -59,4 +68,348 @@ describe('batch', () => {
expect(internalURL).toEqual('/classes/Object');
});
it('should handle a batch request without transaction', done => {
spyOn(databaseAdapter, 'createObject').and.callThrough();
request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
}),
}).then(response => {
expect(response.data.length).toEqual(2);
expect(response.data[0].success.objectId).toBeDefined();
expect(response.data[0].success.createdAt).toBeDefined();
expect(response.data[1].success.objectId).toBeDefined();
expect(response.data[1].success.createdAt).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(databaseAdapter.createObject.calls.count()).toBe(2);
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null);
expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null);
expect(results.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
done();
});
});
});
it('should handle a batch request with transaction = false', done => {
spyOn(databaseAdapter, 'createObject').and.callThrough();
request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: false,
}),
}).then(response => {
expect(response.data.length).toEqual(2);
expect(response.data[0].success.objectId).toBeDefined();
expect(response.data[0].success.createdAt).toBeDefined();
expect(response.data[1].success.objectId).toBeDefined();
expect(response.data[1].success.createdAt).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(databaseAdapter.createObject.calls.count()).toBe(2);
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null);
expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null);
expect(results.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
done();
});
});
});
if (
(process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') ||
process.env.PARSE_SERVER_TEST_DB === 'postgres'
) {
describe('transactions', () => {
beforeAll(async () => {
if (
process.env.MONGODB_VERSION === '4.0.4' &&
process.env.MONGODB_TOPOLOGY === 'replicaset' &&
process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger'
) {
await reconfigureServer({
databaseAdapter: undefined,
databaseURI:
'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset',
});
}
});
beforeEach(async () => {
await TestUtils.destroyAllDataPermanently(true);
});
it('should handle a batch request with transaction = true', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
spyOn(databaseAdapter, 'createObject').and.callThrough();
request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
}),
}).then(response => {
expect(response.data.length).toEqual(2);
expect(response.data[0].success.objectId).toBeDefined();
expect(response.data[0].success.createdAt).toBeDefined();
expect(response.data[1].success.objectId).toBeDefined();
expect(response.data[1].success.createdAt).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(databaseAdapter.createObject.calls.count()).toBe(2);
expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe(
databaseAdapter.createObject.calls.argsFor(1)[3]
);
expect(results.map(result => result.get('key')).sort()).toEqual(
['value1', 'value2']
);
done();
});
});
});
});
it('should not save anything when one operation fails in a transaction', done => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
myObject
.save()
.then(() => {
return myObject.destroy();
})
.then(() => {
request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 10 },
},
],
transaction: true,
}),
}).catch(error => {
expect(error.data).toBeDefined();
const query = new Parse.Query('MyObject');
query.find().then(results => {
expect(results.length).toBe(0);
done();
});
});
});
});
it('should generate separate session for each call', async () => {
const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections
await myObject.save();
await myObject.destroy();
const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections
await myObject2.save();
await myObject2.destroy();
spyOn(databaseAdapter, 'createObject').and.callThrough();
let myObjectCalls = 0;
Parse.Cloud.beforeSave('MyObject', async () => {
myObjectCalls++;
if (myObjectCalls === 2) {
try {
await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject2',
body: { key: 10 },
},
],
transaction: true,
}),
});
fail('should fail');
} catch (e) {
expect(e).toBeDefined();
}
}
});
const response = await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject',
body: { key: 'value2' },
},
],
transaction: true,
}),
});
expect(response.data.length).toEqual(2);
expect(response.data[0].success.objectId).toBeDefined();
expect(response.data[0].success.createdAt).toBeDefined();
expect(response.data[1].success.objectId).toBeDefined();
expect(response.data[1].success.createdAt).toBeDefined();
await request({
method: 'POST',
headers: headers,
url: 'http://localhost:8378/1/batch',
body: JSON.stringify({
requests: [
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value1' },
},
{
method: 'POST',
path: '/1/classes/MyObject3',
body: { key: 'value2' },
},
],
}),
});
const query = new Parse.Query('MyObject');
const results = await query.find();
expect(results.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
const query2 = new Parse.Query('MyObject2');
const results2 = await query2.find();
expect(results2.length).toEqual(0);
const query3 = new Parse.Query('MyObject3');
const results3 = await query3.find();
expect(results3.map(result => result.get('key')).sort()).toEqual([
'value1',
'value2',
]);
expect(databaseAdapter.createObject.calls.count()).toBe(5);
let transactionalSession;
let transactionalSession2;
let myObjectDBCalls = 0;
let myObject2DBCalls = 0;
let myObject3DBCalls = 0;
for (let i = 0; i < 5; i++) {
const args = databaseAdapter.createObject.calls.argsFor(i);
switch (args[0]) {
case 'MyObject':
myObjectDBCalls++;
if (!transactionalSession) {
transactionalSession = args[3];
} else {
expect(transactionalSession).toBe(args[3]);
}
if (transactionalSession2) {
expect(transactionalSession2).not.toBe(args[3]);
}
break;
case 'MyObject2':
myObject2DBCalls++;
transactionalSession2 = args[3];
if (transactionalSession) {
expect(transactionalSession).not.toBe(args[3]);
}
break;
case 'MyObject3':
myObject3DBCalls++;
expect(args[3]).toEqual(null);
break;
}
}
expect(myObjectDBCalls).toEqual(2);
expect(myObject2DBCalls).toEqual(1);
expect(myObject3DBCalls).toEqual(2);
});
});
}
});

View File

@@ -40,20 +40,12 @@ const postgresURI =
let databaseAdapter;
// need to bind for mocking mocha
let startDB = () => {};
let stopDB = () => {};
if (process.env.PARSE_SERVER_TEST_DB === 'postgres') {
databaseAdapter = new PostgresStorageAdapter({
uri: process.env.PARSE_SERVER_TEST_DATABASE_URI || postgresURI,
collectionPrefix: 'test_',
});
} else {
startDB = require('mongodb-runner/mocha/before').bind({
timeout: () => {},
slow: () => {},
});
stopDB = require('mongodb-runner/mocha/after');
databaseAdapter = new MongoStorageAdapter({
uri: mongoURI,
collectionPrefix: 'test_',
@@ -177,11 +169,6 @@ const reconfigureServer = changedConfiguration => {
const Parse = require('parse/node');
Parse.serverURL = 'http://localhost:' + port + '/1';
// 10 minutes timeout
beforeAll(startDB, 10 * 60 * 1000);
afterAll(stopDB);
beforeEach(done => {
try {
Parse.User.enableUnsafeCurrentUser();
@@ -417,6 +404,7 @@ global.reconfigureServer = reconfigureServer;
global.defaultConfiguration = defaultConfiguration;
global.mockCustomAuthenticator = mockCustomAuthenticator;
global.mockFacebookAuthenticator = mockFacebookAuthenticator;
global.databaseAdapter = databaseAdapter;
global.jfail = function(err) {
fail(JSON.stringify(err));
};

View File

@@ -111,27 +111,30 @@ export default class MongoCollection {
.toArray();
}
insertOne(object) {
return this._mongoCollection.insertOne(object);
insertOne(object, session) {
return this._mongoCollection.insertOne(object, { session });
}
// Atomically updates data in the database for a single (first) object that matched the query
// If there is nothing that matches the query - does insert
// Postgres Note: `INSERT ... ON CONFLICT UPDATE` that is available since 9.5.
upsertOne(query, update) {
return this._mongoCollection.updateOne(query, update, { upsert: true });
upsertOne(query, update, session) {
return this._mongoCollection.updateOne(query, update, {
upsert: true,
session,
});
}
updateOne(query, update) {
return this._mongoCollection.updateOne(query, update);
}
updateMany(query, update) {
return this._mongoCollection.updateMany(query, update);
updateMany(query, update, session) {
return this._mongoCollection.updateMany(query, update, { session });
}
deleteMany(query) {
return this._mongoCollection.deleteMany(query);
deleteMany(query, session) {
return this._mongoCollection.deleteMany(query, { session });
}
_ensureSparseUniqueIndexInBackground(indexRequest) {

View File

@@ -472,7 +472,12 @@ export class MongoStorageAdapter implements StorageAdapter {
// TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema,
// and should infer from the type. Or maybe does need the schema for validations. Or maybe needs
// the schema only for the legacy mongo format. We'll figure that out later.
createObject(className: string, schema: SchemaType, object: any) {
createObject(
className: string,
schema: SchemaType,
object: any,
transactionalSession: ?any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoObject = parseObjectToMongoObjectForCreate(
className,
@@ -480,7 +485,9 @@ export class MongoStorageAdapter implements StorageAdapter {
schema
);
return this._adaptiveCollection(className)
.then(collection => collection.insertOne(mongoObject))
.then(collection =>
collection.insertOne(mongoObject, transactionalSession)
)
.catch(error => {
if (error.code === 11000) {
// Duplicate value
@@ -510,13 +517,14 @@ export class MongoStorageAdapter implements StorageAdapter {
deleteObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType
query: QueryType,
transactionalSession: ?any
) {
schema = convertParseSchemaToMongoSchema(schema);
return this._adaptiveCollection(className)
.then(collection => {
const mongoWhere = transformWhere(className, query, schema);
return collection.deleteMany(mongoWhere);
return collection.deleteMany(mongoWhere, transactionalSession);
})
.catch(err => this.handleError(err))
.then(
@@ -543,13 +551,16 @@ export class MongoStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
return this._adaptiveCollection(className)
.then(collection => collection.updateMany(mongoWhere, mongoUpdate))
.then(collection =>
collection.updateMany(mongoWhere, mongoUpdate, transactionalSession)
)
.catch(err => this.handleError(err));
}
@@ -559,7 +570,8 @@ export class MongoStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
@@ -568,6 +580,7 @@ export class MongoStorageAdapter implements StorageAdapter {
.then(collection =>
collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, {
returnOriginal: false,
session: transactionalSession || undefined,
})
)
.then(result => mongoObjectToParseObject(className, result.value, schema))
@@ -588,13 +601,16 @@ export class MongoStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
return this._adaptiveCollection(className)
.then(collection => collection.upsertOne(mongoWhere, mongoUpdate))
.then(collection =>
collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession)
)
.catch(err => this.handleError(err));
}
@@ -1059,6 +1075,24 @@ export class MongoStorageAdapter implements StorageAdapter {
})
.catch(err => this.handleError(err));
}
createTransactionalSession(): Promise<any> {
const transactionalSection = this.client.startSession();
transactionalSection.startTransaction();
return Promise.resolve(transactionalSection);
}
commitTransactionalSession(transactionalSection: any): Promise<void> {
return transactionalSection.commitTransaction().then(() => {
transactionalSection.endSession();
});
}
abortTransactionalSession(transactionalSection: any): Promise<void> {
return transactionalSection.abortTransaction().then(() => {
transactionalSection.endSession();
});
}
}
export default MongoStorageAdapter;

View File

@@ -1223,7 +1223,12 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
// TODO: remove the mongo format dependency in the return value
createObject(className: string, schema: SchemaType, object: any) {
createObject(
className: string,
schema: SchemaType,
object: any,
transactionalSession: ?any
) {
debug('createObject', className, object);
let columnsArray = [];
const valuesArray = [];
@@ -1351,7 +1356,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`;
const values = [className, ...columnsArray, ...valuesArray];
debug(qs, values);
return this._client
const promise = (transactionalSession
? transactionalSession.t
: this._client
)
.none(qs, values)
.then(() => ({ ops: [object] }))
.catch(error => {
@@ -1371,6 +1379,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
throw error;
});
if (transactionalSession) {
transactionalSession.batch.push(promise);
}
return promise;
}
// Remove all objects that match the given Parse Query.
@@ -1379,7 +1391,8 @@ export class PostgresStorageAdapter implements StorageAdapter {
deleteObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType
query: QueryType,
transactionalSession: ?any
) {
debug('deleteObjectsByQuery', className, query);
const values = [className];
@@ -1391,7 +1404,10 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`;
debug(qs, values);
return this._client
const promise = (transactionalSession
? transactionalSession.t
: this._client
)
.one(qs, values, a => +a.count)
.then(count => {
if (count === 0) {
@@ -1409,18 +1425,27 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
// ELSE: Don't delete anything if doesn't exist
});
if (transactionalSession) {
transactionalSession.batch.push(promise);
}
return promise;
}
// Return value not currently well specified.
findOneAndUpdate(
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
): Promise<any> {
debug('findOneAndUpdate', className, query, update);
return this.updateObjectsByQuery(className, schema, query, update).then(
val => val[0]
);
return this.updateObjectsByQuery(
className,
schema,
query,
update,
transactionalSession
).then(val => val[0]);
}
// Apply the update to all objects that match the given Parse Query.
@@ -1428,7 +1453,8 @@ export class PostgresStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
): Promise<[any]> {
debug('updateObjectsByQuery', className, query, update);
const updatePatterns = [];
@@ -1685,7 +1711,14 @@ export class PostgresStorageAdapter implements StorageAdapter {
where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`;
debug('update: ', qs, values);
return this._client.any(qs, values);
const promise = (transactionalSession
? transactionalSession.t
: this._client
).any(qs, values);
if (transactionalSession) {
transactionalSession.batch.push(promise);
}
return promise;
}
// Hopefully, we can get rid of this. It's only used for config and hooks.
@@ -1693,16 +1726,28 @@ export class PostgresStorageAdapter implements StorageAdapter {
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
) {
debug('upsertOneObject', { className, query, update });
const createValue = Object.assign({}, query, update);
return this.createObject(className, schema, createValue).catch(error => {
return this.createObject(
className,
schema,
createValue,
transactionalSession
).catch(error => {
// ignore duplicate value errors as it's upsert
if (error.code !== Parse.Error.DUPLICATE_VALUE) {
throw error;
}
return this.findOneAndUpdate(className, schema, query, update);
return this.findOneAndUpdate(
className,
schema,
query,
update,
transactionalSession
);
});
}
@@ -2323,6 +2368,37 @@ export class PostgresStorageAdapter implements StorageAdapter {
updateEstimatedCount(className: string) {
return this._client.none('ANALYZE $1:name', [className]);
}
createTransactionalSession(): Promise<any> {
return new Promise(resolve => {
const transactionalSession = {};
transactionalSession.result = this._client.tx(t => {
transactionalSession.t = t;
transactionalSession.promise = new Promise(resolve => {
transactionalSession.resolve = resolve;
});
transactionalSession.batch = [];
resolve(transactionalSession);
return transactionalSession.promise;
});
});
}
commitTransactionalSession(transactionalSession: any): Promise<void> {
transactionalSession.resolve(
transactionalSession.t.batch(transactionalSession.batch)
);
return transactionalSession.result;
}
abortTransactionalSession(transactionalSession: any): Promise<void> {
const result = transactionalSession.result.catch();
transactionalSession.batch.push(Promise.reject());
transactionalSession.resolve(
transactionalSession.t.batch(transactionalSession.batch)
);
return result;
}
}
function convertPolygonToSQL(polygon) {

View File

@@ -46,30 +46,35 @@ export interface StorageAdapter {
createObject(
className: string,
schema: SchemaType,
object: any
object: any,
transactionalSession: ?any
): Promise<any>;
deleteObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType
query: QueryType,
transactionalSession: ?any
): Promise<void>;
updateObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
): Promise<[any]>;
findOneAndUpdate(
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
): Promise<any>;
upsertOneObject(
className: string,
schema: SchemaType,
query: QueryType,
update: any
update: any,
transactionalSession: ?any
): Promise<any>;
find(
className: string,
@@ -114,4 +119,7 @@ export interface StorageAdapter {
fields: any,
conn: ?any
): Promise<void>;
createTransactionalSession(): Promise<any>;
commitTransactionalSession(transactionalSession: any): Promise<void>;
abortTransactionalSession(transactionalSession: any): Promise<void>;
}

View File

@@ -411,6 +411,7 @@ class DatabaseController {
schemaCache: any;
schemaPromise: ?Promise<SchemaController.SchemaController>;
skipMongoDBServer13732Workaround: boolean;
_transactionalSession: ?any;
constructor(
adapter: StorageAdapter,
@@ -424,6 +425,7 @@ class DatabaseController {
// it. Instead, use loadSchema to get a schema.
this.schemaPromise = null;
this.skipMongoDBServer13732Workaround = skipMongoDBServer13732Workaround;
this._transactionalSession = null;
}
collectionExists(className: string): Promise<boolean> {
@@ -624,21 +626,24 @@ class DatabaseController {
className,
schema,
query,
update
update,
this._transactionalSession
);
} else if (upsert) {
return this.adapter.upsertOneObject(
className,
schema,
query,
update
update,
this._transactionalSession
);
} else {
return this.adapter.findOneAndUpdate(
className,
schema,
query,
update
update,
this._transactionalSession
);
}
});
@@ -760,7 +765,8 @@ class DatabaseController {
`_Join:${key}:${fromClassName}`,
relationSchema,
doc,
doc
doc,
this._transactionalSession
);
}
@@ -781,7 +787,8 @@ class DatabaseController {
.deleteObjectsByQuery(
`_Join:${key}:${fromClassName}`,
relationSchema,
doc
doc,
this._transactionalSession
)
.catch(error => {
// We don't care if they try to delete a non-existent relation.
@@ -848,7 +855,8 @@ class DatabaseController {
this.adapter.deleteObjectsByQuery(
className,
parseFormatSchema,
query
query,
this._transactionalSession
)
)
.catch(error => {
@@ -908,7 +916,8 @@ class DatabaseController {
return this.adapter.createObject(
className,
SchemaController.convertSchemaToAdapterSchema(schema),
object
object,
this._transactionalSession
);
})
.then(result => {
@@ -1531,6 +1540,36 @@ class DatabaseController {
return protectedKeys;
}
createTransactionalSession() {
return this.adapter
.createTransactionalSession()
.then(transactionalSession => {
this._transactionalSession = transactionalSession;
});
}
commitTransactionalSession() {
if (!this._transactionalSession) {
throw new Error('There is no transactional session to commit');
}
return this.adapter
.commitTransactionalSession(this._transactionalSession)
.then(() => {
this._transactionalSession = null;
});
}
abortTransactionalSession() {
if (!this._transactionalSession) {
throw new Error('There is no transactional session to abort');
}
return this.adapter
.abortTransactionalSession(this._transactionalSession)
.then(() => {
this._transactionalSession = null;
});
}
// TODO: create indexes on first creation of a _User object. Otherwise it's impossible to
// have a Parse app without it having a _User collection.
performInitialization() {

View File

@@ -33,11 +33,13 @@ function getAuth(options = {}, config) {
}
function ParseServerRESTController(applicationId, router) {
function handleRequest(method, path, data = {}, options = {}) {
function handleRequest(method, path, data = {}, options = {}, config) {
// Store the arguments, for later use if internal fails
const args = arguments;
const config = Config.get(applicationId);
if (!config) {
config = Config.get(applicationId);
}
const serverURL = URL.parse(config.serverURL);
if (path.indexOf(serverURL.path) === 0) {
path = path.slice(serverURL.path.length, path.length);
@@ -48,24 +50,52 @@ function ParseServerRESTController(applicationId, router) {
}
if (path === '/batch') {
const promises = data.requests.map(request => {
return handleRequest(
request.method,
request.path,
request.body,
options
).then(
response => {
return Promise.resolve({ success: response });
},
error => {
return Promise.resolve({
error: { code: error.code, error: error.message },
});
}
);
let initialPromise = Promise.resolve();
if (data.transaction === true) {
initialPromise = config.database.createTransactionalSession();
}
return initialPromise.then(() => {
const promises = data.requests.map(request => {
return handleRequest(
request.method,
request.path,
request.body,
options,
config
).then(
response => {
return Promise.resolve({ success: response });
},
error => {
if (data.transaction === true) {
return Promise.reject(error);
}
return Promise.resolve({
error: { code: error.code, error: error.message },
});
}
);
});
return Promise.all(promises)
.catch(error => {
if (data.transaction === true) {
return config.database.abortTransactionalSession().then(() => {
throw error;
});
} else {
throw error;
}
})
.then(result => {
if (data.transaction === true) {
return config.database.commitTransactionalSession().then(() => {
return result;
});
} else {
return result;
}
});
});
return Promise.all(promises);
}
let query;

View File

@@ -83,30 +83,57 @@ function handleBatch(router, req) {
req.config.publicServerURL
);
const promises = req.body.requests.map(restRequest => {
const routablePath = makeRoutablePath(restRequest.path);
// Construct a request that we can send to a handler
const request = {
body: restRequest.body,
config: req.config,
auth: req.auth,
info: req.info,
};
let initialPromise = Promise.resolve();
if (req.body.transaction === true) {
initialPromise = req.config.database.createTransactionalSession();
}
return router
.tryRouteRequest(restRequest.method, routablePath, request)
.then(
response => {
return { success: response.response };
},
error => {
return { error: { code: error.code, error: error.message } };
return initialPromise.then(() => {
const promises = req.body.requests.map(restRequest => {
const routablePath = makeRoutablePath(restRequest.path);
// Construct a request that we can send to a handler
const request = {
body: restRequest.body,
config: req.config,
auth: req.auth,
info: req.info,
};
return router
.tryRouteRequest(restRequest.method, routablePath, request)
.then(
response => {
return { success: response.response };
},
error => {
if (req.body.transaction === true) {
return Promise.reject(error);
}
return { error: { code: error.code, error: error.message } };
}
);
});
return Promise.all(promises)
.catch(error => {
if (req.body.transaction === true) {
return req.config.database.abortTransactionalSession().then(() => {
throw error;
});
} else {
throw error;
}
);
});
return Promise.all(promises).then(results => {
return { response: results };
})
.then(results => {
if (req.body.transaction === true) {
return req.config.database.commitTransactionalSession().then(() => {
return { response: results };
});
} else {
return { response: results };
}
});
});
}