diff --git a/.travis.yml b/.travis.yml index d44f41c8e1..56ec8cc958 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: node_js dist: trusty services: -- mongodb - postgresql - redis-server - docker @@ -19,13 +18,12 @@ branches: cache: directories: - "$HOME/.npm" - - "$HOME/.mongodb/versions" stage: test env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' matrix: - - MONGODB_VERSION=4.0.4 + - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredTiger - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis @@ -42,7 +40,6 @@ before_script: - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database -- silent=1 mongodb-runner --start - greenkeeper-lockfile-update script: - npm run lint diff --git a/package.json b/package.json index cce983c1cd..dfcdb33002 100644 --- a/package.json +++ b/package.json @@ -95,8 +95,10 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", - "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine", - "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine", + "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start", + "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", + "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop", + "coverage": "npm run pretest && cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine && npm run posttest", "start": "node ./bin/parse-server", "prepare": "npm run build", "postinstall": "node -p 'require(\"./postinstall.js\")()'" diff --git a/spec/.eslintrc.json b/spec/.eslintrc.json index e11949b773..7031e96d6f 100644 --- a/spec/.eslintrc.json +++ b/spec/.eslintrc.json @@ -25,7 +25,8 @@ "jequal": true, "create": true, "arrayContains": true, - "expectAsync": true + "expectAsync": true, + "databaseAdapter": true }, "rules": { "no-console": [0], diff --git a/spec/GridFSBucketStorageAdapter.spec.js b/spec/GridFSBucketStorageAdapter.spec.js index 7baae8d03a..6ed4a92ed0 100644 --- a/spec/GridFSBucketStorageAdapter.spec.js +++ b/spec/GridFSBucketStorageAdapter.spec.js @@ -18,7 +18,7 @@ describe('GridFSBucket and GridStore interop', () => { beforeEach(async () => { const gsAdapter = new GridStoreAdapter(databaseURI); const db = await gsAdapter._connect(); - db.dropDatabase(); + await db.dropDatabase(); }); it('a file created in GridStore should be available in GridFS', async () => { diff --git a/spec/GridStoreAdapter.spec.js b/spec/GridStoreAdapter.spec.js index 2565f7bee0..1b6d61256c 100644 --- a/spec/GridStoreAdapter.spec.js +++ b/spec/GridStoreAdapter.spec.js @@ -13,7 +13,7 @@ describe_only_db('mongo')('GridStoreAdapter', () => { const config = Config.get(Parse.applicationId); const gridStoreAdapter = new GridStoreAdapter(databaseURI); const db = await gridStoreAdapter._connect(); - db.dropDatabase(); + await db.dropDatabase(); const filesController = new FilesController( gridStoreAdapter, Parse.applicationId, diff --git a/spec/ParseServerRESTController.spec.js b/spec/ParseServerRESTController.spec.js index 6fa128ec4c..11c23bdd71 100644 --- a/spec/ParseServerRESTController.spec.js +++ b/spec/ParseServerRESTController.spec.js @@ -2,6 +2,7 @@ const ParseServerRESTController = require('../lib/ParseServerRESTController') .ParseServerRESTController; const ParseServer = require('../lib/ParseServer').default; const Parse = require('parse/node').Parse; +const TestUtils = require('../lib/TestUtils'); let RESTController; @@ -40,7 +41,7 @@ describe('ParseServerRESTController', () => { ); }); - it('should handle a POST batch', done => { + it('should handle a POST batch without transaction', done => { RESTController.request('POST', 'batch', { requests: [ { @@ -69,6 +70,272 @@ describe('ParseServerRESTController', () => { ); }); + it('should handle a POST batch with transaction=false', done => { + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'GET', + path: '/classes/MyObject', + }, + { + method: 'POST', + path: '/classes/MyObject', + body: { key: 'value' }, + }, + { + method: 'GET', + path: '/classes/MyObject', + }, + ], + transaction: false, + }).then( + res => { + expect(res.length).toBe(3); + done(); + }, + err => { + jfail(err); + done(); + } + ); + }); + + if ( + (process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') || + process.env.PARSE_SERVER_TEST_DB === 'postgres' + ) { + describe('transactions', () => { + beforeAll(async () => { + if ( + process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger' + ) { + await reconfigureServer({ + databaseAdapter: undefined, + databaseURI: + 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset', + }); + } + }); + + beforeEach(async () => { + await TestUtils.destroyAllDataPermanently(true); + }); + + it('should handle a batch request with transaction = true', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }).then(response => { + expect(response.length).toEqual(2); + expect(response[0].success.objectId).toBeDefined(); + expect(response[0].success.createdAt).toBeDefined(); + expect(response[1].success.objectId).toBeDefined(); + expect(response[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( + databaseAdapter.createObject.calls.argsFor(1)[3] + ); + expect(results.map(result => result.get('key')).sort()).toEqual( + ['value1', 'value2'] + ); + done(); + }); + }); + }); + }); + + it('should not save anything when one operation fails in a transaction', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 10 }, + }, + ], + transaction: true, + }).catch(error => { + expect(error.message).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.length).toBe(0); + done(); + }); + }); + }); + }); + + it('should generate separate session for each call', async () => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + await myObject.save(); + await myObject.destroy(); + + const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + await myObject2.save(); + await myObject2.destroy(); + + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + let myObjectCalls = 0; + Parse.Cloud.beforeSave('MyObject', async () => { + myObjectCalls++; + if (myObjectCalls === 2) { + try { + await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 10 }, + }, + ], + transaction: true, + }); + fail('should fail'); + } catch (e) { + expect(e).toBeDefined(); + } + } + }); + + const response = await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }); + + expect(response.length).toEqual(2); + expect(response[0].success.objectId).toBeDefined(); + expect(response[0].success.createdAt).toBeDefined(); + expect(response[1].success.objectId).toBeDefined(); + expect(response[1].success.createdAt).toBeDefined(); + + await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }); + + const query = new Parse.Query('MyObject'); + const results = await query.find(); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + const query2 = new Parse.Query('MyObject2'); + const results2 = await query2.find(); + expect(results2.length).toEqual(0); + + const query3 = new Parse.Query('MyObject3'); + const results3 = await query3.find(); + expect(results3.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + expect(databaseAdapter.createObject.calls.count()).toBe(5); + let transactionalSession; + let transactionalSession2; + let myObjectDBCalls = 0; + let myObject2DBCalls = 0; + let myObject3DBCalls = 0; + for (let i = 0; i < 5; i++) { + const args = databaseAdapter.createObject.calls.argsFor(i); + switch (args[0]) { + case 'MyObject': + myObjectDBCalls++; + if (!transactionalSession) { + transactionalSession = args[3]; + } else { + expect(transactionalSession).toBe(args[3]); + } + if (transactionalSession2) { + expect(transactionalSession2).not.toBe(args[3]); + } + break; + case 'MyObject2': + myObject2DBCalls++; + transactionalSession2 = args[3]; + if (transactionalSession) { + expect(transactionalSession).not.toBe(args[3]); + } + break; + case 'MyObject3': + myObject3DBCalls++; + expect(args[3]).toEqual(null); + break; + } + } + expect(myObjectDBCalls).toEqual(2); + expect(myObject2DBCalls).toEqual(1); + expect(myObject3DBCalls).toEqual(2); + }); + }); + } + it('should handle a POST request', done => { RESTController.request('POST', '/classes/MyObject', { key: 'value' }) .then(() => { diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 5f002d4ca1..68ee0c01a2 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -1,4 +1,6 @@ const batch = require('../lib/batch'); +const request = require('../lib/request'); +const TestUtils = require('../lib/TestUtils'); const originalURL = '/parse/batch'; const serverURL = 'http://localhost:1234/parse'; @@ -7,6 +9,13 @@ const serverURLNaked = 'http://localhost:1234/'; const publicServerURL = 'http://domain.com/parse'; const publicServerURLNaked = 'http://domain.com/'; +const headers = { + 'Content-Type': 'application/json', + 'X-Parse-Application-Id': 'test', + 'X-Parse-REST-API-Key': 'rest', + 'X-Parse-Installation-Id': 'yolo', +}; + describe('batch', () => { it('should return the proper url', () => { const internalURL = batch.makeBatchRoutingPathFunction(originalURL)( @@ -59,4 +68,348 @@ describe('batch', () => { expect(internalURL).toEqual('/classes/Object'); }); + + it('should handle a batch request without transaction', done => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null); + expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + + it('should handle a batch request with transaction = false', done => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: false, + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null); + expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + + if ( + (process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') || + process.env.PARSE_SERVER_TEST_DB === 'postgres' + ) { + describe('transactions', () => { + beforeAll(async () => { + if ( + process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger' + ) { + await reconfigureServer({ + databaseAdapter: undefined, + databaseURI: + 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset', + }); + } + }); + + beforeEach(async () => { + await TestUtils.destroyAllDataPermanently(true); + }); + + it('should handle a batch request with transaction = true', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( + databaseAdapter.createObject.calls.argsFor(1)[3] + ); + expect(results.map(result => result.get('key')).sort()).toEqual( + ['value1', 'value2'] + ); + done(); + }); + }); + }); + }); + + it('should not save anything when one operation fails in a transaction', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 10 }, + }, + ], + transaction: true, + }), + }).catch(error => { + expect(error.data).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.length).toBe(0); + done(); + }); + }); + }); + }); + + it('should generate separate session for each call', async () => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + await myObject.save(); + await myObject.destroy(); + + const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + await myObject2.save(); + await myObject2.destroy(); + + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + let myObjectCalls = 0; + Parse.Cloud.beforeSave('MyObject', async () => { + myObjectCalls++; + if (myObjectCalls === 2) { + try { + await request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 10 }, + }, + ], + transaction: true, + }), + }); + fail('should fail'); + } catch (e) { + expect(e).toBeDefined(); + } + } + }); + + const response = await request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }); + + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + + await request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }), + }); + + const query = new Parse.Query('MyObject'); + const results = await query.find(); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + const query2 = new Parse.Query('MyObject2'); + const results2 = await query2.find(); + expect(results2.length).toEqual(0); + + const query3 = new Parse.Query('MyObject3'); + const results3 = await query3.find(); + expect(results3.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + expect(databaseAdapter.createObject.calls.count()).toBe(5); + let transactionalSession; + let transactionalSession2; + let myObjectDBCalls = 0; + let myObject2DBCalls = 0; + let myObject3DBCalls = 0; + for (let i = 0; i < 5; i++) { + const args = databaseAdapter.createObject.calls.argsFor(i); + switch (args[0]) { + case 'MyObject': + myObjectDBCalls++; + if (!transactionalSession) { + transactionalSession = args[3]; + } else { + expect(transactionalSession).toBe(args[3]); + } + if (transactionalSession2) { + expect(transactionalSession2).not.toBe(args[3]); + } + break; + case 'MyObject2': + myObject2DBCalls++; + transactionalSession2 = args[3]; + if (transactionalSession) { + expect(transactionalSession).not.toBe(args[3]); + } + break; + case 'MyObject3': + myObject3DBCalls++; + expect(args[3]).toEqual(null); + break; + } + } + expect(myObjectDBCalls).toEqual(2); + expect(myObject2DBCalls).toEqual(1); + expect(myObject3DBCalls).toEqual(2); + }); + }); + } }); diff --git a/spec/helper.js b/spec/helper.js index 93801c3a0d..0800128ced 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -40,20 +40,12 @@ const postgresURI = let databaseAdapter; // need to bind for mocking mocha -let startDB = () => {}; -let stopDB = () => {}; - if (process.env.PARSE_SERVER_TEST_DB === 'postgres') { databaseAdapter = new PostgresStorageAdapter({ uri: process.env.PARSE_SERVER_TEST_DATABASE_URI || postgresURI, collectionPrefix: 'test_', }); } else { - startDB = require('mongodb-runner/mocha/before').bind({ - timeout: () => {}, - slow: () => {}, - }); - stopDB = require('mongodb-runner/mocha/after'); databaseAdapter = new MongoStorageAdapter({ uri: mongoURI, collectionPrefix: 'test_', @@ -177,11 +169,6 @@ const reconfigureServer = changedConfiguration => { const Parse = require('parse/node'); Parse.serverURL = 'http://localhost:' + port + '/1'; -// 10 minutes timeout -beforeAll(startDB, 10 * 60 * 1000); - -afterAll(stopDB); - beforeEach(done => { try { Parse.User.enableUnsafeCurrentUser(); @@ -417,6 +404,7 @@ global.reconfigureServer = reconfigureServer; global.defaultConfiguration = defaultConfiguration; global.mockCustomAuthenticator = mockCustomAuthenticator; global.mockFacebookAuthenticator = mockFacebookAuthenticator; +global.databaseAdapter = databaseAdapter; global.jfail = function(err) { fail(JSON.stringify(err)); }; diff --git a/src/Adapters/Storage/Mongo/MongoCollection.js b/src/Adapters/Storage/Mongo/MongoCollection.js index 50e7a41123..91c28b407d 100644 --- a/src/Adapters/Storage/Mongo/MongoCollection.js +++ b/src/Adapters/Storage/Mongo/MongoCollection.js @@ -111,27 +111,30 @@ export default class MongoCollection { .toArray(); } - insertOne(object) { - return this._mongoCollection.insertOne(object); + insertOne(object, session) { + return this._mongoCollection.insertOne(object, { session }); } // Atomically updates data in the database for a single (first) object that matched the query // If there is nothing that matches the query - does insert // Postgres Note: `INSERT ... ON CONFLICT UPDATE` that is available since 9.5. - upsertOne(query, update) { - return this._mongoCollection.updateOne(query, update, { upsert: true }); + upsertOne(query, update, session) { + return this._mongoCollection.updateOne(query, update, { + upsert: true, + session, + }); } updateOne(query, update) { return this._mongoCollection.updateOne(query, update); } - updateMany(query, update) { - return this._mongoCollection.updateMany(query, update); + updateMany(query, update, session) { + return this._mongoCollection.updateMany(query, update, { session }); } - deleteMany(query) { - return this._mongoCollection.deleteMany(query); + deleteMany(query, session) { + return this._mongoCollection.deleteMany(query, { session }); } _ensureSparseUniqueIndexInBackground(indexRequest) { diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 14128bbc54..5ab92a2a31 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -472,7 +472,12 @@ export class MongoStorageAdapter implements StorageAdapter { // TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema, // and should infer from the type. Or maybe does need the schema for validations. Or maybe needs // the schema only for the legacy mongo format. We'll figure that out later. - createObject(className: string, schema: SchemaType, object: any) { + createObject( + className: string, + schema: SchemaType, + object: any, + transactionalSession: ?any + ) { schema = convertParseSchemaToMongoSchema(schema); const mongoObject = parseObjectToMongoObjectForCreate( className, @@ -480,7 +485,9 @@ export class MongoStorageAdapter implements StorageAdapter { schema ); return this._adaptiveCollection(className) - .then(collection => collection.insertOne(mongoObject)) + .then(collection => + collection.insertOne(mongoObject, transactionalSession) + ) .catch(error => { if (error.code === 11000) { // Duplicate value @@ -510,13 +517,14 @@ export class MongoStorageAdapter implements StorageAdapter { deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType + query: QueryType, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); return this._adaptiveCollection(className) .then(collection => { const mongoWhere = transformWhere(className, query, schema); - return collection.deleteMany(mongoWhere); + return collection.deleteMany(mongoWhere, transactionalSession); }) .catch(err => this.handleError(err)) .then( @@ -543,13 +551,16 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => collection.updateMany(mongoWhere, mongoUpdate)) + .then(collection => + collection.updateMany(mongoWhere, mongoUpdate, transactionalSession) + ) .catch(err => this.handleError(err)); } @@ -559,7 +570,8 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); @@ -568,6 +580,7 @@ export class MongoStorageAdapter implements StorageAdapter { .then(collection => collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, { returnOriginal: false, + session: transactionalSession || undefined, }) ) .then(result => mongoObjectToParseObject(className, result.value, schema)) @@ -588,13 +601,16 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => collection.upsertOne(mongoWhere, mongoUpdate)) + .then(collection => + collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession) + ) .catch(err => this.handleError(err)); } @@ -1059,6 +1075,24 @@ export class MongoStorageAdapter implements StorageAdapter { }) .catch(err => this.handleError(err)); } + + createTransactionalSession(): Promise { + const transactionalSection = this.client.startSession(); + transactionalSection.startTransaction(); + return Promise.resolve(transactionalSection); + } + + commitTransactionalSession(transactionalSection: any): Promise { + return transactionalSection.commitTransaction().then(() => { + transactionalSection.endSession(); + }); + } + + abortTransactionalSession(transactionalSection: any): Promise { + return transactionalSection.abortTransaction().then(() => { + transactionalSection.endSession(); + }); + } } export default MongoStorageAdapter; diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 9290a6d72f..28cbb67748 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -1223,7 +1223,12 @@ export class PostgresStorageAdapter implements StorageAdapter { } // TODO: remove the mongo format dependency in the return value - createObject(className: string, schema: SchemaType, object: any) { + createObject( + className: string, + schema: SchemaType, + object: any, + transactionalSession: ?any + ) { debug('createObject', className, object); let columnsArray = []; const valuesArray = []; @@ -1351,7 +1356,10 @@ export class PostgresStorageAdapter implements StorageAdapter { const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`; const values = [className, ...columnsArray, ...valuesArray]; debug(qs, values); - return this._client + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ) .none(qs, values) .then(() => ({ ops: [object] })) .catch(error => { @@ -1371,6 +1379,10 @@ export class PostgresStorageAdapter implements StorageAdapter { } throw error; }); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Remove all objects that match the given Parse Query. @@ -1379,7 +1391,8 @@ export class PostgresStorageAdapter implements StorageAdapter { deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType + query: QueryType, + transactionalSession: ?any ) { debug('deleteObjectsByQuery', className, query); const values = [className]; @@ -1391,7 +1404,10 @@ export class PostgresStorageAdapter implements StorageAdapter { } const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`; debug(qs, values); - return this._client + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ) .one(qs, values, a => +a.count) .then(count => { if (count === 0) { @@ -1409,18 +1425,27 @@ export class PostgresStorageAdapter implements StorageAdapter { } // ELSE: Don't delete anything if doesn't exist }); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Return value not currently well specified. findOneAndUpdate( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise { debug('findOneAndUpdate', className, query, update); - return this.updateObjectsByQuery(className, schema, query, update).then( - val => val[0] - ); + return this.updateObjectsByQuery( + className, + schema, + query, + update, + transactionalSession + ).then(val => val[0]); } // Apply the update to all objects that match the given Parse Query. @@ -1428,7 +1453,8 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise<[any]> { debug('updateObjectsByQuery', className, query, update); const updatePatterns = []; @@ -1685,7 +1711,14 @@ export class PostgresStorageAdapter implements StorageAdapter { where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`; debug('update: ', qs, values); - return this._client.any(qs, values); + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ).any(qs, values); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Hopefully, we can get rid of this. It's only used for config and hooks. @@ -1693,16 +1726,28 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { debug('upsertOneObject', { className, query, update }); const createValue = Object.assign({}, query, update); - return this.createObject(className, schema, createValue).catch(error => { + return this.createObject( + className, + schema, + createValue, + transactionalSession + ).catch(error => { // ignore duplicate value errors as it's upsert if (error.code !== Parse.Error.DUPLICATE_VALUE) { throw error; } - return this.findOneAndUpdate(className, schema, query, update); + return this.findOneAndUpdate( + className, + schema, + query, + update, + transactionalSession + ); }); } @@ -2323,6 +2368,37 @@ export class PostgresStorageAdapter implements StorageAdapter { updateEstimatedCount(className: string) { return this._client.none('ANALYZE $1:name', [className]); } + + createTransactionalSession(): Promise { + return new Promise(resolve => { + const transactionalSession = {}; + transactionalSession.result = this._client.tx(t => { + transactionalSession.t = t; + transactionalSession.promise = new Promise(resolve => { + transactionalSession.resolve = resolve; + }); + transactionalSession.batch = []; + resolve(transactionalSession); + return transactionalSession.promise; + }); + }); + } + + commitTransactionalSession(transactionalSession: any): Promise { + transactionalSession.resolve( + transactionalSession.t.batch(transactionalSession.batch) + ); + return transactionalSession.result; + } + + abortTransactionalSession(transactionalSession: any): Promise { + const result = transactionalSession.result.catch(); + transactionalSession.batch.push(Promise.reject()); + transactionalSession.resolve( + transactionalSession.t.batch(transactionalSession.batch) + ); + return result; + } } function convertPolygonToSQL(polygon) { diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index 31afe569c9..6de3ea3cbd 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -46,30 +46,35 @@ export interface StorageAdapter { createObject( className: string, schema: SchemaType, - object: any + object: any, + transactionalSession: ?any ): Promise; deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType + query: QueryType, + transactionalSession: ?any ): Promise; updateObjectsByQuery( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise<[any]>; findOneAndUpdate( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise; upsertOneObject( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise; find( className: string, @@ -114,4 +119,7 @@ export interface StorageAdapter { fields: any, conn: ?any ): Promise; + createTransactionalSession(): Promise; + commitTransactionalSession(transactionalSession: any): Promise; + abortTransactionalSession(transactionalSession: any): Promise; } diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index 222c2facf4..0dc89c412a 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -411,6 +411,7 @@ class DatabaseController { schemaCache: any; schemaPromise: ?Promise; skipMongoDBServer13732Workaround: boolean; + _transactionalSession: ?any; constructor( adapter: StorageAdapter, @@ -424,6 +425,7 @@ class DatabaseController { // it. Instead, use loadSchema to get a schema. this.schemaPromise = null; this.skipMongoDBServer13732Workaround = skipMongoDBServer13732Workaround; + this._transactionalSession = null; } collectionExists(className: string): Promise { @@ -624,21 +626,24 @@ class DatabaseController { className, schema, query, - update + update, + this._transactionalSession ); } else if (upsert) { return this.adapter.upsertOneObject( className, schema, query, - update + update, + this._transactionalSession ); } else { return this.adapter.findOneAndUpdate( className, schema, query, - update + update, + this._transactionalSession ); } }); @@ -760,7 +765,8 @@ class DatabaseController { `_Join:${key}:${fromClassName}`, relationSchema, doc, - doc + doc, + this._transactionalSession ); } @@ -781,7 +787,8 @@ class DatabaseController { .deleteObjectsByQuery( `_Join:${key}:${fromClassName}`, relationSchema, - doc + doc, + this._transactionalSession ) .catch(error => { // We don't care if they try to delete a non-existent relation. @@ -848,7 +855,8 @@ class DatabaseController { this.adapter.deleteObjectsByQuery( className, parseFormatSchema, - query + query, + this._transactionalSession ) ) .catch(error => { @@ -908,7 +916,8 @@ class DatabaseController { return this.adapter.createObject( className, SchemaController.convertSchemaToAdapterSchema(schema), - object + object, + this._transactionalSession ); }) .then(result => { @@ -1531,6 +1540,36 @@ class DatabaseController { return protectedKeys; } + createTransactionalSession() { + return this.adapter + .createTransactionalSession() + .then(transactionalSession => { + this._transactionalSession = transactionalSession; + }); + } + + commitTransactionalSession() { + if (!this._transactionalSession) { + throw new Error('There is no transactional session to commit'); + } + return this.adapter + .commitTransactionalSession(this._transactionalSession) + .then(() => { + this._transactionalSession = null; + }); + } + + abortTransactionalSession() { + if (!this._transactionalSession) { + throw new Error('There is no transactional session to abort'); + } + return this.adapter + .abortTransactionalSession(this._transactionalSession) + .then(() => { + this._transactionalSession = null; + }); + } + // TODO: create indexes on first creation of a _User object. Otherwise it's impossible to // have a Parse app without it having a _User collection. performInitialization() { diff --git a/src/ParseServerRESTController.js b/src/ParseServerRESTController.js index b91fbdbd1e..4bcd7bf066 100644 --- a/src/ParseServerRESTController.js +++ b/src/ParseServerRESTController.js @@ -33,11 +33,13 @@ function getAuth(options = {}, config) { } function ParseServerRESTController(applicationId, router) { - function handleRequest(method, path, data = {}, options = {}) { + function handleRequest(method, path, data = {}, options = {}, config) { // Store the arguments, for later use if internal fails const args = arguments; - const config = Config.get(applicationId); + if (!config) { + config = Config.get(applicationId); + } const serverURL = URL.parse(config.serverURL); if (path.indexOf(serverURL.path) === 0) { path = path.slice(serverURL.path.length, path.length); @@ -48,24 +50,52 @@ function ParseServerRESTController(applicationId, router) { } if (path === '/batch') { - const promises = data.requests.map(request => { - return handleRequest( - request.method, - request.path, - request.body, - options - ).then( - response => { - return Promise.resolve({ success: response }); - }, - error => { - return Promise.resolve({ - error: { code: error.code, error: error.message }, - }); - } - ); + let initialPromise = Promise.resolve(); + if (data.transaction === true) { + initialPromise = config.database.createTransactionalSession(); + } + return initialPromise.then(() => { + const promises = data.requests.map(request => { + return handleRequest( + request.method, + request.path, + request.body, + options, + config + ).then( + response => { + return Promise.resolve({ success: response }); + }, + error => { + if (data.transaction === true) { + return Promise.reject(error); + } + return Promise.resolve({ + error: { code: error.code, error: error.message }, + }); + } + ); + }); + return Promise.all(promises) + .catch(error => { + if (data.transaction === true) { + return config.database.abortTransactionalSession().then(() => { + throw error; + }); + } else { + throw error; + } + }) + .then(result => { + if (data.transaction === true) { + return config.database.commitTransactionalSession().then(() => { + return result; + }); + } else { + return result; + } + }); }); - return Promise.all(promises); } let query; diff --git a/src/batch.js b/src/batch.js index 0a584d1dae..10ae294a36 100644 --- a/src/batch.js +++ b/src/batch.js @@ -83,30 +83,57 @@ function handleBatch(router, req) { req.config.publicServerURL ); - const promises = req.body.requests.map(restRequest => { - const routablePath = makeRoutablePath(restRequest.path); - // Construct a request that we can send to a handler - const request = { - body: restRequest.body, - config: req.config, - auth: req.auth, - info: req.info, - }; + let initialPromise = Promise.resolve(); + if (req.body.transaction === true) { + initialPromise = req.config.database.createTransactionalSession(); + } - return router - .tryRouteRequest(restRequest.method, routablePath, request) - .then( - response => { - return { success: response.response }; - }, - error => { - return { error: { code: error.code, error: error.message } }; - } - ); - }); + return initialPromise.then(() => { + const promises = req.body.requests.map(restRequest => { + const routablePath = makeRoutablePath(restRequest.path); + // Construct a request that we can send to a handler - return Promise.all(promises).then(results => { - return { response: results }; + const request = { + body: restRequest.body, + config: req.config, + auth: req.auth, + info: req.info, + }; + + return router + .tryRouteRequest(restRequest.method, routablePath, request) + .then( + response => { + return { success: response.response }; + }, + error => { + if (req.body.transaction === true) { + return Promise.reject(error); + } + return { error: { code: error.code, error: error.message } }; + } + ); + }); + + return Promise.all(promises) + .catch(error => { + if (req.body.transaction === true) { + return req.config.database.abortTransactionalSession().then(() => { + throw error; + }); + } else { + throw error; + } + }) + .then(results => { + if (req.body.transaction === true) { + return req.config.database.commitTransactionalSession().then(() => { + return { response: results }; + }); + } else { + return { response: results }; + } + }); }); }