From 8222855124ddda2822a08de47c5405ca87c46f04 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 23 Jul 2019 11:31:55 -0700 Subject: [PATCH 01/24] Batch transaction boilerplate --- spec/batch.spec.js | 118 ++++++++++++++++++ .../Storage/Mongo/MongoStorageAdapter.js | 18 +++ .../Postgres/PostgresStorageAdapter.js | 12 ++ src/Adapters/Storage/StorageAdapter.js | 3 + src/Controllers/DatabaseController.js | 15 ++- src/batch.js | 76 +++++++---- 6 files changed, 219 insertions(+), 23 deletions(-) diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 5f002d4ca1..218aa011a7 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -1,4 +1,5 @@ const batch = require('../lib/batch'); +const request = require('../lib/request'); const originalURL = '/parse/batch'; const serverURL = 'http://localhost:1234/parse'; @@ -7,6 +8,13 @@ const serverURLNaked = 'http://localhost:1234/'; const publicServerURL = 'http://domain.com/parse'; const publicServerURLNaked = 'http://domain.com/'; +const headers = { + 'Content-Type': 'application/json', + 'X-Parse-Application-Id': 'test', + 'X-Parse-REST-API-Key': 'rest', + 'X-Parse-Installation-Id': 'yolo', +}; + describe('batch', () => { it('should return the proper url', () => { const internalURL = batch.makeBatchRoutingPathFunction(originalURL)( @@ -59,4 +67,114 @@ describe('batch', () => { expect(internalURL).toEqual('/classes/Object'); }); + + it('should handle a batch request without transaction', done => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + + it('should handle a batch request with transaction = false', done => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: false, + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + + it('should handle a batch request with transaction = true', done => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); }); diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 33282ce2b9..5ff3a17023 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -1048,6 +1048,24 @@ export class MongoStorageAdapter implements StorageAdapter { }) .catch(err => this.handleError(err)); } + + createTransactionalSession(): Promise { + const transactionalSection = this.client.startSession(); + transactionalSection.startTransaction(); + return Promise.resolve(transactionalSection); + } + + commitTransactionalSession(transactionalSection): Promise { + return transactionalSection.commitTransaction().then(() => { + transactionalSection.endSession(); + }); + } + + abortTransactionalSession(transactionalSection): Promise { + return transactionalSection.abortTransaction().then(() => { + transactionalSection.endSession(); + }); + } } export default MongoStorageAdapter; diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index a449dbb39f..513de31158 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -2330,6 +2330,18 @@ export class PostgresStorageAdapter implements StorageAdapter { updateEstimatedCount(className: string) { return this._client.none('ANALYZE $1:name', [className]); } + + createTransactionalSession(): Promise { + return Promise.resolve(); + } + + commitTransactionalSession(): Promise { + return Promise.resolve(); + } + + abortTransactionalSession(): Promise { + return Promise.resolve(); + } } function convertPolygonToSQL(polygon) { diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index 31afe569c9..c87106c8c8 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -114,4 +114,7 @@ export interface StorageAdapter { fields: any, conn: ?any ): Promise; + createTransactionalSession(): Promise; + commitTransactionalSession(transactionalSession: string): Promise; + abortTransactionalSession(transactionalSession: string): Promise; } diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index cfe2c3570d..bb956a8531 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -119,7 +119,7 @@ const validateQuery = ( */ Object.keys(query).forEach(key => { const noCollisions = !query.$or.some(subq => - subq.hasOwnProperty(key) + Object.hasOwnProperty.call(subq, key) ); let hasNears = false; if (query[key] != null && typeof query[key] == 'object') { @@ -411,6 +411,7 @@ class DatabaseController { schemaCache: any; schemaPromise: ?Promise; skipMongoDBServer13732Workaround: boolean; + transactionalSession: ?any; constructor( adapter: StorageAdapter, @@ -1531,6 +1532,18 @@ class DatabaseController { return protectedKeys; } + createTransactionalSession() { + return this.adapter.createTransactionalSession(); + } + + commitTransactionalSession(transactionalSession) { + return this.adapter.commitTransactionalSession(transactionalSession); + } + + abortTransactionalSession(transactionalSession) { + return this.adapter.abortTransactionalSession(transactionalSession); + } + // TODO: create indexes on first creation of a _User object. Otherwise it's impossible to // have a Parse app without it having a _User collection. performInitialization() { diff --git a/src/batch.js b/src/batch.js index 0a584d1dae..83ef4f6e13 100644 --- a/src/batch.js +++ b/src/batch.js @@ -83,30 +83,62 @@ function handleBatch(router, req) { req.config.publicServerURL ); - const promises = req.body.requests.map(restRequest => { - const routablePath = makeRoutablePath(restRequest.path); - // Construct a request that we can send to a handler - const request = { - body: restRequest.body, - config: req.config, - auth: req.auth, - info: req.info, - }; + let initialPromise = Promise.resolve(); + if (req.body.transaction === true) { + initialPromise = req.config.database.createTransactionalSession(); + } - return router - .tryRouteRequest(restRequest.method, routablePath, request) - .then( - response => { - return { success: response.response }; - }, - error => { - return { error: { code: error.code, error: error.message } }; - } - ); - }); + return initialPromise.then(transactionalSession => { + const promises = req.body.requests.map(restRequest => { + const routablePath = makeRoutablePath(restRequest.path); + // Construct a request that we can send to a handler - return Promise.all(promises).then(results => { - return { response: results }; + if (transactionalSession) { + req.config.database.transactionalSession = transactionalSession; + } + + const request = { + body: restRequest.body, + config: req.config, + auth: req.auth, + info: req.info, + }; + + return router + .tryRouteRequest(restRequest.method, routablePath, request) + .then( + response => { + return { success: response.response }; + }, + error => { + return { error: { code: error.code, error: error.message } }; + } + ); + }); + + return Promise.all(promises) + .then(results => { + if (transactionalSession) { + return req.config.database + .commitTransactionalSession(transactionalSession) + .then(() => { + return { response: results }; + }); + } else { + return { response: results }; + } + }) + .catch(error => { + if (transactionalSession) { + return req.config.database + .abortTransactionalSession(transactionalSession) + .then(() => { + throw error; + }); + } else { + throw error; + } + }); }); } From 47be348aa238a10e1e2bd47c716664b1a1549872 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 23 Jul 2019 12:34:12 -0700 Subject: [PATCH 02/24] Refactoring transaction boilerplate --- .../Storage/Mongo/MongoStorageAdapter.js | 4 +-- src/Adapters/Storage/StorageAdapter.js | 4 +-- src/Controllers/DatabaseController.js | 31 +++++++++++++++---- src/batch.js | 26 ++++++---------- 4 files changed, 38 insertions(+), 27 deletions(-) diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 5ff3a17023..f42fd69101 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -1055,13 +1055,13 @@ export class MongoStorageAdapter implements StorageAdapter { return Promise.resolve(transactionalSection); } - commitTransactionalSession(transactionalSection): Promise { + commitTransactionalSession(transactionalSection: any): Promise { return transactionalSection.commitTransaction().then(() => { transactionalSection.endSession(); }); } - abortTransactionalSession(transactionalSection): Promise { + abortTransactionalSession(transactionalSection: any): Promise { return transactionalSection.abortTransaction().then(() => { transactionalSection.endSession(); }); diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index c87106c8c8..3e446cf25c 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -115,6 +115,6 @@ export interface StorageAdapter { conn: ?any ): Promise; createTransactionalSession(): Promise; - commitTransactionalSession(transactionalSession: string): Promise; - abortTransactionalSession(transactionalSession: string): Promise; + commitTransactionalSession(transactionalSession: any): Promise; + abortTransactionalSession(transactionalSession: any): Promise; } diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index bb956a8531..3dc989bc4b 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -411,7 +411,7 @@ class DatabaseController { schemaCache: any; schemaPromise: ?Promise; skipMongoDBServer13732Workaround: boolean; - transactionalSession: ?any; + _transactionalSession: ?any; constructor( adapter: StorageAdapter, @@ -425,6 +425,7 @@ class DatabaseController { // it. Instead, use loadSchema to get a schema. this.schemaPromise = null; this.skipMongoDBServer13732Workaround = skipMongoDBServer13732Workaround; + this._transactionalSession = null; } collectionExists(className: string): Promise { @@ -1533,15 +1534,33 @@ class DatabaseController { } createTransactionalSession() { - return this.adapter.createTransactionalSession(); + return this.adapter + .createTransactionalSession() + .then(transactionalSession => { + this._transactionalSession = transactionalSession; + }); } - commitTransactionalSession(transactionalSession) { - return this.adapter.commitTransactionalSession(transactionalSession); + commitTransactionalSession() { + if (!this._transactionalSession) { + throw new Error('There is no transactional session to commit'); + } + return this.adapter + .commitTransactionalSession(this._transactionalSession) + .then(() => { + this._transactionalSession = null; + }); } - abortTransactionalSession(transactionalSession) { - return this.adapter.abortTransactionalSession(transactionalSession); + abortTransactionalSession() { + if (!this._transactionalSession) { + throw new Error('There is no transactional session to abort'); + } + return this.adapter + .abortTransactionalSession(this._transactionalSession) + .then(() => { + this._transactionalSession = null; + }); } // TODO: create indexes on first creation of a _User object. Otherwise it's impossible to diff --git a/src/batch.js b/src/batch.js index 83ef4f6e13..ba0e92789d 100644 --- a/src/batch.js +++ b/src/batch.js @@ -88,15 +88,11 @@ function handleBatch(router, req) { initialPromise = req.config.database.createTransactionalSession(); } - return initialPromise.then(transactionalSession => { + return initialPromise.then(() => { const promises = req.body.requests.map(restRequest => { const routablePath = makeRoutablePath(restRequest.path); // Construct a request that we can send to a handler - if (transactionalSession) { - req.config.database.transactionalSession = transactionalSession; - } - const request = { body: restRequest.body, config: req.config, @@ -118,23 +114,19 @@ function handleBatch(router, req) { return Promise.all(promises) .then(results => { - if (transactionalSession) { - return req.config.database - .commitTransactionalSession(transactionalSession) - .then(() => { - return { response: results }; - }); + if (req.body.transaction) { + return req.config.database.commitTransactionalSession().then(() => { + return { response: results }; + }); } else { return { response: results }; } }) .catch(error => { - if (transactionalSession) { - return req.config.database - .abortTransactionalSession(transactionalSession) - .then(() => { - throw error; - }); + if (req.body.transaction) { + return req.config.database.abortTransactionalSession().then(() => { + throw error; + }); } else { throw error; } From 5404ba9d3ff5f10040d912197f6cbd4415870549 Mon Sep 17 00:00:00 2001 From: = Date: Tue, 23 Jul 2019 13:33:15 -0700 Subject: [PATCH 03/24] Independent sessions test --- spec/batch.spec.js | 158 +++++++++++++++++++++++++++++++++++++++++++++ src/triggers.js | 1 + 2 files changed, 159 insertions(+) diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 218aa011a7..c352c1faa3 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -69,6 +69,12 @@ describe('batch', () => { }); it('should handle a batch request without transaction', done => { + let calls = 0; + Parse.Cloud.beforeSave('MyObject', ({ config }) => { + calls++; + expect(config.database._transactionalSession).toEqual(null); + }); + request({ method: 'POST', headers: headers, @@ -95,6 +101,7 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { + expect(calls).toBe(2); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -105,6 +112,12 @@ describe('batch', () => { }); it('should handle a batch request with transaction = false', done => { + let calls = 0; + Parse.Cloud.beforeSave('MyObject', ({ config }) => { + calls++; + expect(config.database._transactionalSession).toEqual(null); + }); + request({ method: 'POST', headers: headers, @@ -132,6 +145,7 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { + expect(calls).toBe(2); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -142,6 +156,20 @@ describe('batch', () => { }); it('should handle a batch request with transaction = true', done => { + let calls = 0; + let transactionalSession = null; + Parse.Cloud.beforeSave('MyObject', ({ config }) => { + calls++; + expect(config.database._transactionalSession).not.toEqual(null); + if (transactionalSession) { + expect(config.database._transactionalSession).toBe( + transactionalSession + ); + } else { + transactionalSession = config.database._transactionalSession; + } + }); + request({ method: 'POST', headers: headers, @@ -169,6 +197,7 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { + expect(calls).toBe(2); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -177,4 +206,133 @@ describe('batch', () => { }); }); }); + + it('should generate separate session for each call', done => { + let myObjectCalls = 0; + let myObjectTransactionalSession = null; + + Parse.Cloud.beforeSave('MyObject', ({ config }) => { + myObjectCalls++; + expect(config.database._transactionalSession).not.toEqual(null); + if (myObjectTransactionalSession) { + expect(config.database._transactionalSession).toBe( + myObjectTransactionalSession + ); + } else { + myObjectTransactionalSession = config.database._transactionalSession; + } + + if (myObjectCalls === 1) { + return request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }).then(() => Promise.resolve()); + } + }); + + let myObject2Calls = 0; + let myObject2TransactionalSession = null; + Parse.Cloud.beforeSave('MyObject2', ({ config }) => { + myObject2Calls++; + expect(config.database._transactionalSession).not.toEqual(null); + if (myObject2TransactionalSession) { + expect(config.database._transactionalSession).toBe( + myObject2TransactionalSession + ); + } else { + myObject2TransactionalSession = config.database._transactionalSession; + } + + if (myObject2Calls === 1) { + return request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }), + }).then(() => Promise.resolve()); + } + }); + + let myObject3Calls = 0; + Parse.Cloud.beforeSave('MyObject3', ({ config }) => { + myObject3Calls++; + expect(config.database._transactionalSession).toEqual(null); + }); + + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }).then(() => { + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(myObjectCalls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + const query = new Parse.Query('MyObject2'); + query.find().then(results => { + expect(myObject2Calls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + const query = new Parse.Query('MyObject3'); + query.find().then(results => { + expect(myObject3Calls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + }); + }); }); diff --git a/src/triggers.js b/src/triggers.js index c0a94c5edd..73829d6920 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -179,6 +179,7 @@ export function getRequestObject( log: config.loggerController, headers: config.headers, ip: config.ip, + config: config, }; if (originalParseObject) { From c99fe13d2904475da41a46094f21b95f1221ccb7 Mon Sep 17 00:00:00 2001 From: = Date: Wed, 24 Jul 2019 14:32:12 -0700 Subject: [PATCH 04/24] Transactions - partial --- spec/GridFSBucketStorageAdapter.spec.js | 2 +- spec/batch.spec.js | 307 ++++++++++-------- src/Adapters/Storage/Mongo/MongoCollection.js | 19 +- .../Storage/Mongo/MongoStorageAdapter.js | 34 +- .../Postgres/PostgresStorageAdapter.js | 40 ++- src/Adapters/Storage/StorageAdapter.js | 15 +- src/Controllers/DatabaseController.js | 21 +- src/batch.js | 16 +- src/triggers.js | 2 +- 9 files changed, 271 insertions(+), 185 deletions(-) diff --git a/spec/GridFSBucketStorageAdapter.spec.js b/spec/GridFSBucketStorageAdapter.spec.js index 7baae8d03a..6ed4a92ed0 100644 --- a/spec/GridFSBucketStorageAdapter.spec.js +++ b/spec/GridFSBucketStorageAdapter.spec.js @@ -18,7 +18,7 @@ describe('GridFSBucket and GridStore interop', () => { beforeEach(async () => { const gsAdapter = new GridStoreAdapter(databaseURI); const db = await gsAdapter._connect(); - db.dropDatabase(); + await db.dropDatabase(); }); it('a file created in GridStore should be available in GridFS', async () => { diff --git a/spec/batch.spec.js b/spec/batch.spec.js index c352c1faa3..406bed7f41 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -1,5 +1,6 @@ const batch = require('../lib/batch'); const request = require('../lib/request'); +const TestUtils = require('../lib/TestUtils'); const originalURL = '/parse/batch'; const serverURL = 'http://localhost:1234/parse'; @@ -70,9 +71,9 @@ describe('batch', () => { it('should handle a batch request without transaction', done => { let calls = 0; - Parse.Cloud.beforeSave('MyObject', ({ config }) => { + Parse.Cloud.beforeSave('MyObject', ({ database }) => { calls++; - expect(config.database._transactionalSession).toEqual(null); + expect(database._transactionalSession).toEqual(null); }); request({ @@ -113,9 +114,9 @@ describe('batch', () => { it('should handle a batch request with transaction = false', done => { let calls = 0; - Parse.Cloud.beforeSave('MyObject', ({ config }) => { + Parse.Cloud.beforeSave('MyObject', ({ database }) => { calls++; - expect(config.database._transactionalSession).toEqual(null); + expect(database._transactionalSession).toEqual(null); }); request({ @@ -155,75 +156,33 @@ describe('batch', () => { }); }); - it('should handle a batch request with transaction = true', done => { - let calls = 0; - let transactionalSession = null; - Parse.Cloud.beforeSave('MyObject', ({ config }) => { - calls++; - expect(config.database._transactionalSession).not.toEqual(null); - if (transactionalSession) { - expect(config.database._transactionalSession).toBe( - transactionalSession - ); - } else { - transactionalSession = config.database._transactionalSession; - } - }); - - request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value2' }, - }, - ], - transaction: true, - }), - }).then(response => { - expect(response.data.length).toEqual(2); - expect(response.data[0].success.objectId).toBeDefined(); - expect(response.data[0].success.createdAt).toBeDefined(); - expect(response.data[1].success.objectId).toBeDefined(); - expect(response.data[1].success.createdAt).toBeDefined(); - const query = new Parse.Query('MyObject'); - query.find().then(results => { - expect(calls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - done(); + if (process.env.PARSE_SERVER_TEST_DATABASE_URI_TRANSACTIONS) { + describe('transactions', () => { + beforeAll(async () => { + await reconfigureServer({ + databaseAdapter: undefined, + databaseURI: process.env.PARSE_SERVER_TEST_DATABASE_URI_TRANSACTIONS, + }); }); - }); - }); - it('should generate separate session for each call', done => { - let myObjectCalls = 0; - let myObjectTransactionalSession = null; + beforeEach(async () => { + await TestUtils.destroyAllDataPermanently(true); + }); - Parse.Cloud.beforeSave('MyObject', ({ config }) => { - myObjectCalls++; - expect(config.database._transactionalSession).not.toEqual(null); - if (myObjectTransactionalSession) { - expect(config.database._transactionalSession).toBe( - myObjectTransactionalSession - ); - } else { - myObjectTransactionalSession = config.database._transactionalSession; - } + it('should handle a batch request with transaction = true', done => { + let calls = 0; + let transactionalSession = null; + Parse.Cloud.beforeSave('MyObject', ({ database }) => { + calls++; + expect(database._transactionalSession).not.toEqual(null); + if (transactionalSession) { + expect(database._transactionalSession).toBe(transactionalSession); + } else { + transactionalSession = database._transactionalSession; + } + }); - if (myObjectCalls === 1) { - return request({ + request({ method: 'POST', headers: headers, url: 'http://localhost:8378/1/batch', @@ -231,36 +190,134 @@ describe('batch', () => { requests: [ { method: 'POST', - path: '/1/classes/MyObject2', + path: '/1/classes/MyObject', body: { key: 'value1' }, }, { method: 'POST', - path: '/1/classes/MyObject2', + path: '/1/classes/MyObject', body: { key: 'value2' }, }, ], transaction: true, }), - }).then(() => Promise.resolve()); - } - }); + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(calls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + done(); + }); + }); + }); + + it('should generate separate session for each call', done => { + let myObjectCalls = 0; + //let myObjectTransactionalSession = null; + + Parse.Cloud.afterSave('MyObject', () => { + console.log(1); + }); + Parse.Cloud.afterSave('MyObject2', () => { + console.log(2); + }); + Parse.Cloud.afterSave('MyObject3', () => { + console.log(3); + }); - let myObject2Calls = 0; - let myObject2TransactionalSession = null; - Parse.Cloud.beforeSave('MyObject2', ({ config }) => { - myObject2Calls++; - expect(config.database._transactionalSession).not.toEqual(null); - if (myObject2TransactionalSession) { - expect(config.database._transactionalSession).toBe( - myObject2TransactionalSession - ); - } else { - myObject2TransactionalSession = config.database._transactionalSession; - } + Parse.Cloud.beforeSave('MyObject', (/*{ database }*/) => { + myObjectCalls++; + // expect(database._transactionalSession).not.toEqual(null); + // if (myObjectTransactionalSession) { + // expect(database._transactionalSession).toBe( + // myObjectTransactionalSession + // ); + // } else { + // myObjectTransactionalSession = + // database._transactionalSession; + // } - if (myObject2Calls === 1) { - return request({ + if (myObjectCalls === 1) { + return request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value2' }, + }, + ], + transaction: false, + }), + }).then(response => { + console.log(response.data[0].error); + return Promise.resolve(); + }); + } + }); + + let myObject2Calls = 0; + //let myObject2TransactionalSession = null; + Parse.Cloud.beforeSave('MyObject2', (/*{ database }*/) => { + myObject2Calls++; + // expect(database._transactionalSession).not.toEqual(null); + // if (myObject2TransactionalSession) { + // expect(database._transactionalSession).toBe( + // myObject2TransactionalSession + // ); + // } else { + // myObject2TransactionalSession = + // database._transactionalSession; + // } + if (myObject2Calls === 1) { + return request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }), + }).then(response => { + console.log(response.data); + return Promise.resolve(); + }); + } + }); + + let myObject3Calls = 0; + Parse.Cloud.beforeSave('MyObject3', ({ database }) => { + myObject3Calls++; + expect(database._transactionalSession).toEqual(null); + }); + + request({ method: 'POST', headers: headers, url: 'http://localhost:8378/1/batch', @@ -268,71 +325,45 @@ describe('batch', () => { requests: [ { method: 'POST', - path: '/1/classes/MyObject3', + path: '/1/classes/MyObject', body: { key: 'value1' }, }, { method: 'POST', - path: '/1/classes/MyObject3', + path: '/1/classes/MyObject', body: { key: 'value2' }, }, ], + transaction: true, }), - }).then(() => Promise.resolve()); - } - }); - - let myObject3Calls = 0; - Parse.Cloud.beforeSave('MyObject3', ({ config }) => { - myObject3Calls++; - expect(config.database._transactionalSession).toEqual(null); - }); - - request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value2' }, - }, - ], - transaction: true, - }), - }).then(() => { - const query = new Parse.Query('MyObject'); - query.find().then(results => { - expect(myObjectCalls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - const query = new Parse.Query('MyObject2'); - query.find().then(results => { - expect(myObject2Calls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - const query = new Parse.Query('MyObject3'); + }).then(response => { + console.log(response.data); + const query = new Parse.Query('MyObject'); query.find().then(results => { - expect(myObject3Calls).toBe(2); + expect(myObjectCalls).toBe(2); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', ]); - done(); + const query = new Parse.Query('MyObject2'); + query.find().then(results => { + expect(myObject2Calls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + const query = new Parse.Query('MyObject3'); + query.find().then(results => { + expect(myObject3Calls).toBe(2); + expect(results.map(result => result.get('key')).sort()).toEqual( + ['value1', 'value2'] + ); + done(); + }); + }); }); }); }); }); - }); + } }); diff --git a/src/Adapters/Storage/Mongo/MongoCollection.js b/src/Adapters/Storage/Mongo/MongoCollection.js index 50e7a41123..91c28b407d 100644 --- a/src/Adapters/Storage/Mongo/MongoCollection.js +++ b/src/Adapters/Storage/Mongo/MongoCollection.js @@ -111,27 +111,30 @@ export default class MongoCollection { .toArray(); } - insertOne(object) { - return this._mongoCollection.insertOne(object); + insertOne(object, session) { + return this._mongoCollection.insertOne(object, { session }); } // Atomically updates data in the database for a single (first) object that matched the query // If there is nothing that matches the query - does insert // Postgres Note: `INSERT ... ON CONFLICT UPDATE` that is available since 9.5. - upsertOne(query, update) { - return this._mongoCollection.updateOne(query, update, { upsert: true }); + upsertOne(query, update, session) { + return this._mongoCollection.updateOne(query, update, { + upsert: true, + session, + }); } updateOne(query, update) { return this._mongoCollection.updateOne(query, update); } - updateMany(query, update) { - return this._mongoCollection.updateMany(query, update); + updateMany(query, update, session) { + return this._mongoCollection.updateMany(query, update, { session }); } - deleteMany(query) { - return this._mongoCollection.deleteMany(query); + deleteMany(query, session) { + return this._mongoCollection.deleteMany(query, { session }); } _ensureSparseUniqueIndexInBackground(indexRequest) { diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index f42fd69101..599f3e9b4e 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -461,7 +461,12 @@ export class MongoStorageAdapter implements StorageAdapter { // TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema, // and should infer from the type. Or maybe does need the schema for validations. Or maybe needs // the schema only for the legacy mongo format. We'll figure that out later. - createObject(className: string, schema: SchemaType, object: any) { + createObject( + className: string, + schema: SchemaType, + object: any, + transactionalSession: ?any + ) { schema = convertParseSchemaToMongoSchema(schema); const mongoObject = parseObjectToMongoObjectForCreate( className, @@ -469,7 +474,9 @@ export class MongoStorageAdapter implements StorageAdapter { schema ); return this._adaptiveCollection(className) - .then(collection => collection.insertOne(mongoObject)) + .then(collection => + collection.insertOne(mongoObject, transactionalSession) + ) .catch(error => { if (error.code === 11000) { // Duplicate value @@ -499,13 +506,14 @@ export class MongoStorageAdapter implements StorageAdapter { deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType + query: QueryType, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); return this._adaptiveCollection(className) .then(collection => { const mongoWhere = transformWhere(className, query, schema); - return collection.deleteMany(mongoWhere); + return collection.deleteMany(mongoWhere, transactionalSession); }) .catch(err => this.handleError(err)) .then( @@ -532,13 +540,16 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => collection.updateMany(mongoWhere, mongoUpdate)) + .then(collection => + collection.updateMany(mongoWhere, mongoUpdate, transactionalSession) + ) .catch(err => this.handleError(err)); } @@ -548,7 +559,8 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); @@ -557,6 +569,7 @@ export class MongoStorageAdapter implements StorageAdapter { .then(collection => collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, { returnOriginal: false, + session: transactionalSession || undefined, }) ) .then(result => mongoObjectToParseObject(className, result.value, schema)) @@ -577,13 +590,16 @@ export class MongoStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); return this._adaptiveCollection(className) - .then(collection => collection.upsertOne(mongoWhere, mongoUpdate)) + .then(collection => + collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession) + ) .catch(err => this.handleError(err)); } diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 513de31158..7ee336862c 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -1230,7 +1230,12 @@ export class PostgresStorageAdapter implements StorageAdapter { } // TODO: remove the mongo format dependency in the return value - createObject(className: string, schema: SchemaType, object: any) { + createObject( + className: string, + schema: SchemaType, + object: any + /* transactionalSession: ?any */ + ) { debug('createObject', className, object); let columnsArray = []; const valuesArray = []; @@ -1387,6 +1392,7 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType + /* transactionalSession: ?any */ ) { debug('deleteObjectsByQuery', className, query); const values = [className]; @@ -1422,12 +1428,17 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise { debug('findOneAndUpdate', className, query, update); - return this.updateObjectsByQuery(className, schema, query, update).then( - val => val[0] - ); + return this.updateObjectsByQuery( + className, + schema, + query, + update, + transactionalSession + ).then(val => val[0]); } // Apply the update to all objects that match the given Parse Query. @@ -1436,6 +1447,7 @@ export class PostgresStorageAdapter implements StorageAdapter { schema: SchemaType, query: QueryType, update: any + /* transactionalSession: ?any */ ): Promise<[any]> { debug('updateObjectsByQuery', className, query, update); const updatePatterns = []; @@ -1700,16 +1712,28 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ) { debug('upsertOneObject', { className, query, update }); const createValue = Object.assign({}, query, update); - return this.createObject(className, schema, createValue).catch(error => { + return this.createObject( + className, + schema, + createValue, + transactionalSession + ).catch(error => { // ignore duplicate value errors as it's upsert if (error.code !== Parse.Error.DUPLICATE_VALUE) { throw error; } - return this.findOneAndUpdate(className, schema, query, update); + return this.findOneAndUpdate( + className, + schema, + query, + update, + transactionalSession + ); }); } diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js index 3e446cf25c..6de3ea3cbd 100644 --- a/src/Adapters/Storage/StorageAdapter.js +++ b/src/Adapters/Storage/StorageAdapter.js @@ -46,30 +46,35 @@ export interface StorageAdapter { createObject( className: string, schema: SchemaType, - object: any + object: any, + transactionalSession: ?any ): Promise; deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType + query: QueryType, + transactionalSession: ?any ): Promise; updateObjectsByQuery( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise<[any]>; findOneAndUpdate( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise; upsertOneObject( className: string, schema: SchemaType, query: QueryType, - update: any + update: any, + transactionalSession: ?any ): Promise; find( className: string, diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index 3dc989bc4b..0dc89c412a 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -626,21 +626,24 @@ class DatabaseController { className, schema, query, - update + update, + this._transactionalSession ); } else if (upsert) { return this.adapter.upsertOneObject( className, schema, query, - update + update, + this._transactionalSession ); } else { return this.adapter.findOneAndUpdate( className, schema, query, - update + update, + this._transactionalSession ); } }); @@ -762,7 +765,8 @@ class DatabaseController { `_Join:${key}:${fromClassName}`, relationSchema, doc, - doc + doc, + this._transactionalSession ); } @@ -783,7 +787,8 @@ class DatabaseController { .deleteObjectsByQuery( `_Join:${key}:${fromClassName}`, relationSchema, - doc + doc, + this._transactionalSession ) .catch(error => { // We don't care if they try to delete a non-existent relation. @@ -850,7 +855,8 @@ class DatabaseController { this.adapter.deleteObjectsByQuery( className, parseFormatSchema, - query + query, + this._transactionalSession ) ) .catch(error => { @@ -910,7 +916,8 @@ class DatabaseController { return this.adapter.createObject( className, SchemaController.convertSchemaToAdapterSchema(schema), - object + object, + this._transactionalSession ); }) .then(result => { diff --git a/src/batch.js b/src/batch.js index ba0e92789d..c081e5cafe 100644 --- a/src/batch.js +++ b/src/batch.js @@ -113,22 +113,22 @@ function handleBatch(router, req) { }); return Promise.all(promises) - .then(results => { + .catch(error => { if (req.body.transaction) { - return req.config.database.commitTransactionalSession().then(() => { - return { response: results }; + return req.config.database.abortTransactionalSession().then(() => { + throw error; }); } else { - return { response: results }; + throw error; } }) - .catch(error => { + .then(results => { if (req.body.transaction) { - return req.config.database.abortTransactionalSession().then(() => { - throw error; + return req.config.database.commitTransactionalSession().then(() => { + return { response: results }; }); } else { - throw error; + return { response: results }; } }); }); diff --git a/src/triggers.js b/src/triggers.js index 73829d6920..7eefcd383e 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -179,7 +179,7 @@ export function getRequestObject( log: config.loggerController, headers: config.headers, ip: config.ip, - config: config, + database: config.database, }; if (originalParseObject) { From 863b23511cc52336426b61c70f1664fed673942c Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 18:33:03 -0700 Subject: [PATCH 05/24] Missing only one test --- spec/.eslintrc.json | 3 +- spec/batch.spec.js | 69 +++++++++++++++++++++++++++++---------------- spec/helper.js | 1 + src/batch.js | 3 ++ src/triggers.js | 1 - 5 files changed, 50 insertions(+), 27 deletions(-) diff --git a/spec/.eslintrc.json b/spec/.eslintrc.json index e11949b773..7031e96d6f 100644 --- a/spec/.eslintrc.json +++ b/spec/.eslintrc.json @@ -25,7 +25,8 @@ "jequal": true, "create": true, "arrayContains": true, - "expectAsync": true + "expectAsync": true, + "databaseAdapter": true }, "rules": { "no-console": [0], diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 406bed7f41..14418afee2 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -70,11 +70,7 @@ describe('batch', () => { }); it('should handle a batch request without transaction', done => { - let calls = 0; - Parse.Cloud.beforeSave('MyObject', ({ database }) => { - calls++; - expect(database._transactionalSession).toEqual(null); - }); + spyOn(databaseAdapter, 'createObject').and.callThrough(); request({ method: 'POST', @@ -102,7 +98,9 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { - expect(calls).toBe(2); + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null); + expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -113,11 +111,7 @@ describe('batch', () => { }); it('should handle a batch request with transaction = false', done => { - let calls = 0; - Parse.Cloud.beforeSave('MyObject', ({ database }) => { - calls++; - expect(database._transactionalSession).toEqual(null); - }); + spyOn(databaseAdapter, 'createObject').and.callThrough(); request({ method: 'POST', @@ -146,7 +140,9 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { - expect(calls).toBe(2); + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toEqual(null); + expect(databaseAdapter.createObject.calls.argsFor(1)[3]).toEqual(null); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -170,17 +166,7 @@ describe('batch', () => { }); it('should handle a batch request with transaction = true', done => { - let calls = 0; - let transactionalSession = null; - Parse.Cloud.beforeSave('MyObject', ({ database }) => { - calls++; - expect(database._transactionalSession).not.toEqual(null); - if (transactionalSession) { - expect(database._transactionalSession).toBe(transactionalSession); - } else { - transactionalSession = database._transactionalSession; - } - }); + spyOn(databaseAdapter, 'createObject').and.callThrough(); request({ method: 'POST', @@ -209,7 +195,10 @@ describe('batch', () => { expect(response.data[1].success.createdAt).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { - expect(calls).toBe(2); + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( + databaseAdapter.createObject.calls.argsFor(1)[3] + ); expect(results.map(result => result.get('key')).sort()).toEqual([ 'value1', 'value2', @@ -219,7 +208,37 @@ describe('batch', () => { }); }); - it('should generate separate session for each call', done => { + it('should not save anything when one operation fails in a transaction', done => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 10 }, + }, + ], + transaction: true, + }), + }).catch(error => { + expect(error.data.error).toEqual('Could not add field key'); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.length).toBe(0); + done(); + }); + }); + }); + + xit('should generate separate session for each call', done => { let myObjectCalls = 0; //let myObjectTransactionalSession = null; diff --git a/spec/helper.js b/spec/helper.js index 93801c3a0d..f5dcc70438 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -417,6 +417,7 @@ global.reconfigureServer = reconfigureServer; global.defaultConfiguration = defaultConfiguration; global.mockCustomAuthenticator = mockCustomAuthenticator; global.mockFacebookAuthenticator = mockFacebookAuthenticator; +global.databaseAdapter = databaseAdapter; global.jfail = function(err) { fail(JSON.stringify(err)); }; diff --git a/src/batch.js b/src/batch.js index c081e5cafe..7367da1303 100644 --- a/src/batch.js +++ b/src/batch.js @@ -107,6 +107,9 @@ function handleBatch(router, req) { return { success: response.response }; }, error => { + if (req.body.transaction) { + return Promise.reject(error); + } return { error: { code: error.code, error: error.message } }; } ); diff --git a/src/triggers.js b/src/triggers.js index 681ac3c21c..f2917ce6fd 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -179,7 +179,6 @@ export function getRequestObject( log: config.loggerController, headers: config.headers, ip: config.ip, - database: config.database, }; if (originalParseObject) { From e1f7caf6092be2c294c4ddd50027623ac33e915e Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 19:44:53 -0700 Subject: [PATCH 06/24] All tests passing for mongo db --- spec/batch.spec.js | 194 ++++++++++++++++++++++----------------------- 1 file changed, 96 insertions(+), 98 deletions(-) diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 14418afee2..402363ae39 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -229,7 +229,7 @@ describe('batch', () => { transaction: true, }), }).catch(error => { - expect(error.data.error).toEqual('Could not add field key'); + expect(error.data).toBeDefined(); const query = new Parse.Query('MyObject'); query.find().then(results => { expect(results.length).toBe(0); @@ -238,74 +238,46 @@ describe('batch', () => { }); }); - xit('should generate separate session for each call', done => { - let myObjectCalls = 0; - //let myObjectTransactionalSession = null; - - Parse.Cloud.afterSave('MyObject', () => { - console.log(1); - }); - Parse.Cloud.afterSave('MyObject2', () => { - console.log(2); - }); - Parse.Cloud.afterSave('MyObject3', () => { - console.log(3); - }); + it('should generate separate session for each call', async () => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); - Parse.Cloud.beforeSave('MyObject', (/*{ database }*/) => { + let myObjectCalls = 0; + Parse.Cloud.beforeSave('MyObject', async () => { myObjectCalls++; - // expect(database._transactionalSession).not.toEqual(null); - // if (myObjectTransactionalSession) { - // expect(database._transactionalSession).toBe( - // myObjectTransactionalSession - // ); - // } else { - // myObjectTransactionalSession = - // database._transactionalSession; - // } - - if (myObjectCalls === 1) { - return request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject2', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject2', - body: { key: 'value2' }, - }, - ], - transaction: false, - }), - }).then(response => { - console.log(response.data[0].error); - return Promise.resolve(); - }); + if (myObjectCalls === 2) { + try { + await request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 10 }, + }, + ], + transaction: true, + }), + }); + fail('should fail'); + } catch (e) { + expect(e).toBeDefined(); + } } }); let myObject2Calls = 0; - //let myObject2TransactionalSession = null; - Parse.Cloud.beforeSave('MyObject2', (/*{ database }*/) => { + Parse.Cloud.beforeSave('MyObject2', async () => { myObject2Calls++; - // expect(database._transactionalSession).not.toEqual(null); - // if (myObject2TransactionalSession) { - // expect(database._transactionalSession).toBe( - // myObject2TransactionalSession - // ); - // } else { - // myObject2TransactionalSession = - // database._transactionalSession; - // } - if (myObject2Calls === 1) { - return request({ + if (myObject2Calls === 2) { + await request({ method: 'POST', headers: headers, url: 'http://localhost:8378/1/batch', @@ -323,20 +295,11 @@ describe('batch', () => { }, ], }), - }).then(response => { - console.log(response.data); - return Promise.resolve(); }); } }); - let myObject3Calls = 0; - Parse.Cloud.beforeSave('MyObject3', ({ database }) => { - myObject3Calls++; - expect(database._transactionalSession).toEqual(null); - }); - - request({ + const response = await request({ method: 'POST', headers: headers, url: 'http://localhost:8378/1/batch', @@ -355,33 +318,68 @@ describe('batch', () => { ], transaction: true, }), - }).then(response => { - console.log(response.data); - const query = new Parse.Query('MyObject'); - query.find().then(results => { - expect(myObjectCalls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - const query = new Parse.Query('MyObject2'); - query.find().then(results => { - expect(myObject2Calls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - const query = new Parse.Query('MyObject3'); - query.find().then(results => { - expect(myObject3Calls).toBe(2); - expect(results.map(result => result.get('key')).sort()).toEqual( - ['value1', 'value2'] - ); - done(); - }); - }); - }); }); + + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + + const query = new Parse.Query('MyObject'); + const results = await query.find(); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + const query2 = new Parse.Query('MyObject2'); + const results2 = await query2.find(); + expect(results2.length).toEqual(0); + + const query3 = new Parse.Query('MyObject3'); + const results3 = await query3.find(); + expect(results3.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + expect(databaseAdapter.createObject.calls.count()).toBe(5); + let transactionalSession; + let transactionalSession2; + let myObjectDBCalls = 0; + let myObject2DBCalls = 0; + let myObject3DBCalls = 0; + for (let i = 0; i < 5; i++) { + const args = databaseAdapter.createObject.calls.argsFor(i); + switch (args[0]) { + case 'MyObject': + myObjectDBCalls++; + if (!transactionalSession) { + transactionalSession = args[3]; + } else { + expect(transactionalSession).toBe(args[3]); + } + if (transactionalSession2) { + expect(transactionalSession2).not.toBe(args[3]); + } + break; + case 'MyObject2': + myObject2DBCalls++; + transactionalSession2 = args[3]; + if (transactionalSession) { + expect(transactionalSession).not.toBe(args[3]); + } + break; + case 'MyObject3': + myObject3DBCalls++; + expect(args[3]).toEqual(null); + break; + } + } + expect(myObjectDBCalls).toEqual(2); + expect(myObject2DBCalls).toEqual(1); + expect(myObject3DBCalls).toEqual(2); }); }); } From b6b5cc7bd571806a129a67cb6b085e77438cf1a6 Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 20:01:17 -0700 Subject: [PATCH 07/24] Tests on Travis --- .travis.yml | 5 +++++ spec/batch.spec.js | 20 +++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index d44f41c8e1..8334640e5b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,6 +38,11 @@ before_install: - nvm use $NODE_VERSION - npm install -g greenkeeper-lockfile@1 before_script: +- echo "replSet = rs0" | sudo tee -a /etc/mongodb.conf +- sudo service mongodb restart +- sleep 20 +- mongo --eval 'rs.initiate()' +- sleep 20 - node -e 'require("./lib/index.js")' - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 402363ae39..6ff208e2b7 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -152,13 +152,19 @@ describe('batch', () => { }); }); - if (process.env.PARSE_SERVER_TEST_DATABASE_URI_TRANSACTIONS) { + if ( + process.env.MONGODB_VERSION === '4.0.4' || + process.env.PARSE_SERVER_TEST_DB === 'postgres' + ) { describe('transactions', () => { beforeAll(async () => { - await reconfigureServer({ - databaseAdapter: undefined, - databaseURI: process.env.PARSE_SERVER_TEST_DATABASE_URI_TRANSACTIONS, - }); + if (process.env.MONGODB_VERSION === '4.0.4') { + await reconfigureServer({ + databaseAdapter: undefined, + databaseURI: + 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=rs0', + }); + } }); beforeEach(async () => { @@ -239,6 +245,10 @@ describe('batch', () => { }); it('should generate separate session for each call', async () => { + const myObject = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + await myObject.save(); + await myObject.destroy(); + spyOn(databaseAdapter, 'createObject').and.callThrough(); let myObjectCalls = 0; From e1dbd71bcb93af85695eb77ba2456b858e11100c Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 23:08:30 -0700 Subject: [PATCH 08/24] Transactions on postgres --- .../Postgres/PostgresStorageAdapter.js | 68 +++++++++++++++---- 1 file changed, 54 insertions(+), 14 deletions(-) diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 04af605814..ceb69ae26a 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -1236,8 +1236,8 @@ export class PostgresStorageAdapter implements StorageAdapter { createObject( className: string, schema: SchemaType, - object: any - /* transactionalSession: ?any */ + object: any, + transactionalSession: ?any ) { debug('createObject', className, object); let columnsArray = []; @@ -1366,7 +1366,10 @@ export class PostgresStorageAdapter implements StorageAdapter { const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`; const values = [className, ...columnsArray, ...valuesArray]; debug(qs, values); - return this._client + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ) .none(qs, values) .then(() => ({ ops: [object] })) .catch(error => { @@ -1386,6 +1389,10 @@ export class PostgresStorageAdapter implements StorageAdapter { } throw error; }); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Remove all objects that match the given Parse Query. @@ -1394,8 +1401,8 @@ export class PostgresStorageAdapter implements StorageAdapter { deleteObjectsByQuery( className: string, schema: SchemaType, - query: QueryType - /* transactionalSession: ?any */ + query: QueryType, + transactionalSession: ?any ) { debug('deleteObjectsByQuery', className, query); const values = [className]; @@ -1407,7 +1414,10 @@ export class PostgresStorageAdapter implements StorageAdapter { } const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`; debug(qs, values); - return this._client + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ) .one(qs, values, a => +a.count) .then(count => { if (count === 0) { @@ -1425,6 +1435,10 @@ export class PostgresStorageAdapter implements StorageAdapter { } // ELSE: Don't delete anything if doesn't exist }); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Return value not currently well specified. findOneAndUpdate( @@ -1449,8 +1463,8 @@ export class PostgresStorageAdapter implements StorageAdapter { className: string, schema: SchemaType, query: QueryType, - update: any - /* transactionalSession: ?any */ + update: any, + transactionalSession: ?any ): Promise<[any]> { debug('updateObjectsByQuery', className, query, update); const updatePatterns = []; @@ -1707,7 +1721,14 @@ export class PostgresStorageAdapter implements StorageAdapter { where.pattern.length > 0 ? `WHERE ${where.pattern}` : ''; const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`; debug('update: ', qs, values); - return this._client.any(qs, values); + const promise = (transactionalSession + ? transactionalSession.t + : this._client + ).any(qs, values); + if (transactionalSession) { + transactionalSession.batch.push(promise); + } + return promise; } // Hopefully, we can get rid of this. It's only used for config and hooks. @@ -2359,15 +2380,34 @@ export class PostgresStorageAdapter implements StorageAdapter { } createTransactionalSession(): Promise { - return Promise.resolve(); + return new Promise(resolve => { + const transactionalSession = {}; + transactionalSession.result = this._client.tx(t => { + transactionalSession.t = t; + transactionalSession.promise = new Promise(resolve => { + transactionalSession.resolve = resolve; + }); + transactionalSession.batch = []; + resolve(transactionalSession); + return transactionalSession.promise; + }); + }); } - commitTransactionalSession(): Promise { - return Promise.resolve(); + commitTransactionalSession(transactionalSession: any): Promise { + transactionalSession.resolve( + transactionalSession.t.batch(transactionalSession.batch) + ); + return transactionalSession.result; } - abortTransactionalSession(): Promise { - return Promise.resolve(); + abortTransactionalSession(transactionalSession: any): Promise { + const result = transactionalSession.result.catch(); + transactionalSession.batch.push(Promise.reject()); + transactionalSession.resolve( + transactionalSession.t.batch(transactionalSession.batch) + ); + return result; } } From 142acdeb623899c28c7a9ee9ff3eaeb46365e56a Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 23:26:25 -0700 Subject: [PATCH 09/24] Fix travis to restart mongodb --- .travis.yml | 7 +------ spec/batch.spec.js | 10 +++++++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8334640e5b..ce198a2b9d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,7 +25,7 @@ env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' matrix: - - MONGODB_VERSION=4.0.4 + - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis @@ -38,11 +38,6 @@ before_install: - nvm use $NODE_VERSION - npm install -g greenkeeper-lockfile@1 before_script: -- echo "replSet = rs0" | sudo tee -a /etc/mongodb.conf -- sudo service mongodb restart -- sleep 20 -- mongo --eval 'rs.initiate()' -- sleep 20 - node -e 'require("./lib/index.js")' - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 6ff208e2b7..d5b235d3c2 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -153,16 +153,20 @@ describe('batch', () => { }); if ( - process.env.MONGODB_VERSION === '4.0.4' || + (process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset') || process.env.PARSE_SERVER_TEST_DB === 'postgres' ) { describe('transactions', () => { beforeAll(async () => { - if (process.env.MONGODB_VERSION === '4.0.4') { + if ( + process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' + ) { await reconfigureServer({ databaseAdapter: undefined, databaseURI: - 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=rs0', + 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset', }); } }); From fa087c38d8d8c4b5da4607e0086df087350944b5 Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 23:48:20 -0700 Subject: [PATCH 10/24] Remove mongodb service and keep only mongodb runner --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ce198a2b9d..8b3aad48a4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: node_js dist: trusty services: -- mongodb - postgresql - redis-server - docker From 801e859118e907058c5349a3b59f7e0eb9d027f6 Mon Sep 17 00:00:00 2001 From: = Date: Thu, 25 Jul 2019 23:54:58 -0700 Subject: [PATCH 11/24] MongoDB service back --- .travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8b3aad48a4..4c8952a9a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: node_js dist: trusty services: +- mongodb - postgresql - redis-server - docker @@ -23,8 +24,9 @@ stage: test env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' + - MONGODB_TOPOLOGY=replicaset matrix: - - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset + - MONGODB_VERSION=4.0.4 - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis From 5ef981b4627350330370345f27ce1eecee7021c5 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 00:01:07 -0700 Subject: [PATCH 12/24] Initialize replicaset --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 4c8952a9a3..d0c4a195c9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,6 +44,9 @@ before_script: - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database - silent=1 mongodb-runner --start +- sleep 20 +- mongo --eval 'rs.initiate()' +- sleep 20 - greenkeeper-lockfile-update script: - npm run lint From 264fc8429eb8d2b74ee4dadb1030f74c3827fe41 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 00:13:30 -0700 Subject: [PATCH 13/24] Remove mongodb runner again --- .travis.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index d0c4a195c9..92a293a387 100644 --- a/.travis.yml +++ b/.travis.yml @@ -43,10 +43,6 @@ before_script: - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database -- silent=1 mongodb-runner --start -- sleep 20 -- mongo --eval 'rs.initiate()' -- sleep 20 - greenkeeper-lockfile-update script: - npm run lint From 1e5753b1a46384fcf4496bff57bcc0a6dd2270fe Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 00:27:45 -0700 Subject: [PATCH 14/24] Again only with mongodb-runner and removing cache --- .travis.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 92a293a387..c427c9fd7f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: node_js dist: trusty services: -- mongodb - postgresql - redis-server - docker @@ -19,14 +18,12 @@ branches: cache: directories: - "$HOME/.npm" - - "$HOME/.mongodb/versions" stage: test env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' - - MONGODB_TOPOLOGY=replicaset matrix: - - MONGODB_VERSION=4.0.4 + - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis @@ -43,6 +40,7 @@ before_script: - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database +- silent=1 mongodb-runner --start - greenkeeper-lockfile-update script: - npm run lint From 93cc1c26bef05d45c89be3511b1fdbcfa9d4c95f Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 00:58:28 -0700 Subject: [PATCH 15/24] Trying with pretest and posttest --- .travis.yml | 1 - package.json | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c427c9fd7f..04f3d0b6ac 100644 --- a/.travis.yml +++ b/.travis.yml @@ -40,7 +40,6 @@ before_script: - psql -c 'create database parse_server_postgres_adapter_test_database;' -U postgres - psql -c 'CREATE EXTENSION postgis;' -U postgres -d parse_server_postgres_adapter_test_database - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database -- silent=1 mongodb-runner --start - greenkeeper-lockfile-update script: - npm run lint diff --git a/package.json b/package.json index 01ca3ff30b..fe5f3704e0 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,9 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", + "pretest": "mongodb-runner start", "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine", + "posttest": "mongodb-runner stop", "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prepare": "npm run build", From cd6b2edd78c479f61c13f99799d08f65a9f74483 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 01:27:11 -0700 Subject: [PATCH 16/24] WiredTiger --- .travis.yml | 4 +++- package.json | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 04f3d0b6ac..b5c31dfe44 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' matrix: - - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset + - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredtiger - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis @@ -42,8 +42,10 @@ before_script: - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database - greenkeeper-lockfile-update script: +- npm run startmongo - npm run lint - npm run coverage +- npm run stopmongo after_script: - greenkeeper-lockfile-upload - bash <(curl -s https://codecov.io/bash) diff --git a/package.json b/package.json index fe5f3704e0..80df7ce298 100644 --- a/package.json +++ b/package.json @@ -95,10 +95,10 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", - "pretest": "mongodb-runner start", - "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine", - "posttest": "mongodb-runner stop", - "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 nyc jasmine", + "startmongo": "mongodb-runner start", + "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", + "stopmongo": "mongodb-runner stop", + "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prepare": "npm run build", "postinstall": "node -p 'require(\"./postinstall.js\")()'" From 16637330d5c93f92b54e51bd8c93129e984422a3 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 01:32:48 -0700 Subject: [PATCH 17/24] Pretest and posttest again --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 80df7ce298..c96e780427 100644 --- a/package.json +++ b/package.json @@ -95,9 +95,9 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", - "startmongo": "mongodb-runner start", + "pretest": "mongodb-runner start", "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", - "stopmongo": "mongodb-runner stop", + "posttest": "mongodb-runner stop", "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prepare": "npm run build", From afa6ed39dda6457358bd22eba0016a44097686cd Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 01:37:14 -0700 Subject: [PATCH 18/24] Removing inexistent scripts --- .travis.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b5c31dfe44..81dd8bc025 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,10 +42,8 @@ before_script: - psql -c 'CREATE EXTENSION postgis_topology;' -U postgres -d parse_server_postgres_adapter_test_database - greenkeeper-lockfile-update script: -- npm run startmongo - npm run lint - npm run coverage -- npm run stopmongo after_script: - greenkeeper-lockfile-upload - bash <(curl -s https://codecov.io/bash) From 560c9fd37125df5a8bbfe75c929b3cc2630e16e5 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 01:49:31 -0700 Subject: [PATCH 19/24] wiredTiger --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 81dd8bc025..56ec8cc958 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ env: global: - COVERAGE_OPTION='./node_modules/.bin/nyc' matrix: - - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredtiger + - MONGODB_VERSION=4.0.4 MONGODB_TOPOLOGY=replicaset MONGODB_STORAGE_ENGINE=wiredTiger - MONGODB_VERSION=3.6.9 - PARSE_SERVER_TEST_DB=postgres - PARSE_SERVER_TEST_CACHE=redis From 145c28d0fc9d687d0a6121cddccb2c7fc8581861 Mon Sep 17 00:00:00 2001 From: = Date: Fri, 26 Jul 2019 02:05:33 -0700 Subject: [PATCH 20/24] One more attempt --- package.json | 6 ++---- spec/batch.spec.js | 6 ++++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index c96e780427..43c47238d2 100644 --- a/package.json +++ b/package.json @@ -95,10 +95,8 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", - "pretest": "mongodb-runner start", - "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", - "posttest": "mongodb-runner stop", - "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", + "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", + "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prepare": "npm run build", "postinstall": "node -p 'require(\"./postinstall.js\")()'" diff --git a/spec/batch.spec.js b/spec/batch.spec.js index d5b235d3c2..7b62ddef4d 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -154,14 +154,16 @@ describe('batch', () => { if ( (process.env.MONGODB_VERSION === '4.0.4' && - process.env.MONGODB_TOPOLOGY === 'replicaset') || + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') || process.env.PARSE_SERVER_TEST_DB === 'postgres' ) { describe('transactions', () => { beforeAll(async () => { if ( process.env.MONGODB_VERSION === '4.0.4' && - process.env.MONGODB_TOPOLOGY === 'replicaset' + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger' ) { await reconfigureServer({ databaseAdapter: undefined, From e3b42cd56e9f2a0a82d1946704a0c5407c85de9c Mon Sep 17 00:00:00 2001 From: = Date: Sun, 28 Jul 2019 22:37:51 -0700 Subject: [PATCH 21/24] Trying another way to run mongodb-runner --- package.json | 2 ++ spec/helper.js | 13 ------------- 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/package.json b/package.json index 522889e462..e678763cbd 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,9 @@ "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "watch": "babel --watch src/ -d lib/ --copy-files", + "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start", "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", + "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop", "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prepare": "npm run build", diff --git a/spec/helper.js b/spec/helper.js index f5dcc70438..0800128ced 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -40,20 +40,12 @@ const postgresURI = let databaseAdapter; // need to bind for mocking mocha -let startDB = () => {}; -let stopDB = () => {}; - if (process.env.PARSE_SERVER_TEST_DB === 'postgres') { databaseAdapter = new PostgresStorageAdapter({ uri: process.env.PARSE_SERVER_TEST_DATABASE_URI || postgresURI, collectionPrefix: 'test_', }); } else { - startDB = require('mongodb-runner/mocha/before').bind({ - timeout: () => {}, - slow: () => {}, - }); - stopDB = require('mongodb-runner/mocha/after'); databaseAdapter = new MongoStorageAdapter({ uri: mongoURI, collectionPrefix: 'test_', @@ -177,11 +169,6 @@ const reconfigureServer = changedConfiguration => { const Parse = require('parse/node'); Parse.serverURL = 'http://localhost:' + port + '/1'; -// 10 minutes timeout -beforeAll(startDB, 10 * 60 * 1000); - -afterAll(stopDB); - beforeEach(done => { try { Parse.User.enableUnsafeCurrentUser(); From dd214beacdc4398918d0803d3df428cb3495a733 Mon Sep 17 00:00:00 2001 From: = Date: Mon, 29 Jul 2019 00:06:22 -0700 Subject: [PATCH 22/24] Fixing tests --- package.json | 2 +- spec/GridStoreAdapter.spec.js | 2 +- spec/batch.spec.js | 195 ++++++++++++++++++---------------- 3 files changed, 106 insertions(+), 93 deletions(-) diff --git a/package.json b/package.json index e5673c33fb..f17926a26d 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner start", "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 jasmine", "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} mongodb-runner stop", - "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine", + "coverage": "npm run pretest && cross-env MONGODB_VERSION=${MONGODB_VERSION:=4.0.4} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=mmapv1} TESTING=1 nyc jasmine && npm run posttest", "start": "node ./bin/parse-server", "prepare": "npm run build", "postinstall": "node -p 'require(\"./postinstall.js\")()'" diff --git a/spec/GridStoreAdapter.spec.js b/spec/GridStoreAdapter.spec.js index 2565f7bee0..1b6d61256c 100644 --- a/spec/GridStoreAdapter.spec.js +++ b/spec/GridStoreAdapter.spec.js @@ -13,7 +13,7 @@ describe_only_db('mongo')('GridStoreAdapter', () => { const config = Config.get(Parse.applicationId); const gridStoreAdapter = new GridStoreAdapter(databaseURI); const db = await gridStoreAdapter._connect(); - db.dropDatabase(); + await db.dropDatabase(); const filesController = new FilesController( gridStoreAdapter, Parse.applicationId, diff --git a/spec/batch.spec.js b/spec/batch.spec.js index 7b62ddef4d..68ee0c01a2 100644 --- a/spec/batch.spec.js +++ b/spec/batch.spec.js @@ -178,83 +178,102 @@ describe('batch', () => { }); it('should handle a batch request with transaction = true', done => { - spyOn(databaseAdapter, 'createObject').and.callThrough(); + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); - request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value2' }, - }, - ], - transaction: true, - }), - }).then(response => { - expect(response.data.length).toEqual(2); - expect(response.data[0].success.objectId).toBeDefined(); - expect(response.data[0].success.createdAt).toBeDefined(); - expect(response.data[1].success.objectId).toBeDefined(); - expect(response.data[1].success.createdAt).toBeDefined(); - const query = new Parse.Query('MyObject'); - query.find().then(results => { - expect(databaseAdapter.createObject.calls.count()).toBe(2); - expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( - databaseAdapter.createObject.calls.argsFor(1)[3] - ); - expect(results.map(result => result.get('key')).sort()).toEqual([ - 'value1', - 'value2', - ]); - done(); + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }), + }).then(response => { + expect(response.data.length).toEqual(2); + expect(response.data[0].success.objectId).toBeDefined(); + expect(response.data[0].success.createdAt).toBeDefined(); + expect(response.data[1].success.objectId).toBeDefined(); + expect(response.data[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( + databaseAdapter.createObject.calls.argsFor(1)[3] + ); + expect(results.map(result => result.get('key')).sort()).toEqual( + ['value1', 'value2'] + ); + done(); + }); + }); }); - }); }); it('should not save anything when one operation fails in a transaction', done => { - request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject', - body: { key: 10 }, - }, - ], - transaction: true, - }), - }).catch(error => { - expect(error.data).toBeDefined(); - const query = new Parse.Query('MyObject'); - query.find().then(results => { - expect(results.length).toBe(0); - done(); + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 10 }, + }, + ], + transaction: true, + }), + }).catch(error => { + expect(error.data).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.length).toBe(0); + done(); + }); + }); }); - }); }); it('should generate separate session for each call', async () => { - const myObject = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections await myObject.save(); await myObject.destroy(); + const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + await myObject2.save(); + await myObject2.destroy(); + spyOn(databaseAdapter, 'createObject').and.callThrough(); let myObjectCalls = 0; @@ -289,32 +308,6 @@ describe('batch', () => { } }); - let myObject2Calls = 0; - Parse.Cloud.beforeSave('MyObject2', async () => { - myObject2Calls++; - if (myObject2Calls === 2) { - await request({ - method: 'POST', - headers: headers, - url: 'http://localhost:8378/1/batch', - body: JSON.stringify({ - requests: [ - { - method: 'POST', - path: '/1/classes/MyObject3', - body: { key: 'value1' }, - }, - { - method: 'POST', - path: '/1/classes/MyObject3', - body: { key: 'value2' }, - }, - ], - }), - }); - } - }); - const response = await request({ method: 'POST', headers: headers, @@ -342,6 +335,26 @@ describe('batch', () => { expect(response.data[1].success.objectId).toBeDefined(); expect(response.data[1].success.createdAt).toBeDefined(); + await request({ + method: 'POST', + headers: headers, + url: 'http://localhost:8378/1/batch', + body: JSON.stringify({ + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }), + }); + const query = new Parse.Query('MyObject'); const results = await query.find(); expect(results.map(result => result.get('key')).sort()).toEqual([ From 1499519b71235dc19b8242a2b605ab7dc9ebf8d4 Mon Sep 17 00:00:00 2001 From: Antonio Davi Macedo Coelho de Castro Date: Tue, 30 Jul 2019 20:46:27 -0700 Subject: [PATCH 23/24] Include batch transaction on direct access --- src/ParseServerRESTController.js | 68 +++++++++++++++++++++++--------- src/batch.js | 2 +- 2 files changed, 50 insertions(+), 20 deletions(-) diff --git a/src/ParseServerRESTController.js b/src/ParseServerRESTController.js index b91fbdbd1e..4bcd7bf066 100644 --- a/src/ParseServerRESTController.js +++ b/src/ParseServerRESTController.js @@ -33,11 +33,13 @@ function getAuth(options = {}, config) { } function ParseServerRESTController(applicationId, router) { - function handleRequest(method, path, data = {}, options = {}) { + function handleRequest(method, path, data = {}, options = {}, config) { // Store the arguments, for later use if internal fails const args = arguments; - const config = Config.get(applicationId); + if (!config) { + config = Config.get(applicationId); + } const serverURL = URL.parse(config.serverURL); if (path.indexOf(serverURL.path) === 0) { path = path.slice(serverURL.path.length, path.length); @@ -48,24 +50,52 @@ function ParseServerRESTController(applicationId, router) { } if (path === '/batch') { - const promises = data.requests.map(request => { - return handleRequest( - request.method, - request.path, - request.body, - options - ).then( - response => { - return Promise.resolve({ success: response }); - }, - error => { - return Promise.resolve({ - error: { code: error.code, error: error.message }, - }); - } - ); + let initialPromise = Promise.resolve(); + if (data.transaction === true) { + initialPromise = config.database.createTransactionalSession(); + } + return initialPromise.then(() => { + const promises = data.requests.map(request => { + return handleRequest( + request.method, + request.path, + request.body, + options, + config + ).then( + response => { + return Promise.resolve({ success: response }); + }, + error => { + if (data.transaction === true) { + return Promise.reject(error); + } + return Promise.resolve({ + error: { code: error.code, error: error.message }, + }); + } + ); + }); + return Promise.all(promises) + .catch(error => { + if (data.transaction === true) { + return config.database.abortTransactionalSession().then(() => { + throw error; + }); + } else { + throw error; + } + }) + .then(result => { + if (data.transaction === true) { + return config.database.commitTransactionalSession().then(() => { + return result; + }); + } else { + return result; + } + }); }); - return Promise.all(promises); } let query; diff --git a/src/batch.js b/src/batch.js index 7367da1303..f22795df54 100644 --- a/src/batch.js +++ b/src/batch.js @@ -107,7 +107,7 @@ function handleBatch(router, req) { return { success: response.response }; }, error => { - if (req.body.transaction) { + if (req.body.transaction === true) { return Promise.reject(error); } return { error: { code: error.code, error: error.message } }; From 20cb7611834e3deaee0d97e736b4cf0c41b1ec38 Mon Sep 17 00:00:00 2001 From: Antonio Davi Macedo Coelho de Castro Date: Tue, 30 Jul 2019 21:21:55 -0700 Subject: [PATCH 24/24] Add tests to direct access --- spec/ParseServerRESTController.spec.js | 269 ++++++++++++++++++++++++- src/batch.js | 4 +- 2 files changed, 270 insertions(+), 3 deletions(-) diff --git a/spec/ParseServerRESTController.spec.js b/spec/ParseServerRESTController.spec.js index 6fa128ec4c..11c23bdd71 100644 --- a/spec/ParseServerRESTController.spec.js +++ b/spec/ParseServerRESTController.spec.js @@ -2,6 +2,7 @@ const ParseServerRESTController = require('../lib/ParseServerRESTController') .ParseServerRESTController; const ParseServer = require('../lib/ParseServer').default; const Parse = require('parse/node').Parse; +const TestUtils = require('../lib/TestUtils'); let RESTController; @@ -40,7 +41,7 @@ describe('ParseServerRESTController', () => { ); }); - it('should handle a POST batch', done => { + it('should handle a POST batch without transaction', done => { RESTController.request('POST', 'batch', { requests: [ { @@ -69,6 +70,272 @@ describe('ParseServerRESTController', () => { ); }); + it('should handle a POST batch with transaction=false', done => { + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'GET', + path: '/classes/MyObject', + }, + { + method: 'POST', + path: '/classes/MyObject', + body: { key: 'value' }, + }, + { + method: 'GET', + path: '/classes/MyObject', + }, + ], + transaction: false, + }).then( + res => { + expect(res.length).toBe(3); + done(); + }, + err => { + jfail(err); + done(); + } + ); + }); + + if ( + (process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger') || + process.env.PARSE_SERVER_TEST_DB === 'postgres' + ) { + describe('transactions', () => { + beforeAll(async () => { + if ( + process.env.MONGODB_VERSION === '4.0.4' && + process.env.MONGODB_TOPOLOGY === 'replicaset' && + process.env.MONGODB_STORAGE_ENGINE === 'wiredTiger' + ) { + await reconfigureServer({ + databaseAdapter: undefined, + databaseURI: + 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase?replicaSet=replicaset', + }); + } + }); + + beforeEach(async () => { + await TestUtils.destroyAllDataPermanently(true); + }); + + it('should handle a batch request with transaction = true', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }).then(response => { + expect(response.length).toEqual(2); + expect(response[0].success.objectId).toBeDefined(); + expect(response[0].success.createdAt).toBeDefined(); + expect(response[1].success.objectId).toBeDefined(); + expect(response[1].success.createdAt).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(databaseAdapter.createObject.calls.count()).toBe(2); + expect(databaseAdapter.createObject.calls.argsFor(0)[3]).toBe( + databaseAdapter.createObject.calls.argsFor(1)[3] + ); + expect(results.map(result => result.get('key')).sort()).toEqual( + ['value1', 'value2'] + ); + done(); + }); + }); + }); + }); + + it('should not save anything when one operation fails in a transaction', done => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + myObject + .save() + .then(() => { + return myObject.destroy(); + }) + .then(() => { + RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 10 }, + }, + ], + transaction: true, + }).catch(error => { + expect(error.message).toBeDefined(); + const query = new Parse.Query('MyObject'); + query.find().then(results => { + expect(results.length).toBe(0); + done(); + }); + }); + }); + }); + + it('should generate separate session for each call', async () => { + const myObject = new Parse.Object('MyObject'); // This is important because transaction only works on pre-existing collections + await myObject.save(); + await myObject.destroy(); + + const myObject2 = new Parse.Object('MyObject2'); // This is important because transaction only works on pre-existing collections + await myObject2.save(); + await myObject2.destroy(); + + spyOn(databaseAdapter, 'createObject').and.callThrough(); + + let myObjectCalls = 0; + Parse.Cloud.beforeSave('MyObject', async () => { + myObjectCalls++; + if (myObjectCalls === 2) { + try { + await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject2', + body: { key: 10 }, + }, + ], + transaction: true, + }); + fail('should fail'); + } catch (e) { + expect(e).toBeDefined(); + } + } + }); + + const response = await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject', + body: { key: 'value2' }, + }, + ], + transaction: true, + }); + + expect(response.length).toEqual(2); + expect(response[0].success.objectId).toBeDefined(); + expect(response[0].success.createdAt).toBeDefined(); + expect(response[1].success.objectId).toBeDefined(); + expect(response[1].success.createdAt).toBeDefined(); + + await RESTController.request('POST', 'batch', { + requests: [ + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value1' }, + }, + { + method: 'POST', + path: '/1/classes/MyObject3', + body: { key: 'value2' }, + }, + ], + }); + + const query = new Parse.Query('MyObject'); + const results = await query.find(); + expect(results.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + const query2 = new Parse.Query('MyObject2'); + const results2 = await query2.find(); + expect(results2.length).toEqual(0); + + const query3 = new Parse.Query('MyObject3'); + const results3 = await query3.find(); + expect(results3.map(result => result.get('key')).sort()).toEqual([ + 'value1', + 'value2', + ]); + + expect(databaseAdapter.createObject.calls.count()).toBe(5); + let transactionalSession; + let transactionalSession2; + let myObjectDBCalls = 0; + let myObject2DBCalls = 0; + let myObject3DBCalls = 0; + for (let i = 0; i < 5; i++) { + const args = databaseAdapter.createObject.calls.argsFor(i); + switch (args[0]) { + case 'MyObject': + myObjectDBCalls++; + if (!transactionalSession) { + transactionalSession = args[3]; + } else { + expect(transactionalSession).toBe(args[3]); + } + if (transactionalSession2) { + expect(transactionalSession2).not.toBe(args[3]); + } + break; + case 'MyObject2': + myObject2DBCalls++; + transactionalSession2 = args[3]; + if (transactionalSession) { + expect(transactionalSession).not.toBe(args[3]); + } + break; + case 'MyObject3': + myObject3DBCalls++; + expect(args[3]).toEqual(null); + break; + } + } + expect(myObjectDBCalls).toEqual(2); + expect(myObject2DBCalls).toEqual(1); + expect(myObject3DBCalls).toEqual(2); + }); + }); + } + it('should handle a POST request', done => { RESTController.request('POST', '/classes/MyObject', { key: 'value' }) .then(() => { diff --git a/src/batch.js b/src/batch.js index f22795df54..10ae294a36 100644 --- a/src/batch.js +++ b/src/batch.js @@ -117,7 +117,7 @@ function handleBatch(router, req) { return Promise.all(promises) .catch(error => { - if (req.body.transaction) { + if (req.body.transaction === true) { return req.config.database.abortTransactionalSession().then(() => { throw error; }); @@ -126,7 +126,7 @@ function handleBatch(router, req) { } }) .then(results => { - if (req.body.transaction) { + if (req.body.transaction === true) { return req.config.database.commitTransactionalSession().then(() => { return { response: results }; });