perf: Allow covering relation queries with minimal index (#6581)
* Apply linter changes on files I'm about to update
My actual changes were quite difficult to find when buried in this sea
of style changes, which were getting automatically applied during a
pre-commit hook. Here I just run the hooks against the files I'm going
to be touching in the following commit, so that a reviewer can ignore
these automatically generated diffs and just view the meaningful commit.
* perf: Allow covering relation queries with minimal index
When finding objects through a relation, we're sending Mongo queries
that look like this:
```
db.getCollection('_Join:foo:bar').find({ relatedId: { $in: [...] } });
```
From the result of that query, we're only reading the `owningId` field,
so we can start by adding it as a projection:
```
db.getCollection('_Join:foo:bar')
.find({ relatedId: { $in: [...] } })
.project({ owningId: 1 });
```
This seems like the perfect example of a query that could be satisfied
with an index scan: we are querying on one field, and only need one
field from the matching document.
For example, this can allow users to speed up the fetching of user roles
in authentication, because they query a `roles` relation on the `_Role`
collection. To add a covering index on that, you could now add an index
like the following:
```
db.getCollection('_Join:roles:_Role').createIndex(
{ relatedId: 1, owningId: 1 },
{ background: true }
);
```
One caveat there is that the index I propose above doesn't include the
`_id` column. For the query in question, we don't actually care about
the ID of the row in the join table, just the `owningId` field, so we
can avoid some overhead of putting the `_id` column into the index if we
can also drop it from the projection. This requires adding a small
special case to the MongoStorageAdapter, because the `_id` field is
special: you have to opt-out of using it by projecting `{ _id: 0 }`.
This commit is contained in:
@@ -34,12 +34,12 @@ const ReadPreference = mongodb.ReadPreference;
|
|||||||
|
|
||||||
const MongoSchemaCollectionName = '_SCHEMA';
|
const MongoSchemaCollectionName = '_SCHEMA';
|
||||||
|
|
||||||
const storageAdapterAllCollections = mongoAdapter => {
|
const storageAdapterAllCollections = (mongoAdapter) => {
|
||||||
return mongoAdapter
|
return mongoAdapter
|
||||||
.connect()
|
.connect()
|
||||||
.then(() => mongoAdapter.database.collections())
|
.then(() => mongoAdapter.database.collections())
|
||||||
.then(collections => {
|
.then((collections) => {
|
||||||
return collections.filter(collection => {
|
return collections.filter((collection) => {
|
||||||
if (collection.namespace.match(/\.system\./)) {
|
if (collection.namespace.match(/\.system\./)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -164,7 +164,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
const encodedUri = formatUrl(parseUrl(this._uri));
|
const encodedUri = formatUrl(parseUrl(this._uri));
|
||||||
|
|
||||||
this.connectionPromise = MongoClient.connect(encodedUri, this._mongoOptions)
|
this.connectionPromise = MongoClient.connect(encodedUri, this._mongoOptions)
|
||||||
.then(client => {
|
.then((client) => {
|
||||||
// Starting mongoDB 3.0, the MongoClient.connect don't return a DB anymore but a client
|
// Starting mongoDB 3.0, the MongoClient.connect don't return a DB anymore but a client
|
||||||
// Fortunately, we can get back the options and use them to select the proper DB.
|
// Fortunately, we can get back the options and use them to select the proper DB.
|
||||||
// https://github.com/mongodb/node-mongodb-native/blob/2c35d76f08574225b8db02d7bef687123e6bb018/lib/mongo_client.js#L885
|
// https://github.com/mongodb/node-mongodb-native/blob/2c35d76f08574225b8db02d7bef687123e6bb018/lib/mongo_client.js#L885
|
||||||
@@ -183,7 +183,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
this.client = client;
|
this.client = client;
|
||||||
this.database = database;
|
this.database = database;
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch((err) => {
|
||||||
delete this.connectionPromise;
|
delete this.connectionPromise;
|
||||||
return Promise.reject(err);
|
return Promise.reject(err);
|
||||||
});
|
});
|
||||||
@@ -212,14 +212,14 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
_adaptiveCollection(name: string) {
|
_adaptiveCollection(name: string) {
|
||||||
return this.connect()
|
return this.connect()
|
||||||
.then(() => this.database.collection(this._collectionPrefix + name))
|
.then(() => this.database.collection(this._collectionPrefix + name))
|
||||||
.then(rawCollection => new MongoCollection(rawCollection))
|
.then((rawCollection) => new MongoCollection(rawCollection))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
_schemaCollection(): Promise<MongoSchemaCollection> {
|
_schemaCollection(): Promise<MongoSchemaCollection> {
|
||||||
return this.connect()
|
return this.connect()
|
||||||
.then(() => this._adaptiveCollection(MongoSchemaCollectionName))
|
.then(() => this._adaptiveCollection(MongoSchemaCollectionName))
|
||||||
.then(collection => new MongoSchemaCollection(collection));
|
.then((collection) => new MongoSchemaCollection(collection));
|
||||||
}
|
}
|
||||||
|
|
||||||
classExists(name: string) {
|
classExists(name: string) {
|
||||||
@@ -229,20 +229,20 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
.listCollections({ name: this._collectionPrefix + name })
|
.listCollections({ name: this._collectionPrefix + name })
|
||||||
.toArray();
|
.toArray();
|
||||||
})
|
})
|
||||||
.then(collections => {
|
.then((collections) => {
|
||||||
return collections.length > 0;
|
return collections.length > 0;
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
setClassLevelPermissions(className: string, CLPs: any): Promise<void> {
|
setClassLevelPermissions(className: string, CLPs: any): Promise<void> {
|
||||||
return this._schemaCollection()
|
return this._schemaCollection()
|
||||||
.then(schemaCollection =>
|
.then((schemaCollection) =>
|
||||||
schemaCollection.updateSchema(className, {
|
schemaCollection.updateSchema(className, {
|
||||||
$set: { '_metadata.class_permissions': CLPs },
|
$set: { '_metadata.class_permissions': CLPs },
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
setIndexesWithSchemaFormat(
|
setIndexesWithSchemaFormat(
|
||||||
@@ -259,7 +259,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
}
|
}
|
||||||
const deletePromises = [];
|
const deletePromises = [];
|
||||||
const insertedIndexes = [];
|
const insertedIndexes = [];
|
||||||
Object.keys(submittedIndexes).forEach(name => {
|
Object.keys(submittedIndexes).forEach((name) => {
|
||||||
const field = submittedIndexes[name];
|
const field = submittedIndexes[name];
|
||||||
if (existingIndexes[name] && field.__op !== 'Delete') {
|
if (existingIndexes[name] && field.__op !== 'Delete') {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
@@ -278,7 +278,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
deletePromises.push(promise);
|
deletePromises.push(promise);
|
||||||
delete existingIndexes[name];
|
delete existingIndexes[name];
|
||||||
} else {
|
} else {
|
||||||
Object.keys(field).forEach(key => {
|
Object.keys(field).forEach((key) => {
|
||||||
if (!Object.prototype.hasOwnProperty.call(fields, key)) {
|
if (!Object.prototype.hasOwnProperty.call(fields, key)) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.INVALID_QUERY,
|
Parse.Error.INVALID_QUERY,
|
||||||
@@ -300,17 +300,17 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
return Promise.all(deletePromises)
|
return Promise.all(deletePromises)
|
||||||
.then(() => insertPromise)
|
.then(() => insertPromise)
|
||||||
.then(() => this._schemaCollection())
|
.then(() => this._schemaCollection())
|
||||||
.then(schemaCollection =>
|
.then((schemaCollection) =>
|
||||||
schemaCollection.updateSchema(className, {
|
schemaCollection.updateSchema(className, {
|
||||||
$set: { '_metadata.indexes': existingIndexes },
|
$set: { '_metadata.indexes': existingIndexes },
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
setIndexesFromMongo(className: string) {
|
setIndexesFromMongo(className: string) {
|
||||||
return this.getIndexes(className)
|
return this.getIndexes(className)
|
||||||
.then(indexes => {
|
.then((indexes) => {
|
||||||
indexes = indexes.reduce((obj, index) => {
|
indexes = indexes.reduce((obj, index) => {
|
||||||
if (index.key._fts) {
|
if (index.key._fts) {
|
||||||
delete index.key._fts;
|
delete index.key._fts;
|
||||||
@@ -322,13 +322,13 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
obj[index.name] = index.key;
|
obj[index.name] = index.key;
|
||||||
return obj;
|
return obj;
|
||||||
}, {});
|
}, {});
|
||||||
return this._schemaCollection().then(schemaCollection =>
|
return this._schemaCollection().then((schemaCollection) =>
|
||||||
schemaCollection.updateSchema(className, {
|
schemaCollection.updateSchema(className, {
|
||||||
$set: { '_metadata.indexes': indexes },
|
$set: { '_metadata.indexes': indexes },
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err))
|
.catch((err) => this.handleError(err))
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
// Ignore if collection not found
|
// Ignore if collection not found
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
@@ -351,8 +351,8 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
schema.fields
|
schema.fields
|
||||||
)
|
)
|
||||||
.then(() => this._schemaCollection())
|
.then(() => this._schemaCollection())
|
||||||
.then(schemaCollection => schemaCollection.insertSchema(mongoObject))
|
.then((schemaCollection) => schemaCollection.insertSchema(mongoObject))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
addFieldIfNotExists(
|
addFieldIfNotExists(
|
||||||
@@ -361,11 +361,11 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
type: any
|
type: any
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return this._schemaCollection()
|
return this._schemaCollection()
|
||||||
.then(schemaCollection =>
|
.then((schemaCollection) =>
|
||||||
schemaCollection.addFieldIfNotExists(className, fieldName, type)
|
schemaCollection.addFieldIfNotExists(className, fieldName, type)
|
||||||
)
|
)
|
||||||
.then(() => this.createIndexesIfNeeded(className, fieldName, type))
|
.then(() => this.createIndexesIfNeeded(className, fieldName, type))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
|
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
|
||||||
@@ -373,8 +373,8 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
deleteClass(className: string) {
|
deleteClass(className: string) {
|
||||||
return (
|
return (
|
||||||
this._adaptiveCollection(className)
|
this._adaptiveCollection(className)
|
||||||
.then(collection => collection.drop())
|
.then((collection) => collection.drop())
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// 'ns not found' means collection was already gone. Ignore deletion attempt.
|
// 'ns not found' means collection was already gone. Ignore deletion attempt.
|
||||||
if (error.message == 'ns not found') {
|
if (error.message == 'ns not found') {
|
||||||
return;
|
return;
|
||||||
@@ -383,17 +383,17 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
})
|
})
|
||||||
// We've dropped the collection, now remove the _SCHEMA document
|
// We've dropped the collection, now remove the _SCHEMA document
|
||||||
.then(() => this._schemaCollection())
|
.then(() => this._schemaCollection())
|
||||||
.then(schemaCollection =>
|
.then((schemaCollection) =>
|
||||||
schemaCollection.findAndDeleteSchema(className)
|
schemaCollection.findAndDeleteSchema(className)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err))
|
.catch((err) => this.handleError(err))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteAllClasses(fast: boolean) {
|
deleteAllClasses(fast: boolean) {
|
||||||
return storageAdapterAllCollections(this).then(collections =>
|
return storageAdapterAllCollections(this).then((collections) =>
|
||||||
Promise.all(
|
Promise.all(
|
||||||
collections.map(collection =>
|
collections.map((collection) =>
|
||||||
fast ? collection.deleteMany({}) : collection.drop()
|
fast ? collection.deleteMany({}) : collection.drop()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -421,7 +421,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
|
|
||||||
// Returns a Promise.
|
// Returns a Promise.
|
||||||
deleteFields(className: string, schema: SchemaType, fieldNames: string[]) {
|
deleteFields(className: string, schema: SchemaType, fieldNames: string[]) {
|
||||||
const mongoFormatNames = fieldNames.map(fieldName => {
|
const mongoFormatNames = fieldNames.map((fieldName) => {
|
||||||
if (schema.fields[fieldName].type === 'Pointer') {
|
if (schema.fields[fieldName].type === 'Pointer') {
|
||||||
return `_p_${fieldName}`;
|
return `_p_${fieldName}`;
|
||||||
} else {
|
} else {
|
||||||
@@ -429,23 +429,23 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
const collectionUpdate = { $unset: {} };
|
const collectionUpdate = { $unset: {} };
|
||||||
mongoFormatNames.forEach(name => {
|
mongoFormatNames.forEach((name) => {
|
||||||
collectionUpdate['$unset'][name] = null;
|
collectionUpdate['$unset'][name] = null;
|
||||||
});
|
});
|
||||||
|
|
||||||
const schemaUpdate = { $unset: {} };
|
const schemaUpdate = { $unset: {} };
|
||||||
fieldNames.forEach(name => {
|
fieldNames.forEach((name) => {
|
||||||
schemaUpdate['$unset'][name] = null;
|
schemaUpdate['$unset'][name] = null;
|
||||||
schemaUpdate['$unset'][`_metadata.fields_options.${name}`] = null;
|
schemaUpdate['$unset'][`_metadata.fields_options.${name}`] = null;
|
||||||
});
|
});
|
||||||
|
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection.updateMany({}, collectionUpdate))
|
.then((collection) => collection.updateMany({}, collectionUpdate))
|
||||||
.then(() => this._schemaCollection())
|
.then(() => this._schemaCollection())
|
||||||
.then(schemaCollection =>
|
.then((schemaCollection) =>
|
||||||
schemaCollection.updateSchema(className, schemaUpdate)
|
schemaCollection.updateSchema(className, schemaUpdate)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a promise for all schemas known to this adapter, in Parse format. In case the
|
// Return a promise for all schemas known to this adapter, in Parse format. In case the
|
||||||
@@ -453,10 +453,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
// rejection reason are TBD.
|
// rejection reason are TBD.
|
||||||
getAllClasses(): Promise<StorageClass[]> {
|
getAllClasses(): Promise<StorageClass[]> {
|
||||||
return this._schemaCollection()
|
return this._schemaCollection()
|
||||||
.then(schemasCollection =>
|
.then((schemasCollection) =>
|
||||||
schemasCollection._fetchAllSchemasFrom_SCHEMA()
|
schemasCollection._fetchAllSchemasFrom_SCHEMA()
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return a promise for the schema with the given name, in Parse format. If
|
// Return a promise for the schema with the given name, in Parse format. If
|
||||||
@@ -464,10 +464,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
// undefined as the reason.
|
// undefined as the reason.
|
||||||
getClass(className: string): Promise<StorageClass> {
|
getClass(className: string): Promise<StorageClass> {
|
||||||
return this._schemaCollection()
|
return this._schemaCollection()
|
||||||
.then(schemasCollection =>
|
.then((schemasCollection) =>
|
||||||
schemasCollection._fetchOneSchemaFrom_SCHEMA(className)
|
schemasCollection._fetchOneSchemaFrom_SCHEMA(className)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema,
|
// TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema,
|
||||||
@@ -486,10 +486,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
schema
|
schema
|
||||||
);
|
);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.insertOne(mongoObject, transactionalSession)
|
collection.insertOne(mongoObject, transactionalSession)
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
if (error.code === 11000) {
|
if (error.code === 11000) {
|
||||||
// Duplicate value
|
// Duplicate value
|
||||||
const err = new Parse.Error(
|
const err = new Parse.Error(
|
||||||
@@ -509,7 +509,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove all objects that match the given Parse Query.
|
// Remove all objects that match the given Parse Query.
|
||||||
@@ -523,11 +523,11 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
) {
|
) {
|
||||||
schema = convertParseSchemaToMongoSchema(schema);
|
schema = convertParseSchemaToMongoSchema(schema);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => {
|
.then((collection) => {
|
||||||
const mongoWhere = transformWhere(className, query, schema);
|
const mongoWhere = transformWhere(className, query, schema);
|
||||||
return collection.deleteMany(mongoWhere, transactionalSession);
|
return collection.deleteMany(mongoWhere, transactionalSession);
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err))
|
.catch((err) => this.handleError(err))
|
||||||
.then(
|
.then(
|
||||||
({ result }) => {
|
({ result }) => {
|
||||||
if (result.n === 0) {
|
if (result.n === 0) {
|
||||||
@@ -559,10 +559,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
const mongoUpdate = transformUpdate(className, update, schema);
|
const mongoUpdate = transformUpdate(className, update, schema);
|
||||||
const mongoWhere = transformWhere(className, query, schema);
|
const mongoWhere = transformWhere(className, query, schema);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.updateMany(mongoWhere, mongoUpdate, transactionalSession)
|
collection.updateMany(mongoWhere, mongoUpdate, transactionalSession)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Atomically finds and updates an object based on query.
|
// Atomically finds and updates an object based on query.
|
||||||
@@ -578,14 +578,16 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
const mongoUpdate = transformUpdate(className, update, schema);
|
const mongoUpdate = transformUpdate(className, update, schema);
|
||||||
const mongoWhere = transformWhere(className, query, schema);
|
const mongoWhere = transformWhere(className, query, schema);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, {
|
collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, {
|
||||||
returnOriginal: false,
|
returnOriginal: false,
|
||||||
session: transactionalSession || undefined,
|
session: transactionalSession || undefined,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.then(result => mongoObjectToParseObject(className, result.value, schema))
|
.then((result) =>
|
||||||
.catch(error => {
|
mongoObjectToParseObject(className, result.value, schema)
|
||||||
|
)
|
||||||
|
.catch((error) => {
|
||||||
if (error.code === 11000) {
|
if (error.code === 11000) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.DUPLICATE_VALUE,
|
Parse.Error.DUPLICATE_VALUE,
|
||||||
@@ -594,7 +596,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hopefully we can get rid of this. It's only used for config and hooks.
|
// Hopefully we can get rid of this. It's only used for config and hooks.
|
||||||
@@ -609,10 +611,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
const mongoUpdate = transformUpdate(className, update, schema);
|
const mongoUpdate = transformUpdate(className, update, schema);
|
||||||
const mongoWhere = transformWhere(className, query, schema);
|
const mongoWhere = transformWhere(className, query, schema);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession)
|
collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
|
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
|
||||||
@@ -650,10 +652,17 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
{}
|
{}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// If we aren't requesting the `_id` field, we need to explicitly opt out
|
||||||
|
// of it. Doing so in parse-server is unusual, but it can allow us to
|
||||||
|
// optimize some queries with covering indexes.
|
||||||
|
if (keys && !mongoKeys._id) {
|
||||||
|
mongoKeys._id = 0;
|
||||||
|
}
|
||||||
|
|
||||||
readPreference = this._parseReadPreference(readPreference);
|
readPreference = this._parseReadPreference(readPreference);
|
||||||
return this.createTextIndexesIfNeeded(className, query, schema)
|
return this.createTextIndexesIfNeeded(className, query, schema)
|
||||||
.then(() => this._adaptiveCollection(className))
|
.then(() => this._adaptiveCollection(className))
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.find(mongoWhere, {
|
collection.find(mongoWhere, {
|
||||||
skip,
|
skip,
|
||||||
limit,
|
limit,
|
||||||
@@ -666,15 +675,15 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
explain,
|
explain,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.then(objects => {
|
.then((objects) => {
|
||||||
if (explain) {
|
if (explain) {
|
||||||
return objects;
|
return objects;
|
||||||
}
|
}
|
||||||
return objects.map(object =>
|
return objects.map((object) =>
|
||||||
mongoObjectToParseObject(className, object, schema)
|
mongoObjectToParseObject(className, object, schema)
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
ensureIndex(
|
ensureIndex(
|
||||||
@@ -687,10 +696,10 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
schema = convertParseSchemaToMongoSchema(schema);
|
schema = convertParseSchemaToMongoSchema(schema);
|
||||||
const indexCreationRequest = {};
|
const indexCreationRequest = {};
|
||||||
const mongoFieldNames = fieldNames.map(fieldName =>
|
const mongoFieldNames = fieldNames.map((fieldName) =>
|
||||||
transformKey(className, fieldName, schema)
|
transformKey(className, fieldName, schema)
|
||||||
);
|
);
|
||||||
mongoFieldNames.forEach(fieldName => {
|
mongoFieldNames.forEach((fieldName) => {
|
||||||
indexCreationRequest[fieldName] = indexType;
|
indexCreationRequest[fieldName] = indexType;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -707,16 +716,16 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
|
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(
|
.then(
|
||||||
collection =>
|
(collection) =>
|
||||||
new Promise((resolve, reject) =>
|
new Promise((resolve, reject) =>
|
||||||
collection._mongoCollection.createIndex(
|
collection._mongoCollection.createIndex(
|
||||||
indexCreationRequest,
|
indexCreationRequest,
|
||||||
indexOptions,
|
indexOptions,
|
||||||
error => (error ? reject(error) : resolve())
|
(error) => (error ? reject(error) : resolve())
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't
|
// Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't
|
||||||
@@ -731,17 +740,17 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
) {
|
) {
|
||||||
schema = convertParseSchemaToMongoSchema(schema);
|
schema = convertParseSchemaToMongoSchema(schema);
|
||||||
const indexCreationRequest = {};
|
const indexCreationRequest = {};
|
||||||
const mongoFieldNames = fieldNames.map(fieldName =>
|
const mongoFieldNames = fieldNames.map((fieldName) =>
|
||||||
transformKey(className, fieldName, schema)
|
transformKey(className, fieldName, schema)
|
||||||
);
|
);
|
||||||
mongoFieldNames.forEach(fieldName => {
|
mongoFieldNames.forEach((fieldName) => {
|
||||||
indexCreationRequest[fieldName] = 1;
|
indexCreationRequest[fieldName] = 1;
|
||||||
});
|
});
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection._ensureSparseUniqueIndexInBackground(indexCreationRequest)
|
collection._ensureSparseUniqueIndexInBackground(indexCreationRequest)
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
if (error.code === 11000) {
|
if (error.code === 11000) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.DUPLICATE_VALUE,
|
Parse.Error.DUPLICATE_VALUE,
|
||||||
@@ -750,18 +759,18 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used in tests
|
// Used in tests
|
||||||
_rawFind(className: string, query: QueryType) {
|
_rawFind(className: string, query: QueryType) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.find(query, {
|
collection.find(query, {
|
||||||
maxTimeMS: this._maxTimeMS,
|
maxTimeMS: this._maxTimeMS,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Executes a count.
|
// Executes a count.
|
||||||
@@ -775,14 +784,14 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
schema = convertParseSchemaToMongoSchema(schema);
|
schema = convertParseSchemaToMongoSchema(schema);
|
||||||
readPreference = this._parseReadPreference(readPreference);
|
readPreference = this._parseReadPreference(readPreference);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.count(transformWhere(className, query, schema, true), {
|
collection.count(transformWhere(className, query, schema, true), {
|
||||||
maxTimeMS: this._maxTimeMS,
|
maxTimeMS: this._maxTimeMS,
|
||||||
readPreference,
|
readPreference,
|
||||||
hint,
|
hint,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
distinct(
|
distinct(
|
||||||
@@ -797,22 +806,22 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
const transformField = transformKey(className, fieldName, schema);
|
const transformField = transformKey(className, fieldName, schema);
|
||||||
|
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.distinct(
|
collection.distinct(
|
||||||
transformField,
|
transformField,
|
||||||
transformWhere(className, query, schema)
|
transformWhere(className, query, schema)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.then(objects => {
|
.then((objects) => {
|
||||||
objects = objects.filter(obj => obj != null);
|
objects = objects.filter((obj) => obj != null);
|
||||||
return objects.map(object => {
|
return objects.map((object) => {
|
||||||
if (isPointerField) {
|
if (isPointerField) {
|
||||||
return transformPointerString(schema, fieldName, object);
|
return transformPointerString(schema, fieldName, object);
|
||||||
}
|
}
|
||||||
return mongoObjectToParseObject(className, object, schema);
|
return mongoObjectToParseObject(className, object, schema);
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregate(
|
aggregate(
|
||||||
@@ -824,7 +833,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
explain?: boolean
|
explain?: boolean
|
||||||
) {
|
) {
|
||||||
let isPointerField = false;
|
let isPointerField = false;
|
||||||
pipeline = pipeline.map(stage => {
|
pipeline = pipeline.map((stage) => {
|
||||||
if (stage.$group) {
|
if (stage.$group) {
|
||||||
stage.$group = this._parseAggregateGroupArgs(schema, stage.$group);
|
stage.$group = this._parseAggregateGroupArgs(schema, stage.$group);
|
||||||
if (
|
if (
|
||||||
@@ -845,13 +854,16 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (stage.$geoNear) {
|
if (stage.$geoNear) {
|
||||||
stage.$geoNear.query = this._parseAggregateArgs(schema, stage.$geoNear.query);
|
stage.$geoNear.query = this._parseAggregateArgs(
|
||||||
|
schema,
|
||||||
|
stage.$geoNear.query
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return stage;
|
return stage;
|
||||||
});
|
});
|
||||||
readPreference = this._parseReadPreference(readPreference);
|
readPreference = this._parseReadPreference(readPreference);
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection =>
|
.then((collection) =>
|
||||||
collection.aggregate(pipeline, {
|
collection.aggregate(pipeline, {
|
||||||
readPreference,
|
readPreference,
|
||||||
maxTimeMS: this._maxTimeMS,
|
maxTimeMS: this._maxTimeMS,
|
||||||
@@ -859,8 +871,8 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
explain,
|
explain,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.then(results => {
|
.then((results) => {
|
||||||
results.forEach(result => {
|
results.forEach((result) => {
|
||||||
if (Object.prototype.hasOwnProperty.call(result, '_id')) {
|
if (Object.prototype.hasOwnProperty.call(result, '_id')) {
|
||||||
if (isPointerField && result._id) {
|
if (isPointerField && result._id) {
|
||||||
result._id = result._id.split('$')[1];
|
result._id = result._id.split('$')[1];
|
||||||
@@ -879,12 +891,12 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
});
|
});
|
||||||
return results;
|
return results;
|
||||||
})
|
})
|
||||||
.then(objects =>
|
.then((objects) =>
|
||||||
objects.map(object =>
|
objects.map((object) =>
|
||||||
mongoObjectToParseObject(className, object, schema)
|
mongoObjectToParseObject(className, object, schema)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function will recursively traverse the pipeline and convert any Pointer or Date columns.
|
// This function will recursively traverse the pipeline and convert any Pointer or Date columns.
|
||||||
@@ -910,7 +922,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
if (pipeline === null) {
|
if (pipeline === null) {
|
||||||
return null;
|
return null;
|
||||||
} else if (Array.isArray(pipeline)) {
|
} else if (Array.isArray(pipeline)) {
|
||||||
return pipeline.map(value => this._parseAggregateArgs(schema, value));
|
return pipeline.map((value) => this._parseAggregateArgs(schema, value));
|
||||||
} else if (typeof pipeline === 'object') {
|
} else if (typeof pipeline === 'object') {
|
||||||
const returnValue = {};
|
const returnValue = {};
|
||||||
for (const field in pipeline) {
|
for (const field in pipeline) {
|
||||||
@@ -985,7 +997,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
// updatedAt or objectId and change it accordingly.
|
// updatedAt or objectId and change it accordingly.
|
||||||
_parseAggregateGroupArgs(schema: any, pipeline: any): any {
|
_parseAggregateGroupArgs(schema: any, pipeline: any): any {
|
||||||
if (Array.isArray(pipeline)) {
|
if (Array.isArray(pipeline)) {
|
||||||
return pipeline.map(value =>
|
return pipeline.map((value) =>
|
||||||
this._parseAggregateGroupArgs(schema, value)
|
this._parseAggregateGroupArgs(schema, value)
|
||||||
);
|
);
|
||||||
} else if (typeof pipeline === 'object') {
|
} else if (typeof pipeline === 'object') {
|
||||||
@@ -1065,14 +1077,14 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
|
|
||||||
createIndex(className: string, index: any) {
|
createIndex(className: string, index: any) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection._mongoCollection.createIndex(index))
|
.then((collection) => collection._mongoCollection.createIndex(index))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
createIndexes(className: string, indexes: any) {
|
createIndexes(className: string, indexes: any) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection._mongoCollection.createIndexes(indexes))
|
.then((collection) => collection._mongoCollection.createIndexes(indexes))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
createIndexesIfNeeded(className: string, fieldName: string, type: any) {
|
createIndexesIfNeeded(className: string, fieldName: string, type: any) {
|
||||||
@@ -1110,7 +1122,7 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
textIndex,
|
textIndex,
|
||||||
existingIndexes,
|
existingIndexes,
|
||||||
schema.fields
|
schema.fields
|
||||||
).catch(error => {
|
).catch((error) => {
|
||||||
if (error.code === 85) {
|
if (error.code === 85) {
|
||||||
// Index exist with different options
|
// Index exist with different options
|
||||||
return this.setIndexesFromMongo(className);
|
return this.setIndexesFromMongo(className);
|
||||||
@@ -1123,31 +1135,31 @@ export class MongoStorageAdapter implements StorageAdapter {
|
|||||||
|
|
||||||
getIndexes(className: string) {
|
getIndexes(className: string) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection._mongoCollection.indexes())
|
.then((collection) => collection._mongoCollection.indexes())
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
dropIndex(className: string, index: any) {
|
dropIndex(className: string, index: any) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection._mongoCollection.dropIndex(index))
|
.then((collection) => collection._mongoCollection.dropIndex(index))
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
dropAllIndexes(className: string) {
|
dropAllIndexes(className: string) {
|
||||||
return this._adaptiveCollection(className)
|
return this._adaptiveCollection(className)
|
||||||
.then(collection => collection._mongoCollection.dropIndexes())
|
.then((collection) => collection._mongoCollection.dropIndexes())
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
updateSchemaWithIndexes(): Promise<any> {
|
updateSchemaWithIndexes(): Promise<any> {
|
||||||
return this.getAllClasses()
|
return this.getAllClasses()
|
||||||
.then(classes => {
|
.then((classes) => {
|
||||||
const promises = classes.map(schema => {
|
const promises = classes.map((schema) => {
|
||||||
return this.setIndexesFromMongo(schema.className);
|
return this.setIndexesFromMongo(schema.className);
|
||||||
});
|
});
|
||||||
return Promise.all(promises);
|
return Promise.all(promises);
|
||||||
})
|
})
|
||||||
.catch(err => this.handleError(err));
|
.catch((err) => this.handleError(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
createTransactionalSession(): Promise<any> {
|
createTransactionalSession(): Promise<any> {
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ const specialQuerykeys = [
|
|||||||
'_failed_login_count',
|
'_failed_login_count',
|
||||||
];
|
];
|
||||||
|
|
||||||
const isSpecialQueryKey = key => {
|
const isSpecialQueryKey = (key) => {
|
||||||
return specialQuerykeys.indexOf(key) >= 0;
|
return specialQuerykeys.indexOf(key) >= 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -107,7 +107,7 @@ const validateQuery = (query: any): void => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Object.keys(query).forEach(key => {
|
Object.keys(query).forEach((key) => {
|
||||||
if (query && query[key] && query[key].$regex) {
|
if (query && query[key] && query[key].$regex) {
|
||||||
if (typeof query[key].$options === 'string') {
|
if (typeof query[key].$options === 'string') {
|
||||||
if (!query[key].$options.match(/^[imxs]+$/)) {
|
if (!query[key].$options.match(/^[imxs]+$/)) {
|
||||||
@@ -149,8 +149,8 @@ const filterSensitiveData = (
|
|||||||
if (isReadOperation && perms.protectedFields) {
|
if (isReadOperation && perms.protectedFields) {
|
||||||
// extract protectedFields added with the pointer-permission prefix
|
// extract protectedFields added with the pointer-permission prefix
|
||||||
const protectedFieldsPointerPerm = Object.keys(perms.protectedFields)
|
const protectedFieldsPointerPerm = Object.keys(perms.protectedFields)
|
||||||
.filter(key => key.startsWith('userField:'))
|
.filter((key) => key.startsWith('userField:'))
|
||||||
.map(key => {
|
.map((key) => {
|
||||||
return { key: key.substring(10), value: perms.protectedFields[key] };
|
return { key: key.substring(10), value: perms.protectedFields[key] };
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -158,13 +158,13 @@ const filterSensitiveData = (
|
|||||||
let overrideProtectedFields = false;
|
let overrideProtectedFields = false;
|
||||||
|
|
||||||
// check if the object grants the current user access based on the extracted fields
|
// check if the object grants the current user access based on the extracted fields
|
||||||
protectedFieldsPointerPerm.forEach(pointerPerm => {
|
protectedFieldsPointerPerm.forEach((pointerPerm) => {
|
||||||
let pointerPermIncludesUser = false;
|
let pointerPermIncludesUser = false;
|
||||||
const readUserFieldValue = object[pointerPerm.key];
|
const readUserFieldValue = object[pointerPerm.key];
|
||||||
if (readUserFieldValue) {
|
if (readUserFieldValue) {
|
||||||
if (Array.isArray(readUserFieldValue)) {
|
if (Array.isArray(readUserFieldValue)) {
|
||||||
pointerPermIncludesUser = readUserFieldValue.some(
|
pointerPermIncludesUser = readUserFieldValue.some(
|
||||||
user => user.objectId && user.objectId === userId
|
(user) => user.objectId && user.objectId === userId
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
pointerPermIncludesUser =
|
pointerPermIncludesUser =
|
||||||
@@ -186,14 +186,14 @@ const filterSensitiveData = (
|
|||||||
newProtectedFields.push(protectedFields);
|
newProtectedFields.push(protectedFields);
|
||||||
}
|
}
|
||||||
// intersect all sets of protectedFields
|
// intersect all sets of protectedFields
|
||||||
newProtectedFields.forEach(fields => {
|
newProtectedFields.forEach((fields) => {
|
||||||
if (fields) {
|
if (fields) {
|
||||||
// if there're no protctedFields by other criteria ( id / role / auth)
|
// if there're no protctedFields by other criteria ( id / role / auth)
|
||||||
// then we must intersect each set (per userField)
|
// then we must intersect each set (per userField)
|
||||||
if (!protectedFields) {
|
if (!protectedFields) {
|
||||||
protectedFields = fields;
|
protectedFields = fields;
|
||||||
} else {
|
} else {
|
||||||
protectedFields = protectedFields.filter(v => fields.includes(v));
|
protectedFields = protectedFields.filter((v) => fields.includes(v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -205,13 +205,13 @@ const filterSensitiveData = (
|
|||||||
/* special treat for the user class: don't filter protectedFields if currently loggedin user is
|
/* special treat for the user class: don't filter protectedFields if currently loggedin user is
|
||||||
the retrieved user */
|
the retrieved user */
|
||||||
if (!(isUserClass && userId && object.objectId === userId)) {
|
if (!(isUserClass && userId && object.objectId === userId)) {
|
||||||
protectedFields && protectedFields.forEach(k => delete object[k]);
|
protectedFields && protectedFields.forEach((k) => delete object[k]);
|
||||||
|
|
||||||
// fields not requested by client (excluded),
|
// fields not requested by client (excluded),
|
||||||
//but were needed to apply protecttedFields
|
//but were needed to apply protecttedFields
|
||||||
perms.protectedFields &&
|
perms.protectedFields &&
|
||||||
perms.protectedFields.temporaryKeys &&
|
perms.protectedFields.temporaryKeys &&
|
||||||
perms.protectedFields.temporaryKeys.forEach(k => delete object[k]);
|
perms.protectedFields.temporaryKeys.forEach((k) => delete object[k]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isUserClass) {
|
if (!isUserClass) {
|
||||||
@@ -265,7 +265,7 @@ const specialKeysForUpdate = [
|
|||||||
'_password_history',
|
'_password_history',
|
||||||
];
|
];
|
||||||
|
|
||||||
const isSpecialUpdateKey = key => {
|
const isSpecialUpdateKey = (key) => {
|
||||||
return specialKeysForUpdate.indexOf(key) >= 0;
|
return specialKeysForUpdate.indexOf(key) >= 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -291,7 +291,7 @@ function sanitizeDatabaseResult(originalObject, result): Promise<any> {
|
|||||||
if (!result) {
|
if (!result) {
|
||||||
return Promise.resolve(response);
|
return Promise.resolve(response);
|
||||||
}
|
}
|
||||||
Object.keys(originalObject).forEach(key => {
|
Object.keys(originalObject).forEach((key) => {
|
||||||
const keyUpdate = originalObject[key];
|
const keyUpdate = originalObject[key];
|
||||||
// determine if that was an op
|
// determine if that was an op
|
||||||
if (
|
if (
|
||||||
@@ -312,7 +312,7 @@ function joinTableName(className, key) {
|
|||||||
return `_Join:${key}:${className}`;
|
return `_Join:${key}:${className}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const flattenUpdateOperatorsForCreate = object => {
|
const flattenUpdateOperatorsForCreate = (object) => {
|
||||||
for (const key in object) {
|
for (const key in object) {
|
||||||
if (object[key] && object[key].__op) {
|
if (object[key] && object[key].__op) {
|
||||||
switch (object[key].__op) {
|
switch (object[key].__op) {
|
||||||
@@ -367,7 +367,7 @@ const flattenUpdateOperatorsForCreate = object => {
|
|||||||
|
|
||||||
const transformAuthData = (className, object, schema) => {
|
const transformAuthData = (className, object, schema) => {
|
||||||
if (object.authData && className === '_User') {
|
if (object.authData && className === '_User') {
|
||||||
Object.keys(object.authData).forEach(provider => {
|
Object.keys(object.authData).forEach((provider) => {
|
||||||
const providerData = object.authData[provider];
|
const providerData = object.authData[provider];
|
||||||
const fieldName = `_auth_data_${provider}`;
|
const fieldName = `_auth_data_${provider}`;
|
||||||
if (providerData == null) {
|
if (providerData == null) {
|
||||||
@@ -387,7 +387,7 @@ const untransformObjectACL = ({ _rperm, _wperm, ...output }) => {
|
|||||||
if (_rperm || _wperm) {
|
if (_rperm || _wperm) {
|
||||||
output.ACL = {};
|
output.ACL = {};
|
||||||
|
|
||||||
(_rperm || []).forEach(entry => {
|
(_rperm || []).forEach((entry) => {
|
||||||
if (!output.ACL[entry]) {
|
if (!output.ACL[entry]) {
|
||||||
output.ACL[entry] = { read: true };
|
output.ACL[entry] = { read: true };
|
||||||
} else {
|
} else {
|
||||||
@@ -395,7 +395,7 @@ const untransformObjectACL = ({ _rperm, _wperm, ...output }) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
(_wperm || []).forEach(entry => {
|
(_wperm || []).forEach((entry) => {
|
||||||
if (!output.ACL[entry]) {
|
if (!output.ACL[entry]) {
|
||||||
output.ACL[entry] = { write: true };
|
output.ACL[entry] = { write: true };
|
||||||
} else {
|
} else {
|
||||||
@@ -442,8 +442,10 @@ class DatabaseController {
|
|||||||
|
|
||||||
purgeCollection(className: string): Promise<void> {
|
purgeCollection(className: string): Promise<void> {
|
||||||
return this.loadSchema()
|
return this.loadSchema()
|
||||||
.then(schemaController => schemaController.getOneSchema(className))
|
.then((schemaController) => schemaController.getOneSchema(className))
|
||||||
.then(schema => this.adapter.deleteObjectsByQuery(className, schema, {}));
|
.then((schema) =>
|
||||||
|
this.adapter.deleteObjectsByQuery(className, schema, {})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
validateClassName(className: string): Promise<void> {
|
validateClassName(className: string): Promise<void> {
|
||||||
@@ -490,7 +492,7 @@ class DatabaseController {
|
|||||||
// classname through the key.
|
// classname through the key.
|
||||||
// TODO: make this not in the DatabaseController interface
|
// TODO: make this not in the DatabaseController interface
|
||||||
redirectClassNameForKey(className: string, key: string): Promise<?string> {
|
redirectClassNameForKey(className: string, key: string): Promise<?string> {
|
||||||
return this.loadSchema().then(schema => {
|
return this.loadSchema().then((schema) => {
|
||||||
var t = schema.getExpectedType(className, key);
|
var t = schema.getExpectedType(className, key);
|
||||||
if (t != null && typeof t !== 'string' && t.type === 'Relation') {
|
if (t != null && typeof t !== 'string' && t.type === 'Relation') {
|
||||||
return t.targetClass;
|
return t.targetClass;
|
||||||
@@ -514,7 +516,7 @@ class DatabaseController {
|
|||||||
const isMaster = acl === undefined;
|
const isMaster = acl === undefined;
|
||||||
var aclGroup: string[] = acl || [];
|
var aclGroup: string[] = acl || [];
|
||||||
return this.loadSchema()
|
return this.loadSchema()
|
||||||
.then(s => {
|
.then((s) => {
|
||||||
schema = s;
|
schema = s;
|
||||||
if (isMaster) {
|
if (isMaster) {
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
@@ -550,7 +552,7 @@ class DatabaseController {
|
|||||||
var aclGroup = acl || [];
|
var aclGroup = acl || [];
|
||||||
|
|
||||||
return this.loadSchemaIfNeeded(validSchemaController).then(
|
return this.loadSchemaIfNeeded(validSchemaController).then(
|
||||||
schemaController => {
|
(schemaController) => {
|
||||||
return (isMaster
|
return (isMaster
|
||||||
? Promise.resolve()
|
? Promise.resolve()
|
||||||
: schemaController.validatePermission(className, aclGroup, 'update')
|
: schemaController.validatePermission(className, aclGroup, 'update')
|
||||||
@@ -594,7 +596,7 @@ class DatabaseController {
|
|||||||
validateQuery(query);
|
validateQuery(query);
|
||||||
return schemaController
|
return schemaController
|
||||||
.getOneSchema(className, true)
|
.getOneSchema(className, true)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// If the schema doesn't exist, pretend it exists with no fields. This behavior
|
// If the schema doesn't exist, pretend it exists with no fields. This behavior
|
||||||
// will likely need revisiting.
|
// will likely need revisiting.
|
||||||
if (error === undefined) {
|
if (error === undefined) {
|
||||||
@@ -602,8 +604,8 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.then(schema => {
|
.then((schema) => {
|
||||||
Object.keys(update).forEach(fieldName => {
|
Object.keys(update).forEach((fieldName) => {
|
||||||
if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) {
|
if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.INVALID_KEY_NAME,
|
Parse.Error.INVALID_KEY_NAME,
|
||||||
@@ -626,7 +628,7 @@ class DatabaseController {
|
|||||||
update[updateOperation] &&
|
update[updateOperation] &&
|
||||||
typeof update[updateOperation] === 'object' &&
|
typeof update[updateOperation] === 'object' &&
|
||||||
Object.keys(update[updateOperation]).some(
|
Object.keys(update[updateOperation]).some(
|
||||||
innerKey =>
|
(innerKey) =>
|
||||||
innerKey.includes('$') || innerKey.includes('.')
|
innerKey.includes('$') || innerKey.includes('.')
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
@@ -641,7 +643,7 @@ class DatabaseController {
|
|||||||
if (validateOnly) {
|
if (validateOnly) {
|
||||||
return this.adapter
|
return this.adapter
|
||||||
.find(className, schema, query, {})
|
.find(className, schema, query, {})
|
||||||
.then(result => {
|
.then((result) => {
|
||||||
if (!result || !result.length) {
|
if (!result || !result.length) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.OBJECT_NOT_FOUND,
|
Parse.Error.OBJECT_NOT_FOUND,
|
||||||
@@ -697,7 +699,7 @@ class DatabaseController {
|
|||||||
return result;
|
return result;
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.then(result => {
|
.then((result) => {
|
||||||
if (skipSanitization) {
|
if (skipSanitization) {
|
||||||
return Promise.resolve(result);
|
return Promise.resolve(result);
|
||||||
}
|
}
|
||||||
@@ -820,7 +822,7 @@ class DatabaseController {
|
|||||||
doc,
|
doc,
|
||||||
this._transactionalSession
|
this._transactionalSession
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// We don't care if they try to delete a non-existent relation.
|
// We don't care if they try to delete a non-existent relation.
|
||||||
if (error.code == Parse.Error.OBJECT_NOT_FOUND) {
|
if (error.code == Parse.Error.OBJECT_NOT_FOUND) {
|
||||||
return;
|
return;
|
||||||
@@ -846,7 +848,7 @@ class DatabaseController {
|
|||||||
const aclGroup = acl || [];
|
const aclGroup = acl || [];
|
||||||
|
|
||||||
return this.loadSchemaIfNeeded(validSchemaController).then(
|
return this.loadSchemaIfNeeded(validSchemaController).then(
|
||||||
schemaController => {
|
(schemaController) => {
|
||||||
return (isMaster
|
return (isMaster
|
||||||
? Promise.resolve()
|
? Promise.resolve()
|
||||||
: schemaController.validatePermission(className, aclGroup, 'delete')
|
: schemaController.validatePermission(className, aclGroup, 'delete')
|
||||||
@@ -873,7 +875,7 @@ class DatabaseController {
|
|||||||
validateQuery(query);
|
validateQuery(query);
|
||||||
return schemaController
|
return schemaController
|
||||||
.getOneSchema(className)
|
.getOneSchema(className)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// If the schema doesn't exist, pretend it exists with no fields. This behavior
|
// If the schema doesn't exist, pretend it exists with no fields. This behavior
|
||||||
// will likely need revisiting.
|
// will likely need revisiting.
|
||||||
if (error === undefined) {
|
if (error === undefined) {
|
||||||
@@ -881,7 +883,7 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.then(parseFormatSchema =>
|
.then((parseFormatSchema) =>
|
||||||
this.adapter.deleteObjectsByQuery(
|
this.adapter.deleteObjectsByQuery(
|
||||||
className,
|
className,
|
||||||
parseFormatSchema,
|
parseFormatSchema,
|
||||||
@@ -889,7 +891,7 @@ class DatabaseController {
|
|||||||
this._transactionalSession
|
this._transactionalSession
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// When deleting sessions while changing passwords, don't throw an error if they don't have any sessions.
|
// When deleting sessions while changing passwords, don't throw an error if they don't have any sessions.
|
||||||
if (
|
if (
|
||||||
className === '_Session' &&
|
className === '_Session' &&
|
||||||
@@ -930,14 +932,14 @@ class DatabaseController {
|
|||||||
|
|
||||||
return this.validateClassName(className)
|
return this.validateClassName(className)
|
||||||
.then(() => this.loadSchemaIfNeeded(validSchemaController))
|
.then(() => this.loadSchemaIfNeeded(validSchemaController))
|
||||||
.then(schemaController => {
|
.then((schemaController) => {
|
||||||
return (isMaster
|
return (isMaster
|
||||||
? Promise.resolve()
|
? Promise.resolve()
|
||||||
: schemaController.validatePermission(className, aclGroup, 'create')
|
: schemaController.validatePermission(className, aclGroup, 'create')
|
||||||
)
|
)
|
||||||
.then(() => schemaController.enforceClassExists(className))
|
.then(() => schemaController.enforceClassExists(className))
|
||||||
.then(() => schemaController.getOneSchema(className, true))
|
.then(() => schemaController.getOneSchema(className, true))
|
||||||
.then(schema => {
|
.then((schema) => {
|
||||||
transformAuthData(className, object, schema);
|
transformAuthData(className, object, schema);
|
||||||
flattenUpdateOperatorsForCreate(object);
|
flattenUpdateOperatorsForCreate(object);
|
||||||
if (validateOnly) {
|
if (validateOnly) {
|
||||||
@@ -950,7 +952,7 @@ class DatabaseController {
|
|||||||
this._transactionalSession
|
this._transactionalSession
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.then(result => {
|
.then((result) => {
|
||||||
if (validateOnly) {
|
if (validateOnly) {
|
||||||
return originalObject;
|
return originalObject;
|
||||||
}
|
}
|
||||||
@@ -979,7 +981,7 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
const fields = Object.keys(object);
|
const fields = Object.keys(object);
|
||||||
const schemaFields = Object.keys(classSchema.fields);
|
const schemaFields = Object.keys(classSchema.fields);
|
||||||
const newKeys = fields.filter(field => {
|
const newKeys = fields.filter((field) => {
|
||||||
// Skip fields that are unset
|
// Skip fields that are unset
|
||||||
if (
|
if (
|
||||||
object[field] &&
|
object[field] &&
|
||||||
@@ -1038,7 +1040,7 @@ class DatabaseController {
|
|||||||
{ owningId },
|
{ owningId },
|
||||||
findOptions
|
findOptions
|
||||||
)
|
)
|
||||||
.then(results => results.map(result => result.relatedId));
|
.then((results) => results.map((result) => result.relatedId));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns a promise for a list of owning ids given some related ids.
|
// Returns a promise for a list of owning ids given some related ids.
|
||||||
@@ -1053,9 +1055,9 @@ class DatabaseController {
|
|||||||
joinTableName(className, key),
|
joinTableName(className, key),
|
||||||
relationSchema,
|
relationSchema,
|
||||||
{ relatedId: { $in: relatedIds } },
|
{ relatedId: { $in: relatedIds } },
|
||||||
{}
|
{ keys: ['owningId'] }
|
||||||
)
|
)
|
||||||
.then(results => results.map(result => result.owningId));
|
.then((results) => results.map((result) => result.owningId));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Modifies query so that it no longer has $in on relation fields, or
|
// Modifies query so that it no longer has $in on relation fields, or
|
||||||
@@ -1069,7 +1071,7 @@ class DatabaseController {
|
|||||||
return Promise.all(
|
return Promise.all(
|
||||||
ors.map((aQuery, index) => {
|
ors.map((aQuery, index) => {
|
||||||
return this.reduceInRelation(className, aQuery, schema).then(
|
return this.reduceInRelation(className, aQuery, schema).then(
|
||||||
aQuery => {
|
(aQuery) => {
|
||||||
query['$or'][index] = aQuery;
|
query['$or'][index] = aQuery;
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -1079,7 +1081,7 @@ class DatabaseController {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const promises = Object.keys(query).map(key => {
|
const promises = Object.keys(query).map((key) => {
|
||||||
const t = schema.getExpectedType(className, key);
|
const t = schema.getExpectedType(className, key);
|
||||||
if (!t || t.type !== 'Relation') {
|
if (!t || t.type !== 'Relation') {
|
||||||
return Promise.resolve(query);
|
return Promise.resolve(query);
|
||||||
@@ -1093,16 +1095,16 @@ class DatabaseController {
|
|||||||
query[key].__type == 'Pointer')
|
query[key].__type == 'Pointer')
|
||||||
) {
|
) {
|
||||||
// Build the list of queries
|
// Build the list of queries
|
||||||
queries = Object.keys(query[key]).map(constraintKey => {
|
queries = Object.keys(query[key]).map((constraintKey) => {
|
||||||
let relatedIds;
|
let relatedIds;
|
||||||
let isNegation = false;
|
let isNegation = false;
|
||||||
if (constraintKey === 'objectId') {
|
if (constraintKey === 'objectId') {
|
||||||
relatedIds = [query[key].objectId];
|
relatedIds = [query[key].objectId];
|
||||||
} else if (constraintKey == '$in') {
|
} else if (constraintKey == '$in') {
|
||||||
relatedIds = query[key]['$in'].map(r => r.objectId);
|
relatedIds = query[key]['$in'].map((r) => r.objectId);
|
||||||
} else if (constraintKey == '$nin') {
|
} else if (constraintKey == '$nin') {
|
||||||
isNegation = true;
|
isNegation = true;
|
||||||
relatedIds = query[key]['$nin'].map(r => r.objectId);
|
relatedIds = query[key]['$nin'].map((r) => r.objectId);
|
||||||
} else if (constraintKey == '$ne') {
|
} else if (constraintKey == '$ne') {
|
||||||
isNegation = true;
|
isNegation = true;
|
||||||
relatedIds = [query[key]['$ne'].objectId];
|
relatedIds = [query[key]['$ne'].objectId];
|
||||||
@@ -1122,11 +1124,11 @@ class DatabaseController {
|
|||||||
delete query[key];
|
delete query[key];
|
||||||
// execute each query independently to build the list of
|
// execute each query independently to build the list of
|
||||||
// $in / $nin
|
// $in / $nin
|
||||||
const promises = queries.map(q => {
|
const promises = queries.map((q) => {
|
||||||
if (!q) {
|
if (!q) {
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
return this.owningIds(className, key, q.relatedIds).then(ids => {
|
return this.owningIds(className, key, q.relatedIds).then((ids) => {
|
||||||
if (q.isNegation) {
|
if (q.isNegation) {
|
||||||
this.addNotInObjectIdsIds(ids, query);
|
this.addNotInObjectIdsIds(ids, query);
|
||||||
} else {
|
} else {
|
||||||
@@ -1155,7 +1157,7 @@ class DatabaseController {
|
|||||||
): ?Promise<void> {
|
): ?Promise<void> {
|
||||||
if (query['$or']) {
|
if (query['$or']) {
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
query['$or'].map(aQuery => {
|
query['$or'].map((aQuery) => {
|
||||||
return this.reduceRelationKeys(className, aQuery, queryOptions);
|
return this.reduceRelationKeys(className, aQuery, queryOptions);
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
@@ -1169,7 +1171,7 @@ class DatabaseController {
|
|||||||
relatedTo.object.objectId,
|
relatedTo.object.objectId,
|
||||||
queryOptions
|
queryOptions
|
||||||
)
|
)
|
||||||
.then(ids => {
|
.then((ids) => {
|
||||||
delete query['$relatedTo'];
|
delete query['$relatedTo'];
|
||||||
this.addInObjectIdsIds(ids, query);
|
this.addInObjectIdsIds(ids, query);
|
||||||
return this.reduceRelationKeys(className, query, queryOptions);
|
return this.reduceRelationKeys(className, query, queryOptions);
|
||||||
@@ -1192,7 +1194,7 @@ class DatabaseController {
|
|||||||
idsFromEq,
|
idsFromEq,
|
||||||
idsFromIn,
|
idsFromIn,
|
||||||
ids,
|
ids,
|
||||||
].filter(list => list !== null);
|
].filter((list) => list !== null);
|
||||||
const totalLength = allIds.reduce((memo, list) => memo + list.length, 0);
|
const totalLength = allIds.reduce((memo, list) => memo + list.length, 0);
|
||||||
|
|
||||||
let idsIntersection = [];
|
let idsIntersection = [];
|
||||||
@@ -1221,7 +1223,7 @@ class DatabaseController {
|
|||||||
addNotInObjectIdsIds(ids: string[] = [], query: any) {
|
addNotInObjectIdsIds(ids: string[] = [], query: any) {
|
||||||
const idsFromNin =
|
const idsFromNin =
|
||||||
query.objectId && query.objectId['$nin'] ? query.objectId['$nin'] : [];
|
query.objectId && query.objectId['$nin'] ? query.objectId['$nin'] : [];
|
||||||
let allIds = [...idsFromNin, ...ids].filter(list => list !== null);
|
let allIds = [...idsFromNin, ...ids].filter((list) => list !== null);
|
||||||
|
|
||||||
// make a set and spread to remove duplicates
|
// make a set and spread to remove duplicates
|
||||||
allIds = [...new Set(allIds)];
|
allIds = [...new Set(allIds)];
|
||||||
@@ -1290,13 +1292,13 @@ class DatabaseController {
|
|||||||
|
|
||||||
let classExists = true;
|
let classExists = true;
|
||||||
return this.loadSchemaIfNeeded(validSchemaController).then(
|
return this.loadSchemaIfNeeded(validSchemaController).then(
|
||||||
schemaController => {
|
(schemaController) => {
|
||||||
//Allow volatile classes if querying with Master (for _PushStatus)
|
//Allow volatile classes if querying with Master (for _PushStatus)
|
||||||
//TODO: Move volatile classes concept into mongo adapter, postgres adapter shouldn't care
|
//TODO: Move volatile classes concept into mongo adapter, postgres adapter shouldn't care
|
||||||
//that api.parse.com breaks when _PushStatus exists in mongo.
|
//that api.parse.com breaks when _PushStatus exists in mongo.
|
||||||
return schemaController
|
return schemaController
|
||||||
.getOneSchema(className, isMaster)
|
.getOneSchema(className, isMaster)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
// Behavior for non-existent classes is kinda weird on Parse.com. Probably doesn't matter too much.
|
// Behavior for non-existent classes is kinda weird on Parse.com. Probably doesn't matter too much.
|
||||||
// For now, pretend the class exists but has no objects,
|
// For now, pretend the class exists but has no objects,
|
||||||
if (error === undefined) {
|
if (error === undefined) {
|
||||||
@@ -1305,7 +1307,7 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
})
|
})
|
||||||
.then(schema => {
|
.then((schema) => {
|
||||||
// Parse.com treats queries on _created_at and _updated_at as if they were queries on createdAt and updatedAt,
|
// Parse.com treats queries on _created_at and _updated_at as if they were queries on createdAt and updatedAt,
|
||||||
// so duplicate that behavior here. If both are specified, the correct behavior to match Parse.com is to
|
// so duplicate that behavior here. If both are specified, the correct behavior to match Parse.com is to
|
||||||
// use the one that appears first in the sort list.
|
// use the one that appears first in the sort list.
|
||||||
@@ -1327,7 +1329,7 @@ class DatabaseController {
|
|||||||
caseInsensitive,
|
caseInsensitive,
|
||||||
explain,
|
explain,
|
||||||
};
|
};
|
||||||
Object.keys(sort).forEach(fieldName => {
|
Object.keys(sort).forEach((fieldName) => {
|
||||||
if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) {
|
if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.INVALID_KEY_NAME,
|
Parse.Error.INVALID_KEY_NAME,
|
||||||
@@ -1439,8 +1441,8 @@ class DatabaseController {
|
|||||||
} else {
|
} else {
|
||||||
return this.adapter
|
return this.adapter
|
||||||
.find(className, schema, query, queryOptions)
|
.find(className, schema, query, queryOptions)
|
||||||
.then(objects =>
|
.then((objects) =>
|
||||||
objects.map(object => {
|
objects.map((object) => {
|
||||||
object = untransformObjectACL(object);
|
object = untransformObjectACL(object);
|
||||||
return filterSensitiveData(
|
return filterSensitiveData(
|
||||||
isMaster,
|
isMaster,
|
||||||
@@ -1454,7 +1456,7 @@ class DatabaseController {
|
|||||||
);
|
);
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
Parse.Error.INTERNAL_SERVER_ERROR,
|
Parse.Error.INTERNAL_SERVER_ERROR,
|
||||||
error
|
error
|
||||||
@@ -1469,8 +1471,10 @@ class DatabaseController {
|
|||||||
|
|
||||||
deleteSchema(className: string): Promise<void> {
|
deleteSchema(className: string): Promise<void> {
|
||||||
return this.loadSchema({ clearCache: true })
|
return this.loadSchema({ clearCache: true })
|
||||||
.then(schemaController => schemaController.getOneSchema(className, true))
|
.then((schemaController) =>
|
||||||
.catch(error => {
|
schemaController.getOneSchema(className, true)
|
||||||
|
)
|
||||||
|
.catch((error) => {
|
||||||
if (error === undefined) {
|
if (error === undefined) {
|
||||||
return { fields: {} };
|
return { fields: {} };
|
||||||
} else {
|
} else {
|
||||||
@@ -1482,7 +1486,7 @@ class DatabaseController {
|
|||||||
.then(() =>
|
.then(() =>
|
||||||
this.adapter.count(className, { fields: {} }, null, '', false)
|
this.adapter.count(className, { fields: {} }, null, '', false)
|
||||||
)
|
)
|
||||||
.then(count => {
|
.then((count) => {
|
||||||
if (count > 0) {
|
if (count > 0) {
|
||||||
throw new Parse.Error(
|
throw new Parse.Error(
|
||||||
255,
|
255,
|
||||||
@@ -1491,13 +1495,13 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
return this.adapter.deleteClass(className);
|
return this.adapter.deleteClass(className);
|
||||||
})
|
})
|
||||||
.then(wasParseCollection => {
|
.then((wasParseCollection) => {
|
||||||
if (wasParseCollection) {
|
if (wasParseCollection) {
|
||||||
const relationFieldNames = Object.keys(schema.fields).filter(
|
const relationFieldNames = Object.keys(schema.fields).filter(
|
||||||
fieldName => schema.fields[fieldName].type === 'Relation'
|
(fieldName) => schema.fields[fieldName].type === 'Relation'
|
||||||
);
|
);
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
relationFieldNames.map(name =>
|
relationFieldNames.map((name) =>
|
||||||
this.adapter.deleteClass(joinTableName(className, name))
|
this.adapter.deleteClass(joinTableName(className, name))
|
||||||
)
|
)
|
||||||
).then(() => {
|
).then(() => {
|
||||||
@@ -1529,7 +1533,7 @@ class DatabaseController {
|
|||||||
}
|
}
|
||||||
const perms = schema.getClassLevelPermissions(className);
|
const perms = schema.getClassLevelPermissions(className);
|
||||||
|
|
||||||
const userACL = aclGroup.filter(acl => {
|
const userACL = aclGroup.filter((acl) => {
|
||||||
return acl.indexOf('role:') != 0 && acl != '*';
|
return acl.indexOf('role:') != 0 && acl != '*';
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1566,7 +1570,7 @@ class DatabaseController {
|
|||||||
objectId: userId,
|
objectId: userId,
|
||||||
};
|
};
|
||||||
|
|
||||||
const ors = permFields.flatMap(key => {
|
const ors = permFields.flatMap((key) => {
|
||||||
// constraint for single pointer setup
|
// constraint for single pointer setup
|
||||||
const q = {
|
const q = {
|
||||||
[key]: userPointer,
|
[key]: userPointer,
|
||||||
@@ -1682,9 +1686,9 @@ class DatabaseController {
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// intersect all sets of protectedFields
|
// intersect all sets of protectedFields
|
||||||
protectedKeysSets.forEach(fields => {
|
protectedKeysSets.forEach((fields) => {
|
||||||
if (fields) {
|
if (fields) {
|
||||||
protectedKeys = protectedKeys.filter(v => fields.includes(v));
|
protectedKeys = protectedKeys.filter((v) => fields.includes(v));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1694,7 +1698,7 @@ class DatabaseController {
|
|||||||
createTransactionalSession() {
|
createTransactionalSession() {
|
||||||
return this.adapter
|
return this.adapter
|
||||||
.createTransactionalSession()
|
.createTransactionalSession()
|
||||||
.then(transactionalSession => {
|
.then((transactionalSession) => {
|
||||||
this._transactionalSession = transactionalSession;
|
this._transactionalSession = transactionalSession;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1737,10 +1741,10 @@ class DatabaseController {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const userClassPromise = this.loadSchema().then(schema =>
|
const userClassPromise = this.loadSchema().then((schema) =>
|
||||||
schema.enforceClassExists('_User')
|
schema.enforceClassExists('_User')
|
||||||
);
|
);
|
||||||
const roleClassPromise = this.loadSchema().then(schema =>
|
const roleClassPromise = this.loadSchema().then((schema) =>
|
||||||
schema.enforceClassExists('_Role')
|
schema.enforceClassExists('_Role')
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -1748,7 +1752,7 @@ class DatabaseController {
|
|||||||
.then(() =>
|
.then(() =>
|
||||||
this.adapter.ensureUniqueness('_User', requiredUserFields, ['username'])
|
this.adapter.ensureUniqueness('_User', requiredUserFields, ['username'])
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
logger.warn('Unable to ensure uniqueness for usernames: ', error);
|
logger.warn('Unable to ensure uniqueness for usernames: ', error);
|
||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
@@ -1763,7 +1767,7 @@ class DatabaseController {
|
|||||||
true
|
true
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
'Unable to create case insensitive username index: ',
|
'Unable to create case insensitive username index: ',
|
||||||
error
|
error
|
||||||
@@ -1775,7 +1779,7 @@ class DatabaseController {
|
|||||||
.then(() =>
|
.then(() =>
|
||||||
this.adapter.ensureUniqueness('_User', requiredUserFields, ['email'])
|
this.adapter.ensureUniqueness('_User', requiredUserFields, ['email'])
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
'Unable to ensure uniqueness for user email addresses: ',
|
'Unable to ensure uniqueness for user email addresses: ',
|
||||||
error
|
error
|
||||||
@@ -1793,7 +1797,7 @@ class DatabaseController {
|
|||||||
true
|
true
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
logger.warn('Unable to create case insensitive email index: ', error);
|
logger.warn('Unable to create case insensitive email index: ', error);
|
||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
@@ -1802,7 +1806,7 @@ class DatabaseController {
|
|||||||
.then(() =>
|
.then(() =>
|
||||||
this.adapter.ensureUniqueness('_Role', requiredRoleFields, ['name'])
|
this.adapter.ensureUniqueness('_Role', requiredRoleFields, ['name'])
|
||||||
)
|
)
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
logger.warn('Unable to ensure uniqueness for role name: ', error);
|
logger.warn('Unable to ensure uniqueness for role name: ', error);
|
||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
@@ -1824,7 +1828,7 @@ class DatabaseController {
|
|||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
static _validateQuery: any => void;
|
static _validateQuery: (any) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = DatabaseController;
|
module.exports = DatabaseController;
|
||||||
|
|||||||
Reference in New Issue
Block a user