GraphQL Object constraints (#5715)
* GraphQL Object constraints
Implements the GraphQL Object constraints, which allows us to filter queries results using the `$eq`, `$lt`, `$gt`, `$in`, and other Parse supported constraints.
Example:
```
query objects {
findMyClass(where: {
objField: {
_eq: {
key: 'foo.bar',
value: 'hello'
},
_gt: {
key: 'foo.number',
value: 10
},
_lt: {
key: 'anotherNumber',
value: 5
}
}
}) {
results {
objectId
}
}
}
```
In the example above, we have the `findMyClass` query (automatically generated for the `MyClass` class), and a field named `objField` whose type is Object. The object below represents a valid `objField` value and would satisfy all constraints:
```
{
"foo": {
"bar": "hello",
"number": 11
},
"anotherNumber": 4
}
```
The Object constraint is applied only when using Parse class object type queries. When using "generic" queries such as `get` and `find`, this type of constraint is not available.
* Objects constraints not working on Postgres
Fixes the $eq, $ne, $gt, and $lt constraints when applied on an Object type field.
* Fix object constraint field name
* Fix Postgres constraints indexes
* fix: Object type composed constraints not working
* fix: Rename key and value fields
* refactor: Object constraints for generic queries
* fix: Object constraints not working on Postgres
This commit is contained in:
committed by
Antonio Davi Macedo Coelho de Castro
parent
e0690d0c56
commit
ef14ca530d
@@ -1178,7 +1178,9 @@ describe('phant auth adapter', () => {
|
|||||||
};
|
};
|
||||||
const { adapter } = authenticationLoader.loadAuthAdapter('phantauth', {});
|
const { adapter } = authenticationLoader.loadAuthAdapter('phantauth', {});
|
||||||
|
|
||||||
spyOn(httpsRequest, 'get').and.callFake(() => Promise.resolve({ sub: 'invalidID' }));
|
spyOn(httpsRequest, 'get').and.callFake(() =>
|
||||||
|
Promise.resolve({ sub: 'invalidID' })
|
||||||
|
);
|
||||||
try {
|
try {
|
||||||
await adapter.validateAuthData(authData);
|
await adapter.validateAuthData(authData);
|
||||||
fail();
|
fail();
|
||||||
|
|||||||
@@ -5273,7 +5273,10 @@ describe('ParseGraphQLServer', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should support object values', async () => {
|
it('should support object values', async () => {
|
||||||
const someFieldValue = { foo: 'bar' };
|
const someFieldValue = {
|
||||||
|
foo: { bar: 'baz' },
|
||||||
|
number: 10,
|
||||||
|
};
|
||||||
|
|
||||||
const createResult = await apolloClient.mutate({
|
const createResult = await apolloClient.mutate({
|
||||||
mutation: gql`
|
mutation: gql`
|
||||||
@@ -5314,30 +5317,179 @@ describe('ParseGraphQLServer', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const getResult = await apolloClient.query({
|
const where = {
|
||||||
|
someField: {
|
||||||
|
_eq: { _key: 'foo.bar', _value: 'baz' },
|
||||||
|
_ne: { _key: 'foo.bar', _value: 'bat' },
|
||||||
|
_gt: { _key: 'number', _value: 9 },
|
||||||
|
_lt: { _key: 'number', _value: 11 },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const queryResult = await apolloClient.query({
|
||||||
query: gql`
|
query: gql`
|
||||||
query GetSomeObject($objectId: ID!) {
|
query GetSomeObject(
|
||||||
|
$objectId: ID!
|
||||||
|
$where: SomeClassConstraints
|
||||||
|
$genericWhere: Object
|
||||||
|
) {
|
||||||
objects {
|
objects {
|
||||||
get(className: "SomeClass", objectId: $objectId)
|
get(className: "SomeClass", objectId: $objectId)
|
||||||
findSomeClass(where: { someField: { _exists: true } }) {
|
findSomeClass(where: $where) {
|
||||||
results {
|
results {
|
||||||
objectId
|
objectId
|
||||||
|
someField
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
find(className: "SomeClass", where: $genericWhere) {
|
||||||
|
results
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
`,
|
`,
|
||||||
variables: {
|
variables: {
|
||||||
objectId: createResult.data.objects.create.objectId,
|
objectId: createResult.data.objects.create.objectId,
|
||||||
|
where,
|
||||||
|
genericWhere: where, // where and genericWhere types are different
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const { someField } = getResult.data.objects.get;
|
const {
|
||||||
|
get: getResult,
|
||||||
|
findSomeClass,
|
||||||
|
find,
|
||||||
|
} = queryResult.data.objects;
|
||||||
|
|
||||||
|
const { someField } = getResult;
|
||||||
expect(typeof someField).toEqual('object');
|
expect(typeof someField).toEqual('object');
|
||||||
expect(someField).toEqual(someFieldValue);
|
expect(someField).toEqual(someFieldValue);
|
||||||
expect(getResult.data.objects.findSomeClass.results.length).toEqual(
|
|
||||||
2
|
// Checks class query results
|
||||||
);
|
expect(findSomeClass.results.length).toEqual(2);
|
||||||
|
expect(findSomeClass.results[0].someField).toEqual(someFieldValue);
|
||||||
|
expect(findSomeClass.results[1].someField).toEqual(someFieldValue);
|
||||||
|
|
||||||
|
// Checks generic query results
|
||||||
|
expect(find.results.length).toEqual(2);
|
||||||
|
expect(find.results[0].someField).toEqual(someFieldValue);
|
||||||
|
expect(find.results[1].someField).toEqual(someFieldValue);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support object composed queries', async () => {
|
||||||
|
const someFieldValue = {
|
||||||
|
lorem: 'ipsum',
|
||||||
|
number: 10,
|
||||||
|
};
|
||||||
|
const someFieldValue2 = {
|
||||||
|
foo: {
|
||||||
|
test: 'bar',
|
||||||
|
},
|
||||||
|
number: 10,
|
||||||
|
};
|
||||||
|
|
||||||
|
const createResult = await apolloClient.mutate({
|
||||||
|
mutation: gql`
|
||||||
|
mutation CreateSomeObject($fields: Object, $fields2: Object) {
|
||||||
|
objects {
|
||||||
|
create1: create(className: "SomeClass", fields: $fields) {
|
||||||
|
objectId
|
||||||
|
}
|
||||||
|
create2: create(className: "SomeClass", fields: $fields2) {
|
||||||
|
objectId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
variables: {
|
||||||
|
fields: {
|
||||||
|
someField: someFieldValue,
|
||||||
|
},
|
||||||
|
fields2: {
|
||||||
|
someField: someFieldValue2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await parseGraphQLServer.parseGraphQLSchema.databaseController.schemaCache.clear();
|
||||||
|
|
||||||
|
const where = {
|
||||||
|
_and: [
|
||||||
|
{
|
||||||
|
someField: {
|
||||||
|
_gt: { _key: 'number', _value: 9 },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
someField: {
|
||||||
|
_lt: { _key: 'number', _value: 11 },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_or: [
|
||||||
|
{
|
||||||
|
someField: {
|
||||||
|
_eq: { _key: 'lorem', _value: 'ipsum' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
someField: {
|
||||||
|
_eq: { _key: 'foo.test', _value: 'bar' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
const findResult = await apolloClient.query({
|
||||||
|
query: gql`
|
||||||
|
query FindSomeObject(
|
||||||
|
$where: SomeClassConstraints
|
||||||
|
$genericWhere: Object
|
||||||
|
) {
|
||||||
|
objects {
|
||||||
|
findSomeClass(where: $where) {
|
||||||
|
results {
|
||||||
|
objectId
|
||||||
|
someField
|
||||||
|
}
|
||||||
|
}
|
||||||
|
find(className: "SomeClass", where: $genericWhere) {
|
||||||
|
results
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
variables: {
|
||||||
|
where,
|
||||||
|
genericWhere: where, // where and genericWhere types are different
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { create1, create2 } = createResult.data.objects;
|
||||||
|
const { findSomeClass, find } = findResult.data.objects;
|
||||||
|
|
||||||
|
// Checks class query results
|
||||||
|
const { results } = findSomeClass;
|
||||||
|
expect(results.length).toEqual(2);
|
||||||
|
expect(
|
||||||
|
results.find(result => result.objectId === create1.objectId)
|
||||||
|
.someField
|
||||||
|
).toEqual(someFieldValue);
|
||||||
|
expect(
|
||||||
|
results.find(result => result.objectId === create2.objectId)
|
||||||
|
.someField
|
||||||
|
).toEqual(someFieldValue2);
|
||||||
|
|
||||||
|
// Checks generic query results
|
||||||
|
const { results: genericResults } = find;
|
||||||
|
expect(genericResults.length).toEqual(2);
|
||||||
|
expect(
|
||||||
|
genericResults.find(result => result.objectId === create1.objectId)
|
||||||
|
.someField
|
||||||
|
).toEqual(someFieldValue);
|
||||||
|
expect(
|
||||||
|
genericResults.find(result => result.objectId === create2.objectId)
|
||||||
|
.someField
|
||||||
|
).toEqual(someFieldValue2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should support array values', async () => {
|
it('should support array values', async () => {
|
||||||
|
|||||||
@@ -369,7 +369,7 @@ describe('Parse.Query testing', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('nested containedIn string with single quote', async () => {
|
it('nested containedIn string with single quote', async () => {
|
||||||
const obj = new TestObject({ nested: { foo: ["single'quote"]} });
|
const obj = new TestObject({ nested: { foo: ["single'quote"] } });
|
||||||
await obj.save();
|
await obj.save();
|
||||||
const query = new Parse.Query(TestObject);
|
const query = new Parse.Query(TestObject);
|
||||||
query.containedIn('nested.foo', ["single'quote"]);
|
query.containedIn('nested.foo', ["single'quote"]);
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
const { ParseWebSocketServer } = require('../lib/LiveQuery/ParseWebSocketServer');
|
const {
|
||||||
|
ParseWebSocketServer,
|
||||||
|
} = require('../lib/LiveQuery/ParseWebSocketServer');
|
||||||
|
|
||||||
describe('ParseWebSocketServer', function() {
|
describe('ParseWebSocketServer', function() {
|
||||||
beforeEach(function(done) {
|
beforeEach(function(done) {
|
||||||
|
|||||||
@@ -451,7 +451,9 @@ describe('SchemaController', () => {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
.then(actualSchema => {
|
.then(actualSchema => {
|
||||||
expect(dd(actualSchema.classLevelPermissions, newLevelPermissions)).toEqual(undefined);
|
expect(
|
||||||
|
dd(actualSchema.classLevelPermissions, newLevelPermissions)
|
||||||
|
).toEqual(undefined);
|
||||||
done();
|
done();
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
|
|||||||
@@ -425,8 +425,12 @@ describe('schemas', () => {
|
|||||||
foo4: { type: 'Date', required: true },
|
foo4: { type: 'Date', required: true },
|
||||||
foo5: { type: 'Number', defaultValue: 5 },
|
foo5: { type: 'Number', defaultValue: 5 },
|
||||||
ptr: { type: 'Pointer', targetClass: 'SomeClass', required: false },
|
ptr: { type: 'Pointer', targetClass: 'SomeClass', required: false },
|
||||||
defaultFalse: { type: 'Boolean', required: true, defaultValue: false },
|
defaultFalse: {
|
||||||
defaultZero: { type: 'Number', defaultValue: 0 }
|
type: 'Boolean',
|
||||||
|
required: true,
|
||||||
|
defaultValue: false,
|
||||||
|
},
|
||||||
|
defaultZero: { type: 'Number', defaultValue: 0 },
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}).then(async response => {
|
}).then(async response => {
|
||||||
@@ -447,8 +451,12 @@ describe('schemas', () => {
|
|||||||
foo4: { type: 'Date', required: true },
|
foo4: { type: 'Date', required: true },
|
||||||
foo5: { type: 'Number', defaultValue: 5 },
|
foo5: { type: 'Number', defaultValue: 5 },
|
||||||
ptr: { type: 'Pointer', targetClass: 'SomeClass', required: false },
|
ptr: { type: 'Pointer', targetClass: 'SomeClass', required: false },
|
||||||
defaultFalse: { type: 'Boolean', required: true, defaultValue: false },
|
defaultFalse: {
|
||||||
defaultZero: { type: 'Number', defaultValue: 0 }
|
type: 'Boolean',
|
||||||
|
required: true,
|
||||||
|
defaultValue: false,
|
||||||
|
},
|
||||||
|
defaultZero: { type: 'Number', defaultValue: 0 },
|
||||||
},
|
},
|
||||||
classLevelPermissions: defaultClassLevelPermissions,
|
classLevelPermissions: defaultClassLevelPermissions,
|
||||||
});
|
});
|
||||||
@@ -468,8 +476,8 @@ describe('schemas', () => {
|
|||||||
expect(obj.get('foo4')).toEqual(date);
|
expect(obj.get('foo4')).toEqual(date);
|
||||||
expect(obj.get('foo5')).toEqual(5);
|
expect(obj.get('foo5')).toEqual(5);
|
||||||
expect(obj.get('ptr')).toBeUndefined();
|
expect(obj.get('ptr')).toBeUndefined();
|
||||||
expect(obj.get('defaultFalse')).toEqual(false)
|
expect(obj.get('defaultFalse')).toEqual(false);
|
||||||
expect(obj.get('defaultZero')).toEqual(0)
|
expect(obj.get('defaultZero')).toEqual(0);
|
||||||
expect(obj.get('ptr')).toBeUndefined();
|
expect(obj.get('ptr')).toBeUndefined();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -288,7 +288,7 @@ const buildWhereClause = ({ schema, query, index }): WhereClause => {
|
|||||||
index += 2;
|
index += 2;
|
||||||
} else if (fieldValue.$regex) {
|
} else if (fieldValue.$regex) {
|
||||||
// Handle later
|
// Handle later
|
||||||
} else {
|
} else if (typeof fieldValue !== 'object') {
|
||||||
patterns.push(`$${index}:raw = $${index + 1}::text`);
|
patterns.push(`$${index}:raw = $${index + 1}::text`);
|
||||||
values.push(name, fieldValue);
|
values.push(name, fieldValue);
|
||||||
index += 2;
|
index += 2;
|
||||||
@@ -358,9 +358,16 @@ const buildWhereClause = ({ schema, query, index }): WhereClause => {
|
|||||||
2}) OR $${index}:name IS NULL)`
|
2}) OR $${index}:name IS NULL)`
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
patterns.push(
|
if (fieldName.indexOf('.') >= 0) {
|
||||||
`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`
|
const constraintFieldName = transformDotField(fieldName);
|
||||||
);
|
patterns.push(
|
||||||
|
`(${constraintFieldName} <> $${index} OR ${constraintFieldName} IS NULL)`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
patterns.push(
|
||||||
|
`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -380,9 +387,14 @@ const buildWhereClause = ({ schema, query, index }): WhereClause => {
|
|||||||
values.push(fieldName);
|
values.push(fieldName);
|
||||||
index += 1;
|
index += 1;
|
||||||
} else {
|
} else {
|
||||||
patterns.push(`$${index}:name = $${index + 1}`);
|
if (fieldName.indexOf('.') >= 0) {
|
||||||
values.push(fieldName, fieldValue.$eq);
|
values.push(fieldValue.$eq);
|
||||||
index += 2;
|
patterns.push(`${transformDotField(fieldName)} = $${index++}`);
|
||||||
|
} else {
|
||||||
|
values.push(fieldName, fieldValue.$eq);
|
||||||
|
patterns.push(`$${index}:name = $${index + 1}`);
|
||||||
|
index += 2;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const isInOrNin =
|
const isInOrNin =
|
||||||
@@ -749,9 +761,29 @@ const buildWhereClause = ({ schema, query, index }): WhereClause => {
|
|||||||
Object.keys(ParseToPosgresComparator).forEach(cmp => {
|
Object.keys(ParseToPosgresComparator).forEach(cmp => {
|
||||||
if (fieldValue[cmp] || fieldValue[cmp] === 0) {
|
if (fieldValue[cmp] || fieldValue[cmp] === 0) {
|
||||||
const pgComparator = ParseToPosgresComparator[cmp];
|
const pgComparator = ParseToPosgresComparator[cmp];
|
||||||
patterns.push(`$${index}:name ${pgComparator} $${index + 1}`);
|
const postgresValue = toPostgresValue(fieldValue[cmp]);
|
||||||
values.push(fieldName, toPostgresValue(fieldValue[cmp]));
|
let constraintFieldName;
|
||||||
index += 2;
|
if (fieldName.indexOf('.') >= 0) {
|
||||||
|
let castType;
|
||||||
|
switch (typeof postgresValue) {
|
||||||
|
case 'number':
|
||||||
|
castType = 'double precision';
|
||||||
|
break;
|
||||||
|
case 'boolean':
|
||||||
|
castType = 'boolean';
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
castType = undefined;
|
||||||
|
}
|
||||||
|
constraintFieldName = castType
|
||||||
|
? `CAST ((${transformDotField(fieldName)}) AS ${castType})`
|
||||||
|
: transformDotField(fieldName);
|
||||||
|
} else {
|
||||||
|
constraintFieldName = `$${index++}:name`;
|
||||||
|
values.push(fieldName);
|
||||||
|
}
|
||||||
|
values.push(postgresValue);
|
||||||
|
patterns.push(`${constraintFieldName} ${pgComparator} $${index++}`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -820,7 +852,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
|
|
||||||
setClassLevelPermissions(className: string, CLPs: any) {
|
setClassLevelPermissions(className: string, CLPs: any) {
|
||||||
const self = this;
|
const self = this;
|
||||||
return this._client.task('set-class-level-permissions', async (t) => {
|
return this._client.task('set-class-level-permissions', async t => {
|
||||||
await self._ensureSchemaCollectionExists(t);
|
await self._ensureSchemaCollectionExists(t);
|
||||||
const values = [
|
const values = [
|
||||||
className,
|
className,
|
||||||
@@ -885,7 +917,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return conn.tx('set-indexes-with-schema-format', async (t) => {
|
return conn.tx('set-indexes-with-schema-format', async t => {
|
||||||
if (insertedIndexes.length > 0) {
|
if (insertedIndexes.length > 0) {
|
||||||
await self.createIndexes(className, insertedIndexes, t);
|
await self.createIndexes(className, insertedIndexes, t);
|
||||||
}
|
}
|
||||||
@@ -981,7 +1013,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
const values = [className, ...valuesArray];
|
const values = [className, ...valuesArray];
|
||||||
|
|
||||||
debug(qs, values);
|
debug(qs, values);
|
||||||
return conn.task('create-table', async (t) => {
|
return conn.task('create-table', async t => {
|
||||||
try {
|
try {
|
||||||
await self._ensureSchemaCollectionExists(t);
|
await self._ensureSchemaCollectionExists(t);
|
||||||
await t.none(qs, values);
|
await t.none(qs, values);
|
||||||
@@ -1009,7 +1041,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
conn = conn || this._client;
|
conn = conn || this._client;
|
||||||
const self = this;
|
const self = this;
|
||||||
|
|
||||||
return conn.tx('schema-upgrade', async (t) => {
|
return conn.tx('schema-upgrade', async t => {
|
||||||
const columns = await t.map(
|
const columns = await t.map(
|
||||||
'SELECT column_name FROM information_schema.columns WHERE table_name = $<className>',
|
'SELECT column_name FROM information_schema.columns WHERE table_name = $<className>',
|
||||||
{ className },
|
{ className },
|
||||||
@@ -1040,7 +1072,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
debug('addFieldIfNotExists', { className, fieldName, type });
|
debug('addFieldIfNotExists', { className, fieldName, type });
|
||||||
conn = conn || this._client;
|
conn = conn || this._client;
|
||||||
const self = this;
|
const self = this;
|
||||||
return conn.tx('add-field-if-not-exists', async (t) => {
|
return conn.tx('add-field-if-not-exists', async t => {
|
||||||
if (type.type !== 'Relation') {
|
if (type.type !== 'Relation') {
|
||||||
try {
|
try {
|
||||||
await t.none(
|
await t.none(
|
||||||
@@ -1110,7 +1142,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
debug('deleteAllClasses');
|
debug('deleteAllClasses');
|
||||||
|
|
||||||
return this._client
|
return this._client
|
||||||
.task('delete-all-classes', async (t) => {
|
.task('delete-all-classes', async t => {
|
||||||
try {
|
try {
|
||||||
const results = await t.any('SELECT * FROM "_SCHEMA"');
|
const results = await t.any('SELECT * FROM "_SCHEMA"');
|
||||||
const joins = results.reduce((list: Array<string>, schema: any) => {
|
const joins = results.reduce((list: Array<string>, schema: any) => {
|
||||||
@@ -1180,7 +1212,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
})
|
})
|
||||||
.join(', DROP COLUMN');
|
.join(', DROP COLUMN');
|
||||||
|
|
||||||
return this._client.tx('delete-fields', async (t) => {
|
return this._client.tx('delete-fields', async t => {
|
||||||
await t.none(
|
await t.none(
|
||||||
'UPDATE "_SCHEMA" SET "schema"=$<schema> WHERE "className"=$<className>',
|
'UPDATE "_SCHEMA" SET "schema"=$<schema> WHERE "className"=$<className>',
|
||||||
{ schema, className }
|
{ schema, className }
|
||||||
@@ -1196,7 +1228,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
|
|||||||
// rejection reason are TBD.
|
// rejection reason are TBD.
|
||||||
getAllClasses() {
|
getAllClasses() {
|
||||||
const self = this;
|
const self = this;
|
||||||
return this._client.task('get-all-classes', async (t) => {
|
return this._client.task('get-all-classes', async t => {
|
||||||
await self._ensureSchemaCollectionExists(t);
|
await self._ensureSchemaCollectionExists(t);
|
||||||
return await t.map('SELECT * FROM "_SCHEMA"', null, row =>
|
return await t.map('SELECT * FROM "_SCHEMA"', null, row =>
|
||||||
toParseSchema({ className: row.className, ...row.schema })
|
toParseSchema({ className: row.className, ...row.schema })
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ export class WSSAdapter {
|
|||||||
* @param {Object} options - {http.Server|https.Server} server
|
* @param {Object} options - {http.Server|https.Server} server
|
||||||
*/
|
*/
|
||||||
constructor(options) {
|
constructor(options) {
|
||||||
this.onListen = () => {}
|
this.onListen = () => {};
|
||||||
this.onConnection = () => {}
|
this.onConnection = () => {};
|
||||||
}
|
}
|
||||||
|
|
||||||
// /**
|
// /**
|
||||||
|
|||||||
@@ -768,7 +768,11 @@ export default class SchemaController {
|
|||||||
})
|
})
|
||||||
.then(results => {
|
.then(results => {
|
||||||
enforceFields = results.filter(result => !!result);
|
enforceFields = results.filter(result => !!result);
|
||||||
return this.setPermissions(className, classLevelPermissions, newSchema);
|
return this.setPermissions(
|
||||||
|
className,
|
||||||
|
classLevelPermissions,
|
||||||
|
newSchema
|
||||||
|
);
|
||||||
})
|
})
|
||||||
.then(() =>
|
.then(() =>
|
||||||
this._dbAdapter.setIndexesWithSchemaFormat(
|
this._dbAdapter.setIndexesWithSchemaFormat(
|
||||||
|
|||||||
@@ -846,15 +846,34 @@ const ARRAY_CONSTRAINT = new GraphQLInputObjectType({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const KEY_VALUE = new GraphQLInputObjectType({
|
||||||
|
name: 'KeyValue',
|
||||||
|
description: 'An entry from an object, i.e., a pair of key and value.',
|
||||||
|
fields: {
|
||||||
|
_key: {
|
||||||
|
description: 'The key used to retrieve the value of this entry.',
|
||||||
|
type: new GraphQLNonNull(GraphQLString),
|
||||||
|
},
|
||||||
|
_value: {
|
||||||
|
description: 'The value of the entry. Could be any type of scalar data.',
|
||||||
|
type: new GraphQLNonNull(ANY),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const OBJECT_CONSTRAINT = new GraphQLInputObjectType({
|
const OBJECT_CONSTRAINT = new GraphQLInputObjectType({
|
||||||
name: 'ObjectConstraint',
|
name: 'ObjectConstraint',
|
||||||
description:
|
description:
|
||||||
'The ObjectConstraint input type is used in operations that involve filtering objects by a field of type Object.',
|
'The ObjectConstraint input type is used in operations that involve filtering result by a field of type Object.',
|
||||||
fields: {
|
fields: {
|
||||||
_eq: _eq(OBJECT),
|
_eq: _eq(KEY_VALUE),
|
||||||
_ne: _ne(OBJECT),
|
_ne: _ne(KEY_VALUE),
|
||||||
_in: _in(OBJECT),
|
_in: _in(KEY_VALUE),
|
||||||
_nin: _nin(OBJECT),
|
_nin: _nin(KEY_VALUE),
|
||||||
|
_lt: _lt(KEY_VALUE),
|
||||||
|
_lte: _lte(KEY_VALUE),
|
||||||
|
_gt: _gt(KEY_VALUE),
|
||||||
|
_gte: _gte(KEY_VALUE),
|
||||||
_exists,
|
_exists,
|
||||||
_select,
|
_select,
|
||||||
_dontSelect,
|
_dontSelect,
|
||||||
|
|||||||
@@ -96,13 +96,51 @@ const parseMap = {
|
|||||||
_point: '$point',
|
_point: '$point',
|
||||||
};
|
};
|
||||||
|
|
||||||
const transformToParse = constraints => {
|
const transformToParse = (constraints, parentFieldName, parentConstraints) => {
|
||||||
if (!constraints || typeof constraints !== 'object') {
|
if (!constraints || typeof constraints !== 'object') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Object.keys(constraints).forEach(fieldName => {
|
Object.keys(constraints).forEach(fieldName => {
|
||||||
let fieldValue = constraints[fieldName];
|
let fieldValue = constraints[fieldName];
|
||||||
if (parseMap[fieldName]) {
|
|
||||||
|
/**
|
||||||
|
* If we have a key-value pair, we need to change the way the constraint is structured.
|
||||||
|
*
|
||||||
|
* Example:
|
||||||
|
* From:
|
||||||
|
* {
|
||||||
|
* "someField": {
|
||||||
|
* "_lt": {
|
||||||
|
* "_key":"foo.bar",
|
||||||
|
* "_value": 100
|
||||||
|
* },
|
||||||
|
* "_gt": {
|
||||||
|
* "_key":"foo.bar",
|
||||||
|
* "_value": 10
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* To:
|
||||||
|
* {
|
||||||
|
* "someField.foo.bar": {
|
||||||
|
* "$lt": 100,
|
||||||
|
* "$gt": 10
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
if (
|
||||||
|
fieldValue._key &&
|
||||||
|
fieldValue._value &&
|
||||||
|
parentConstraints &&
|
||||||
|
parentFieldName
|
||||||
|
) {
|
||||||
|
delete parentConstraints[parentFieldName];
|
||||||
|
parentConstraints[`${parentFieldName}.${fieldValue._key}`] = {
|
||||||
|
...parentConstraints[`${parentFieldName}.${fieldValue._key}`],
|
||||||
|
[parseMap[fieldName]]: fieldValue._value,
|
||||||
|
};
|
||||||
|
} else if (parseMap[fieldName]) {
|
||||||
delete constraints[fieldName];
|
delete constraints[fieldName];
|
||||||
fieldName = parseMap[fieldName];
|
fieldName = parseMap[fieldName];
|
||||||
constraints[fieldName] = fieldValue;
|
constraints[fieldName] = fieldValue;
|
||||||
@@ -160,7 +198,7 @@ const transformToParse = constraints => {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (typeof fieldValue === 'object') {
|
if (typeof fieldValue === 'object') {
|
||||||
transformToParse(fieldValue);
|
transformToParse(fieldValue, fieldName, constraints);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -678,9 +678,7 @@ class ParseLiveQueryServer {
|
|||||||
client.pushSubscribe(request.requestId);
|
client.pushSubscribe(request.requestId);
|
||||||
|
|
||||||
logger.verbose(
|
logger.verbose(
|
||||||
`Create client ${parseWebsocket.clientId} new subscription: ${
|
`Create client ${parseWebsocket.clientId} new subscription: ${request.requestId}`
|
||||||
request.requestId
|
|
||||||
}`
|
|
||||||
);
|
);
|
||||||
logger.verbose('Current client number: %d', this.clients.size);
|
logger.verbose('Current client number: %d', this.clients.size);
|
||||||
runLiveQueryEventHandlers({
|
runLiveQueryEventHandlers({
|
||||||
@@ -774,9 +772,7 @@ class ParseLiveQueryServer {
|
|||||||
client.pushUnsubscribe(request.requestId);
|
client.pushUnsubscribe(request.requestId);
|
||||||
|
|
||||||
logger.verbose(
|
logger.verbose(
|
||||||
`Delete client: ${parseWebsocket.clientId} | subscription: ${
|
`Delete client: ${parseWebsocket.clientId} | subscription: ${request.requestId}`
|
||||||
request.requestId
|
|
||||||
}`
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,480 +3,504 @@
|
|||||||
This code has been generated by resources/buildConfigDefinitions.js
|
This code has been generated by resources/buildConfigDefinitions.js
|
||||||
Do not edit manually, but update Options/index.js
|
Do not edit manually, but update Options/index.js
|
||||||
*/
|
*/
|
||||||
var parsers = require("./parsers");
|
var parsers = require('./parsers');
|
||||||
|
|
||||||
module.exports.ParseServerOptions = {
|
module.exports.ParseServerOptions = {
|
||||||
"accountLockout": {
|
accountLockout: {
|
||||||
"env": "PARSE_SERVER_ACCOUNT_LOCKOUT",
|
env: 'PARSE_SERVER_ACCOUNT_LOCKOUT',
|
||||||
"help": "account lockout policy for failed login attempts",
|
help: 'account lockout policy for failed login attempts',
|
||||||
"action": parsers.objectParser
|
action: parsers.objectParser,
|
||||||
},
|
},
|
||||||
"allowClientClassCreation": {
|
allowClientClassCreation: {
|
||||||
"env": "PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION",
|
env: 'PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION',
|
||||||
"help": "Enable (or disable) client class creation, defaults to true",
|
help: 'Enable (or disable) client class creation, defaults to true',
|
||||||
"action": parsers.booleanParser,
|
action: parsers.booleanParser,
|
||||||
"default": true
|
default: true,
|
||||||
},
|
},
|
||||||
"analyticsAdapter": {
|
analyticsAdapter: {
|
||||||
"env": "PARSE_SERVER_ANALYTICS_ADAPTER",
|
env: 'PARSE_SERVER_ANALYTICS_ADAPTER',
|
||||||
"help": "Adapter module for the analytics",
|
help: 'Adapter module for the analytics',
|
||||||
"action": parsers.moduleOrObjectParser
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"appId": {
|
appId: {
|
||||||
"env": "PARSE_SERVER_APPLICATION_ID",
|
env: 'PARSE_SERVER_APPLICATION_ID',
|
||||||
"help": "Your Parse Application ID",
|
help: 'Your Parse Application ID',
|
||||||
"required": true
|
required: true,
|
||||||
},
|
},
|
||||||
"appName": {
|
appName: {
|
||||||
"env": "PARSE_SERVER_APP_NAME",
|
env: 'PARSE_SERVER_APP_NAME',
|
||||||
"help": "Sets the app name"
|
help: 'Sets the app name',
|
||||||
},
|
},
|
||||||
"auth": {
|
auth: {
|
||||||
"env": "PARSE_SERVER_AUTH_PROVIDERS",
|
env: 'PARSE_SERVER_AUTH_PROVIDERS',
|
||||||
"help": "Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication",
|
help:
|
||||||
"action": parsers.objectParser
|
'Configuration for your authentication providers, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#oauth-and-3rd-party-authentication',
|
||||||
},
|
action: parsers.objectParser,
|
||||||
"cacheAdapter": {
|
},
|
||||||
"env": "PARSE_SERVER_CACHE_ADAPTER",
|
cacheAdapter: {
|
||||||
"help": "Adapter module for the cache",
|
env: 'PARSE_SERVER_CACHE_ADAPTER',
|
||||||
"action": parsers.moduleOrObjectParser
|
help: 'Adapter module for the cache',
|
||||||
},
|
action: parsers.moduleOrObjectParser,
|
||||||
"cacheMaxSize": {
|
},
|
||||||
"env": "PARSE_SERVER_CACHE_MAX_SIZE",
|
cacheMaxSize: {
|
||||||
"help": "Sets the maximum size for the in memory cache, defaults to 10000",
|
env: 'PARSE_SERVER_CACHE_MAX_SIZE',
|
||||||
"action": parsers.numberParser("cacheMaxSize"),
|
help: 'Sets the maximum size for the in memory cache, defaults to 10000',
|
||||||
"default": 10000
|
action: parsers.numberParser('cacheMaxSize'),
|
||||||
},
|
default: 10000,
|
||||||
"cacheTTL": {
|
},
|
||||||
"env": "PARSE_SERVER_CACHE_TTL",
|
cacheTTL: {
|
||||||
"help": "Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)",
|
env: 'PARSE_SERVER_CACHE_TTL',
|
||||||
"action": parsers.numberParser("cacheTTL"),
|
help:
|
||||||
"default": 5000
|
'Sets the TTL for the in memory cache (in ms), defaults to 5000 (5 seconds)',
|
||||||
},
|
action: parsers.numberParser('cacheTTL'),
|
||||||
"clientKey": {
|
default: 5000,
|
||||||
"env": "PARSE_SERVER_CLIENT_KEY",
|
},
|
||||||
"help": "Key for iOS, MacOS, tvOS clients"
|
clientKey: {
|
||||||
},
|
env: 'PARSE_SERVER_CLIENT_KEY',
|
||||||
"cloud": {
|
help: 'Key for iOS, MacOS, tvOS clients',
|
||||||
"env": "PARSE_SERVER_CLOUD",
|
},
|
||||||
"help": "Full path to your cloud code main.js"
|
cloud: {
|
||||||
},
|
env: 'PARSE_SERVER_CLOUD',
|
||||||
"cluster": {
|
help: 'Full path to your cloud code main.js',
|
||||||
"env": "PARSE_SERVER_CLUSTER",
|
},
|
||||||
"help": "Run with cluster, optionally set the number of processes default to os.cpus().length",
|
cluster: {
|
||||||
"action": parsers.numberOrBooleanParser
|
env: 'PARSE_SERVER_CLUSTER',
|
||||||
},
|
help:
|
||||||
"collectionPrefix": {
|
'Run with cluster, optionally set the number of processes default to os.cpus().length',
|
||||||
"env": "PARSE_SERVER_COLLECTION_PREFIX",
|
action: parsers.numberOrBooleanParser,
|
||||||
"help": "A collection prefix for the classes",
|
},
|
||||||
"default": ""
|
collectionPrefix: {
|
||||||
},
|
env: 'PARSE_SERVER_COLLECTION_PREFIX',
|
||||||
"customPages": {
|
help: 'A collection prefix for the classes',
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES",
|
default: '',
|
||||||
"help": "custom pages for password validation and reset",
|
},
|
||||||
"action": parsers.objectParser,
|
customPages: {
|
||||||
"default": {}
|
env: 'PARSE_SERVER_CUSTOM_PAGES',
|
||||||
},
|
help: 'custom pages for password validation and reset',
|
||||||
"databaseAdapter": {
|
action: parsers.objectParser,
|
||||||
"env": "PARSE_SERVER_DATABASE_ADAPTER",
|
default: {},
|
||||||
"help": "Adapter module for the database",
|
},
|
||||||
"action": parsers.moduleOrObjectParser
|
databaseAdapter: {
|
||||||
},
|
env: 'PARSE_SERVER_DATABASE_ADAPTER',
|
||||||
"databaseOptions": {
|
help: 'Adapter module for the database',
|
||||||
"env": "PARSE_SERVER_DATABASE_OPTIONS",
|
action: parsers.moduleOrObjectParser,
|
||||||
"help": "Options to pass to the mongodb client",
|
},
|
||||||
"action": parsers.objectParser
|
databaseOptions: {
|
||||||
},
|
env: 'PARSE_SERVER_DATABASE_OPTIONS',
|
||||||
"databaseURI": {
|
help: 'Options to pass to the mongodb client',
|
||||||
"env": "PARSE_SERVER_DATABASE_URI",
|
action: parsers.objectParser,
|
||||||
"help": "The full URI to your database. Supported databases are mongodb or postgres.",
|
},
|
||||||
"required": true,
|
databaseURI: {
|
||||||
"default": "mongodb://localhost:27017/parse"
|
env: 'PARSE_SERVER_DATABASE_URI',
|
||||||
},
|
help:
|
||||||
"directAccess": {
|
'The full URI to your database. Supported databases are mongodb or postgres.',
|
||||||
"env": "PARSE_SERVER_ENABLE_EXPERIMENTAL_DIRECT_ACCESS",
|
required: true,
|
||||||
"help": "Replace HTTP Interface when using JS SDK in current node runtime, defaults to false. Caution, this is an experimental feature that may not be appropriate for production.",
|
default: 'mongodb://localhost:27017/parse',
|
||||||
"action": parsers.booleanParser,
|
},
|
||||||
"default": false
|
directAccess: {
|
||||||
},
|
env: 'PARSE_SERVER_ENABLE_EXPERIMENTAL_DIRECT_ACCESS',
|
||||||
"dotNetKey": {
|
help:
|
||||||
"env": "PARSE_SERVER_DOT_NET_KEY",
|
'Replace HTTP Interface when using JS SDK in current node runtime, defaults to false. Caution, this is an experimental feature that may not be appropriate for production.',
|
||||||
"help": "Key for Unity and .Net SDK"
|
action: parsers.booleanParser,
|
||||||
},
|
default: false,
|
||||||
"emailAdapter": {
|
},
|
||||||
"env": "PARSE_SERVER_EMAIL_ADAPTER",
|
dotNetKey: {
|
||||||
"help": "Adapter module for email sending",
|
env: 'PARSE_SERVER_DOT_NET_KEY',
|
||||||
"action": parsers.moduleOrObjectParser
|
help: 'Key for Unity and .Net SDK',
|
||||||
},
|
},
|
||||||
"emailVerifyTokenValidityDuration": {
|
emailAdapter: {
|
||||||
"env": "PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION",
|
env: 'PARSE_SERVER_EMAIL_ADAPTER',
|
||||||
"help": "Email verification token validity duration, in seconds",
|
help: 'Adapter module for email sending',
|
||||||
"action": parsers.numberParser("emailVerifyTokenValidityDuration")
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"enableAnonymousUsers": {
|
emailVerifyTokenValidityDuration: {
|
||||||
"env": "PARSE_SERVER_ENABLE_ANON_USERS",
|
env: 'PARSE_SERVER_EMAIL_VERIFY_TOKEN_VALIDITY_DURATION',
|
||||||
"help": "Enable (or disable) anon users, defaults to true",
|
help: 'Email verification token validity duration, in seconds',
|
||||||
"action": parsers.booleanParser,
|
action: parsers.numberParser('emailVerifyTokenValidityDuration'),
|
||||||
"default": true
|
},
|
||||||
},
|
enableAnonymousUsers: {
|
||||||
"enableExpressErrorHandler": {
|
env: 'PARSE_SERVER_ENABLE_ANON_USERS',
|
||||||
"env": "PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER",
|
help: 'Enable (or disable) anon users, defaults to true',
|
||||||
"help": "Enables the default express error handler for all errors",
|
action: parsers.booleanParser,
|
||||||
"action": parsers.booleanParser,
|
default: true,
|
||||||
"default": false
|
},
|
||||||
},
|
enableExpressErrorHandler: {
|
||||||
"enableSingleSchemaCache": {
|
env: 'PARSE_SERVER_ENABLE_EXPRESS_ERROR_HANDLER',
|
||||||
"env": "PARSE_SERVER_ENABLE_SINGLE_SCHEMA_CACHE",
|
help: 'Enables the default express error handler for all errors',
|
||||||
"help": "Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.",
|
action: parsers.booleanParser,
|
||||||
"action": parsers.booleanParser,
|
default: false,
|
||||||
"default": false
|
},
|
||||||
},
|
enableSingleSchemaCache: {
|
||||||
"expireInactiveSessions": {
|
env: 'PARSE_SERVER_ENABLE_SINGLE_SCHEMA_CACHE',
|
||||||
"env": "PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS",
|
help:
|
||||||
"help": "Sets wether we should expire the inactive sessions, defaults to true",
|
'Use a single schema cache shared across requests. Reduces number of queries made to _SCHEMA, defaults to false, i.e. unique schema cache per request.',
|
||||||
"action": parsers.booleanParser,
|
action: parsers.booleanParser,
|
||||||
"default": true
|
default: false,
|
||||||
},
|
},
|
||||||
"fileKey": {
|
expireInactiveSessions: {
|
||||||
"env": "PARSE_SERVER_FILE_KEY",
|
env: 'PARSE_SERVER_EXPIRE_INACTIVE_SESSIONS',
|
||||||
"help": "Key for your files"
|
help:
|
||||||
},
|
'Sets wether we should expire the inactive sessions, defaults to true',
|
||||||
"filesAdapter": {
|
action: parsers.booleanParser,
|
||||||
"env": "PARSE_SERVER_FILES_ADAPTER",
|
default: true,
|
||||||
"help": "Adapter module for the files sub-system",
|
},
|
||||||
"action": parsers.moduleOrObjectParser
|
fileKey: {
|
||||||
},
|
env: 'PARSE_SERVER_FILE_KEY',
|
||||||
"graphQLPath": {
|
help: 'Key for your files',
|
||||||
"env": "PARSE_SERVER_GRAPHQL_PATH",
|
},
|
||||||
"help": "Mount path for the GraphQL endpoint, defaults to /graphql",
|
filesAdapter: {
|
||||||
"default": "/graphql"
|
env: 'PARSE_SERVER_FILES_ADAPTER',
|
||||||
},
|
help: 'Adapter module for the files sub-system',
|
||||||
"graphQLSchema": {
|
action: parsers.moduleOrObjectParser,
|
||||||
"env": "PARSE_SERVER_GRAPH_QLSCHEMA",
|
},
|
||||||
"help": "Full path to your GraphQL custom schema.graphql file"
|
graphQLPath: {
|
||||||
},
|
env: 'PARSE_SERVER_GRAPHQL_PATH',
|
||||||
"host": {
|
help: 'Mount path for the GraphQL endpoint, defaults to /graphql',
|
||||||
"env": "PARSE_SERVER_HOST",
|
default: '/graphql',
|
||||||
"help": "The host to serve ParseServer on, defaults to 0.0.0.0",
|
},
|
||||||
"default": "0.0.0.0"
|
graphQLSchema: {
|
||||||
},
|
env: 'PARSE_SERVER_GRAPH_QLSCHEMA',
|
||||||
"javascriptKey": {
|
help: 'Full path to your GraphQL custom schema.graphql file',
|
||||||
"env": "PARSE_SERVER_JAVASCRIPT_KEY",
|
},
|
||||||
"help": "Key for the Javascript SDK"
|
host: {
|
||||||
},
|
env: 'PARSE_SERVER_HOST',
|
||||||
"jsonLogs": {
|
help: 'The host to serve ParseServer on, defaults to 0.0.0.0',
|
||||||
"env": "JSON_LOGS",
|
default: '0.0.0.0',
|
||||||
"help": "Log as structured JSON objects",
|
},
|
||||||
"action": parsers.booleanParser
|
javascriptKey: {
|
||||||
},
|
env: 'PARSE_SERVER_JAVASCRIPT_KEY',
|
||||||
"liveQuery": {
|
help: 'Key for the Javascript SDK',
|
||||||
"env": "PARSE_SERVER_LIVE_QUERY",
|
},
|
||||||
"help": "parse-server's LiveQuery configuration object",
|
jsonLogs: {
|
||||||
"action": parsers.objectParser
|
env: 'JSON_LOGS',
|
||||||
},
|
help: 'Log as structured JSON objects',
|
||||||
"liveQueryServerOptions": {
|
action: parsers.booleanParser,
|
||||||
"env": "PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS",
|
},
|
||||||
"help": "Live query server configuration options (will start the liveQuery server)",
|
liveQuery: {
|
||||||
"action": parsers.objectParser
|
env: 'PARSE_SERVER_LIVE_QUERY',
|
||||||
},
|
help: "parse-server's LiveQuery configuration object",
|
||||||
"loggerAdapter": {
|
action: parsers.objectParser,
|
||||||
"env": "PARSE_SERVER_LOGGER_ADAPTER",
|
},
|
||||||
"help": "Adapter module for the logging sub-system",
|
liveQueryServerOptions: {
|
||||||
"action": parsers.moduleOrObjectParser
|
env: 'PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS',
|
||||||
},
|
help:
|
||||||
"logLevel": {
|
'Live query server configuration options (will start the liveQuery server)',
|
||||||
"env": "PARSE_SERVER_LOG_LEVEL",
|
action: parsers.objectParser,
|
||||||
"help": "Sets the level for logs"
|
},
|
||||||
},
|
loggerAdapter: {
|
||||||
"logsFolder": {
|
env: 'PARSE_SERVER_LOGGER_ADAPTER',
|
||||||
"env": "PARSE_SERVER_LOGS_FOLDER",
|
help: 'Adapter module for the logging sub-system',
|
||||||
"help": "Folder for the logs (defaults to './logs'); set to null to disable file based logging",
|
action: parsers.moduleOrObjectParser,
|
||||||
"default": "./logs"
|
},
|
||||||
},
|
logLevel: {
|
||||||
"masterKey": {
|
env: 'PARSE_SERVER_LOG_LEVEL',
|
||||||
"env": "PARSE_SERVER_MASTER_KEY",
|
help: 'Sets the level for logs',
|
||||||
"help": "Your Parse Master Key",
|
},
|
||||||
"required": true
|
logsFolder: {
|
||||||
},
|
env: 'PARSE_SERVER_LOGS_FOLDER',
|
||||||
"masterKeyIps": {
|
help:
|
||||||
"env": "PARSE_SERVER_MASTER_KEY_IPS",
|
"Folder for the logs (defaults to './logs'); set to null to disable file based logging",
|
||||||
"help": "Restrict masterKey to be used by only these ips, defaults to [] (allow all ips)",
|
default: './logs',
|
||||||
"action": parsers.arrayParser,
|
},
|
||||||
"default": []
|
masterKey: {
|
||||||
},
|
env: 'PARSE_SERVER_MASTER_KEY',
|
||||||
"maxLimit": {
|
help: 'Your Parse Master Key',
|
||||||
"env": "PARSE_SERVER_MAX_LIMIT",
|
required: true,
|
||||||
"help": "Max value for limit option on queries, defaults to unlimited",
|
},
|
||||||
"action": parsers.numberParser("maxLimit")
|
masterKeyIps: {
|
||||||
},
|
env: 'PARSE_SERVER_MASTER_KEY_IPS',
|
||||||
"maxUploadSize": {
|
help:
|
||||||
"env": "PARSE_SERVER_MAX_UPLOAD_SIZE",
|
'Restrict masterKey to be used by only these ips, defaults to [] (allow all ips)',
|
||||||
"help": "Max file size for uploads, defaults to 20mb",
|
action: parsers.arrayParser,
|
||||||
"default": "20mb"
|
default: [],
|
||||||
},
|
},
|
||||||
"middleware": {
|
maxLimit: {
|
||||||
"env": "PARSE_SERVER_MIDDLEWARE",
|
env: 'PARSE_SERVER_MAX_LIMIT',
|
||||||
"help": "middleware for express server, can be string or function"
|
help: 'Max value for limit option on queries, defaults to unlimited',
|
||||||
},
|
action: parsers.numberParser('maxLimit'),
|
||||||
"mountGraphQL": {
|
},
|
||||||
"env": "PARSE_SERVER_MOUNT_GRAPHQL",
|
maxUploadSize: {
|
||||||
"help": "Mounts the GraphQL endpoint",
|
env: 'PARSE_SERVER_MAX_UPLOAD_SIZE',
|
||||||
"action": parsers.booleanParser,
|
help: 'Max file size for uploads, defaults to 20mb',
|
||||||
"default": false
|
default: '20mb',
|
||||||
},
|
},
|
||||||
"mountPath": {
|
middleware: {
|
||||||
"env": "PARSE_SERVER_MOUNT_PATH",
|
env: 'PARSE_SERVER_MIDDLEWARE',
|
||||||
"help": "Mount path for the server, defaults to /parse",
|
help: 'middleware for express server, can be string or function',
|
||||||
"default": "/parse"
|
},
|
||||||
},
|
mountGraphQL: {
|
||||||
"mountPlayground": {
|
env: 'PARSE_SERVER_MOUNT_GRAPHQL',
|
||||||
"env": "PARSE_SERVER_MOUNT_PLAYGROUND",
|
help: 'Mounts the GraphQL endpoint',
|
||||||
"help": "Mounts the GraphQL Playground - never use this option in production",
|
action: parsers.booleanParser,
|
||||||
"action": parsers.booleanParser,
|
default: false,
|
||||||
"default": false
|
},
|
||||||
},
|
mountPath: {
|
||||||
"objectIdSize": {
|
env: 'PARSE_SERVER_MOUNT_PATH',
|
||||||
"env": "PARSE_SERVER_OBJECT_ID_SIZE",
|
help: 'Mount path for the server, defaults to /parse',
|
||||||
"help": "Sets the number of characters in generated object id's, default 10",
|
default: '/parse',
|
||||||
"action": parsers.numberParser("objectIdSize"),
|
},
|
||||||
"default": 10
|
mountPlayground: {
|
||||||
},
|
env: 'PARSE_SERVER_MOUNT_PLAYGROUND',
|
||||||
"passwordPolicy": {
|
help: 'Mounts the GraphQL Playground - never use this option in production',
|
||||||
"env": "PARSE_SERVER_PASSWORD_POLICY",
|
action: parsers.booleanParser,
|
||||||
"help": "Password policy for enforcing password related rules",
|
default: false,
|
||||||
"action": parsers.objectParser
|
},
|
||||||
},
|
objectIdSize: {
|
||||||
"playgroundPath": {
|
env: 'PARSE_SERVER_OBJECT_ID_SIZE',
|
||||||
"env": "PARSE_SERVER_PLAYGROUND_PATH",
|
help: "Sets the number of characters in generated object id's, default 10",
|
||||||
"help": "Mount path for the GraphQL Playground, defaults to /playground",
|
action: parsers.numberParser('objectIdSize'),
|
||||||
"default": "/playground"
|
default: 10,
|
||||||
},
|
},
|
||||||
"port": {
|
passwordPolicy: {
|
||||||
"env": "PORT",
|
env: 'PARSE_SERVER_PASSWORD_POLICY',
|
||||||
"help": "The port to run the ParseServer, defaults to 1337.",
|
help: 'Password policy for enforcing password related rules',
|
||||||
"action": parsers.numberParser("port"),
|
action: parsers.objectParser,
|
||||||
"default": 1337
|
},
|
||||||
},
|
playgroundPath: {
|
||||||
"preserveFileName": {
|
env: 'PARSE_SERVER_PLAYGROUND_PATH',
|
||||||
"env": "PARSE_SERVER_PRESERVE_FILE_NAME",
|
help: 'Mount path for the GraphQL Playground, defaults to /playground',
|
||||||
"help": "Enable (or disable) the addition of a unique hash to the file names",
|
default: '/playground',
|
||||||
"action": parsers.booleanParser,
|
},
|
||||||
"default": false
|
port: {
|
||||||
},
|
env: 'PORT',
|
||||||
"preventLoginWithUnverifiedEmail": {
|
help: 'The port to run the ParseServer, defaults to 1337.',
|
||||||
"env": "PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL",
|
action: parsers.numberParser('port'),
|
||||||
"help": "Prevent user from login if email is not verified and PARSE_SERVER_VERIFY_USER_EMAILS is true, defaults to false",
|
default: 1337,
|
||||||
"action": parsers.booleanParser,
|
},
|
||||||
"default": false
|
preserveFileName: {
|
||||||
},
|
env: 'PARSE_SERVER_PRESERVE_FILE_NAME',
|
||||||
"protectedFields": {
|
help: 'Enable (or disable) the addition of a unique hash to the file names',
|
||||||
"env": "PARSE_SERVER_PROTECTED_FIELDS",
|
action: parsers.booleanParser,
|
||||||
"help": "Protected fields that should be treated with extra security when fetching details.",
|
default: false,
|
||||||
"action": parsers.objectParser,
|
},
|
||||||
"default": {
|
preventLoginWithUnverifiedEmail: {
|
||||||
"_User": {
|
env: 'PARSE_SERVER_PREVENT_LOGIN_WITH_UNVERIFIED_EMAIL',
|
||||||
"*": ["email"]
|
help:
|
||||||
}
|
'Prevent user from login if email is not verified and PARSE_SERVER_VERIFY_USER_EMAILS is true, defaults to false',
|
||||||
}
|
action: parsers.booleanParser,
|
||||||
},
|
default: false,
|
||||||
"publicServerURL": {
|
},
|
||||||
"env": "PARSE_PUBLIC_SERVER_URL",
|
protectedFields: {
|
||||||
"help": "Public URL to your parse server with http:// or https://."
|
env: 'PARSE_SERVER_PROTECTED_FIELDS',
|
||||||
},
|
help:
|
||||||
"push": {
|
'Protected fields that should be treated with extra security when fetching details.',
|
||||||
"env": "PARSE_SERVER_PUSH",
|
action: parsers.objectParser,
|
||||||
"help": "Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications",
|
default: {
|
||||||
"action": parsers.objectParser
|
_User: {
|
||||||
},
|
'*': ['email'],
|
||||||
"readOnlyMasterKey": {
|
},
|
||||||
"env": "PARSE_SERVER_READ_ONLY_MASTER_KEY",
|
},
|
||||||
"help": "Read-only key, which has the same capabilities as MasterKey without writes"
|
},
|
||||||
},
|
publicServerURL: {
|
||||||
"restAPIKey": {
|
env: 'PARSE_PUBLIC_SERVER_URL',
|
||||||
"env": "PARSE_SERVER_REST_API_KEY",
|
help: 'Public URL to your parse server with http:// or https://.',
|
||||||
"help": "Key for REST calls"
|
},
|
||||||
},
|
push: {
|
||||||
"revokeSessionOnPasswordReset": {
|
env: 'PARSE_SERVER_PUSH',
|
||||||
"env": "PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET",
|
help:
|
||||||
"help": "When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.",
|
'Configuration for push, as stringified JSON. See http://docs.parseplatform.org/parse-server/guide/#push-notifications',
|
||||||
"action": parsers.booleanParser,
|
action: parsers.objectParser,
|
||||||
"default": true
|
},
|
||||||
},
|
readOnlyMasterKey: {
|
||||||
"scheduledPush": {
|
env: 'PARSE_SERVER_READ_ONLY_MASTER_KEY',
|
||||||
"env": "PARSE_SERVER_SCHEDULED_PUSH",
|
help:
|
||||||
"help": "Configuration for push scheduling, defaults to false.",
|
'Read-only key, which has the same capabilities as MasterKey without writes',
|
||||||
"action": parsers.booleanParser,
|
},
|
||||||
"default": false
|
restAPIKey: {
|
||||||
},
|
env: 'PARSE_SERVER_REST_API_KEY',
|
||||||
"schemaCacheTTL": {
|
help: 'Key for REST calls',
|
||||||
"env": "PARSE_SERVER_SCHEMA_CACHE_TTL",
|
},
|
||||||
"help": "The TTL for caching the schema for optimizing read/write operations. You should put a long TTL when your DB is in production. default to 5000; set 0 to disable.",
|
revokeSessionOnPasswordReset: {
|
||||||
"action": parsers.numberParser("schemaCacheTTL"),
|
env: 'PARSE_SERVER_REVOKE_SESSION_ON_PASSWORD_RESET',
|
||||||
"default": 5000
|
help:
|
||||||
},
|
"When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions.",
|
||||||
"serverURL": {
|
action: parsers.booleanParser,
|
||||||
"env": "PARSE_SERVER_URL",
|
default: true,
|
||||||
"help": "URL to your parse server with http:// or https://.",
|
},
|
||||||
"required": true
|
scheduledPush: {
|
||||||
},
|
env: 'PARSE_SERVER_SCHEDULED_PUSH',
|
||||||
"sessionLength": {
|
help: 'Configuration for push scheduling, defaults to false.',
|
||||||
"env": "PARSE_SERVER_SESSION_LENGTH",
|
action: parsers.booleanParser,
|
||||||
"help": "Session duration, in seconds, defaults to 1 year",
|
default: false,
|
||||||
"action": parsers.numberParser("sessionLength"),
|
},
|
||||||
"default": 31536000
|
schemaCacheTTL: {
|
||||||
},
|
env: 'PARSE_SERVER_SCHEMA_CACHE_TTL',
|
||||||
"silent": {
|
help:
|
||||||
"env": "SILENT",
|
'The TTL for caching the schema for optimizing read/write operations. You should put a long TTL when your DB is in production. default to 5000; set 0 to disable.',
|
||||||
"help": "Disables console output",
|
action: parsers.numberParser('schemaCacheTTL'),
|
||||||
"action": parsers.booleanParser
|
default: 5000,
|
||||||
},
|
},
|
||||||
"skipMongoDBServer13732Workaround": {
|
serverURL: {
|
||||||
"env": "PARSE_SKIP_MONGODB_SERVER_13732_WORKAROUND",
|
env: 'PARSE_SERVER_URL',
|
||||||
"help": "Circumvent Parse workaround for historical MongoDB bug SERVER-13732",
|
help: 'URL to your parse server with http:// or https://.',
|
||||||
"action": parsers.booleanParser,
|
required: true,
|
||||||
"default": false
|
},
|
||||||
},
|
sessionLength: {
|
||||||
"startLiveQueryServer": {
|
env: 'PARSE_SERVER_SESSION_LENGTH',
|
||||||
"env": "PARSE_SERVER_START_LIVE_QUERY_SERVER",
|
help: 'Session duration, in seconds, defaults to 1 year',
|
||||||
"help": "Starts the liveQuery server",
|
action: parsers.numberParser('sessionLength'),
|
||||||
"action": parsers.booleanParser
|
default: 31536000,
|
||||||
},
|
},
|
||||||
"userSensitiveFields": {
|
silent: {
|
||||||
"env": "PARSE_SERVER_USER_SENSITIVE_FIELDS",
|
env: 'SILENT',
|
||||||
"help": "Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields",
|
help: 'Disables console output',
|
||||||
"action": parsers.arrayParser
|
action: parsers.booleanParser,
|
||||||
},
|
},
|
||||||
"verbose": {
|
skipMongoDBServer13732Workaround: {
|
||||||
"env": "VERBOSE",
|
env: 'PARSE_SKIP_MONGODB_SERVER_13732_WORKAROUND',
|
||||||
"help": "Set the logging to verbose",
|
help: 'Circumvent Parse workaround for historical MongoDB bug SERVER-13732',
|
||||||
"action": parsers.booleanParser
|
action: parsers.booleanParser,
|
||||||
},
|
default: false,
|
||||||
"verifyUserEmails": {
|
},
|
||||||
"env": "PARSE_SERVER_VERIFY_USER_EMAILS",
|
startLiveQueryServer: {
|
||||||
"help": "Enable (or disable) user email validation, defaults to false",
|
env: 'PARSE_SERVER_START_LIVE_QUERY_SERVER',
|
||||||
"action": parsers.booleanParser,
|
help: 'Starts the liveQuery server',
|
||||||
"default": false
|
action: parsers.booleanParser,
|
||||||
},
|
},
|
||||||
"webhookKey": {
|
userSensitiveFields: {
|
||||||
"env": "PARSE_SERVER_WEBHOOK_KEY",
|
env: 'PARSE_SERVER_USER_SENSITIVE_FIELDS',
|
||||||
"help": "Key sent with outgoing webhook calls"
|
help:
|
||||||
}
|
'Personally identifiable information fields in the user table the should be removed for non-authorized users. Deprecated @see protectedFields',
|
||||||
|
action: parsers.arrayParser,
|
||||||
|
},
|
||||||
|
verbose: {
|
||||||
|
env: 'VERBOSE',
|
||||||
|
help: 'Set the logging to verbose',
|
||||||
|
action: parsers.booleanParser,
|
||||||
|
},
|
||||||
|
verifyUserEmails: {
|
||||||
|
env: 'PARSE_SERVER_VERIFY_USER_EMAILS',
|
||||||
|
help: 'Enable (or disable) user email validation, defaults to false',
|
||||||
|
action: parsers.booleanParser,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
|
webhookKey: {
|
||||||
|
env: 'PARSE_SERVER_WEBHOOK_KEY',
|
||||||
|
help: 'Key sent with outgoing webhook calls',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
module.exports.CustomPagesOptions = {
|
module.exports.CustomPagesOptions = {
|
||||||
"choosePassword": {
|
choosePassword: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_CHOOSE_PASSWORD',
|
||||||
"help": "choose password page path"
|
help: 'choose password page path',
|
||||||
},
|
},
|
||||||
"invalidLink": {
|
invalidLink: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_LINK',
|
||||||
"help": "invalid link page path"
|
help: 'invalid link page path',
|
||||||
},
|
},
|
||||||
"invalidVerificationLink": {
|
invalidVerificationLink: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_INVALID_VERIFICATION_LINK',
|
||||||
"help": "invalid verification link page path"
|
help: 'invalid verification link page path',
|
||||||
},
|
},
|
||||||
"linkSendFail": {
|
linkSendFail: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_FAIL',
|
||||||
"help": "verification link send fail page path"
|
help: 'verification link send fail page path',
|
||||||
},
|
},
|
||||||
"linkSendSuccess": {
|
linkSendSuccess: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_LINK_SEND_SUCCESS',
|
||||||
"help": "verification link send success page path"
|
help: 'verification link send success page path',
|
||||||
},
|
},
|
||||||
"parseFrameURL": {
|
parseFrameURL: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_PARSE_FRAME_URL',
|
||||||
"help": "for masking user-facing pages"
|
help: 'for masking user-facing pages',
|
||||||
},
|
},
|
||||||
"passwordResetSuccess": {
|
passwordResetSuccess: {
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS",
|
env: 'PARSE_SERVER_CUSTOM_PAGES_PASSWORD_RESET_SUCCESS',
|
||||||
"help": "password reset success page path"
|
help: 'password reset success page path',
|
||||||
|
},
|
||||||
|
verifyEmailSuccess: {
|
||||||
|
env: 'PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS',
|
||||||
|
help: 'verify email success page path',
|
||||||
},
|
},
|
||||||
"verifyEmailSuccess": {
|
|
||||||
"env": "PARSE_SERVER_CUSTOM_PAGES_VERIFY_EMAIL_SUCCESS",
|
|
||||||
"help": "verify email success page path"
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
module.exports.LiveQueryOptions = {
|
module.exports.LiveQueryOptions = {
|
||||||
"classNames": {
|
classNames: {
|
||||||
"env": "PARSE_SERVER_LIVEQUERY_CLASSNAMES",
|
env: 'PARSE_SERVER_LIVEQUERY_CLASSNAMES',
|
||||||
"help": "parse-server's LiveQuery classNames",
|
help: "parse-server's LiveQuery classNames",
|
||||||
"action": parsers.arrayParser
|
action: parsers.arrayParser,
|
||||||
},
|
},
|
||||||
"pubSubAdapter": {
|
pubSubAdapter: {
|
||||||
"env": "PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER",
|
env: 'PARSE_SERVER_LIVEQUERY_PUB_SUB_ADAPTER',
|
||||||
"help": "LiveQuery pubsub adapter",
|
help: 'LiveQuery pubsub adapter',
|
||||||
"action": parsers.moduleOrObjectParser
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"redisOptions": {
|
redisOptions: {
|
||||||
"env": "PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS",
|
env: 'PARSE_SERVER_LIVEQUERY_REDIS_OPTIONS',
|
||||||
"help": "parse-server's LiveQuery redisOptions",
|
help: "parse-server's LiveQuery redisOptions",
|
||||||
"action": parsers.objectParser
|
action: parsers.objectParser,
|
||||||
},
|
},
|
||||||
"redisURL": {
|
redisURL: {
|
||||||
"env": "PARSE_SERVER_LIVEQUERY_REDIS_URL",
|
env: 'PARSE_SERVER_LIVEQUERY_REDIS_URL',
|
||||||
"help": "parse-server's LiveQuery redisURL"
|
help: "parse-server's LiveQuery redisURL",
|
||||||
|
},
|
||||||
|
wssAdapter: {
|
||||||
|
env: 'PARSE_SERVER_LIVEQUERY_WSS_ADAPTER',
|
||||||
|
help: 'Adapter module for the WebSocketServer',
|
||||||
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"wssAdapter": {
|
|
||||||
"env": "PARSE_SERVER_LIVEQUERY_WSS_ADAPTER",
|
|
||||||
"help": "Adapter module for the WebSocketServer",
|
|
||||||
"action": parsers.moduleOrObjectParser
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
module.exports.LiveQueryServerOptions = {
|
module.exports.LiveQueryServerOptions = {
|
||||||
"appId": {
|
appId: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_APP_ID",
|
env: 'PARSE_LIVE_QUERY_SERVER_APP_ID',
|
||||||
"help": "This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId."
|
help:
|
||||||
|
'This string should match the appId in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same appId.',
|
||||||
},
|
},
|
||||||
"cacheTimeout": {
|
cacheTimeout: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT",
|
env: 'PARSE_LIVE_QUERY_SERVER_CACHE_TIMEOUT',
|
||||||
"help": "Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 30 * 24 * 60 * 60 * 1000 ms (~30 days).",
|
help:
|
||||||
"action": parsers.numberParser("cacheTimeout")
|
"Number in milliseconds. When clients provide the sessionToken to the LiveQuery server, the LiveQuery server will try to fetch its ParseUser's objectId from parse server and store it in the cache. The value defines the duration of the cache. Check the following Security section and our protocol specification for details, defaults to 30 * 24 * 60 * 60 * 1000 ms (~30 days).",
|
||||||
|
action: parsers.numberParser('cacheTimeout'),
|
||||||
},
|
},
|
||||||
"keyPairs": {
|
keyPairs: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_KEY_PAIRS",
|
env: 'PARSE_LIVE_QUERY_SERVER_KEY_PAIRS',
|
||||||
"help": "A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.",
|
help:
|
||||||
"action": parsers.objectParser
|
'A JSON object that serves as a whitelist of keys. It is used for validating clients when they try to connect to the LiveQuery server. Check the following Security section and our protocol specification for details.',
|
||||||
|
action: parsers.objectParser,
|
||||||
},
|
},
|
||||||
"logLevel": {
|
logLevel: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_LOG_LEVEL",
|
env: 'PARSE_LIVE_QUERY_SERVER_LOG_LEVEL',
|
||||||
"help": "This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO."
|
help:
|
||||||
|
'This string defines the log level of the LiveQuery server. We support VERBOSE, INFO, ERROR, NONE, defaults to INFO.',
|
||||||
},
|
},
|
||||||
"masterKey": {
|
masterKey: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_MASTER_KEY",
|
env: 'PARSE_LIVE_QUERY_SERVER_MASTER_KEY',
|
||||||
"help": "This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey."
|
help:
|
||||||
|
'This string should match the masterKey in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same masterKey.',
|
||||||
},
|
},
|
||||||
"port": {
|
port: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_PORT",
|
env: 'PARSE_LIVE_QUERY_SERVER_PORT',
|
||||||
"help": "The port to run the LiveQuery server, defaults to 1337.",
|
help: 'The port to run the LiveQuery server, defaults to 1337.',
|
||||||
"action": parsers.numberParser("port"),
|
action: parsers.numberParser('port'),
|
||||||
"default": 1337
|
default: 1337,
|
||||||
},
|
},
|
||||||
"pubSubAdapter": {
|
pubSubAdapter: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER",
|
env: 'PARSE_LIVE_QUERY_SERVER_PUB_SUB_ADAPTER',
|
||||||
"help": "LiveQuery pubsub adapter",
|
help: 'LiveQuery pubsub adapter',
|
||||||
"action": parsers.moduleOrObjectParser
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"redisOptions": {
|
redisOptions: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS",
|
env: 'PARSE_LIVE_QUERY_SERVER_REDIS_OPTIONS',
|
||||||
"help": "parse-server's LiveQuery redisOptions",
|
help: "parse-server's LiveQuery redisOptions",
|
||||||
"action": parsers.objectParser
|
action: parsers.objectParser,
|
||||||
},
|
},
|
||||||
"redisURL": {
|
redisURL: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_REDIS_URL",
|
env: 'PARSE_LIVE_QUERY_SERVER_REDIS_URL',
|
||||||
"help": "parse-server's LiveQuery redisURL"
|
help: "parse-server's LiveQuery redisURL",
|
||||||
},
|
},
|
||||||
"serverURL": {
|
serverURL: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_SERVER_URL",
|
env: 'PARSE_LIVE_QUERY_SERVER_SERVER_URL',
|
||||||
"help": "This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL."
|
help:
|
||||||
|
'This string should match the serverURL in use by your Parse Server. If you deploy the LiveQuery server alongside Parse Server, the LiveQuery server will try to use the same serverURL.',
|
||||||
},
|
},
|
||||||
"websocketTimeout": {
|
websocketTimeout: {
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT",
|
env: 'PARSE_LIVE_QUERY_SERVER_WEBSOCKET_TIMEOUT',
|
||||||
"help": "Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).",
|
help:
|
||||||
"action": parsers.numberParser("websocketTimeout")
|
'Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).',
|
||||||
|
action: parsers.numberParser('websocketTimeout'),
|
||||||
|
},
|
||||||
|
wssAdapter: {
|
||||||
|
env: 'PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER',
|
||||||
|
help: 'Adapter module for the WebSocketServer',
|
||||||
|
action: parsers.moduleOrObjectParser,
|
||||||
},
|
},
|
||||||
"wssAdapter": {
|
|
||||||
"env": "PARSE_LIVE_QUERY_SERVER_WSS_ADAPTER",
|
|
||||||
"help": "Adapter module for the WebSocketServer",
|
|
||||||
"action": parsers.moduleOrObjectParser
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -106,4 +106,3 @@
|
|||||||
* @property {Number} websocketTimeout Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).
|
* @property {Number} websocketTimeout Number of milliseconds between ping/pong frames. The WebSocket server sends ping/pong frames to the clients to keep the WebSocket alive. This value defines the interval of the ping/pong frame from the server to clients, defaults to 10 * 1000 ms (10 s).
|
||||||
* @property {Adapter<WSSAdapter>} wssAdapter Adapter module for the WebSocketServer
|
* @property {Adapter<WSSAdapter>} wssAdapter Adapter module for the WebSocketServer
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user