Enable prefer-const lint rule (#3202)

This commit is contained in:
Arthur Cinader
2016-12-07 15:17:05 -08:00
committed by Florent Vilmart
parent a6c988176e
commit ca286b7108
106 changed files with 1183 additions and 1183 deletions

View File

@@ -57,11 +57,11 @@ function authDataValidator(adapter, appIds, options) {
module.exports = function(authOptions = {}, enableAnonymousUsers = true) {
let _enableAnonymousUsers = enableAnonymousUsers;
let setEnableAnonymousUsers = function(enable) {
const setEnableAnonymousUsers = function(enable) {
_enableAnonymousUsers = enable;
}
// To handle the test cases on configuration
let getValidatorForProvider = function(provider) {
const getValidatorForProvider = function(provider) {
if (provider === 'anonymous' && !_enableAnonymousUsers) {
return;

View File

@@ -28,7 +28,7 @@ function validateAppId() {
function handleMultipleConfigurations(authData, options) {
if (Array.isArray(options)) {
let consumer_key = authData.consumer_key;
const consumer_key = authData.consumer_key;
if (!consumer_key) {
logger.error('Twitter Auth', 'Multiple twitter configurations are available, by no consumer_key was sent by the client.');
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');

View File

@@ -10,7 +10,7 @@ export class InMemoryCache {
}
get(key) {
let record = this.cache[key];
const record = this.cache[key];
if (record == null) {
return null;
}

View File

@@ -8,7 +8,7 @@ export class InMemoryCacheAdapter {
get(key) {
return new Promise((resolve) => {
let record = this.cache.get(key);
const record = this.cache.get(key);
if (record == null) {
return resolve(null);
}

View File

@@ -30,7 +30,7 @@ export class GridStoreAdapter extends FilesAdapter {
// Returns a promise
createFile(filename: string, data) {
return this._connect().then(database => {
let gridStore = new GridStore(database, filename, 'w');
const gridStore = new GridStore(database, filename, 'w');
return gridStore.open();
}).then(gridStore => {
return gridStore.write(data);
@@ -41,7 +41,7 @@ export class GridStoreAdapter extends FilesAdapter {
deleteFile(filename: string) {
return this._connect().then(database => {
let gridStore = new GridStore(database, filename, 'r');
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
}).then((gridStore) => {
return gridStore.unlink();
@@ -54,7 +54,7 @@ export class GridStoreAdapter extends FilesAdapter {
return this._connect().then(database => {
return GridStore.exist(database, filename)
.then(() => {
let gridStore = new GridStore(database, filename, 'r');
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
});
}).then(gridStore => {
@@ -69,7 +69,7 @@ export class GridStoreAdapter extends FilesAdapter {
getFileStream(filename: string) {
return this._connect().then(database => {
return GridStore.exist(database, filename).then(() => {
let gridStore = new GridStore(database, filename, 'r');
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
});
});

View File

@@ -9,9 +9,9 @@ const logger = new winston.Logger();
const additionalTransports = [];
function updateTransports(options) {
let transports = Object.assign({}, logger.transports);
const transports = Object.assign({}, logger.transports);
if (options) {
let silent = options.silent;
const silent = options.silent;
delete options.silent;
if (_.isNull(options.dirname)) {
delete transports['parse-server'];
@@ -84,8 +84,8 @@ export function addTransport(transport) {
}
export function removeTransport(transport) {
let transportName = typeof transport == 'string' ? transport : transport.name;
let transports = Object.assign({}, logger.transports);
const transportName = typeof transport == 'string' ? transport : transport.name;
const transports = Object.assign({}, logger.transports);
delete transports[transportName];
logger.configure({
transports: _.values(transports)

View File

@@ -28,11 +28,11 @@ export class WinstonLoggerAdapter extends LoggerAdapter {
options = {};
}
// defaults to 7 days prior
let from = options.from || new Date(Date.now() - (7 * MILLISECONDS_IN_A_DAY));
let until = options.until || new Date();
let limit = options.size || 10;
let order = options.order || 'desc';
let level = options.level || 'info';
const from = options.from || new Date(Date.now() - (7 * MILLISECONDS_IN_A_DAY));
const until = options.until || new Date();
const limit = options.size || 10;
const order = options.order || 'desc';
const level = options.level || 'info';
const queryOptions = {
from,

View File

@@ -1,6 +1,6 @@
import events from 'events';
let emitter = new events.EventEmitter();
const emitter = new events.EventEmitter();
class Publisher {
emitter: any;
@@ -25,7 +25,7 @@ class Subscriber extends events.EventEmitter {
}
subscribe(channel: string): void {
let handler = (message) => {
const handler = (message) => {
this.emit('message', channel, message);
}
this.subscriptions.set(channel, handler);
@@ -49,7 +49,7 @@ function createSubscriber(): any {
return new Subscriber(emitter);
}
let EventEmitterPubSub = {
const EventEmitterPubSub = {
createPublisher,
createSubscriber
}

View File

@@ -1,5 +1,5 @@
let mongodb = require('mongodb');
let Collection = mongodb.Collection;
const mongodb = require('mongodb');
const Collection = mongodb.Collection;
export default class MongoCollection {
_mongoCollection:Collection;
@@ -21,7 +21,7 @@ export default class MongoCollection {
throw error;
}
// Figure out what key needs an index
let key = error.message.match(/field=([A-Za-z_0-9]+) /)[1];
const key = error.message.match(/field=([A-Za-z_0-9]+) /)[1];
if (!key) {
throw error;
}
@@ -50,7 +50,7 @@ export default class MongoCollection {
}
count(query, { skip, limit, sort, maxTimeMS } = {}) {
let countOperation = this._mongoCollection.count(query, { skip, limit, sort, maxTimeMS });
const countOperation = this._mongoCollection.count(query, { skip, limit, sort, maxTimeMS });
return countOperation;
}

View File

@@ -73,7 +73,7 @@ function mongoSchemaToParseSchema(mongoSchema) {
}
function _mongoSchemaQueryFromNameQuery(name: string, query) {
let object = { _id: name };
const object = { _id: name };
if (query) {
Object.keys(query).forEach(key => {
object[key] = query[key];

View File

@@ -15,8 +15,8 @@ import Parse from 'parse/node';
import _ from 'lodash';
import defaults from '../../../defaults';
let mongodb = require('mongodb');
let MongoClient = mongodb.MongoClient;
const mongodb = require('mongodb');
const MongoClient = mongodb.MongoClient;
const MongoSchemaCollectionName = '_SCHEMA';
@@ -53,14 +53,14 @@ const convertParseSchemaToMongoSchema = ({...schema}) => {
// Returns { code, error } if invalid, or { result }, an object
// suitable for inserting into _SCHEMA collection, otherwise.
const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPermissions) => {
let mongoObject = {
const mongoObject = {
_id: className,
objectId: 'string',
updatedAt: 'string',
createdAt: 'string'
};
for (let fieldName in fields) {
for (const fieldName in fields) {
mongoObject[fieldName] = MongoSchemaCollection.parseFieldTypeToMongoFieldType(fields[fieldName]);
}
@@ -157,7 +157,7 @@ export class MongoStorageAdapter {
createClass(className, schema) {
schema = convertParseSchemaToMongoSchema(schema);
let mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(schema.fields, className, schema.classLevelPermissions);
const mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(schema.fields, className, schema.classLevelPermissions);
mongoObject._id = className;
return this._schemaCollection()
.then(schemaCollection => schemaCollection._collection.insertOne(mongoObject))
@@ -282,7 +282,7 @@ export class MongoStorageAdapter {
schema = convertParseSchemaToMongoSchema(schema);
return this._adaptiveCollection(className)
.then(collection => {
let mongoWhere = transformWhere(className, query, schema);
const mongoWhere = transformWhere(className, query, schema);
return collection.deleteMany(mongoWhere)
})
.then(({ result }) => {
@@ -327,9 +327,9 @@ export class MongoStorageAdapter {
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
find(className, schema, query, { skip, limit, sort, keys }) {
schema = convertParseSchemaToMongoSchema(schema);
let mongoWhere = transformWhere(className, query, schema);
let mongoSort = _.mapKeys(sort, (value, fieldName) => transformKey(className, fieldName, schema));
let mongoKeys = _.reduce(keys, (memo, key) => {
const mongoWhere = transformWhere(className, query, schema);
const mongoSort = _.mapKeys(sort, (value, fieldName) => transformKey(className, fieldName, schema));
const mongoKeys = _.reduce(keys, (memo, key) => {
memo[transformKey(className, key, schema)] = 1;
return memo;
}, {});
@@ -351,8 +351,8 @@ export class MongoStorageAdapter {
// which is why we use sparse indexes.
ensureUniqueness(className, schema, fieldNames) {
schema = convertParseSchemaToMongoSchema(schema);
let indexCreationRequest = {};
let mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
const indexCreationRequest = {};
const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
mongoFieldNames.forEach(fieldName => {
indexCreationRequest[fieldName] = 1;
});

View File

@@ -243,9 +243,9 @@ function transformQueryKeyValue(className, key, value, schema) {
// restWhere is the "where" clause in REST API form.
// Returns the mongo form of the query.
function transformWhere(className, restWhere, schema) {
let mongoWhere = {};
for (let restKey in restWhere) {
let out = transformQueryKeyValue(className, restKey, restWhere[restKey], schema);
const mongoWhere = {};
for (const restKey in restWhere) {
const out = transformQueryKeyValue(className, restKey, restWhere[restKey], schema);
mongoWhere[out.key] = out.value;
}
return mongoWhere;
@@ -331,12 +331,12 @@ const parseObjectKeyValueToMongoObjectKeyValue = (restKey, restValue, schema) =>
const parseObjectToMongoObjectForCreate = (className, restCreate, schema) => {
restCreate = addLegacyACL(restCreate);
let mongoCreate = {}
for (let restKey in restCreate) {
const mongoCreate = {}
for (const restKey in restCreate) {
if (restCreate[restKey] && restCreate[restKey].__type === 'Relation') {
continue;
}
let { key, value } = parseObjectKeyValueToMongoObjectKeyValue(
const { key, value } = parseObjectKeyValueToMongoObjectKeyValue(
restKey,
restCreate[restKey],
schema
@@ -361,8 +361,8 @@ const parseObjectToMongoObjectForCreate = (className, restCreate, schema) => {
// Main exposed method to help update old objects.
const transformUpdate = (className, restUpdate, parseFormatSchema) => {
let mongoUpdate = {};
let acl = addLegacyACL(restUpdate);
const mongoUpdate = {};
const acl = addLegacyACL(restUpdate);
if (acl._rperm || acl._wperm || acl._acl) {
mongoUpdate.$set = {};
if (acl._rperm) {
@@ -398,8 +398,8 @@ const transformUpdate = (className, restUpdate, parseFormatSchema) => {
// Add the legacy _acl format.
const addLegacyACL = restObject => {
let restObjectCopy = {...restObject};
let _acl = {};
const restObjectCopy = {...restObject};
const _acl = {};
if (restObject._wperm) {
restObject._wperm.forEach(entry => {
@@ -532,12 +532,12 @@ function transformConstraint(constraint, inArray) {
case '$in':
case '$nin': {
let arr = constraint[key];
const arr = constraint[key];
if (!(arr instanceof Array)) {
throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad ' + key + ' value');
}
answer[key] = arr.map(value => {
let result = inArray ? transformInteriorAtom(value) : transformTopLevelAtom(value);
const result = inArray ? transformInteriorAtom(value) : transformTopLevelAtom(value);
if (result === CannotTransform) {
throw new Parse.Error(Parse.Error.INVALID_JSON, `bad atom: ${value}`);
}
@@ -546,7 +546,7 @@ function transformConstraint(constraint, inArray) {
break;
}
case '$all': {
let arr = constraint[key];
const arr = constraint[key];
if (!(arr instanceof Array)) {
throw new Parse.Error(Parse.Error.INVALID_JSON,
'bad ' + key + ' value');
@@ -761,7 +761,7 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
return BytesCoder.databaseToJSON(mongoObject);
}
let restObject = {};
const restObject = {};
if (mongoObject._rperm || mongoObject._wperm) {
restObject._rperm = mongoObject._rperm || [];
restObject._wperm = mongoObject._wperm || [];
@@ -861,7 +861,7 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
}
const relationFieldNames = Object.keys(schema.fields).filter(fieldName => schema.fields[fieldName].type === 'Relation');
let relationFields = {};
const relationFields = {};
relationFieldNames.forEach(relationFieldName => {
relationFields[relationFieldName] = {
__type: 'Relation',

View File

@@ -2,7 +2,6 @@ const pgp = require('pg-promise')();
const parser = require('./PostgresConfigParser');
export function createClient(uri, databaseOptions) {
let client;
let dbOptions = {};
databaseOptions = databaseOptions || {};
@@ -14,7 +13,7 @@ export function createClient(uri, databaseOptions) {
dbOptions[key] = databaseOptions[key];
}
client = pgp(dbOptions);
const client = pgp(dbOptions);
if (dbOptions.pgOptions) {
for (const key in dbOptions.pgOptions) {

View File

@@ -11,7 +11,7 @@ const logger = require('../../../logger');
const debug = function(){
let args = [...arguments];
args = ['PG: '+arguments[0]].concat(args.slice(1, args.length));
let log = logger.getLogger();
const log = logger.getLogger();
log.debug.apply(log, args);
}
@@ -116,8 +116,8 @@ const toPostgresSchema = (schema) => {
const handleDotFields = (object) => {
Object.keys(object).forEach(fieldName => {
if (fieldName.indexOf('.') > -1) {
let components = fieldName.split('.');
let first = components.shift();
const components = fieldName.split('.');
const first = components.shift();
object[first] = object[first] || {};
let currentObj = object[first];
let next;
@@ -156,7 +156,7 @@ const validateKeys = (object) => {
// Returns the list of join tables on a schema
const joinTablesForSchema = (schema) => {
let list = [];
const list = [];
if (schema) {
Object.keys(schema.fields).forEach((field) => {
if (schema.fields[field].type === 'Relation') {
@@ -168,17 +168,17 @@ const joinTablesForSchema = (schema) => {
}
const buildWhereClause = ({ schema, query, index }) => {
let patterns = [];
const patterns = [];
let values = [];
let sorts = [];
const sorts = [];
schema = toPostgresSchema(schema);
for (let fieldName in query) {
let isArrayField = schema.fields
for (const fieldName in query) {
const isArrayField = schema.fields
&& schema.fields[fieldName]
&& schema.fields[fieldName].type === 'Array';
let initialPatternsLength = patterns.length;
let fieldValue = query[fieldName];
const initialPatternsLength = patterns.length;
const fieldValue = query[fieldName];
// nothingin the schema, it's gonna blow up
if (!schema.fields[fieldName]) {
@@ -189,7 +189,7 @@ const buildWhereClause = ({ schema, query, index }) => {
}
if (fieldName.indexOf('.') >= 0) {
let components = fieldName.split('.').map((cmpt, index) => {
const components = fieldName.split('.').map((cmpt, index) => {
if (index === 0) {
return `"${cmpt}"`;
}
@@ -211,17 +211,17 @@ const buildWhereClause = ({ schema, query, index }) => {
values.push(fieldName, fieldValue);
index += 2;
} else if (fieldName === '$or' || fieldName === '$and') {
let clauses = [];
let clauseValues = [];
const clauses = [];
const clauseValues = [];
fieldValue.forEach((subQuery) => {
let clause = buildWhereClause({ schema, query: subQuery, index });
const clause = buildWhereClause({ schema, query: subQuery, index });
if (clause.pattern.length > 0) {
clauses.push(clause.pattern);
clauseValues.push(...clause.values);
index += clause.values.length;
}
});
let orOrAnd = fieldName === '$or' ? ' OR ' : ' AND ';
const orOrAnd = fieldName === '$or' ? ' OR ' : ' AND ';
patterns.push(`(${clauses.join(orOrAnd)})`);
values.push(...clauseValues);
}
@@ -254,7 +254,7 @@ const buildWhereClause = ({ schema, query, index }) => {
isArrayField &&
schema.fields[fieldName].contents &&
schema.fields[fieldName].contents.type === 'String') {
let inPatterns = [];
const inPatterns = [];
let allowNull = false;
values.push(fieldName);
fieldValue.$in.forEach((listElem, listIndex) => {
@@ -274,13 +274,13 @@ const buildWhereClause = ({ schema, query, index }) => {
} else if (isInOrNin) {
var createConstraint = (baseArray, notIn) => {
if (baseArray.length > 0) {
let not = notIn ? ' NOT ' : '';
const not = notIn ? ' NOT ' : '';
if (isArrayField) {
patterns.push(`${not} array_contains($${index}:name, $${index+1})`);
values.push(fieldName, JSON.stringify(baseArray));
index += 2;
} else {
let inPatterns = [];
const inPatterns = [];
values.push(fieldName);
baseArray.forEach((listElem, listIndex) => {
values.push(listElem);
@@ -320,9 +320,9 @@ const buildWhereClause = ({ schema, query, index }) => {
}
if (fieldValue.$nearSphere) {
let point = fieldValue.$nearSphere;
let distance = fieldValue.$maxDistance;
let distanceInKM = distance*6371*1000;
const point = fieldValue.$nearSphere;
const distance = fieldValue.$maxDistance;
const distanceInKM = distance*6371*1000;
patterns.push(`ST_distance_sphere($${index}:name::geometry, POINT($${index+1}, $${index+2})::geometry) <= $${index+3}`);
sorts.push(`ST_distance_sphere($${index}:name::geometry, POINT($${index+1}, $${index+2})::geometry) ASC`)
values.push(fieldName, point.longitude, point.latitude, distanceInKM);
@@ -330,11 +330,11 @@ const buildWhereClause = ({ schema, query, index }) => {
}
if (fieldValue.$within && fieldValue.$within.$box) {
let box = fieldValue.$within.$box;
let left = box[0].longitude;
let bottom = box[0].latitude;
let right = box[1].longitude;
let top = box[1].latitude;
const box = fieldValue.$within.$box;
const left = box[0].longitude;
const bottom = box[0].latitude;
const right = box[1].longitude;
const top = box[1].latitude;
patterns.push(`$${index}:name::point <@ $${index+1}::box`);
values.push(fieldName, `((${left}, ${bottom}), (${right}, ${top}))`);
@@ -344,7 +344,7 @@ const buildWhereClause = ({ schema, query, index }) => {
if (fieldValue.$regex) {
let regex = fieldValue.$regex;
let operator = '~';
let opts = fieldValue.$options;
const opts = fieldValue.$options;
if (opts) {
if (opts.indexOf('i') >= 0) {
operator = '~*';
@@ -381,7 +381,7 @@ const buildWhereClause = ({ schema, query, index }) => {
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (fieldValue[cmp]) {
let pgComparator = ParseToPosgresComparator[cmp];
const pgComparator = ParseToPosgresComparator[cmp];
patterns.push(`$${index}:name ${pgComparator} $${index + 1}`);
values.push(fieldName, toPostgresValue(fieldValue[cmp]));
index += 2;
@@ -461,9 +461,9 @@ export class PostgresStorageAdapter {
createTable(className, schema, conn) {
conn = conn || this._client;
debug('createTable', className, schema);
let valuesArray = [];
let patternsArray = [];
let fields = Object.assign({}, schema.fields);
const valuesArray = [];
const patternsArray = [];
const fields = Object.assign({}, schema.fields);
if (className === '_User') {
fields._email_verify_token_expires_at = {type: 'Date'};
fields._email_verify_token = {type: 'String'};
@@ -475,9 +475,9 @@ export class PostgresStorageAdapter {
fields._password_history = { type: 'Array'};
}
let index = 2;
let relations = [];
const relations = [];
Object.keys(fields).forEach((fieldName) => {
let parseType = fields[fieldName];
const parseType = fields[fieldName];
// Skip when it's a relation
// We'll create the tables later
if (parseType.type === 'Relation') {
@@ -557,7 +557,7 @@ export class PostgresStorageAdapter {
// and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible.
deleteClass(className) {
return Promise.resolve().then(() => {
let operations = [[`DROP TABLE IF EXISTS $1:name`, [className]],
const operations = [[`DROP TABLE IF EXISTS $1:name`, [className]],
[`DELETE FROM "_SCHEMA" WHERE "className"=$1`, [className]]];
return this._client.tx(t=>t.batch(operations.map(statement=>t.none(statement[0], statement[1]))));
}).then(() => {
@@ -568,11 +568,11 @@ export class PostgresStorageAdapter {
// Delete all data known to this adapter. Used for testing.
deleteAllClasses() {
let now = new Date().getTime();
const now = new Date().getTime();
debug('deleteAllClasses');
return this._client.any('SELECT * FROM "_SCHEMA"')
.then(results => {
let joins = results.reduce((list, schema) => {
const joins = results.reduce((list, schema) => {
return list.concat(joinTablesForSchema(schema.schema));
}, []);
const classes = ['_SCHEMA','_PushStatus','_JobStatus','_Hooks','_GlobalConfig', ...results.map(result => result.className), ...joins];
@@ -607,7 +607,7 @@ export class PostgresStorageAdapter {
return Promise.resolve()
.then(() => {
fieldNames = fieldNames.reduce((list, fieldName) => {
let field = schema.fields[fieldName]
const field = schema.fields[fieldName]
if (field.type !== 'Relation') {
list.push(fieldName);
}
@@ -615,13 +615,13 @@ export class PostgresStorageAdapter {
return list;
}, []);
let values = [className, ...fieldNames];
let columns = fieldNames.map((name, idx) => {
const values = [className, ...fieldNames];
const columns = fieldNames.map((name, idx) => {
return `$${idx+2}:name`;
}).join(',');
let doBatch = (t) => {
let batch = [
const doBatch = (t) => {
const batch = [
t.none('UPDATE "_SCHEMA" SET "schema"=$<schema> WHERE "className"=$<className>', {schema, className})
];
if (values.length > 1) {
@@ -663,9 +663,9 @@ export class PostgresStorageAdapter {
createObject(className, schema, object) {
debug('createObject', className, object);
let columnsArray = [];
let valuesArray = [];
const valuesArray = [];
schema = toPostgresSchema(schema);
let geoPoints = {};
const geoPoints = {};
object = handleDotFields(object);
@@ -747,9 +747,9 @@ export class PostgresStorageAdapter {
});
columnsArray = columnsArray.concat(Object.keys(geoPoints));
let initialValues = valuesArray.map((val, index) => {
const initialValues = valuesArray.map((val, index) => {
let termination = '';
let fieldName = columnsArray[index];
const fieldName = columnsArray[index];
if (['_rperm','_wperm'].indexOf(fieldName) >= 0) {
termination = '::text[]';
} else if (schema.fields[fieldName] && schema.fields[fieldName].type === 'Array') {
@@ -757,18 +757,18 @@ export class PostgresStorageAdapter {
}
return `$${index + 2 + columnsArray.length}${termination}`;
});
let geoPointsInjects = Object.keys(geoPoints).map((key) => {
let value = geoPoints[key];
const geoPointsInjects = Object.keys(geoPoints).map((key) => {
const value = geoPoints[key];
valuesArray.push(value.longitude, value.latitude);
let l = valuesArray.length + columnsArray.length;
const l = valuesArray.length + columnsArray.length;
return `POINT($${l}, $${l+1})`;
});
let columnsPattern = columnsArray.map((col, index) => `$${index + 2}:name`).join(',');
let valuesPattern = initialValues.concat(geoPointsInjects).join(',')
const columnsPattern = columnsArray.map((col, index) => `$${index + 2}:name`).join(',');
const valuesPattern = initialValues.concat(geoPointsInjects).join(',')
let qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`
let values = [className, ...columnsArray, ...valuesArray]
const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`
const values = [className, ...columnsArray, ...valuesArray]
debug(qs, values);
return this._client.any(qs, values)
.then(() => ({ ops: [object] }))
@@ -786,14 +786,14 @@ export class PostgresStorageAdapter {
// If there is some other error, reject with INTERNAL_SERVER_ERROR.
deleteObjectsByQuery(className, schema, query) {
debug('deleteObjectsByQuery', className, query);
let values = [className];
let index = 2;
let where = buildWhereClause({ schema, index, query })
const values = [className];
const index = 2;
const where = buildWhereClause({ schema, index, query })
values.push(...where.values);
if (Object.keys(query).length === 0) {
where.pattern = 'TRUE';
}
let qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`;
const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`;
debug(qs, values);
return this._client.one(qs, values , a => +a.count)
.then(count => {
@@ -813,8 +813,8 @@ export class PostgresStorageAdapter {
// Apply the update to all objects that match the given Parse Query.
updateObjectsByQuery(className, schema, query, update) {
debug('updateObjectsByQuery', className, query, update);
let updatePatterns = [];
let values = [className]
const updatePatterns = [];
const values = [className]
let index = 2;
schema = toPostgresSchema(schema);
@@ -822,19 +822,19 @@ export class PostgresStorageAdapter {
update = handleDotFields(update);
// Resolve authData first,
// So we don't end up with multiple key updates
for (let fieldName in update) {
let authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/);
for (const fieldName in update) {
const authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/);
if (authDataMatch) {
var provider = authDataMatch[1];
let value = update[fieldName];
const value = update[fieldName];
delete update[fieldName];
update['authData'] = update['authData'] || {};
update['authData'][provider] = value;
}
}
for (let fieldName in update) {
let fieldValue = update[fieldName];
for (const fieldName in update) {
const fieldValue = update[fieldName];
if (fieldValue === null) {
updatePatterns.push(`$${index}:name = NULL`);
values.push(fieldName);
@@ -842,15 +842,15 @@ export class PostgresStorageAdapter {
} else if (fieldName == 'authData') {
// This recursively sets the json_object
// Only 1 level deep
let generate = (jsonb, key, value) => {
const generate = (jsonb, key, value) => {
return `json_object_set_key(COALESCE(${jsonb}, '{}'::jsonb), ${key}, ${value})::jsonb`;
}
let lastKey = `$${index}:name`;
let fieldNameIndex = index;
const lastKey = `$${index}:name`;
const fieldNameIndex = index;
index+=1;
values.push(fieldName);
let update = Object.keys(fieldValue).reduce((lastKey, key) => {
let str = generate(lastKey, `$${index}::text`, `$${index+1}::jsonb`)
const update = Object.keys(fieldValue).reduce((lastKey, key) => {
const str = generate(lastKey, `$${index}::text`, `$${index+1}::jsonb`)
index+=2;
let value = fieldValue[key];
if (value) {
@@ -941,12 +941,12 @@ export class PostgresStorageAdapter {
} else if (Array.isArray(fieldValue)
&& schema.fields[fieldName]
&& schema.fields[fieldName].type === 'Array') {
let expectedType = parseTypeToPostgresType(schema.fields[fieldName]);
const expectedType = parseTypeToPostgresType(schema.fields[fieldName]);
if (expectedType === 'text[]') {
updatePatterns.push(`$${index}:name = $${index + 1}::text[]`);
} else {
let type = 'text';
for (let elt of fieldValue) {
for (const elt of fieldValue) {
if (typeof elt == 'object') {
type = 'json';
break;
@@ -962,10 +962,10 @@ export class PostgresStorageAdapter {
}
}
let where = buildWhereClause({ schema, index, query })
const where = buildWhereClause({ schema, index, query })
values.push(...where.values);
let qs = `UPDATE $1:name SET ${updatePatterns.join(',')} WHERE ${where.pattern} RETURNING *`;
const qs = `UPDATE $1:name SET ${updatePatterns.join(',')} WHERE ${where.pattern} RETURNING *`;
debug('update: ', qs, values);
return this._client.any(qs, values); // TODO: This is unsafe, verification is needed, or a different query method;
}
@@ -973,7 +973,7 @@ export class PostgresStorageAdapter {
// Hopefully, we can get rid of this. It's only used for config and hooks.
upsertOneObject(className, schema, query, update) {
debug('upsertOneObject', {className, query, update});
let createValue = Object.assign({}, query, update);
const createValue = Object.assign({}, query, update);
return this.createObject(className, schema, createValue).catch((err) => {
// ignore duplicate value errors as it's upsert
if (err.code === Parse.Error.DUPLICATE_VALUE) {
@@ -988,7 +988,7 @@ export class PostgresStorageAdapter {
const hasLimit = limit !== undefined;
const hasSkip = skip !== undefined;
let values = [className];
let where = buildWhereClause({ schema, query, index: 2 })
const where = buildWhereClause({ schema, query, index: 2 })
values.push(...where.values);
const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
@@ -1003,7 +1003,7 @@ export class PostgresStorageAdapter {
let sortPattern = '';
if (sort) {
let sorting = Object.keys(sort).map((key) => {
const sorting = Object.keys(sort).map((key) => {
// Using $idx pattern gives: non-integer constant in ORDER BY
if (sort[key] === 1) {
return `"${key}" ASC`;
@@ -1085,7 +1085,7 @@ export class PostgresStorageAdapter {
object._password_changed_at = { __type: 'Date', iso: object._password_changed_at.toISOString() };
}
for (let fieldName in object) {
for (const fieldName in object) {
if (object[fieldName] === null) {
delete object[fieldName];
}
@@ -1125,8 +1125,8 @@ export class PostgresStorageAdapter {
// Executes a count.
count(className, schema, query) {
debug('count', className, query);
let values = [className];
let where = buildWhereClause({ schema, query, index: 2 });
const values = [className];
const where = buildWhereClause({ schema, query, index: 2 });
values.push(...where.values);
const wherePattern = where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
@@ -1140,7 +1140,7 @@ export class PostgresStorageAdapter {
}
performInitialization({ VolatileClassesSchemas }) {
let now = new Date().getTime();
const now = new Date().getTime();
debug('performInitialization');
let promises = VolatileClassesSchemas.map((schema) => {
return this.createTable(schema.className, schema).catch((err) =>{