build: release

This commit is contained in:
Manuel Trezza
2022-03-18 15:16:09 +01:00
44 changed files with 2175 additions and 1953 deletions

View File

@@ -14,20 +14,23 @@ const authData = {
const { Parse } = require('parse/node');
const crypto = require('crypto');
const https = require('https');
const url = require('url');
const cache = {}; // (publicKey -> cert) cache
function verifyPublicKeyUrl(publicKeyUrl) {
const parsedUrl = url.parse(publicKeyUrl);
if (parsedUrl.protocol !== 'https:') {
try {
const parsedUrl = new URL(publicKeyUrl);
if (parsedUrl.protocol !== 'https:') {
return false;
}
const hostnameParts = parsedUrl.hostname.split('.');
const length = hostnameParts.length;
const domainParts = hostnameParts.slice(length - 2, length);
const domain = domainParts.join('.');
return domain === 'apple.com';
} catch (error) {
return false;
}
const hostnameParts = parsedUrl.hostname.split('.');
const length = hostnameParts.length;
const domainParts = hostnameParts.slice(length - 2, length);
const domain = domainParts.join('.');
return domain === 'apple.com';
}
function convertX509CertToPEM(X509Cert) {

View File

@@ -54,7 +54,6 @@
*/
const Parse = require('parse/node').Parse;
const url = require('url');
const querystring = require('querystring');
const httpsRequest = require('./httpsRequest');
@@ -112,7 +111,7 @@ function requestTokenInfo(options, access_token) {
if (!options || !options.tokenIntrospectionEndpointUrl) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, MISSING_URL);
}
const parsedUrl = url.parse(options.tokenIntrospectionEndpointUrl);
const parsedUrl = new URL(options.tokenIntrospectionEndpointUrl);
const postData = querystring.stringify({
token: access_token,
});

View File

@@ -1,6 +1,6 @@
/**
GridFSBucketAdapter
Stores files in Mongo using GridStore
Stores files in Mongo using GridFS
Requires the database adapter to be based on mongoclient
@flow weak

View File

@@ -1,181 +1,4 @@
/**
GridStoreAdapter
Stores files in Mongo using GridStore
Requires the database adapter to be based on mongoclient
(GridStore is deprecated, Please use GridFSBucket instead)
@flow weak
*/
// @flow-disable-next
import { MongoClient, GridStore, Db } from 'mongodb';
import { FilesAdapter, validateFilename } from './FilesAdapter';
import defaults from '../../defaults';
export class GridStoreAdapter extends FilesAdapter {
_databaseURI: string;
_connectionPromise: Promise<Db>;
_mongoOptions: Object;
constructor(mongoDatabaseURI = defaults.DefaultMongoURI, mongoOptions = {}) {
super();
this._databaseURI = mongoDatabaseURI;
const defaultMongoOptions = {
useNewUrlParser: true,
useUnifiedTopology: true,
};
this._mongoOptions = Object.assign(defaultMongoOptions, mongoOptions);
}
_connect() {
if (!this._connectionPromise) {
this._connectionPromise = MongoClient.connect(this._databaseURI, this._mongoOptions).then(
client => {
this._client = client;
return client.db(client.s.options.dbName);
}
);
}
return this._connectionPromise;
}
// For a given config object, filename, and data, store a file
// Returns a promise
createFile(filename: string, data) {
return this._connect()
.then(database => {
const gridStore = new GridStore(database, filename, 'w');
return gridStore.open();
})
.then(gridStore => {
return gridStore.write(data);
})
.then(gridStore => {
return gridStore.close();
});
}
deleteFile(filename: string) {
return this._connect()
.then(database => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
})
.then(gridStore => {
return gridStore.unlink();
})
.then(gridStore => {
return gridStore.close();
});
}
getFileData(filename: string) {
return this._connect()
.then(database => {
return GridStore.exist(database, filename).then(() => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
});
})
.then(gridStore => {
return gridStore.read();
});
}
getFileLocation(config, filename) {
return config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename);
}
async handleFileStream(filename: string, req, res, contentType) {
const stream = await this._connect().then(database => {
return GridStore.exist(database, filename).then(() => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
});
});
handleRangeRequest(stream, req, res, contentType);
}
handleShutdown() {
if (!this._client) {
return Promise.resolve();
}
return this._client.close(false);
}
validateFilename(filename) {
return validateFilename(filename);
}
}
// handleRangeRequest is licensed under Creative Commons Attribution 4.0 International License (https://creativecommons.org/licenses/by/4.0/).
// Author: LEROIB at weightingformypizza (https://weightingformypizza.wordpress.com/2015/06/24/stream-html5-media-content-like-video-audio-from-mongodb-using-express-and-gridstore/).
function handleRangeRequest(stream, req, res, contentType) {
const buffer_size = 1024 * 1024; //1024Kb
// Range request, partial stream the file
const parts = req
.get('Range')
.replace(/bytes=/, '')
.split('-');
let [start, end] = parts;
const notEnded = !end && end !== 0;
const notStarted = !start && start !== 0;
// No end provided, we want all bytes
if (notEnded) {
end = stream.length - 1;
}
// No start provided, we're reading backwards
if (notStarted) {
start = stream.length - end;
end = start + end - 1;
}
// Data exceeds the buffer_size, cap
if (end - start >= buffer_size) {
end = start + buffer_size - 1;
}
const contentLength = end - start + 1;
res.writeHead(206, {
'Content-Range': 'bytes ' + start + '-' + end + '/' + stream.length,
'Accept-Ranges': 'bytes',
'Content-Length': contentLength,
'Content-Type': contentType,
});
stream.seek(start, function () {
// Get gridFile stream
const gridFileStream = stream.stream(true);
let bufferAvail = 0;
let remainingBytesToWrite = contentLength;
let totalBytesWritten = 0;
// Write to response
gridFileStream.on('data', function (data) {
bufferAvail += data.length;
if (bufferAvail > 0) {
// slice returns the same buffer if overflowing
// safe to call in any case
const buffer = data.slice(0, remainingBytesToWrite);
// Write the buffer
res.write(buffer);
// Increment total
totalBytesWritten += buffer.length;
// Decrement remaining
remainingBytesToWrite -= data.length;
// Decrement the available buffer
bufferAvail -= buffer.length;
}
// In case of small slices, all values will be good at that point
// we've written enough, end...
if (totalBytesWritten >= contentLength) {
stream.close();
res.end();
this.destroy();
}
});
});
}
export default GridStoreAdapter;
// Note: GridStore was replaced by GridFSBucketAdapter by default in 2018 by @flovilmart
throw new Error(
'GridStoreAdapter: GridStore is no longer supported by parse server and mongodb, use GridFSBucketAdapter instead.'
);

View File

@@ -177,7 +177,7 @@ class MongoSchemaCollection {
insertSchema(schema: any) {
return this._collection
.insertOne(schema)
.then(result => mongoSchemaToParseSchema(result.ops[0]))
.then(() => mongoSchemaToParseSchema(schema))
.catch(error => {
if (error.code === 11000) {
//Mongo's duplicate key error

View File

@@ -479,6 +479,7 @@ export class MongoStorageAdapter implements StorageAdapter {
const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema);
return this._adaptiveCollection(className)
.then(collection => collection.insertOne(mongoObject, transactionalSession))
.then(() => ({ ops: [mongoObject] }))
.catch(error => {
if (error.code === 11000) {
// Duplicate value
@@ -517,8 +518,8 @@ export class MongoStorageAdapter implements StorageAdapter {
})
.catch(err => this.handleError(err))
.then(
({ result }) => {
if (result.n === 0) {
({ deletedCount }) => {
if (deletedCount === 0) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.');
}
return Promise.resolve();

View File

@@ -2,6 +2,7 @@ import log from '../../../logger';
import _ from 'lodash';
var mongodb = require('mongodb');
var Parse = require('parse/node').Parse;
const Utils = require('../../../Utils');
const transformKey = (className, fieldName, schema) => {
// Check if the schema is known since it's a built-in field.
@@ -634,133 +635,6 @@ function transformTopLevelAtom(atom, field) {
}
}
function relativeTimeToDate(text, now = new Date()) {
text = text.toLowerCase();
let parts = text.split(' ');
// Filter out whitespace
parts = parts.filter(part => part !== '');
const future = parts[0] === 'in';
const past = parts[parts.length - 1] === 'ago';
if (!future && !past && text !== 'now') {
return {
status: 'error',
info: "Time should either start with 'in' or end with 'ago'",
};
}
if (future && past) {
return {
status: 'error',
info: "Time cannot have both 'in' and 'ago'",
};
}
// strip the 'ago' or 'in'
if (future) {
parts = parts.slice(1);
} else {
// past
parts = parts.slice(0, parts.length - 1);
}
if (parts.length % 2 !== 0 && text !== 'now') {
return {
status: 'error',
info: 'Invalid time string. Dangling unit or number.',
};
}
const pairs = [];
while (parts.length) {
pairs.push([parts.shift(), parts.shift()]);
}
let seconds = 0;
for (const [num, interval] of pairs) {
const val = Number(num);
if (!Number.isInteger(val)) {
return {
status: 'error',
info: `'${num}' is not an integer.`,
};
}
switch (interval) {
case 'yr':
case 'yrs':
case 'year':
case 'years':
seconds += val * 31536000; // 365 * 24 * 60 * 60
break;
case 'wk':
case 'wks':
case 'week':
case 'weeks':
seconds += val * 604800; // 7 * 24 * 60 * 60
break;
case 'd':
case 'day':
case 'days':
seconds += val * 86400; // 24 * 60 * 60
break;
case 'hr':
case 'hrs':
case 'hour':
case 'hours':
seconds += val * 3600; // 60 * 60
break;
case 'min':
case 'mins':
case 'minute':
case 'minutes':
seconds += val * 60;
break;
case 'sec':
case 'secs':
case 'second':
case 'seconds':
seconds += val;
break;
default:
return {
status: 'error',
info: `Invalid interval: '${interval}'`,
};
}
}
const milliseconds = seconds * 1000;
if (future) {
return {
status: 'success',
info: 'future',
result: new Date(now.valueOf() + milliseconds),
};
} else if (past) {
return {
status: 'success',
info: 'past',
result: new Date(now.valueOf() - milliseconds),
};
} else {
return {
status: 'success',
info: 'present',
result: new Date(now.valueOf()),
};
}
}
// Transforms a query constraint from REST API format to Mongo format.
// A constraint is something with fields like $lt.
// If it is not a valid constraint but it could be a valid something
@@ -813,7 +687,7 @@ function transformConstraint(constraint, field, count = false) {
);
}
const parserResult = relativeTimeToDate(val.$relativeTime);
const parserResult = Utils.relativeTimeToDate(val.$relativeTime);
if (parserResult.status === 'success') {
answer[key] = parserResult.result;
break;
@@ -1556,7 +1430,6 @@ module.exports = {
transformUpdate,
transformWhere,
mongoObjectToParseObject,
relativeTimeToDate,
transformConstraint,
transformPointerString,
};

View File

@@ -1,18 +1,16 @@
const url = require('url');
const fs = require('fs');
function getDatabaseOptionsFromURI(uri) {
const databaseOptions = {};
const parsedURI = url.parse(uri);
const queryParams = parseQueryParams(parsedURI.query);
const authParts = parsedURI.auth ? parsedURI.auth.split(':') : [];
const parsedURI = new URL(uri);
const queryParams = parseQueryParams(parsedURI.searchParams.toString());
databaseOptions.host = parsedURI.hostname || 'localhost';
databaseOptions.port = parsedURI.port ? parseInt(parsedURI.port) : 5432;
databaseOptions.database = parsedURI.pathname ? parsedURI.pathname.substr(1) : undefined;
databaseOptions.user = authParts.length > 0 ? authParts[0] : '';
databaseOptions.password = authParts.length > 1 ? authParts[1] : '';
databaseOptions.user = parsedURI.username;
databaseOptions.password = parsedURI.password;
if (queryParams.ssl && queryParams.ssl.toLowerCase() === 'true') {
databaseOptions.ssl = true;

View File

@@ -7,12 +7,14 @@ import _ from 'lodash';
// @flow-disable-next
import { v4 as uuidv4 } from 'uuid';
import sql from './sql';
import { StorageAdapter } from '../StorageAdapter';
import type { SchemaType, QueryType, QueryOptions } from '../StorageAdapter';
const Utils = require('../../../Utils');
const PostgresRelationDoesNotExistError = '42P01';
const PostgresDuplicateRelationError = '42P07';
const PostgresDuplicateColumnError = '42701';
const PostgresMissingColumnError = '42703';
const PostgresDuplicateObjectError = '42710';
const PostgresUniqueIndexViolationError = '23505';
const logger = require('../../../logger');
@@ -22,9 +24,6 @@ const debug = function (...args: any) {
log.debug.apply(log, args);
};
import { StorageAdapter } from '../StorageAdapter';
import type { SchemaType, QueryType, QueryOptions } from '../StorageAdapter';
const parseTypeToPostgresType = type => {
switch (type.type) {
case 'String':
@@ -374,6 +373,11 @@ const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClaus
patterns.push(
`(${constraintFieldName} <> $${index} OR ${constraintFieldName} IS NULL)`
);
} else if (typeof fieldValue.$ne === 'object' && fieldValue.$ne.$relativeTime) {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'$relativeTime can only be used with the $lt, $lte, $gt, and $gte operators'
);
} else {
patterns.push(`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`);
}
@@ -399,6 +403,11 @@ const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClaus
if (fieldName.indexOf('.') >= 0) {
values.push(fieldValue.$eq);
patterns.push(`${transformDotField(fieldName)} = $${index++}`);
} else if (typeof fieldValue.$eq === 'object' && fieldValue.$eq.$relativeTime) {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'$relativeTime can only be used with the $lt, $lte, $gt, and $gte operators'
);
} else {
values.push(fieldName, fieldValue.$eq);
patterns.push(`$${index}:name = $${index + 1}`);
@@ -513,7 +522,12 @@ const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClaus
}
if (typeof fieldValue.$exists !== 'undefined') {
if (fieldValue.$exists) {
if (typeof fieldValue.$exists === 'object' && fieldValue.$exists.$relativeTime) {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'$relativeTime can only be used with the $lt, $lte, $gt, and $gte operators'
);
} else if (fieldValue.$exists) {
patterns.push(`$${index}:name IS NOT NULL`);
} else {
patterns.push(`$${index}:name IS NULL`);
@@ -757,7 +771,7 @@ const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClaus
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (fieldValue[cmp] || fieldValue[cmp] === 0) {
const pgComparator = ParseToPosgresComparator[cmp];
const postgresValue = toPostgresValue(fieldValue[cmp]);
let postgresValue = toPostgresValue(fieldValue[cmp]);
let constraintFieldName;
if (fieldName.indexOf('.') >= 0) {
let castType;
@@ -775,6 +789,24 @@ const buildWhereClause = ({ schema, query, index, caseInsensitive }): WhereClaus
? `CAST ((${transformDotField(fieldName)}) AS ${castType})`
: transformDotField(fieldName);
} else {
if (typeof postgresValue === 'object' && postgresValue.$relativeTime) {
if (schema.fields[fieldName].type !== 'Date') {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'$relativeTime can only be used with Date field'
);
}
const parserResult = Utils.relativeTimeToDate(postgresValue.$relativeTime);
if (parserResult.status === 'success') {
postgresValue = toPostgresValue(parserResult.result);
} else {
console.error('Error while parsing relative date', parserResult);
throw new Parse.Error(
Parse.Error.INVALID_JSON,
`bad $relativeTime (${postgresValue.$relativeTime}) value. ${parserResult.info}`
);
}
}
constraintFieldName = `$${index++}:name`;
values.push(fieldName);
}
@@ -873,15 +905,7 @@ export class PostgresStorageAdapter implements StorageAdapter {
'CREATE TABLE IF NOT EXISTS "_SCHEMA" ( "className" varChar(120), "schema" jsonb, "isParseClass" bool, PRIMARY KEY ("className") )'
)
.catch(error => {
if (
error.code === PostgresDuplicateRelationError ||
error.code === PostgresUniqueIndexViolationError ||
error.code === PostgresDuplicateObjectError
) {
// Table already exists, must have been created by a different request. Ignore error.
} else {
throw error;
}
throw error;
});
}
@@ -2416,6 +2440,11 @@ export class PostgresStorageAdapter implements StorageAdapter {
? fieldNames.map((fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops`)
: fieldNames.map((fieldName, index) => `$${index + 3}:name`);
const qs = `CREATE INDEX IF NOT EXISTS $1:name ON $2:name (${constraintPatterns.join()})`;
const setIdempotencyFunction =
options.setIdempotencyFunction !== undefined ? options.setIdempotencyFunction : false;
if (setIdempotencyFunction) {
await this.ensureIdempotencyFunctionExists(options);
}
await conn.none(qs, [indexNameOptions.name, className, ...fieldNames]).catch(error => {
if (
error.code === PostgresDuplicateRelationError &&
@@ -2436,6 +2465,24 @@ export class PostgresStorageAdapter implements StorageAdapter {
}
});
}
async deleteIdempotencyFunction(options?: Object = {}): Promise<any> {
const conn = options.conn !== undefined ? options.conn : this._client;
const qs = 'DROP FUNCTION IF EXISTS idempotency_delete_expired_records()';
return conn.none(qs).catch(error => {
throw error;
});
}
async ensureIdempotencyFunctionExists(options?: Object = {}): Promise<any> {
const conn = options.conn !== undefined ? options.conn : this._client;
const ttlOptions = options.ttl !== undefined ? `${options.ttl} seconds` : '60 seconds';
const qs =
'CREATE OR REPLACE FUNCTION idempotency_delete_expired_records() RETURNS void LANGUAGE plpgsql AS $$ BEGIN DELETE FROM "_Idempotency" WHERE expire < NOW() - INTERVAL $1; END; $$;';
return conn.none(qs, [ttlOptions]).catch(error => {
throw error;
});
}
}
function convertPolygonToSQL(polygon) {

View File

@@ -14,6 +14,7 @@ import logger from '../logger';
import * as SchemaController from './SchemaController';
import { StorageAdapter } from '../Adapters/Storage/StorageAdapter';
import MongoStorageAdapter from '../Adapters/Storage/Mongo/MongoStorageAdapter';
import PostgresStorageAdapter from '../Adapters/Storage/Postgres/PostgresStorageAdapter';
import SchemaCache from '../Adapters/Cache/SchemaCache';
import type { LoadSchemaOptions } from './types';
import type { ParseServerOptions } from '../Options';
@@ -364,12 +365,14 @@ class DatabaseController {
schemaPromise: ?Promise<SchemaController.SchemaController>;
_transactionalSession: ?any;
options: ParseServerOptions;
idempotencyOptions: any;
constructor(adapter: StorageAdapter, options: ParseServerOptions) {
this.adapter = adapter;
// We don't want a mutable this.schema, because then you could have
// one request that uses different schemas for different parts of
// it. Instead, use loadSchema to get a schema.
this.options = options || {};
this.idempotencyOptions = this.options.idempotencyOptions || {};
// Prevent mutable this.schema, otherwise one request could use
// multiple schemas, so instead use loadSchema to get a schema.
this.schemaPromise = null;
this._transactionalSession = null;
this.options = options;
@@ -862,7 +865,7 @@ class DatabaseController {
if (object[field] && object[field].__op && object[field].__op === 'Delete') {
return false;
}
return schemaFields.indexOf(field) < 0;
return schemaFields.indexOf(getRootFieldName(field)) < 0;
});
if (newKeys.length > 0) {
// adds a marker that new field is being adding during update
@@ -940,9 +943,9 @@ class DatabaseController {
});
}
if (query['$and']) {
const ors = query['$and'];
const ands = query['$and'];
return Promise.all(
ors.map((aQuery, index) => {
ands.map((aQuery, index) => {
return this.reduceInRelation(className, aQuery, schema).then(aQuery => {
query['$and'][index] = aQuery;
});
@@ -1681,9 +1684,7 @@ class DatabaseController {
};
await this.loadSchema().then(schema => schema.enforceClassExists('_User'));
await this.loadSchema().then(schema => schema.enforceClassExists('_Role'));
if (this.adapter instanceof MongoStorageAdapter) {
await this.loadSchema().then(schema => schema.enforceClassExists('_Idempotency'));
}
await this.loadSchema().then(schema => schema.enforceClassExists('_Idempotency'));
await this.adapter.ensureUniqueness('_User', requiredUserFields, ['username']).catch(error => {
logger.warn('Unable to ensure uniqueness for usernames: ', error);
@@ -1719,18 +1720,28 @@ class DatabaseController {
logger.warn('Unable to ensure uniqueness for role name: ', error);
throw error;
});
if (this.adapter instanceof MongoStorageAdapter) {
await this.adapter
.ensureUniqueness('_Idempotency', requiredIdempotencyFields, ['reqId'])
.catch(error => {
logger.warn('Unable to ensure uniqueness for idempotency request ID: ', error);
throw error;
});
await this.adapter
.ensureIndex('_Idempotency', requiredIdempotencyFields, ['expire'], 'ttl', false, {
await this.adapter
.ensureUniqueness('_Idempotency', requiredIdempotencyFields, ['reqId'])
.catch(error => {
logger.warn('Unable to ensure uniqueness for idempotency request ID: ', error);
throw error;
});
const isMongoAdapter = this.adapter instanceof MongoStorageAdapter;
const isPostgresAdapter = this.adapter instanceof PostgresStorageAdapter;
if (isMongoAdapter || isPostgresAdapter) {
let options = {};
if (isMongoAdapter) {
options = {
ttl: 0,
})
};
} else if (isPostgresAdapter) {
options = this.idempotencyOptions;
options.setIdempotencyFunction = true;
}
await this.adapter
.ensureIndex('_Idempotency', requiredIdempotencyFields, ['expire'], 'ttl', false, options)
.catch(error => {
logger.warn('Unable to create TTL index for idempotency expire date: ', error);
throw error;

View File

@@ -1,7 +1,6 @@
import { Parse } from 'parse/node';
import AdaptableController from './AdaptableController';
import { LoggerAdapter } from '../Adapters/Logger/LoggerAdapter';
import url from 'url';
const MILLISECONDS_IN_A_DAY = 24 * 60 * 60 * 1000;
const LOG_STRING_TRUNCATE_LENGTH = 1000;
@@ -38,15 +37,16 @@ export class LoggerController extends AdaptableController {
});
}
maskSensitiveUrl(urlString) {
const urlObj = url.parse(urlString, true);
const query = urlObj.query;
maskSensitiveUrl(path) {
const urlString = 'http://localhost' + path; // prepend dummy string to make a real URL
const urlObj = new URL(urlString);
const query = urlObj.searchParams;
let sanitizedQuery = '?';
for (const key in query) {
for (const [key, value] of query) {
if (key !== 'password') {
// normal value
sanitizedQuery += key + '=' + query[key] + '&';
sanitizedQuery += key + '=' + value + '&';
} else {
// password value, redact it
sanitizedQuery += key + '=' + '********' + '&';

View File

@@ -2,7 +2,6 @@ import authDataManager from '../Adapters/Auth';
import { ParseServerOptions } from '../Options';
import { loadAdapter } from '../Adapters/AdapterLoader';
import defaults from '../defaults';
import url from 'url';
// Controllers
import { LoggerController } from './LoggerController';
import { FilesController } from './FilesController';
@@ -220,13 +219,14 @@ export function getAuthDataManager(options: ParseServerOptions) {
export function getDatabaseAdapter(databaseURI, collectionPrefix, databaseOptions) {
let protocol;
try {
const parsedURI = url.parse(databaseURI);
const parsedURI = new URL(databaseURI);
protocol = parsedURI.protocol ? parsedURI.protocol.toLowerCase() : null;
} catch (e) {
/* */
}
switch (protocol) {
case 'postgres:':
case 'postgresql:':
return new PostgresStorageAdapter({
uri: databaseURI,
collectionPrefix,

View File

@@ -1,7 +1,6 @@
const Config = require('./Config');
const Auth = require('./Auth');
const RESTController = require('parse/lib/node/RESTController');
const URL = require('url');
const Parse = require('parse/node');
function getSessionToken(options) {
@@ -38,9 +37,9 @@ function ParseServerRESTController(applicationId, router) {
if (!config) {
config = Config.get(applicationId);
}
const serverURL = URL.parse(config.serverURL);
if (path.indexOf(serverURL.path) === 0) {
path = path.slice(serverURL.path.length, path.length);
const serverURL = new URL(config.serverURL);
if (path.indexOf(serverURL.pathname) === 0) {
path = path.slice(serverURL.pathname.length, path.length);
}
if (path[0] !== '/') {

View File

@@ -83,7 +83,8 @@ export class ClassesRouter extends PromiseRouter {
this.className(req),
req.params.objectId,
options,
req.info.clientSDK
req.info.clientSDK,
req.info.context
)
.then(response => {
if (!response.results || response.results.length == 0) {

View File

@@ -201,6 +201,138 @@ class Utils {
}
}
/**
* Computes the relative date based on a string.
* @param {String} text The string to interpret the date from.
* @param {Date} now The date the string is comparing against.
* @returns {Object} The relative date object.
**/
static relativeTimeToDate(text, now = new Date()) {
text = text.toLowerCase();
let parts = text.split(' ');
// Filter out whitespace
parts = parts.filter(part => part !== '');
const future = parts[0] === 'in';
const past = parts[parts.length - 1] === 'ago';
if (!future && !past && text !== 'now') {
return {
status: 'error',
info: "Time should either start with 'in' or end with 'ago'",
};
}
if (future && past) {
return {
status: 'error',
info: "Time cannot have both 'in' and 'ago'",
};
}
// strip the 'ago' or 'in'
if (future) {
parts = parts.slice(1);
} else {
// past
parts = parts.slice(0, parts.length - 1);
}
if (parts.length % 2 !== 0 && text !== 'now') {
return {
status: 'error',
info: 'Invalid time string. Dangling unit or number.',
};
}
const pairs = [];
while (parts.length) {
pairs.push([parts.shift(), parts.shift()]);
}
let seconds = 0;
for (const [num, interval] of pairs) {
const val = Number(num);
if (!Number.isInteger(val)) {
return {
status: 'error',
info: `'${num}' is not an integer.`,
};
}
switch (interval) {
case 'yr':
case 'yrs':
case 'year':
case 'years':
seconds += val * 31536000; // 365 * 24 * 60 * 60
break;
case 'wk':
case 'wks':
case 'week':
case 'weeks':
seconds += val * 604800; // 7 * 24 * 60 * 60
break;
case 'd':
case 'day':
case 'days':
seconds += val * 86400; // 24 * 60 * 60
break;
case 'hr':
case 'hrs':
case 'hour':
case 'hours':
seconds += val * 3600; // 60 * 60
break;
case 'min':
case 'mins':
case 'minute':
case 'minutes':
seconds += val * 60;
break;
case 'sec':
case 'secs':
case 'second':
case 'seconds':
seconds += val;
break;
default:
return {
status: 'error',
info: `Invalid interval: '${interval}'`,
};
}
}
const milliseconds = seconds * 1000;
if (future) {
return {
status: 'success',
info: 'future',
result: new Date(now.valueOf() + milliseconds),
};
} else if (past) {
return {
status: 'success',
info: 'past',
result: new Date(now.valueOf() - milliseconds),
};
} else {
return {
status: 'success',
info: 'present',
result: new Date(now.valueOf()),
};
}
}
/**
* Deep-scans an object for a matching key/value definition.
* @param {Object} obj The object to scan.

View File

@@ -1,5 +1,4 @@
const Parse = require('parse/node').Parse;
const url = require('url');
const path = require('path');
// These methods handle batch requests.
const batchPath = '/batch';
@@ -11,11 +10,12 @@ function mountOnto(router) {
});
}
function parseURL(URL) {
if (typeof URL === 'string') {
return url.parse(URL);
function parseURL(urlString) {
try {
return new URL(urlString);
} catch (error) {
return undefined;
}
return undefined;
}
function makeBatchRoutingPathFunction(originalUrl, serverURL, publicServerURL) {
@@ -33,9 +33,9 @@ function makeBatchRoutingPathFunction(originalUrl, serverURL, publicServerURL) {
return path.posix.join('/', requestPath.slice(apiPrefix.length));
};
if (serverURL && publicServerURL && serverURL.path != publicServerURL.path) {
const localPath = serverURL.path;
const publicPath = publicServerURL.path;
if (serverURL && publicServerURL && serverURL.pathname != publicServerURL.pathname) {
const localPath = serverURL.pathname;
const publicPath = publicServerURL.pathname;
// Override the api prefix
apiPrefix = localPath;

View File

@@ -6,6 +6,7 @@ import ClientSDK from './ClientSDK';
import defaultLogger from './logger';
import rest from './rest';
import MongoStorageAdapter from './Adapters/Storage/Mongo/MongoStorageAdapter';
import PostgresStorageAdapter from './Adapters/Storage/Postgres/PostgresStorageAdapter';
export const DEFAULT_ALLOWED_HEADERS =
'X-Parse-Master-Key, X-Parse-REST-API-Key, X-Parse-Javascript-Key, X-Parse-Application-Id, X-Parse-Client-Version, X-Parse-Session-Token, X-Requested-With, X-Parse-Revocable-Session, X-Parse-Request-Id, Content-Type, Pragma, Cache-Control';
@@ -431,7 +432,12 @@ export function promiseEnforceMasterKeyAccess(request) {
*/
export function promiseEnsureIdempotency(req) {
// Enable feature only for MongoDB
if (!(req.config.database.adapter instanceof MongoStorageAdapter)) {
if (
!(
req.config.database.adapter instanceof MongoStorageAdapter ||
req.config.database.adapter instanceof PostgresStorageAdapter
)
) {
return Promise.resolve();
}
// Get parameters