Use Prettier JS (#5017)

* Adds prettier

* Run lint before tests
This commit is contained in:
Florent Vilmart
2018-09-01 13:58:06 -04:00
committed by GitHub
parent 189cd259ee
commit d83a0b6808
240 changed files with 41098 additions and 29020 deletions

View File

@@ -16,10 +16,10 @@ export function loadAdapter<T>(adapter, defaultAdapter, options): T {
}
// Load from the default adapter when no adapter is set
return loadAdapter(defaultAdapter, undefined, options);
} else if (typeof adapter === "function") {
} else if (typeof adapter === 'function') {
try {
return adapter(options);
} catch(e) {
} catch (e) {
if (e.name === 'TypeError') {
var Adapter = adapter;
return new Adapter(options);
@@ -27,7 +27,7 @@ export function loadAdapter<T>(adapter, defaultAdapter, options): T {
throw e;
}
}
} else if (typeof adapter === "string") {
} else if (typeof adapter === 'string') {
/* eslint-disable */
adapter = require(adapter);
// If it's define as a module, get the default

View File

@@ -6,7 +6,6 @@
* @interface AnalyticsAdapter
*/
export class AnalyticsAdapter {
/**
@param {any} parameters: the analytics request body, analytics info will be in the dimensions property
@param {Request} req: the original http request

View File

@@ -1,6 +1,5 @@
/*eslint no-unused-vars: "off"*/
export class AuthAdapter {
/*
@param appIds: the specified app ids in the configuration
@param authData: the client provided authData

View File

@@ -3,8 +3,11 @@ var https = require('https'),
var Parse = require('parse/node').Parse;
var OAuth = function(options) {
if(!options) {
throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'No options passed to OAuth');
if (!options) {
throw new Parse.Error(
Parse.Error.INTERNAL_SERVER_ERROR,
'No options passed to OAuth'
);
}
this.consumer_key = options.consumer_key;
this.consumer_secret = options.consumer_secret;
@@ -14,23 +17,24 @@ var OAuth = function(options) {
this.oauth_params = options.oauth_params || {};
};
OAuth.prototype.send = function(method, path, params, body){
OAuth.prototype.send = function(method, path, params, body) {
var request = this.buildRequest(method, path, params, body);
// Encode the body properly, the current Parse Implementation don't do it properly
return new Promise(function(resolve, reject) {
var httpRequest = https.request(request, function(res) {
var data = '';
res.on('data', function(chunk) {
data += chunk;
var httpRequest = https
.request(request, function(res) {
var data = '';
res.on('data', function(chunk) {
data += chunk;
});
res.on('end', function() {
data = JSON.parse(data);
resolve(data);
});
})
.on('error', function() {
reject('Failed to make an OAuth request');
});
res.on('end', function() {
data = JSON.parse(data);
resolve(data);
});
}).on('error', function() {
reject('Failed to make an OAuth request');
});
if (request.body) {
httpRequest.write(request.body);
}
@@ -39,40 +43,45 @@ OAuth.prototype.send = function(method, path, params, body){
};
OAuth.prototype.buildRequest = function(method, path, params, body) {
if (path.indexOf("/") != 0) {
path = "/" + path;
if (path.indexOf('/') != 0) {
path = '/' + path;
}
if (params && Object.keys(params).length > 0) {
path += "?" + OAuth.buildParameterString(params);
path += '?' + OAuth.buildParameterString(params);
}
var request = {
host: this.host,
path: path,
method: method.toUpperCase()
host: this.host,
path: path,
method: method.toUpperCase(),
};
var oauth_params = this.oauth_params || {};
oauth_params.oauth_consumer_key = this.consumer_key;
if(this.auth_token){
oauth_params["oauth_token"] = this.auth_token;
if (this.auth_token) {
oauth_params['oauth_token'] = this.auth_token;
}
request = OAuth.signRequest(request, oauth_params, this.consumer_secret, this.auth_token_secret);
request = OAuth.signRequest(
request,
oauth_params,
this.consumer_secret,
this.auth_token_secret
);
if (body && Object.keys(body).length > 0) {
request.body = OAuth.buildParameterString(body);
}
return request;
}
};
OAuth.prototype.get = function(path, params) {
return this.send("GET", path, params);
}
return this.send('GET', path, params);
};
OAuth.prototype.post = function(path, params, body) {
return this.send("POST", path, params, body);
}
return this.send('POST', path, params, body);
};
/*
Proper string %escape encoding
@@ -99,8 +108,7 @@ OAuth.encode = function(str) {
// example 3: rawurlencode('http://www.google.nl/search?q=php.js&ie=utf-8&oe=utf-8&aq=t&rls=com.ubuntu:en-US:unofficial&client=firefox-a');
// returns 3: 'http%3A%2F%2Fwww.google.nl%2Fsearch%3Fq%3Dphp.js%26ie%3Dutf-8%26oe%3Dutf-8%26aq%3Dt%26rls%3Dcom.ubuntu%3Aen-US%3Aunofficial%26client%3Dfirefox-a'
str = (str + '')
.toString();
str = (str + '').toString();
// Tilde should be allowed unescaped in future versions of PHP (as reflected below), but if you want to reflect current
// PHP behavior, you would need to add ".replace(/~/g, '%7E');" to the following.
@@ -110,55 +118,72 @@ OAuth.encode = function(str) {
.replace(/\(/g, '%28')
.replace(/\)/g, '%29')
.replace(/\*/g, '%2A');
}
};
OAuth.signatureMethod = "HMAC-SHA1";
OAuth.version = "1.0";
OAuth.signatureMethod = 'HMAC-SHA1';
OAuth.version = '1.0';
/*
Generate a nonce
*/
OAuth.nonce = function(){
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
OAuth.nonce = function() {
var text = '';
var possible =
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for(var i = 0; i < 30; i++)
for (var i = 0; i < 30; i++)
text += possible.charAt(Math.floor(Math.random() * possible.length));
return text;
}
};
OAuth.buildParameterString = function(obj){
OAuth.buildParameterString = function(obj) {
// Sort keys and encode values
if (obj) {
var keys = Object.keys(obj).sort();
// Map key=value, join them by &
return keys.map(function(key){
return key + "=" + OAuth.encode(obj[key]);
}).join("&");
return keys
.map(function(key) {
return key + '=' + OAuth.encode(obj[key]);
})
.join('&');
}
return "";
}
return '';
};
/*
Build the signature string from the object
*/
OAuth.buildSignatureString = function(method, url, parameters){
return [method.toUpperCase(), OAuth.encode(url), OAuth.encode(parameters)].join("&");
}
OAuth.buildSignatureString = function(method, url, parameters) {
return [
method.toUpperCase(),
OAuth.encode(url),
OAuth.encode(parameters),
].join('&');
};
/*
Retuns encoded HMAC-SHA1 from key and text
*/
OAuth.signature = function(text, key){
crypto = require("crypto");
return OAuth.encode(crypto.createHmac('sha1', key).update(text).digest('base64'));
}
OAuth.signature = function(text, key) {
crypto = require('crypto');
return OAuth.encode(
crypto
.createHmac('sha1', key)
.update(text)
.digest('base64')
);
};
OAuth.signRequest = function(request, oauth_parameters, consumer_secret, auth_token_secret){
OAuth.signRequest = function(
request,
oauth_parameters,
consumer_secret,
auth_token_secret
) {
oauth_parameters = oauth_parameters || {};
// Set default values
@@ -175,20 +200,20 @@ OAuth.signRequest = function(request, oauth_parameters, consumer_secret, auth_to
oauth_parameters.oauth_version = OAuth.version;
}
if(!auth_token_secret){
auth_token_secret = "";
if (!auth_token_secret) {
auth_token_secret = '';
}
// Force GET method if unset
if (!request.method) {
request.method = "GET"
request.method = 'GET';
}
// Collect all the parameters in one signatureParameters object
var signatureParams = {};
var parametersToMerge = [request.params, request.body, oauth_parameters];
for(var i in parametersToMerge) {
for (var i in parametersToMerge) {
var parameters = parametersToMerge[i];
for(var k in parameters) {
for (var k in parameters) {
signatureParams[k] = parameters[k];
}
}
@@ -197,32 +222,41 @@ OAuth.signRequest = function(request, oauth_parameters, consumer_secret, auth_to
var parameterString = OAuth.buildParameterString(signatureParams);
// Build the signature string
var url = "https://" + request.host + "" + request.path;
var url = 'https://' + request.host + '' + request.path;
var signatureString = OAuth.buildSignatureString(request.method, url, parameterString);
var signatureString = OAuth.buildSignatureString(
request.method,
url,
parameterString
);
// Hash the signature string
var signatureKey = [OAuth.encode(consumer_secret), OAuth.encode(auth_token_secret)].join("&");
var signatureKey = [
OAuth.encode(consumer_secret),
OAuth.encode(auth_token_secret),
].join('&');
var signature = OAuth.signature(signatureString, signatureKey);
// Set the signature in the params
oauth_parameters.oauth_signature = signature;
if(!request.headers){
if (!request.headers) {
request.headers = {};
}
// Set the authorization header
var authHeader = Object.keys(oauth_parameters).sort().map(function(key){
var value = oauth_parameters[key];
return key + '="' + value + '"';
}).join(", ")
var authHeader = Object.keys(oauth_parameters)
.sort()
.map(function(key) {
var value = oauth_parameters[key];
return key + '="' + value + '"';
})
.join(', ');
request.headers.Authorization = 'OAuth ' + authHeader;
// Set the content type header
request.headers["Content-Type"] = "application/x-www-form-urlencoded";
request.headers['Content-Type'] = 'application/x-www-form-urlencoded';
return request;
}
};
module.exports = OAuth;

View File

@@ -4,15 +4,17 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return graphRequest('me?fields=id&access_token=' + authData.access_token)
.then((data) => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook auth is invalid for this user.');
});
return graphRequest(
'me?fields=id&access_token=' + authData.access_token
).then(data => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills iff this app id is valid.
@@ -21,17 +23,18 @@ function validateAppId(appIds, authData) {
if (!appIds.length) {
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook auth is not configured.');
'Facebook auth is not configured.'
);
}
return graphRequest('app?access_token=' + access_token)
.then((data) => {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook auth is invalid for this user.');
});
return graphRequest('app?access_token=' + access_token).then(data => {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook auth is invalid for this user.'
);
});
}
// A promisey wrapper for FB graph requests.
@@ -41,5 +44,5 @@ function graphRequest(path) {
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -1,16 +1,20 @@
const crypto = require('crypto');
const httpsRequest = require('./httpsRequest');
const Parse = require('parse/node').Parse;
const Parse = require('parse/node').Parse;
const graphRequest = (path) => {
const graphRequest = path => {
return httpsRequest.get(`https://graph.accountkit.com/v1.1/${path}`);
};
function getRequestPath(authData, options) {
const access_token = authData.access_token, appSecret = options && options.appSecret;
const access_token = authData.access_token,
appSecret = options && options.appSecret;
if (appSecret) {
const appsecret_proof = crypto.createHmac("sha256", appSecret).update(access_token).digest('hex');
return `me?access_token=${access_token}&appsecret_proof=${appsecret_proof}`
const appsecret_proof = crypto
.createHmac('sha256', appSecret)
.update(access_token)
.digest('hex');
return `me?access_token=${access_token}&appsecret_proof=${appsecret_proof}`;
}
return `me?access_token=${access_token}`;
}
@@ -20,36 +24,37 @@ function validateAppId(appIds, authData, options) {
return Promise.reject(
new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook app id for Account Kit is not configured.')
)
'Facebook app id for Account Kit is not configured.'
)
);
}
return graphRequest(getRequestPath(authData, options))
.then(data => {
if (data && data.application && appIds.indexOf(data.application.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook app id for Account Kit is invalid for this user.');
})
return graphRequest(getRequestPath(authData, options)).then(data => {
if (data && data.application && appIds.indexOf(data.application.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook app id for Account Kit is invalid for this user.'
);
});
}
function validateAuthData(authData, options) {
return graphRequest(getRequestPath(authData, options))
.then(data => {
if (data && data.error) {
throw data.error;
}
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook Account Kit auth is invalid for this user.');
})
return graphRequest(getRequestPath(authData, options)).then(data => {
if (data && data.error) {
throw data.error;
}
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Facebook Account Kit auth is invalid for this user.'
);
});
}
module.exports = {
validateAppId,
validateAuthData
validateAuthData,
};

View File

@@ -4,15 +4,15 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return request('user', authData.access_token)
.then((data) => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Github auth is invalid for this user.');
});
return request('user', authData.access_token).then(data => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Github auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills iff this app id is valid.
@@ -26,13 +26,13 @@ function request(path, access_token) {
host: 'api.github.com',
path: '/' + path,
headers: {
'Authorization': 'bearer ' + access_token,
'User-Agent': 'parse-server'
}
Authorization: 'bearer ' + access_token,
'User-Agent': 'parse-server',
},
});
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -3,27 +3,27 @@ var Parse = require('parse/node').Parse;
const httpsRequest = require('./httpsRequest');
function validateIdToken(id, token) {
return googleRequest("tokeninfo?id_token=" + token)
.then((response) => {
if (response && (response.sub == id || response.user_id == id)) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Google auth is invalid for this user.');
});
return googleRequest('tokeninfo?id_token=' + token).then(response => {
if (response && (response.sub == id || response.user_id == id)) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Google auth is invalid for this user.'
);
});
}
function validateAuthToken(id, token) {
return googleRequest("tokeninfo?access_token=" + token)
.then((response) => {
if (response && (response.sub == id || response.user_id == id)) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Google auth is invalid for this user.');
});
return googleRequest('tokeninfo?access_token=' + token).then(response => {
if (response && (response.sub == id || response.user_id == id)) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Google auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills if this user id is valid.
@@ -31,13 +31,16 @@ function validateAuthData(authData) {
if (authData.id_token) {
return validateIdToken(authData.id, authData.id_token);
} else {
return validateAuthToken(authData.id, authData.access_token).then(() => {
// Validation with auth token worked
return;
}, () => {
// Try with the id_token param
return validateIdToken(authData.id, authData.access_token);
});
return validateAuthToken(authData.id, authData.access_token).then(
() => {
// Validation with auth token worked
return;
},
() => {
// Try with the id_token param
return validateIdToken(authData.id, authData.access_token);
}
);
}
}
@@ -48,10 +51,10 @@ function validateAppId() {
// A promisey wrapper for api requests
function googleRequest(path) {
return httpsRequest.get("https://www.googleapis.com/oauth2/v3/" + path);
return httpsRequest.get('https://www.googleapis.com/oauth2/v3/' + path);
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -3,7 +3,7 @@ const https = require('https');
function makeCallback(resolve, reject, noJSON) {
return function(res) {
let data = '';
res.on('data', (chunk) => {
res.on('data', chunk => {
data += chunk;
});
res.on('end', () => {
@@ -12,7 +12,7 @@ function makeCallback(resolve, reject, noJSON) {
}
try {
data = JSON.parse(data);
} catch(e) {
} catch (e) {
return reject(e);
}
resolve(data);

View File

@@ -2,20 +2,20 @@ import loadAdapter from '../AdapterLoader';
const facebook = require('./facebook');
const facebookaccountkit = require('./facebookaccountkit');
const instagram = require("./instagram");
const linkedin = require("./linkedin");
const meetup = require("./meetup");
const google = require("./google");
const github = require("./github");
const twitter = require("./twitter");
const spotify = require("./spotify");
const digits = require("./twitter"); // digits tokens are validated by twitter
const janrainengage = require("./janrainengage");
const janraincapture = require("./janraincapture");
const vkontakte = require("./vkontakte");
const qq = require("./qq");
const wechat = require("./wechat");
const weibo = require("./weibo");
const instagram = require('./instagram');
const linkedin = require('./linkedin');
const meetup = require('./meetup');
const google = require('./google');
const github = require('./github');
const twitter = require('./twitter');
const spotify = require('./spotify');
const digits = require('./twitter'); // digits tokens are validated by twitter
const janrainengage = require('./janrainengage');
const janraincapture = require('./janraincapture');
const vkontakte = require('./vkontakte');
const qq = require('./qq');
const wechat = require('./wechat');
const weibo = require('./weibo');
const anonymous = {
validateAuthData: () => {
@@ -23,8 +23,8 @@ const anonymous = {
},
validateAppId: () => {
return Promise.resolve();
}
}
},
};
const providers = {
facebook,
@@ -43,8 +43,8 @@ const providers = {
vkontakte,
qq,
wechat,
weibo
}
weibo,
};
function authDataValidator(adapter, appIds, options) {
return function(authData) {
return adapter.validateAuthData(authData, options).then(() => {
@@ -53,7 +53,7 @@ function authDataValidator(adapter, appIds, options) {
}
return Promise.resolve();
});
}
};
}
function loadAuthAdapter(provider, authOptions) {
@@ -69,9 +69,13 @@ function loadAuthAdapter(provider, authOptions) {
// Try the configuration methods
if (providerOptions) {
const optionalAdapter = loadAdapter(providerOptions, undefined, providerOptions);
const optionalAdapter = loadAdapter(
providerOptions,
undefined,
providerOptions
);
if (optionalAdapter) {
['validateAuthData', 'validateAppId'].forEach((key) => {
['validateAuthData', 'validateAppId'].forEach(key => {
if (optionalAdapter[key]) {
adapter[key] = optionalAdapter[key];
}
@@ -83,34 +87,32 @@ function loadAuthAdapter(provider, authOptions) {
return;
}
return {adapter, appIds, providerOptions};
return { adapter, appIds, providerOptions };
}
module.exports = function(authOptions = {}, enableAnonymousUsers = true) {
let _enableAnonymousUsers = enableAnonymousUsers;
const setEnableAnonymousUsers = function(enable) {
_enableAnonymousUsers = enable;
}
};
// To handle the test cases on configuration
const getValidatorForProvider = function(provider) {
if (provider === 'anonymous' && !_enableAnonymousUsers) {
return;
}
const {
adapter,
appIds,
providerOptions
} = loadAuthAdapter(provider, authOptions);
const { adapter, appIds, providerOptions } = loadAuthAdapter(
provider,
authOptions
);
return authDataValidator(adapter, appIds, providerOptions);
}
};
return Object.freeze({
getValidatorForProvider,
setEnableAnonymousUsers
})
}
setEnableAnonymousUsers,
});
};
module.exports.loadAuthAdapter = loadAuthAdapter;

View File

@@ -4,15 +4,17 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return request("users/self/?access_token=" + authData.access_token)
.then((response) => {
return request('users/self/?access_token=' + authData.access_token).then(
response => {
if (response && response.data && response.data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Instagram auth is invalid for this user.');
});
'Instagram auth is invalid for this user.'
);
}
);
}
// Returns a promise that fulfills iff this app id is valid.
@@ -22,10 +24,10 @@ function validateAppId() {
// A promisey wrapper for api requests
function request(path) {
return httpsRequest.get("https://api.instagram.com/v1/" + path);
return httpsRequest.get('https://api.instagram.com/v1/' + path);
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -5,15 +5,19 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
return request(options.janrain_capture_host, authData.access_token)
.then((data) => {
return request(options.janrain_capture_host, authData.access_token).then(
data => {
//successful response will have a "stat" (status) of 'ok' and a result node that stores the uuid, because that's all we asked for
//see: https://docs.janrain.com/api/registration/entity/#entity
if (data && data.stat == 'ok' && data.result == authData.id) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Janrain capture auth is invalid for this user.');
});
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Janrain capture auth is invalid for this user.'
);
}
);
}
// Returns a promise that fulfills iff this app id is valid.
@@ -24,10 +28,9 @@ function validateAppId() {
// A promisey wrapper for api requests
function request(host, access_token) {
var query_string_data = querystring.stringify({
'access_token': access_token,
'attribute_name': 'uuid' // we only need to pull the uuid for this access token to make sure it matches
access_token: access_token,
attribute_name: 'uuid', // we only need to pull the uuid for this access token to make sure it matches
});
return httpsRequest.get({ host: host, path: '/entity?' + query_string_data });
@@ -35,5 +38,5 @@ function request(host, access_token) {
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -5,15 +5,17 @@ var querystring = require('querystring');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
return apiRequest(options.api_key, authData.auth_token)
.then((data) => {
//successful response will have a "stat" (status) of 'ok' and a profile node with an identifier
//see: http://developers.janrain.com/overview/social-login/identity-providers/user-profile-data/#normalized-user-profile-data
if (data && data.stat == 'ok' && data.profile.identifier == authData.id) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Janrain engage auth is invalid for this user.');
});
return apiRequest(options.api_key, authData.auth_token).then(data => {
//successful response will have a "stat" (status) of 'ok' and a profile node with an identifier
//see: http://developers.janrain.com/overview/social-login/identity-providers/user-profile-data/#normalized-user-profile-data
if (data && data.stat == 'ok' && data.profile.identifier == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Janrain engage auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills iff this app id is valid.
@@ -24,11 +26,10 @@ function validateAppId() {
// A promisey wrapper for api requests
function apiRequest(api_key, auth_token) {
var post_data = querystring.stringify({
'token': auth_token,
'apiKey': api_key,
'format': 'json'
token: auth_token,
apiKey: api_key,
format: 'json',
});
var post_options = {
@@ -37,8 +38,8 @@ function apiRequest(api_key, auth_token) {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
'Content-Length': post_data.length,
},
};
return httpsRequest.request(post_options, post_data);
@@ -46,5 +47,5 @@ function apiRequest(api_key, auth_token) {
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -4,15 +4,19 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return request('people/~:(id)', authData.access_token, authData.is_mobile_sdk)
.then((data) => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Linkedin auth is invalid for this user.');
});
return request(
'people/~:(id)',
authData.access_token,
authData.is_mobile_sdk
).then(data => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Linkedin auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills iff this app id is valid.
@@ -23,21 +27,21 @@ function validateAppId() {
// A promisey wrapper for api requests
function request(path, access_token, is_mobile_sdk) {
var headers = {
'Authorization': 'Bearer ' + access_token,
Authorization: 'Bearer ' + access_token,
'x-li-format': 'json',
}
};
if(is_mobile_sdk) {
if (is_mobile_sdk) {
headers['x-li-src'] = 'msdk';
}
return httpsRequest.get({
host: 'api.linkedin.com',
path: '/v1/' + path,
headers: headers
headers: headers,
});
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -4,15 +4,15 @@ const httpsRequest = require('./httpsRequest');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return request('member/self', authData.access_token)
.then((data) => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Meetup auth is invalid for this user.');
});
return request('member/self', authData.access_token).then(data => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Meetup auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills iff this app id is valid.
@@ -26,12 +26,12 @@ function request(path, access_token) {
host: 'api.meetup.com',
path: '/2/' + path,
headers: {
'Authorization': 'bearer ' + access_token
}
Authorization: 'bearer ' + access_token,
},
});
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -4,11 +4,16 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return graphRequest('me?access_token=' + authData.access_token).then(function (data) {
return graphRequest('me?access_token=' + authData.access_token).then(function(
data
) {
if (data && data.openid == authData.id) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'qq auth is invalid for this user.');
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'qq auth is invalid for this user.'
);
});
}
@@ -19,18 +24,23 @@ function validateAppId() {
// A promisey wrapper for qq graph requests.
function graphRequest(path) {
return httpsRequest.get('https://graph.qq.com/oauth2.0/' + path, true).then((data) => {
return parseResponseData(data);
});
return httpsRequest
.get('https://graph.qq.com/oauth2.0/' + path, true)
.then(data => {
return parseResponseData(data);
});
}
function parseResponseData(data) {
const starPos = data.indexOf("(");
const endPos = data.indexOf(")");
if(starPos == -1 || endPos == -1){
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'qq auth is invalid for this user.');
const starPos = data.indexOf('(');
const endPos = data.indexOf(')');
if (starPos == -1 || endPos == -1) {
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'qq auth is invalid for this user.'
);
}
data = data.substring(starPos + 1,endPos - 1);
data = data.substring(starPos + 1, endPos - 1);
return JSON.parse(data);
}

View File

@@ -4,15 +4,15 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return request('me', authData.access_token)
.then((data) => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Spotify auth is invalid for this user.');
});
return request('me', authData.access_token).then(data => {
if (data && data.id == authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Spotify auth is invalid for this user.'
);
});
}
// Returns a promise that fulfills if this app id is valid.
@@ -21,17 +21,18 @@ function validateAppId(appIds, authData) {
if (!appIds.length) {
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Spotify auth is not configured.');
'Spotify auth is not configured.'
);
}
return request('me', access_token)
.then((data) => {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Spotify auth is invalid for this user.');
});
return request('me', access_token).then(data => {
if (data && appIds.indexOf(data.id) != -1) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Spotify auth is invalid for this user.'
);
});
}
// A promisey wrapper for Spotify API requests.
@@ -40,12 +41,12 @@ function request(path, access_token) {
host: 'api.spotify.com',
path: '/v1/' + path,
headers: {
'Authorization': 'Bearer ' + access_token
}
Authorization: 'Bearer ' + access_token,
},
});
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -5,22 +5,26 @@ var logger = require('../../logger').default;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, options) {
if(!options) {
throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Twitter auth configuration missing');
if (!options) {
throw new Parse.Error(
Parse.Error.INTERNAL_SERVER_ERROR,
'Twitter auth configuration missing'
);
}
options = handleMultipleConfigurations(authData, options);
var client = new OAuth(options);
client.host = "api.twitter.com";
client.host = 'api.twitter.com';
client.auth_token = authData.auth_token;
client.auth_token_secret = authData.auth_token_secret;
return client.get("/1.1/account/verify_credentials.json").then((data) => {
return client.get('/1.1/account/verify_credentials.json').then(data => {
if (data && data.id_str == '' + authData.id) {
return;
}
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Twitter auth is invalid for this user.');
'Twitter auth is invalid for this user.'
);
});
}
@@ -33,16 +37,28 @@ function handleMultipleConfigurations(authData, options) {
if (Array.isArray(options)) {
const consumer_key = authData.consumer_key;
if (!consumer_key) {
logger.error('Twitter Auth', 'Multiple twitter configurations are available, by no consumer_key was sent by the client.');
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');
logger.error(
'Twitter Auth',
'Multiple twitter configurations are available, by no consumer_key was sent by the client.'
);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Twitter auth is invalid for this user.'
);
}
options = options.filter((option) => {
options = options.filter(option => {
return option.consumer_key == consumer_key;
});
if (options.length == 0) {
logger.error('Twitter Auth','Cannot find a configuration for the provided consumer_key');
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Twitter auth is invalid for this user.');
logger.error(
'Twitter Auth',
'Cannot find a configuration for the provided consumer_key'
);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Twitter auth is invalid for this user.'
);
}
options = options[0];
}
@@ -52,5 +68,5 @@ function handleMultipleConfigurations(authData, options) {
module.exports = {
validateAppId,
validateAuthData,
handleMultipleConfigurations
handleMultipleConfigurations,
};

View File

@@ -8,29 +8,62 @@ var logger = require('../../logger').default;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData, params) {
return vkOAuth2Request(params).then(function (response) {
return vkOAuth2Request(params).then(function(response) {
if (response && response.access_token) {
return request("api.vk.com", "method/users.get?access_token=" + authData.access_token + "&v=5.8").then(function (response) {
if (response && response.response && response.response.length && response.response[0].id == authData.id) {
return request(
'api.vk.com',
'method/users.get?access_token=' + authData.access_token + '&v=5.8'
).then(function(response) {
if (
response &&
response.response &&
response.response.length &&
response.response[0].id == authData.id
) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk auth is invalid for this user.');
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Vk auth is invalid for this user.'
);
});
}
logger.error('Vk Auth', 'Vk appIds or appSecret is incorrect.');
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk appIds or appSecret is incorrect.');
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Vk appIds or appSecret is incorrect.'
);
});
}
function vkOAuth2Request(params) {
return new Promise(function (resolve) {
if (!params || !params.appIds || !params.appIds.length || !params.appSecret || !params.appSecret.length) {
logger.error('Vk Auth', 'Vk auth is not configured. Missing appIds or appSecret.');
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Vk auth is not configured. Missing appIds or appSecret.');
return new Promise(function(resolve) {
if (
!params ||
!params.appIds ||
!params.appIds.length ||
!params.appSecret ||
!params.appSecret.length
) {
logger.error(
'Vk Auth',
'Vk auth is not configured. Missing appIds or appSecret.'
);
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Vk auth is not configured. Missing appIds or appSecret.'
);
}
resolve();
}).then(function () {
return request("oauth.vk.com", "access_token?client_id=" + params.appIds + "&client_secret=" + params.appSecret + "&v=5.59&grant_type=client_credentials");
}).then(function() {
return request(
'oauth.vk.com',
'access_token?client_id=' +
params.appIds +
'&client_secret=' +
params.appSecret +
'&v=5.59&grant_type=client_credentials'
);
});
}
@@ -41,10 +74,10 @@ function validateAppId() {
// A promisey wrapper for api requests
function request(host, path) {
return httpsRequest.get("https://" + host + "/" + path);
return httpsRequest.get('https://' + host + '/' + path);
}
module.exports = {
validateAppId: validateAppId,
validateAuthData: validateAuthData
validateAuthData: validateAuthData,
};

View File

@@ -4,11 +4,16 @@ var Parse = require('parse/node').Parse;
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return graphRequest('auth?access_token=' + authData.access_token + '&openid=' + authData.id).then(function (data) {
return graphRequest(
'auth?access_token=' + authData.access_token + '&openid=' + authData.id
).then(function(data) {
if (data.errcode == 0) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'wechat auth is invalid for this user.');
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'wechat auth is invalid for this user.'
);
});
}
@@ -24,5 +29,5 @@ function graphRequest(path) {
module.exports = {
validateAppId,
validateAuthData
validateAuthData,
};

View File

@@ -5,11 +5,14 @@ var querystring = require('querystring');
// Returns a promise that fulfills iff this user id is valid.
function validateAuthData(authData) {
return graphRequest(authData.access_token).then(function (data) {
return graphRequest(authData.access_token).then(function(data) {
if (data && data.uid == authData.id) {
return;
}
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'weibo auth is invalid for this user.');
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'weibo auth is invalid for this user.'
);
});
}
@@ -21,7 +24,7 @@ function validateAppId() {
// A promisey wrapper for weibo graph requests.
function graphRequest(access_token) {
var postData = querystring.stringify({
"access_token": access_token
access_token: access_token,
});
var options = {
hostname: 'api.weibo.com',
@@ -29,13 +32,13 @@ function graphRequest(access_token) {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': Buffer.byteLength(postData)
}
'Content-Length': Buffer.byteLength(postData),
},
};
return httpsRequest.request(options, postData);
}
module.exports = {
validateAppId,
validateAuthData
validateAuthData,
};

View File

@@ -1,10 +1,7 @@
const DEFAULT_CACHE_TTL = 5 * 1000;
export class InMemoryCache {
constructor({
ttl = DEFAULT_CACHE_TTL
}) {
constructor({ ttl = DEFAULT_CACHE_TTL }) {
this.ttl = ttl;
this.cache = Object.create(null);
}
@@ -32,8 +29,8 @@ export class InMemoryCache {
var record = {
value: value,
expire: ttl + Date.now()
}
expire: ttl + Date.now(),
};
if (!isNaN(record.expire)) {
record.timeout = setTimeout(() => {
@@ -59,7 +56,6 @@ export class InMemoryCache {
clear() {
this.cache = Object.create(null);
}
}
export default InMemoryCache;

View File

@@ -1,9 +1,8 @@
import {LRUCache} from './LRUCache';
import { LRUCache } from './LRUCache';
export class InMemoryCacheAdapter {
constructor(ctx) {
this.cache = new LRUCache(ctx)
this.cache = new LRUCache(ctx);
}
get(key) {

View File

@@ -1,14 +1,11 @@
import LRU from 'lru-cache';
import defaults from '../../defaults';
import defaults from '../../defaults';
export class LRUCache {
constructor({
ttl = defaults.cacheTTL,
maxSize = defaults.cacheMaxSize,
}) {
constructor({ ttl = defaults.cacheTTL, maxSize = defaults.cacheMaxSize }) {
this.cache = new LRU({
max: maxSize,
maxAge: ttl
maxAge: ttl,
});
}
@@ -27,7 +24,6 @@ export class LRUCache {
clear() {
this.cache.reset();
}
}
export default LRUCache;

View File

@@ -1,11 +1,10 @@
export class NullCacheAdapter {
constructor() {}
get() {
return new Promise((resolve) => {
return new Promise(resolve => {
return resolve(null);
})
});
}
put() {

View File

@@ -8,7 +8,6 @@ function debug() {
}
export class RedisCacheAdapter {
constructor(redisCtx, ttl = DEFAULT_REDIS_TTL) {
this.client = redis.createClient(redisCtx);
this.p = Promise.resolve();
@@ -18,10 +17,10 @@ export class RedisCacheAdapter {
get(key) {
debug('get', key);
this.p = this.p.then(() => {
return new Promise((resolve) => {
return new Promise(resolve => {
this.client.get(key, function(err, res) {
debug('-> get', key, res);
if(!res) {
if (!res) {
return resolve(null);
}
resolve(JSON.parse(res));
@@ -41,7 +40,7 @@ export class RedisCacheAdapter {
ttl = DEFAULT_REDIS_TTL;
}
this.p = this.p.then(() => {
return new Promise((resolve) => {
return new Promise(resolve => {
if (ttl === Infinity) {
this.client.set(key, value, function() {
resolve();
@@ -59,7 +58,7 @@ export class RedisCacheAdapter {
del(key) {
debug('del', key);
this.p = this.p.then(() => {
return new Promise((resolve) => {
return new Promise(resolve => {
this.client.del(key, function() {
resolve();
});
@@ -71,7 +70,7 @@ export class RedisCacheAdapter {
clear() {
debug('clear');
this.p = this.p.then(() => {
return new Promise((resolve) => {
return new Promise(resolve => {
this.client.flushdb(function() {
resolve();
});

View File

@@ -13,7 +13,7 @@
// and for the API server to be using the DatabaseController with Mongo
// database adapter.
import type { Config } from '../../Config'
import type { Config } from '../../Config';
/**
* @module Adapters
*/
@@ -21,7 +21,6 @@ import type { Config } from '../../Config'
* @interface FilesAdapter
*/
export class FilesAdapter {
/** Responsible for storing the file in order to be retrieved later by its filename
*
* @param {string} filename - the filename to save
@@ -31,7 +30,7 @@ export class FilesAdapter {
*
* @return {Promise} a promise that should fail if the storage didn't succeed
*/
createFile(filename: string, data, contentType: string): Promise { }
createFile(filename: string, data, contentType: string): Promise {}
/** Responsible for deleting the specified file
*
@@ -39,7 +38,7 @@ export class FilesAdapter {
*
* @return {Promise} a promise that should fail if the deletion didn't succeed
*/
deleteFile(filename: string): Promise { }
deleteFile(filename: string): Promise {}
/** Responsible for retrieving the data of the specified file
*
@@ -47,7 +46,7 @@ export class FilesAdapter {
*
* @return {Promise} a promise that should pass with the file data or fail on error
*/
getFileData(filename: string): Promise<any> { }
getFileData(filename: string): Promise<any> {}
/** Returns an absolute URL where the file can be accessed
*
@@ -56,7 +55,7 @@ export class FilesAdapter {
*
* @return {string} Absolute URL
*/
getFileLocation(config: Config, filename: string): string { }
getFileLocation(config: Config, filename: string): string {}
}
export default FilesAdapter;

View File

@@ -7,9 +7,9 @@
*/
// @flow-disable-next
import { MongoClient, GridStore, Db} from 'mongodb';
import { FilesAdapter } from './FilesAdapter';
import defaults from '../../defaults';
import { MongoClient, GridStore, Db } from 'mongodb';
import { FilesAdapter } from './FilesAdapter';
import defaults from '../../defaults';
export class GridStoreAdapter extends FilesAdapter {
_databaseURI: string;
@@ -22,8 +22,9 @@ export class GridStoreAdapter extends FilesAdapter {
_connect() {
if (!this._connectionPromise) {
this._connectionPromise = MongoClient.connect(this._databaseURI)
.then((client) => client.db(client.s.options.dbName));
this._connectionPromise = MongoClient.connect(this._databaseURI).then(
client => client.db(client.s.options.dbName)
);
}
return this._connectionPromise;
}
@@ -31,41 +32,54 @@ export class GridStoreAdapter extends FilesAdapter {
// For a given config object, filename, and data, store a file
// Returns a promise
createFile(filename: string, data) {
return this._connect().then((database) => {
const gridStore = new GridStore(database, filename, 'w');
return gridStore.open();
}).then(gridStore => {
return gridStore.write(data);
}).then(gridStore => {
return gridStore.close();
});
return this._connect()
.then(database => {
const gridStore = new GridStore(database, filename, 'w');
return gridStore.open();
})
.then(gridStore => {
return gridStore.write(data);
})
.then(gridStore => {
return gridStore.close();
});
}
deleteFile(filename: string) {
return this._connect().then(database => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
}).then((gridStore) => {
return gridStore.unlink();
}).then((gridStore) => {
return gridStore.close();
});
return this._connect()
.then(database => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
})
.then(gridStore => {
return gridStore.unlink();
})
.then(gridStore => {
return gridStore.close();
});
}
getFileData(filename: string) {
return this._connect().then(database => {
return GridStore.exist(database, filename)
.then(() => {
return this._connect()
.then(database => {
return GridStore.exist(database, filename).then(() => {
const gridStore = new GridStore(database, filename, 'r');
return gridStore.open();
});
}).then(gridStore => {
return gridStore.read();
});
})
.then(gridStore => {
return gridStore.read();
});
}
getFileLocation(config, filename) {
return (config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename));
return (
config.mount +
'/files/' +
config.applicationId +
'/' +
encodeURIComponent(filename)
);
}
getFileStream(filename: string) {

View File

@@ -16,7 +16,7 @@ export class LoggerAdapter {
* @param {String} message
* @param {Object} metadata
*/
log(level, message, /* meta */) {}
log(level, message /* meta */) {}
}
export default LoggerAdapter;

View File

@@ -3,7 +3,7 @@ import fs from 'fs';
import path from 'path';
import DailyRotateFile from 'winston-daily-rotate-file';
import _ from 'lodash';
import defaults from '../../defaults';
import defaults from '../../defaults';
const logger = new winston.Logger();
const additionalTransports = [];
@@ -17,31 +17,47 @@ function updateTransports(options) {
delete transports['parse-server'];
delete transports['parse-server-error'];
} else if (!_.isUndefined(options.dirname)) {
transports['parse-server'] = new (DailyRotateFile)(
Object.assign({}, {
filename: 'parse-server.info',
name: 'parse-server',
}, options, { timestamp: true }));
transports['parse-server-error'] = new (DailyRotateFile)(
Object.assign({}, {
filename: 'parse-server.err',
name: 'parse-server-error',
}, options, { level: 'error', timestamp: true }));
transports['parse-server'] = new DailyRotateFile(
Object.assign(
{},
{
filename: 'parse-server.info',
name: 'parse-server',
},
options,
{ timestamp: true }
)
);
transports['parse-server-error'] = new DailyRotateFile(
Object.assign(
{},
{
filename: 'parse-server.err',
name: 'parse-server-error',
},
options,
{ level: 'error', timestamp: true }
)
);
}
transports.console = new (winston.transports.Console)(
Object.assign({
colorize: true,
name: 'console',
silent
}, options));
transports.console = new winston.transports.Console(
Object.assign(
{
colorize: true,
name: 'console',
silent,
},
options
)
);
}
// Mount the additional transports
additionalTransports.forEach((transport) => {
additionalTransports.forEach(transport => {
transports[transport.name] = transport;
});
logger.configure({
transports: _.values(transports)
transports: _.values(transports),
});
}
@@ -50,8 +66,8 @@ export function configureLogger({
jsonLogs = defaults.jsonLogs,
logLevel = winston.level,
verbose = defaults.verbose,
silent = defaults.silent } = {}) {
silent = defaults.silent,
} = {}) {
if (verbose) {
logLevel = 'verbose';
}
@@ -65,7 +81,9 @@ export function configureLogger({
}
try {
fs.mkdirSync(logsFolder);
} catch (e) { /* */ }
} catch (e) {
/* */
}
}
options.dirname = logsFolder;
options.level = logLevel;
@@ -84,13 +102,14 @@ export function addTransport(transport) {
}
export function removeTransport(transport) {
const transportName = typeof transport == 'string' ? transport : transport.name;
const transportName =
typeof transport == 'string' ? transport : transport.name;
const transports = Object.assign({}, logger.transports);
delete transports[transportName];
logger.configure({
transports: _.values(transports)
transports: _.values(transports),
});
_.remove(additionalTransports, (transport) => {
_.remove(additionalTransports, transport => {
return transport.name === transportName;
});
}

View File

@@ -28,7 +28,8 @@ export class WinstonLoggerAdapter extends LoggerAdapter {
options = {};
}
// defaults to 7 days prior
const from = options.from || new Date(Date.now() - (7 * MILLISECONDS_IN_A_DAY));
const from =
options.from || new Date(Date.now() - 7 * MILLISECONDS_IN_A_DAY);
const until = options.until || new Date();
const limit = options.size || 10;
const order = options.order || 'desc';
@@ -38,7 +39,7 @@ export class WinstonLoggerAdapter extends LoggerAdapter {
from,
until,
limit,
order
order,
};
return new Promise((resolve, reject) => {
@@ -54,7 +55,7 @@ export class WinstonLoggerAdapter extends LoggerAdapter {
callback(res['parse-server']);
resolve(res['parse-server']);
}
})
});
});
}
}

View File

@@ -35,9 +35,9 @@ class Consumer extends events.EventEmitter {
subscribe(channel: string): void {
unsubscribe(channel);
const handler = (message) => {
const handler = message => {
this.emit('message', channel, message);
}
};
subscriptions.set(channel, handler);
this.emitter.on(channel, handler);
}
@@ -57,9 +57,7 @@ function createSubscriber(): any {
const EventEmitterMQ = {
createPublisher,
createSubscriber
}
createSubscriber,
};
export {
EventEmitterMQ
}
export { EventEmitterMQ };

View File

@@ -25,9 +25,9 @@ class Subscriber extends events.EventEmitter {
}
subscribe(channel: string): void {
const handler = (message) => {
const handler = message => {
this.emit('message', channel, message);
}
};
this.subscriptions.set(channel, handler);
this.emitter.on(channel, handler);
}
@@ -51,9 +51,7 @@ function createSubscriber(): any {
const EventEmitterPubSub = {
createPublisher,
createSubscriber
}
createSubscriber,
};
export {
EventEmitterPubSub
}
export { EventEmitterPubSub };

View File

@@ -25,7 +25,7 @@ interface Publisher {
* @param {String} channel the channel in which to publish
* @param {String} message the message to publish
*/
publish(channel: string, message: string):void;
publish(channel: string, message: string): void;
}
/**

View File

@@ -1,18 +1,16 @@
import redis from 'redis';
function createPublisher({redisURL}): any {
function createPublisher({ redisURL }): any {
return redis.createClient(redisURL, { no_ready_check: true });
}
function createSubscriber({redisURL}): any {
function createSubscriber({ redisURL }): any {
return redis.createClient(redisURL, { no_ready_check: true });
}
const RedisPubSub = {
createPublisher,
createSubscriber
}
createSubscriber,
};
export {
RedisPubSub
}
export { RedisPubSub };

View File

@@ -31,7 +31,7 @@ export class PushAdapter {
* @returns {Array} An array of valid push types
*/
getValidPushTypes(): string[] {
return []
return [];
}
}

View File

@@ -2,9 +2,9 @@ const mongodb = require('mongodb');
const Collection = mongodb.Collection;
export default class MongoCollection {
_mongoCollection:Collection;
_mongoCollection: Collection;
constructor(mongoCollection:Collection) {
constructor(mongoCollection: Collection) {
this._mongoCollection = mongoCollection;
}
@@ -15,33 +15,58 @@ export default class MongoCollection {
// idea. Or even if this behavior is a good idea.
find(query, { skip, limit, sort, keys, maxTimeMS, readPreference } = {}) {
// Support for Full Text Search - $text
if(keys && keys.$score) {
if (keys && keys.$score) {
delete keys.$score;
keys.score = {$meta: 'textScore'};
keys.score = { $meta: 'textScore' };
}
return this._rawFind(query, { skip, limit, sort, keys, maxTimeMS, readPreference })
.catch(error => {
// Check for "no geoindex" error
if (error.code != 17007 && !error.message.match(/unable to find index for .geoNear/)) {
throw error;
}
// Figure out what key needs an index
const key = error.message.match(/field=([A-Za-z_0-9]+) /)[1];
if (!key) {
throw error;
}
return this._rawFind(query, {
skip,
limit,
sort,
keys,
maxTimeMS,
readPreference,
}).catch(error => {
// Check for "no geoindex" error
if (
error.code != 17007 &&
!error.message.match(/unable to find index for .geoNear/)
) {
throw error;
}
// Figure out what key needs an index
const key = error.message.match(/field=([A-Za-z_0-9]+) /)[1];
if (!key) {
throw error;
}
var index = {};
index[key] = '2d';
return this._mongoCollection.createIndex(index)
var index = {};
index[key] = '2d';
return (
this._mongoCollection
.createIndex(index)
// Retry, but just once.
.then(() => this._rawFind(query, { skip, limit, sort, keys, maxTimeMS, readPreference }));
});
.then(() =>
this._rawFind(query, {
skip,
limit,
sort,
keys,
maxTimeMS,
readPreference,
})
)
);
});
}
_rawFind(query, { skip, limit, sort, keys, maxTimeMS, readPreference } = {}) {
let findOperation = this._mongoCollection
.find(query, { skip, limit, sort, readPreference })
let findOperation = this._mongoCollection.find(query, {
skip,
limit,
sort,
readPreference,
});
if (keys) {
findOperation = findOperation.project(keys);
@@ -55,7 +80,13 @@ export default class MongoCollection {
}
count(query, { skip, limit, sort, maxTimeMS, readPreference } = {}) {
const countOperation = this._mongoCollection.count(query, { skip, limit, sort, maxTimeMS, readPreference });
const countOperation = this._mongoCollection.count(query, {
skip,
limit,
sort,
maxTimeMS,
readPreference,
});
return countOperation;
}
@@ -65,7 +96,9 @@ export default class MongoCollection {
}
aggregate(pipeline, { maxTimeMS, readPreference } = {}) {
return this._mongoCollection.aggregate(pipeline, { maxTimeMS, readPreference }).toArray();
return this._mongoCollection
.aggregate(pipeline, { maxTimeMS, readPreference })
.toArray();
}
insertOne(object) {
@@ -76,7 +109,7 @@ export default class MongoCollection {
// If there is nothing that matches the query - does insert
// Postgres Note: `INSERT ... ON CONFLICT UPDATE` that is available since 9.5.
upsertOne(query, update) {
return this._mongoCollection.update(query, update, { upsert: true })
return this._mongoCollection.update(query, update, { upsert: true });
}
updateOne(query, update) {
@@ -93,13 +126,17 @@ export default class MongoCollection {
_ensureSparseUniqueIndexInBackground(indexRequest) {
return new Promise((resolve, reject) => {
this._mongoCollection.ensureIndex(indexRequest, { unique: true, background: true, sparse: true }, (error) => {
if (error) {
reject(error);
} else {
resolve();
this._mongoCollection.ensureIndex(
indexRequest,
{ unique: true, background: true, sparse: true },
error => {
if (error) {
reject(error);
} else {
resolve();
}
}
});
);
});
}

View File

@@ -1,5 +1,5 @@
import MongoCollection from './MongoCollection';
import Parse from 'parse/node';
import Parse from 'parse/node';
function mongoFieldToParseSchemaField(type) {
if (type[0] === '*') {
@@ -15,31 +15,43 @@ function mongoFieldToParseSchemaField(type) {
};
}
switch (type) {
case 'number': return {type: 'Number'};
case 'string': return {type: 'String'};
case 'boolean': return {type: 'Boolean'};
case 'date': return {type: 'Date'};
case 'map':
case 'object': return {type: 'Object'};
case 'array': return {type: 'Array'};
case 'geopoint': return {type: 'GeoPoint'};
case 'file': return {type: 'File'};
case 'bytes': return {type: 'Bytes'};
case 'polygon': return {type: 'Polygon'};
case 'number':
return { type: 'Number' };
case 'string':
return { type: 'String' };
case 'boolean':
return { type: 'Boolean' };
case 'date':
return { type: 'Date' };
case 'map':
case 'object':
return { type: 'Object' };
case 'array':
return { type: 'Array' };
case 'geopoint':
return { type: 'GeoPoint' };
case 'file':
return { type: 'File' };
case 'bytes':
return { type: 'Bytes' };
case 'polygon':
return { type: 'Polygon' };
}
}
const nonFieldSchemaKeys = ['_id', '_metadata', '_client_permissions'];
function mongoSchemaFieldsToParseSchemaFields(schema) {
var fieldNames = Object.keys(schema).filter(key => nonFieldSchemaKeys.indexOf(key) === -1);
var fieldNames = Object.keys(schema).filter(
key => nonFieldSchemaKeys.indexOf(key) === -1
);
var response = fieldNames.reduce((obj, fieldName) => {
obj[fieldName] = mongoFieldToParseSchemaField(schema[fieldName])
obj[fieldName] = mongoFieldToParseSchemaField(schema[fieldName]);
return obj;
}, {});
response.ACL = {type: 'ACL'};
response.createdAt = {type: 'Date'};
response.updatedAt = {type: 'Date'};
response.objectId = {type: 'String'};
response.ACL = { type: 'ACL' };
response.createdAt = { type: 'Date' };
response.updatedAt = { type: 'Date' };
response.objectId = { type: 'String' };
return response;
}
@@ -53,23 +65,23 @@ const emptyCLPS = Object.freeze({
});
const defaultCLPS = Object.freeze({
find: {'*': true},
get: {'*': true},
create: {'*': true},
update: {'*': true},
delete: {'*': true},
addField: {'*': true},
find: { '*': true },
get: { '*': true },
create: { '*': true },
update: { '*': true },
delete: { '*': true },
addField: { '*': true },
});
function mongoSchemaToParseSchema(mongoSchema) {
let clps = defaultCLPS;
let indexes = {}
let indexes = {};
if (mongoSchema._metadata) {
if (mongoSchema._metadata.class_permissions) {
clps = {...emptyCLPS, ...mongoSchema._metadata.class_permissions};
clps = { ...emptyCLPS, ...mongoSchema._metadata.class_permissions };
}
if (mongoSchema._metadata.indexes) {
indexes = {...mongoSchema._metadata.indexes};
indexes = { ...mongoSchema._metadata.indexes };
}
}
return {
@@ -90,23 +102,34 @@ function _mongoSchemaQueryFromNameQuery(name: string, query) {
return object;
}
// Returns a type suitable for inserting into mongo _SCHEMA collection.
// Does no validation. That is expected to be done in Parse Server.
function parseFieldTypeToMongoFieldType({ type, targetClass }) {
switch (type) {
case 'Pointer': return `*${targetClass}`;
case 'Relation': return `relation<${targetClass}>`;
case 'Number': return 'number';
case 'String': return 'string';
case 'Boolean': return 'boolean';
case 'Date': return 'date';
case 'Object': return 'object';
case 'Array': return 'array';
case 'GeoPoint': return 'geopoint';
case 'File': return 'file';
case 'Bytes': return 'bytes';
case 'Polygon': return 'polygon';
case 'Pointer':
return `*${targetClass}`;
case 'Relation':
return `relation<${targetClass}>`;
case 'Number':
return 'number';
case 'String':
return 'string';
case 'Boolean':
return 'boolean';
case 'Date':
return 'date';
case 'Object':
return 'object';
case 'Array':
return 'array';
case 'GeoPoint':
return 'geopoint';
case 'File':
return 'file';
case 'Bytes':
return 'bytes';
case 'Polygon':
return 'polygon';
}
}
@@ -118,43 +141,60 @@ class MongoSchemaCollection {
}
_fetchAllSchemasFrom_SCHEMA() {
return this._collection._rawFind({})
return this._collection
._rawFind({})
.then(schemas => schemas.map(mongoSchemaToParseSchema));
}
_fetchOneSchemaFrom_SCHEMA(name: string) {
return this._collection._rawFind(_mongoSchemaQueryFromNameQuery(name), { limit: 1 }).then(results => {
if (results.length === 1) {
return mongoSchemaToParseSchema(results[0]);
} else {
throw undefined;
}
});
return this._collection
._rawFind(_mongoSchemaQueryFromNameQuery(name), { limit: 1 })
.then(results => {
if (results.length === 1) {
return mongoSchemaToParseSchema(results[0]);
} else {
throw undefined;
}
});
}
// Atomically find and delete an object based on query.
findAndDeleteSchema(name: string) {
return this._collection._mongoCollection.findAndRemove(_mongoSchemaQueryFromNameQuery(name), []);
return this._collection._mongoCollection.findAndRemove(
_mongoSchemaQueryFromNameQuery(name),
[]
);
}
insertSchema(schema: any) {
return this._collection.insertOne(schema)
return this._collection
.insertOne(schema)
.then(result => mongoSchemaToParseSchema(result.ops[0]))
.catch(error => {
if (error.code === 11000) { //Mongo's duplicate key error
throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'Class already exists.');
if (error.code === 11000) {
//Mongo's duplicate key error
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Class already exists.'
);
} else {
throw error;
}
})
});
}
updateSchema(name: string, update) {
return this._collection.updateOne(_mongoSchemaQueryFromNameQuery(name), update);
return this._collection.updateOne(
_mongoSchemaQueryFromNameQuery(name),
update
);
}
upsertSchema(name: string, query: string, update) {
return this._collection.upsertOne(_mongoSchemaQueryFromNameQuery(name, query), update);
return this._collection.upsertOne(
_mongoSchemaQueryFromNameQuery(name, query),
update
);
}
// Add a field to the schema. If database does not support the field
@@ -170,34 +210,45 @@ class MongoSchemaCollection {
// TODO: don't spend an extra query on finding the schema if the type we are trying to add isn't a GeoPoint.
addFieldIfNotExists(className: string, fieldName: string, type: string) {
return this._fetchOneSchemaFrom_SCHEMA(className)
.then(schema => {
// If a field with this name already exists, it will be handled elsewhere.
if (schema.fields[fieldName] != undefined) {
return;
}
// The schema exists. Check for existing GeoPoints.
if (type.type === 'GeoPoint') {
// Make sure there are not other geopoint fields
if (Object.keys(schema.fields).some(existingField => schema.fields[existingField].type === 'GeoPoint')) {
throw new Parse.Error(Parse.Error.INCORRECT_TYPE, 'MongoDB only supports one GeoPoint field in a class.');
.then(
schema => {
// If a field with this name already exists, it will be handled elsewhere.
if (schema.fields[fieldName] != undefined) {
return;
}
// The schema exists. Check for existing GeoPoints.
if (type.type === 'GeoPoint') {
// Make sure there are not other geopoint fields
if (
Object.keys(schema.fields).some(
existingField =>
schema.fields[existingField].type === 'GeoPoint'
)
) {
throw new Parse.Error(
Parse.Error.INCORRECT_TYPE,
'MongoDB only supports one GeoPoint field in a class.'
);
}
}
}
return;
}, error => {
// If error is undefined, the schema doesn't exist, and we can create the schema with the field.
// If some other error, reject with it.
if (error === undefined) {
return;
},
error => {
// If error is undefined, the schema doesn't exist, and we can create the schema with the field.
// If some other error, reject with it.
if (error === undefined) {
return;
}
throw error;
}
throw error;
})
)
.then(() => {
// We use $exists and $set to avoid overwriting the field type if it
// already exists. (it could have added inbetween the last query and the update)
// We use $exists and $set to avoid overwriting the field type if it
// already exists. (it could have added inbetween the last query and the update)
return this.upsertSchema(
className,
{ [fieldName]: { '$exists': false } },
{ '$set' : { [fieldName]: parseFieldTypeToMongoFieldType(type) } }
{ [fieldName]: { $exists: false } },
{ $set: { [fieldName]: parseFieldTypeToMongoFieldType(type) } }
);
});
}
@@ -205,7 +256,7 @@ class MongoSchemaCollection {
// Exported for testing reasons and because we haven't moved all mongo schema format
// related logic into the database adapter yet.
MongoSchemaCollection._TESTmongoSchemaToParseSchema = mongoSchemaToParseSchema
MongoSchemaCollection.parseFieldTypeToMongoFieldType = parseFieldTypeToMongoFieldType
MongoSchemaCollection._TESTmongoSchemaToParseSchema = mongoSchemaToParseSchema;
MongoSchemaCollection.parseFieldTypeToMongoFieldType = parseFieldTypeToMongoFieldType;
export default MongoSchemaCollection
export default MongoSchemaCollection;

View File

@@ -1,11 +1,13 @@
// @flow
import MongoCollection from './MongoCollection';
import MongoCollection from './MongoCollection';
import MongoSchemaCollection from './MongoSchemaCollection';
import { StorageAdapter } from '../StorageAdapter';
import type { SchemaType,
import { StorageAdapter } from '../StorageAdapter';
import type {
SchemaType,
QueryType,
StorageClass,
QueryOptions } from '../StorageAdapter';
QueryOptions,
} from '../StorageAdapter';
import {
parse as parseUrl,
format as formatUrl,
@@ -19,11 +21,11 @@ import {
transformPointerString,
} from './MongoTransform';
// @flow-disable-next
import Parse from 'parse/node';
import Parse from 'parse/node';
// @flow-disable-next
import _ from 'lodash';
import defaults from '../../../defaults';
import logger from '../../../logger';
import _ from 'lodash';
import defaults from '../../../defaults';
import logger from '../../../logger';
// @flow-disable-next
const mongodb = require('mongodb');
@@ -33,7 +35,8 @@ const ReadPreference = mongodb.ReadPreference;
const MongoSchemaCollectionName = '_SCHEMA';
const storageAdapterAllCollections = mongoAdapter => {
return mongoAdapter.connect()
return mongoAdapter
.connect()
.then(() => mongoAdapter.database.collections())
.then(collections => {
return collections.filter(collection => {
@@ -42,12 +45,14 @@ const storageAdapterAllCollections = mongoAdapter => {
}
// TODO: If you have one app with a collection prefix that happens to be a prefix of another
// apps prefix, this will go very very badly. We should fix that somehow.
return (collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0);
return (
collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0
);
});
});
}
};
const convertParseSchemaToMongoSchema = ({...schema}) => {
const convertParseSchemaToMongoSchema = ({ ...schema }) => {
delete schema.fields._rperm;
delete schema.fields._wperm;
@@ -60,11 +65,16 @@ const convertParseSchemaToMongoSchema = ({...schema}) => {
}
return schema;
}
};
// Returns { code, error } if invalid, or { result }, an object
// suitable for inserting into _SCHEMA collection, otherwise.
const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPermissions, indexes) => {
const mongoSchemaFromFieldsAndClassNameAndCLP = (
fields,
className,
classLevelPermissions,
indexes
) => {
const mongoObject = {
_id: className,
objectId: 'string',
@@ -74,7 +84,9 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPe
};
for (const fieldName in fields) {
mongoObject[fieldName] = MongoSchemaCollection.parseFieldTypeToMongoFieldType(fields[fieldName]);
mongoObject[
fieldName
] = MongoSchemaCollection.parseFieldTypeToMongoFieldType(fields[fieldName]);
}
if (typeof classLevelPermissions !== 'undefined') {
@@ -86,18 +98,22 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPe
}
}
if (indexes && typeof indexes === 'object' && Object.keys(indexes).length > 0) {
if (
indexes &&
typeof indexes === 'object' &&
Object.keys(indexes).length > 0
) {
mongoObject._metadata = mongoObject._metadata || {};
mongoObject._metadata.indexes = indexes;
}
if (!mongoObject._metadata) { // cleanup the unused _metadata
if (!mongoObject._metadata) {
// cleanup the unused _metadata
delete mongoObject._metadata;
}
return mongoObject;
}
};
export class MongoStorageAdapter implements StorageAdapter {
// Private
@@ -135,34 +151,40 @@ export class MongoStorageAdapter implements StorageAdapter {
// encoded
const encodedUri = formatUrl(parseUrl(this._uri));
this.connectionPromise = MongoClient.connect(encodedUri, this._mongoOptions).then(client => {
// Starting mongoDB 3.0, the MongoClient.connect don't return a DB anymore but a client
// Fortunately, we can get back the options and use them to select the proper DB.
// https://github.com/mongodb/node-mongodb-native/blob/2c35d76f08574225b8db02d7bef687123e6bb018/lib/mongo_client.js#L885
const options = client.s.options;
const database = client.db(options.dbName);
if (!database) {
delete this.connectionPromise;
return;
}
database.on('error', () => {
this.connectionPromise = MongoClient.connect(
encodedUri,
this._mongoOptions
)
.then(client => {
// Starting mongoDB 3.0, the MongoClient.connect don't return a DB anymore but a client
// Fortunately, we can get back the options and use them to select the proper DB.
// https://github.com/mongodb/node-mongodb-native/blob/2c35d76f08574225b8db02d7bef687123e6bb018/lib/mongo_client.js#L885
const options = client.s.options;
const database = client.db(options.dbName);
if (!database) {
delete this.connectionPromise;
return;
}
database.on('error', () => {
delete this.connectionPromise;
});
database.on('close', () => {
delete this.connectionPromise;
});
this.client = client;
this.database = database;
})
.catch(err => {
delete this.connectionPromise;
return Promise.reject(err);
});
database.on('close', () => {
delete this.connectionPromise;
});
this.client = client;
this.database = database;
}).catch((err) => {
delete this.connectionPromise;
return Promise.reject(err);
});
return this.connectionPromise;
}
handleError<T>(error: ?(Error | Parse.Error)): Promise<T> {
if (error && error.code === 13) { // Unauthorized error
if (error && error.code === 13) {
// Unauthorized error
delete this.client;
delete this.database;
delete this.connectionPromise;
@@ -192,36 +214,55 @@ export class MongoStorageAdapter implements StorageAdapter {
}
classExists(name: string) {
return this.connect().then(() => {
return this.database.listCollections({ name: this._collectionPrefix + name }).toArray();
}).then(collections => {
return collections.length > 0;
}).catch(err => this.handleError(err));
return this.connect()
.then(() => {
return this.database
.listCollections({ name: this._collectionPrefix + name })
.toArray();
})
.then(collections => {
return collections.length > 0;
})
.catch(err => this.handleError(err));
}
setClassLevelPermissions(className: string, CLPs: any): Promise<void> {
return this._schemaCollection()
.then(schemaCollection => schemaCollection.updateSchema(className, {
$set: { '_metadata.class_permissions': CLPs }
})).catch(err => this.handleError(err));
.then(schemaCollection =>
schemaCollection.updateSchema(className, {
$set: { '_metadata.class_permissions': CLPs },
})
)
.catch(err => this.handleError(err));
}
setIndexesWithSchemaFormat(className: string, submittedIndexes: any, existingIndexes: any = {}, fields: any): Promise<void> {
setIndexesWithSchemaFormat(
className: string,
submittedIndexes: any,
existingIndexes: any = {},
fields: any
): Promise<void> {
if (submittedIndexes === undefined) {
return Promise.resolve();
}
if (Object.keys(existingIndexes).length === 0) {
existingIndexes = { _id_: { _id: 1} };
existingIndexes = { _id_: { _id: 1 } };
}
const deletePromises = [];
const insertedIndexes = [];
Object.keys(submittedIndexes).forEach(name => {
const field = submittedIndexes[name];
if (existingIndexes[name] && field.__op !== 'Delete') {
throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`);
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
`Index ${name} exists, cannot update.`
);
}
if (!existingIndexes[name] && field.__op === 'Delete') {
throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} does not exist, cannot delete.`);
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
`Index ${name} does not exist, cannot delete.`
);
}
if (field.__op === 'Delete') {
const promise = this.dropIndex(className, name);
@@ -230,7 +271,10 @@ export class MongoStorageAdapter implements StorageAdapter {
} else {
Object.keys(field).forEach(key => {
if (!fields.hasOwnProperty(key)) {
throw new Parse.Error(Parse.Error.INVALID_QUERY, `Field ${key} does not exist, cannot add index.`);
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
`Field ${key} does not exist, cannot add index.`
);
}
});
existingIndexes[name] = field;
@@ -247,30 +291,34 @@ export class MongoStorageAdapter implements StorageAdapter {
return Promise.all(deletePromises)
.then(() => insertPromise)
.then(() => this._schemaCollection())
.then(schemaCollection => schemaCollection.updateSchema(className, {
$set: { '_metadata.indexes': existingIndexes }
}))
.then(schemaCollection =>
schemaCollection.updateSchema(className, {
$set: { '_metadata.indexes': existingIndexes },
})
)
.catch(err => this.handleError(err));
}
setIndexesFromMongo(className: string) {
return this.getIndexes(className).then((indexes) => {
indexes = indexes.reduce((obj, index) => {
if (index.key._fts) {
delete index.key._fts;
delete index.key._ftsx;
for (const field in index.weights) {
index.key[field] = 'text';
return this.getIndexes(className)
.then(indexes => {
indexes = indexes.reduce((obj, index) => {
if (index.key._fts) {
delete index.key._fts;
delete index.key._ftsx;
for (const field in index.weights) {
index.key[field] = 'text';
}
}
}
obj[index.name] = index.key;
return obj;
}, {});
return this._schemaCollection()
.then(schemaCollection => schemaCollection.updateSchema(className, {
$set: { '_metadata.indexes': indexes }
}));
})
obj[index.name] = index.key;
return obj;
}, {});
return this._schemaCollection().then(schemaCollection =>
schemaCollection.updateSchema(className, {
$set: { '_metadata.indexes': indexes },
})
);
})
.catch(err => this.handleError(err))
.catch(() => {
// Ignore if collection not found
@@ -280,17 +328,33 @@ export class MongoStorageAdapter implements StorageAdapter {
createClass(className: string, schema: SchemaType): Promise<void> {
schema = convertParseSchemaToMongoSchema(schema);
const mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(schema.fields, className, schema.classLevelPermissions, schema.indexes);
const mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(
schema.fields,
className,
schema.classLevelPermissions,
schema.indexes
);
mongoObject._id = className;
return this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields)
return this.setIndexesWithSchemaFormat(
className,
schema.indexes,
{},
schema.fields
)
.then(() => this._schemaCollection())
.then(schemaCollection => schemaCollection.insertSchema(mongoObject))
.catch(err => this.handleError(err));
}
addFieldIfNotExists(className: string, fieldName: string, type: any): Promise<void> {
addFieldIfNotExists(
className: string,
fieldName: string,
type: any
): Promise<void> {
return this._schemaCollection()
.then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type))
.then(schemaCollection =>
schemaCollection.addFieldIfNotExists(className, fieldName, type)
)
.then(() => this.createIndexesIfNeeded(className, fieldName, type))
.catch(err => this.handleError(err));
}
@@ -298,24 +362,33 @@ export class MongoStorageAdapter implements StorageAdapter {
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
// and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible.
deleteClass(className: string) {
return this._adaptiveCollection(className)
.then(collection => collection.drop())
.catch(error => {
// 'ns not found' means collection was already gone. Ignore deletion attempt.
if (error.message == 'ns not found') {
return;
}
throw error;
})
// We've dropped the collection, now remove the _SCHEMA document
.then(() => this._schemaCollection())
.then(schemaCollection => schemaCollection.findAndDeleteSchema(className))
.catch(err => this.handleError(err));
return (
this._adaptiveCollection(className)
.then(collection => collection.drop())
.catch(error => {
// 'ns not found' means collection was already gone. Ignore deletion attempt.
if (error.message == 'ns not found') {
return;
}
throw error;
})
// We've dropped the collection, now remove the _SCHEMA document
.then(() => this._schemaCollection())
.then(schemaCollection =>
schemaCollection.findAndDeleteSchema(className)
)
.catch(err => this.handleError(err))
);
}
deleteAllClasses(fast: boolean) {
return storageAdapterAllCollections(this)
.then(collections => Promise.all(collections.map(collection => fast ? collection.remove({}) : collection.drop())));
return storageAdapterAllCollections(this).then(collections =>
Promise.all(
collections.map(
collection => (fast ? collection.remove({}) : collection.drop())
)
)
);
}
// Remove the column and all the data. For Relations, the _Join collection is handled
@@ -341,17 +414,17 @@ export class MongoStorageAdapter implements StorageAdapter {
deleteFields(className: string, schema: SchemaType, fieldNames: string[]) {
const mongoFormatNames = fieldNames.map(fieldName => {
if (schema.fields[fieldName].type === 'Pointer') {
return `_p_${fieldName}`
return `_p_${fieldName}`;
} else {
return fieldName;
}
});
const collectionUpdate = { '$unset' : {} };
const collectionUpdate = { $unset: {} };
mongoFormatNames.forEach(name => {
collectionUpdate['$unset'][name] = null;
});
const schemaUpdate = { '$unset' : {} };
const schemaUpdate = { $unset: {} };
fieldNames.forEach(name => {
schemaUpdate['$unset'][name] = null;
});
@@ -359,7 +432,9 @@ export class MongoStorageAdapter implements StorageAdapter {
return this._adaptiveCollection(className)
.then(collection => collection.updateMany({}, collectionUpdate))
.then(() => this._schemaCollection())
.then(schemaCollection => schemaCollection.updateSchema(className, schemaUpdate))
.then(schemaCollection =>
schemaCollection.updateSchema(className, schemaUpdate)
)
.catch(err => this.handleError(err));
}
@@ -367,7 +442,10 @@ export class MongoStorageAdapter implements StorageAdapter {
// schemas cannot be retrieved, returns a promise that rejects. Requirements for the
// rejection reason are TBD.
getAllClasses(): Promise<StorageClass[]> {
return this._schemaCollection().then(schemasCollection => schemasCollection._fetchAllSchemasFrom_SCHEMA())
return this._schemaCollection()
.then(schemasCollection =>
schemasCollection._fetchAllSchemasFrom_SCHEMA()
)
.catch(err => this.handleError(err));
}
@@ -376,7 +454,9 @@ export class MongoStorageAdapter implements StorageAdapter {
// undefined as the reason.
getClass(className: string): Promise<StorageClass> {
return this._schemaCollection()
.then(schemasCollection => schemasCollection._fetchOneSchemaFrom_SCHEMA(className))
.then(schemasCollection =>
schemasCollection._fetchOneSchemaFrom_SCHEMA(className)
)
.catch(err => this.handleError(err));
}
@@ -385,15 +465,25 @@ export class MongoStorageAdapter implements StorageAdapter {
// the schema only for the legacy mongo format. We'll figure that out later.
createObject(className: string, schema: SchemaType, object: any) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema);
const mongoObject = parseObjectToMongoObjectForCreate(
className,
object,
schema
);
return this._adaptiveCollection(className)
.then(collection => collection.insertOne(mongoObject))
.catch(error => {
if (error.code === 11000) { // Duplicate value
const err = new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'A duplicate value for a field with unique values was provided');
if (error.code === 11000) {
// Duplicate value
const err = new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'A duplicate value for a field with unique values was provided'
);
err.underlyingError = error;
if (error.message) {
const matches = error.message.match(/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/);
const matches = error.message.match(
/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/
);
if (matches && Array.isArray(matches)) {
err.userInfo = { duplicated_field: matches[1] };
}
@@ -408,26 +498,44 @@ export class MongoStorageAdapter implements StorageAdapter {
// Remove all objects that match the given Parse Query.
// If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined.
// If there is some other error, reject with INTERNAL_SERVER_ERROR.
deleteObjectsByQuery(className: string, schema: SchemaType, query: QueryType) {
deleteObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType
) {
schema = convertParseSchemaToMongoSchema(schema);
return this._adaptiveCollection(className)
.then(collection => {
const mongoWhere = transformWhere(className, query, schema);
return collection.deleteMany(mongoWhere)
return collection.deleteMany(mongoWhere);
})
.catch(err => this.handleError(err))
.then(({ result }) => {
if (result.n === 0) {
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.');
.then(
({ result }) => {
if (result.n === 0) {
throw new Parse.Error(
Parse.Error.OBJECT_NOT_FOUND,
'Object not found.'
);
}
return Promise.resolve();
},
() => {
throw new Parse.Error(
Parse.Error.INTERNAL_SERVER_ERROR,
'Database adapter error'
);
}
return Promise.resolve();
}, () => {
throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error');
});
);
}
// Apply the update to all objects that match the given Parse Query.
updateObjectsByQuery(className: string, schema: SchemaType, query: QueryType, update: any) {
updateObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType,
update: any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
@@ -438,16 +546,28 @@ export class MongoStorageAdapter implements StorageAdapter {
// Atomically finds and updates an object based on query.
// Return value not currently well specified.
findOneAndUpdate(className: string, schema: SchemaType, query: QueryType, update: any) {
findOneAndUpdate(
className: string,
schema: SchemaType,
query: QueryType,
update: any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
return this._adaptiveCollection(className)
.then(collection => collection._mongoCollection.findAndModify(mongoWhere, [], mongoUpdate, { new: true }))
.then(collection =>
collection._mongoCollection.findAndModify(mongoWhere, [], mongoUpdate, {
new: true,
})
)
.then(result => mongoObjectToParseObject(className, result.value, schema))
.catch(error => {
if (error.code === 11000) {
throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'A duplicate value for a field with unique values was provided');
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'A duplicate value for a field with unique values was provided'
);
}
throw error;
})
@@ -455,7 +575,12 @@ export class MongoStorageAdapter implements StorageAdapter {
}
// Hopefully we can get rid of this. It's only used for config and hooks.
upsertOneObject(className: string, schema: SchemaType, query: QueryType, update: any) {
upsertOneObject(
className: string,
schema: SchemaType,
query: QueryType,
update: any
) {
schema = convertParseSchemaToMongoSchema(schema);
const mongoUpdate = transformUpdate(className, update, schema);
const mongoWhere = transformWhere(className, query, schema);
@@ -465,32 +590,49 @@ export class MongoStorageAdapter implements StorageAdapter {
}
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
find(className: string, schema: SchemaType, query: QueryType, { skip, limit, sort, keys, readPreference }: QueryOptions): Promise<any> {
find(
className: string,
schema: SchemaType,
query: QueryType,
{ skip, limit, sort, keys, readPreference }: QueryOptions
): Promise<any> {
schema = convertParseSchemaToMongoSchema(schema);
const mongoWhere = transformWhere(className, query, schema);
const mongoSort = _.mapKeys(sort, (value, fieldName) => transformKey(className, fieldName, schema));
const mongoKeys = _.reduce(keys, (memo, key) => {
if (key === 'ACL') {
memo['_rperm'] = 1;
memo['_wperm'] = 1;
} else {
memo[transformKey(className, key, schema)] = 1;
}
return memo;
}, {});
const mongoSort = _.mapKeys(sort, (value, fieldName) =>
transformKey(className, fieldName, schema)
);
const mongoKeys = _.reduce(
keys,
(memo, key) => {
if (key === 'ACL') {
memo['_rperm'] = 1;
memo['_wperm'] = 1;
} else {
memo[transformKey(className, key, schema)] = 1;
}
return memo;
},
{}
);
readPreference = this._parseReadPreference(readPreference);
return this.createTextIndexesIfNeeded(className, query, schema)
.then(() => this._adaptiveCollection(className))
.then(collection => collection.find(mongoWhere, {
skip,
limit,
sort: mongoSort,
keys: mongoKeys,
maxTimeMS: this._maxTimeMS,
readPreference,
}))
.then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema)))
.then(collection =>
collection.find(mongoWhere, {
skip,
limit,
sort: mongoSort,
keys: mongoKeys,
maxTimeMS: this._maxTimeMS,
readPreference,
})
)
.then(objects =>
objects.map(object =>
mongoObjectToParseObject(className, object, schema)
)
)
.catch(err => this.handleError(err));
}
@@ -499,18 +641,29 @@ export class MongoStorageAdapter implements StorageAdapter {
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
// which is why we use sparse indexes.
ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) {
ensureUniqueness(
className: string,
schema: SchemaType,
fieldNames: string[]
) {
schema = convertParseSchemaToMongoSchema(schema);
const indexCreationRequest = {};
const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
const mongoFieldNames = fieldNames.map(fieldName =>
transformKey(className, fieldName, schema)
);
mongoFieldNames.forEach(fieldName => {
indexCreationRequest[fieldName] = 1;
});
return this._adaptiveCollection(className)
.then(collection => collection._ensureSparseUniqueIndexInBackground(indexCreationRequest))
.then(collection =>
collection._ensureSparseUniqueIndexInBackground(indexCreationRequest)
)
.catch(error => {
if (error.code === 11000) {
throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'Tried to ensure field uniqueness for a class that already has duplicates.');
throw new Parse.Error(
Parse.Error.DUPLICATE_VALUE,
'Tried to ensure field uniqueness for a class that already has duplicates.'
);
}
throw error;
})
@@ -519,33 +672,52 @@ export class MongoStorageAdapter implements StorageAdapter {
// Used in tests
_rawFind(className: string, query: QueryType) {
return this._adaptiveCollection(className).then(collection => collection.find(query, {
maxTimeMS: this._maxTimeMS,
})).catch(err => this.handleError(err));
}
// Executes a count.
count(className: string, schema: SchemaType, query: QueryType, readPreference: ?string) {
schema = convertParseSchemaToMongoSchema(schema);
readPreference = this._parseReadPreference(readPreference);
return this._adaptiveCollection(className)
.then(collection => collection.count(transformWhere(className, query, schema), {
maxTimeMS: this._maxTimeMS,
readPreference,
}))
.then(collection =>
collection.find(query, {
maxTimeMS: this._maxTimeMS,
})
)
.catch(err => this.handleError(err));
}
distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) {
// Executes a count.
count(
className: string,
schema: SchemaType,
query: QueryType,
readPreference: ?string
) {
schema = convertParseSchemaToMongoSchema(schema);
const isPointerField = schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
readPreference = this._parseReadPreference(readPreference);
return this._adaptiveCollection(className)
.then(collection =>
collection.count(transformWhere(className, query, schema), {
maxTimeMS: this._maxTimeMS,
readPreference,
})
)
.catch(err => this.handleError(err));
}
distinct(
className: string,
schema: SchemaType,
query: QueryType,
fieldName: string
) {
schema = convertParseSchemaToMongoSchema(schema);
const isPointerField =
schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
if (isPointerField) {
fieldName = `_p_${fieldName}`
fieldName = `_p_${fieldName}`;
}
return this._adaptiveCollection(className)
.then(collection => collection.distinct(fieldName, transformWhere(className, query, schema)))
.then(collection =>
collection.distinct(fieldName, transformWhere(className, query, schema))
)
.then(objects => {
objects = objects.filter((obj) => obj != null);
objects = objects.filter(obj => obj != null);
return objects.map(object => {
if (isPointerField) {
const field = fieldName.substring(3);
@@ -557,12 +729,21 @@ export class MongoStorageAdapter implements StorageAdapter {
.catch(err => this.handleError(err));
}
aggregate(className: string, schema: any, pipeline: any, readPreference: ?string) {
aggregate(
className: string,
schema: any,
pipeline: any,
readPreference: ?string
) {
let isPointerField = false;
pipeline = pipeline.map((stage) => {
pipeline = pipeline.map(stage => {
if (stage.$group) {
stage.$group = this._parseAggregateGroupArgs(schema, stage.$group);
if (stage.$group._id && (typeof stage.$group._id === 'string') && stage.$group._id.indexOf('$_p_') >= 0) {
if (
stage.$group._id &&
typeof stage.$group._id === 'string' &&
stage.$group._id.indexOf('$_p_') >= 0
) {
isPointerField = true;
}
}
@@ -570,13 +751,21 @@ export class MongoStorageAdapter implements StorageAdapter {
stage.$match = this._parseAggregateArgs(schema, stage.$match);
}
if (stage.$project) {
stage.$project = this._parseAggregateProjectArgs(schema, stage.$project);
stage.$project = this._parseAggregateProjectArgs(
schema,
stage.$project
);
}
return stage;
});
readPreference = this._parseReadPreference(readPreference);
return this._adaptiveCollection(className)
.then(collection => collection.aggregate(pipeline, { readPreference, maxTimeMS: this._maxTimeMS }))
.then(collection =>
collection.aggregate(pipeline, {
readPreference,
maxTimeMS: this._maxTimeMS,
})
)
.catch(error => {
if (error.code === 16006) {
throw new Parse.Error(Parse.Error.INVALID_QUERY, error.message);
@@ -598,7 +787,11 @@ export class MongoStorageAdapter implements StorageAdapter {
});
return results;
})
.then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema)))
.then(objects =>
objects.map(object =>
mongoObjectToParseObject(className, object, schema)
)
)
.catch(err => this.handleError(err));
}
@@ -623,7 +816,7 @@ export class MongoStorageAdapter implements StorageAdapter {
// down a tree to find a "leaf node" and checking to see if it needs to be converted.
_parseAggregateArgs(schema: any, pipeline: any): any {
if (Array.isArray(pipeline)) {
return pipeline.map((value) => this._parseAggregateArgs(schema, value));
return pipeline.map(value => this._parseAggregateArgs(schema, value));
} else if (typeof pipeline === 'object') {
const returnValue = {};
for (const field in pipeline) {
@@ -632,12 +825,20 @@ export class MongoStorageAdapter implements StorageAdapter {
// Pass objects down to MongoDB...this is more than likely an $exists operator.
returnValue[`_p_${field}`] = pipeline[field];
} else {
returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${pipeline[field]}`;
returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${
pipeline[field]
}`;
}
} else if (schema.fields[field] && schema.fields[field].type === 'Date') {
} else if (
schema.fields[field] &&
schema.fields[field].type === 'Date'
) {
returnValue[field] = this._convertToDate(pipeline[field]);
} else {
returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]);
returnValue[field] = this._parseAggregateArgs(
schema,
pipeline[field]
);
}
if (field === 'objectId') {
@@ -690,11 +891,16 @@ export class MongoStorageAdapter implements StorageAdapter {
// updatedAt or objectId and change it accordingly.
_parseAggregateGroupArgs(schema: any, pipeline: any): any {
if (Array.isArray(pipeline)) {
return pipeline.map((value) => this._parseAggregateGroupArgs(schema, value));
return pipeline.map(value =>
this._parseAggregateGroupArgs(schema, value)
);
} else if (typeof pipeline === 'object') {
const returnValue = {};
for (const field in pipeline) {
returnValue[field] = this._parseAggregateGroupArgs(schema, pipeline[field]);
returnValue[field] = this._parseAggregateGroupArgs(
schema,
pipeline[field]
);
}
return returnValue;
} else if (typeof pipeline === 'string') {
@@ -719,34 +925,37 @@ export class MongoStorageAdapter implements StorageAdapter {
return new Date(value);
}
const returnValue = {}
const returnValue = {};
for (const field in value) {
returnValue[field] = this._convertToDate(value[field])
returnValue[field] = this._convertToDate(value[field]);
}
return returnValue;
}
_parseReadPreference(readPreference: ?string): ?string {
switch (readPreference) {
case 'PRIMARY':
readPreference = ReadPreference.PRIMARY;
break;
case 'PRIMARY_PREFERRED':
readPreference = ReadPreference.PRIMARY_PREFERRED;
break;
case 'SECONDARY':
readPreference = ReadPreference.SECONDARY;
break;
case 'SECONDARY_PREFERRED':
readPreference = ReadPreference.SECONDARY_PREFERRED;
break;
case 'NEAREST':
readPreference = ReadPreference.NEAREST;
break;
case undefined:
break;
default:
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Not supported read preference.');
case 'PRIMARY':
readPreference = ReadPreference.PRIMARY;
break;
case 'PRIMARY_PREFERRED':
readPreference = ReadPreference.PRIMARY_PREFERRED;
break;
case 'SECONDARY':
readPreference = ReadPreference.SECONDARY;
break;
case 'SECONDARY_PREFERRED':
readPreference = ReadPreference.SECONDARY_PREFERRED;
break;
case 'NEAREST':
readPreference = ReadPreference.NEAREST;
break;
case undefined:
break;
default:
throw new Parse.Error(
Parse.Error.INVALID_QUERY,
'Not supported read preference.'
);
}
return readPreference;
}
@@ -770,15 +979,19 @@ export class MongoStorageAdapter implements StorageAdapter {
createIndexesIfNeeded(className: string, fieldName: string, type: any) {
if (type && type.type === 'Polygon') {
const index = {
[fieldName]: '2dsphere'
[fieldName]: '2dsphere',
};
return this.createIndex(className, index);
}
return Promise.resolve();
}
createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise<void> {
for(const fieldName in query) {
createTextIndexesIfNeeded(
className: string,
query: QueryType,
schema: any
): Promise<void> {
for (const fieldName in query) {
if (!query[fieldName] || !query[fieldName].$text) {
continue;
}
@@ -791,15 +1004,20 @@ export class MongoStorageAdapter implements StorageAdapter {
}
const indexName = `${fieldName}_text`;
const textIndex = {
[indexName]: { [fieldName]: 'text' }
[indexName]: { [fieldName]: 'text' },
};
return this.setIndexesWithSchemaFormat(className, textIndex, existingIndexes, schema.fields)
.catch((error) => {
if (error.code === 85) { // Index exist with different options
return this.setIndexesFromMongo(className);
}
throw error;
});
return this.setIndexesWithSchemaFormat(
className,
textIndex,
existingIndexes,
schema.fields
).catch(error => {
if (error.code === 85) {
// Index exist with different options
return this.setIndexesFromMongo(className);
}
throw error;
});
}
return Promise.resolve();
}
@@ -824,8 +1042,8 @@ export class MongoStorageAdapter implements StorageAdapter {
updateSchemaWithIndexes(): Promise<any> {
return this.getAllClasses()
.then((classes) => {
const promises = classes.map((schema) => {
.then(classes => {
const promises = classes.map(schema => {
return this.setIndexesFromMongo(schema.className);
});
return Promise.all(promises);

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,3 @@
const parser = require('./PostgresConfigParser');
export function createClient(uri, databaseOptions) {

View File

@@ -19,11 +19,14 @@ function getDatabaseOptionsFromURI(uri) {
databaseOptions.ssl =
queryParams.ssl && queryParams.ssl.toLowerCase() === 'true' ? true : false;
databaseOptions.binary =
queryParams.binary && queryParams.binary.toLowerCase() === 'true' ? true : false;
queryParams.binary && queryParams.binary.toLowerCase() === 'true'
? true
: false;
databaseOptions.client_encoding = queryParams.client_encoding;
databaseOptions.application_name = queryParams.application_name;
databaseOptions.fallback_application_name = queryParams.fallback_application_name;
databaseOptions.fallback_application_name =
queryParams.fallback_application_name;
if (queryParams.poolSize) {
databaseOptions.poolSize = parseInt(queryParams.poolSize) || 10;
@@ -35,19 +38,15 @@ function getDatabaseOptionsFromURI(uri) {
function parseQueryParams(queryString) {
queryString = queryString || '';
return queryString
.split('&')
.reduce((p, c) => {
const parts = c.split('=');
p[decodeURIComponent(parts[0])] =
parts.length > 1
? decodeURIComponent(parts.slice(1).join('='))
: '';
return p;
}, {});
return queryString.split('&').reduce((p, c) => {
const parts = c.split('=');
p[decodeURIComponent(parts[0])] =
parts.length > 1 ? decodeURIComponent(parts.slice(1).join('=')) : '';
return p;
}, {});
}
module.exports = {
parseQueryParams: parseQueryParams,
getDatabaseOptionsFromURI: getDatabaseOptionsFromURI
getDatabaseOptionsFromURI: getDatabaseOptionsFromURI,
};

File diff suppressed because it is too large Load Diff

View File

@@ -10,20 +10,19 @@ module.exports = {
contains: sql('array/contains.sql'),
containsAll: sql('array/contains-all.sql'),
containsAllRegex: sql('array/contains-all-regex.sql'),
remove: sql('array/remove.sql')
remove: sql('array/remove.sql'),
},
misc: {
jsonObjectSetKeys: sql('misc/json-object-set-keys.sql')
}
jsonObjectSetKeys: sql('misc/json-object-set-keys.sql'),
},
};
///////////////////////////////////////////////
// Helper for linking to external query files;
function sql(file) {
var fullPath = path.join(__dirname, file); // generating full path;
var qf = new QueryFile(fullPath, {minify: true});
var qf = new QueryFile(fullPath, { minify: true });
if (qf.error) {
throw qf.error;

View File

@@ -7,7 +7,7 @@ export type QueryOptions = {
skip?: number,
limit?: number,
acl?: string[],
sort?: {[string]: number},
sort?: { [string]: number },
count?: boolean | number,
keys?: string[],
op?: string,
@@ -18,8 +18,8 @@ export type QueryOptions = {
export type UpdateQueryOptions = {
many?: boolean,
upsert?: boolean
}
upsert?: boolean,
};
export type FullQueryOptions = QueryOptions & UpdateQueryOptions;
@@ -29,27 +29,88 @@ export interface StorageAdapter {
classExists(className: string): Promise<boolean>;
setClassLevelPermissions(className: string, clps: any): Promise<void>;
createClass(className: string, schema: SchemaType): Promise<void>;
addFieldIfNotExists(className: string, fieldName: string, type: any): Promise<void>;
addFieldIfNotExists(
className: string,
fieldName: string,
type: any
): Promise<void>;
deleteClass(className: string): Promise<void>;
deleteAllClasses(fast: boolean): Promise<void>;
deleteFields(className: string, schema: SchemaType, fieldNames: Array<string>): Promise<void>;
deleteFields(
className: string,
schema: SchemaType,
fieldNames: Array<string>
): Promise<void>;
getAllClasses(): Promise<StorageClass[]>;
getClass(className: string): Promise<StorageClass>;
createObject(className: string, schema: SchemaType, object: any): Promise<any>;
deleteObjectsByQuery(className: string, schema: SchemaType, query: QueryType): Promise<void>;
updateObjectsByQuery(className: string, schema: SchemaType, query: QueryType, update: any): Promise<[any]>;
findOneAndUpdate(className: string, schema: SchemaType, query: QueryType, update: any): Promise<any>;
upsertOneObject(className: string, schema: SchemaType, query: QueryType, update: any): Promise<any>;
find(className: string, schema: SchemaType, query: QueryType, options: QueryOptions): Promise<[any]>;
ensureUniqueness(className: string, schema: SchemaType, fieldNames: Array<string>): Promise<void>;
count(className: string, schema: SchemaType, query: QueryType, readPreference: ?string): Promise<number>;
distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string): Promise<any>;
aggregate(className: string, schema: any, pipeline: any, readPreference: ?string): Promise<any>;
createObject(
className: string,
schema: SchemaType,
object: any
): Promise<any>;
deleteObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType
): Promise<void>;
updateObjectsByQuery(
className: string,
schema: SchemaType,
query: QueryType,
update: any
): Promise<[any]>;
findOneAndUpdate(
className: string,
schema: SchemaType,
query: QueryType,
update: any
): Promise<any>;
upsertOneObject(
className: string,
schema: SchemaType,
query: QueryType,
update: any
): Promise<any>;
find(
className: string,
schema: SchemaType,
query: QueryType,
options: QueryOptions
): Promise<[any]>;
ensureUniqueness(
className: string,
schema: SchemaType,
fieldNames: Array<string>
): Promise<void>;
count(
className: string,
schema: SchemaType,
query: QueryType,
readPreference: ?string
): Promise<number>;
distinct(
className: string,
schema: SchemaType,
query: QueryType,
fieldName: string
): Promise<any>;
aggregate(
className: string,
schema: any,
pipeline: any,
readPreference: ?string
): Promise<any>;
performInitialization(options: ?any): Promise<void>;
// Indexing
createIndexes(className: string, indexes: any, conn: ?any): Promise<void>;
getIndexes(className: string, connection: ?any): Promise<void>;
updateSchemaWithIndexes(): Promise<void>;
setIndexesWithSchemaFormat(className: string, submittedIndexes: any, existingIndexes: any, fields: any, conn: ?any): Promise<void>;
setIndexesWithSchemaFormat(
className: string,
submittedIndexes: any,
existingIndexes: any,
fields: any,
conn: ?any
): Promise<void>;
}