feat: Add option to change the log level of the logs emitted by triggers (#8328)

This commit is contained in:
alljinx
2022-12-07 22:55:45 +01:00
committed by GitHub
parent 0a8670dc22
commit 8f3b694e39
10 changed files with 286 additions and 137 deletions

View File

@@ -24,31 +24,33 @@ const nestedOptionTypes = [
'PasswordPolicyOptions', 'PasswordPolicyOptions',
'SecurityOptions', 'SecurityOptions',
'SchemaOptions', 'SchemaOptions',
'LogLevels',
]; ];
/** The prefix of environment variables for nested options. */ /** The prefix of environment variables for nested options. */
const nestedOptionEnvPrefix = { const nestedOptionEnvPrefix = {
'AccountLockoutOptions': 'PARSE_SERVER_ACCOUNT_LOCKOUT_', AccountLockoutOptions: 'PARSE_SERVER_ACCOUNT_LOCKOUT_',
'CustomPagesOptions': 'PARSE_SERVER_CUSTOM_PAGES_', CustomPagesOptions: 'PARSE_SERVER_CUSTOM_PAGES_',
'DatabaseOptions': 'PARSE_SERVER_DATABASE_', DatabaseOptions: 'PARSE_SERVER_DATABASE_',
'FileUploadOptions': 'PARSE_SERVER_FILE_UPLOAD_', FileUploadOptions: 'PARSE_SERVER_FILE_UPLOAD_',
'IdempotencyOptions': 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_', IdempotencyOptions: 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_',
'LiveQueryOptions': 'PARSE_SERVER_LIVEQUERY_', LiveQueryOptions: 'PARSE_SERVER_LIVEQUERY_',
'LiveQueryServerOptions': 'PARSE_LIVE_QUERY_SERVER_', LiveQueryServerOptions: 'PARSE_LIVE_QUERY_SERVER_',
'PagesCustomUrlsOptions': 'PARSE_SERVER_PAGES_CUSTOM_URL_', PagesCustomUrlsOptions: 'PARSE_SERVER_PAGES_CUSTOM_URL_',
'PagesOptions': 'PARSE_SERVER_PAGES_', PagesOptions: 'PARSE_SERVER_PAGES_',
'PagesRoute': 'PARSE_SERVER_PAGES_ROUTE_', PagesRoute: 'PARSE_SERVER_PAGES_ROUTE_',
'ParseServerOptions': 'PARSE_SERVER_', ParseServerOptions: 'PARSE_SERVER_',
'PasswordPolicyOptions': 'PARSE_SERVER_PASSWORD_POLICY_', PasswordPolicyOptions: 'PARSE_SERVER_PASSWORD_POLICY_',
'SecurityOptions': 'PARSE_SERVER_SECURITY_', SecurityOptions: 'PARSE_SERVER_SECURITY_',
'SchemaOptions': 'PARSE_SERVER_SCHEMA_', SchemaOptions: 'PARSE_SERVER_SCHEMA_',
LogLevels: 'PARSE_SERVER_LOG_LEVELS_',
}; };
function last(array) { function last(array) {
return array[array.length - 1]; return array[array.length - 1];
} }
const letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' const letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
function toENV(key) { function toENV(key) {
let str = ''; let str = '';
let previousIsUpper = false; let previousIsUpper = false;
@@ -68,13 +70,15 @@ function toENV(key) {
} }
function getCommentValue(comment) { function getCommentValue(comment) {
if (!comment) { return } if (!comment) {
return;
}
return comment.value.trim(); return comment.value.trim();
} }
function getENVPrefix(iface) { function getENVPrefix(iface) {
if (nestedOptionEnvPrefix[iface.id.name]) { if (nestedOptionEnvPrefix[iface.id.name]) {
return nestedOptionEnvPrefix[iface.id.name] return nestedOptionEnvPrefix[iface.id.name];
} }
} }
@@ -86,11 +90,11 @@ function processProperty(property, iface) {
if (!firstComment) { if (!firstComment) {
return; return;
} }
const lines = firstComment.split('\n').map((line) => line.trim()); const lines = firstComment.split('\n').map(line => line.trim());
let help = ''; let help = '';
let envLine; let envLine;
let defaultLine; let defaultLine;
lines.forEach((line) => { lines.forEach(line => {
if (line.indexOf(':ENV:') === 0) { if (line.indexOf(':ENV:') === 0) {
envLine = line; envLine = line;
} else if (line.indexOf(':DEFAULT:') === 0) { } else if (line.indexOf(':DEFAULT:') === 0) {
@@ -103,7 +107,7 @@ function processProperty(property, iface) {
if (envLine) { if (envLine) {
env = envLine.split(' ')[1]; env = envLine.split(' ')[1];
} else { } else {
env = (prefix + toENV(name)); env = prefix + toENV(name);
} }
let defaultValue; let defaultValue;
if (defaultLine) { if (defaultLine) {
@@ -123,21 +127,20 @@ function processProperty(property, iface) {
defaultValue, defaultValue,
types: property.value.types, types: property.value.types,
typeAnnotation: property.value.typeAnnotation, typeAnnotation: property.value.typeAnnotation,
required: isRequired required: isRequired,
}; };
} }
function doInterface(iface) { function doInterface(iface) {
return iface.body.properties return iface.body.properties
.sort((a, b) => a.key.name.localeCompare(b.key.name)) .sort((a, b) => a.key.name.localeCompare(b.key.name))
.map((prop) => processProperty(prop, iface)) .map(prop => processProperty(prop, iface))
.filter((e) => e !== undefined); .filter(e => e !== undefined);
} }
function mapperFor(elt, t) { function mapperFor(elt, t) {
const p = t.identifier('parsers'); const p = t.identifier('parsers');
const wrap = (identifier) => t.memberExpression(p, identifier); const wrap = identifier => t.memberExpression(p, identifier);
if (t.isNumberTypeAnnotation(elt)) { if (t.isNumberTypeAnnotation(elt)) {
return t.callExpression(wrap(t.identifier('numberParser')), [t.stringLiteral(elt.name)]); return t.callExpression(wrap(t.identifier('numberParser')), [t.stringLiteral(elt.name)]);
@@ -171,27 +174,29 @@ function parseDefaultValue(elt, value, t) {
literalValue = t.numericLiteral(parsers.numberOrBoolParser('')(value)); literalValue = t.numericLiteral(parsers.numberOrBoolParser('')(value));
} else if (t.isArrayTypeAnnotation(elt)) { } else if (t.isArrayTypeAnnotation(elt)) {
const array = parsers.objectParser(value); const array = parsers.objectParser(value);
literalValue = t.arrayExpression(array.map((value) => { literalValue = t.arrayExpression(
if (typeof value == 'string') { array.map(value => {
return t.stringLiteral(value); if (typeof value == 'string') {
} else if (typeof value == 'number') { return t.stringLiteral(value);
return t.numericLiteral(value); } else if (typeof value == 'number') {
} else if (typeof value == 'object') { return t.numericLiteral(value);
const object = parsers.objectParser(value); } else if (typeof value == 'object') {
const props = Object.entries(object).map(([k, v]) => { const object = parsers.objectParser(value);
if (typeof v == 'string') { const props = Object.entries(object).map(([k, v]) => {
return t.objectProperty(t.identifier(k), t.stringLiteral(v)); if (typeof v == 'string') {
} else if (typeof v == 'number') { return t.objectProperty(t.identifier(k), t.stringLiteral(v));
return t.objectProperty(t.identifier(k), t.numericLiteral(v)); } else if (typeof v == 'number') {
} else if (typeof v == 'boolean') { return t.objectProperty(t.identifier(k), t.numericLiteral(v));
return t.objectProperty(t.identifier(k), t.booleanLiteral(v)); } else if (typeof v == 'boolean') {
} return t.objectProperty(t.identifier(k), t.booleanLiteral(v));
}); }
return t.objectExpression(props); });
} else { return t.objectExpression(props);
throw new Error('Unable to parse array'); } else {
} throw new Error('Unable to parse array');
})); }
})
);
} else if (t.isAnyTypeAnnotation(elt)) { } else if (t.isAnyTypeAnnotation(elt)) {
literalValue = t.arrayExpression([]); literalValue = t.arrayExpression([]);
} else if (t.isBooleanTypeAnnotation(elt)) { } else if (t.isBooleanTypeAnnotation(elt)) {
@@ -204,15 +209,16 @@ function parseDefaultValue(elt, value, t) {
if (nestedOptionTypes.includes(type)) { if (nestedOptionTypes.includes(type)) {
const object = parsers.objectParser(value); const object = parsers.objectParser(value);
const props = Object.keys(object).map((key) => { const props = Object.keys(object).map(key => {
return t.objectProperty(key, object[value]); return t.objectProperty(key, object[value]);
}); });
literalValue = t.objectExpression(props); literalValue = t.objectExpression(props);
} }
if (type == 'ProtectedFields') { if (type == 'ProtectedFields') {
const prop = t.objectProperty( const prop = t.objectProperty(
t.stringLiteral('_User'), t.objectPattern([ t.stringLiteral('_User'),
t.objectProperty(t.stringLiteral('*'), t.arrayExpression([t.stringLiteral('email')])) t.objectPattern([
t.objectProperty(t.stringLiteral('*'), t.arrayExpression([t.stringLiteral('email')])),
]) ])
); );
literalValue = t.objectExpression([prop]); literalValue = t.objectExpression([prop]);
@@ -223,62 +229,69 @@ function parseDefaultValue(elt, value, t) {
function inject(t, list) { function inject(t, list) {
let comments = ''; let comments = '';
const results = list.map((elt) => { const results = list
if (!elt.name) { .map(elt => {
return; if (!elt.name) {
} return;
const props = ['env', 'help'].map((key) => {
if (elt[key]) {
return t.objectProperty(t.stringLiteral(key), t.stringLiteral(elt[key]));
} }
}).filter((e) => e !== undefined); const props = ['env', 'help']
if (elt.required) { .map(key => {
props.push(t.objectProperty(t.stringLiteral('required'), t.booleanLiteral(true))) if (elt[key]) {
} return t.objectProperty(t.stringLiteral(key), t.stringLiteral(elt[key]));
const action = mapperFor(elt, t); }
if (action) { })
props.push(t.objectProperty(t.stringLiteral('action'), action)) .filter(e => e !== undefined);
} if (elt.required) {
if (elt.defaultValue) { props.push(t.objectProperty(t.stringLiteral('required'), t.booleanLiteral(true)));
const parsedValue = parseDefaultValue(elt, elt.defaultValue, t);
if (parsedValue) {
props.push(t.objectProperty(t.stringLiteral('default'), parsedValue));
} else {
throw new Error(`Unable to parse value for ${elt.name} `);
} }
} const action = mapperFor(elt, t);
let type = elt.type.replace('TypeAnnotation', ''); if (action) {
if (type === 'Generic') { props.push(t.objectProperty(t.stringLiteral('action'), action));
type = elt.typeAnnotation.id.name; }
} if (elt.defaultValue) {
if (type === 'Array') { const parsedValue = parseDefaultValue(elt, elt.defaultValue, t);
type = elt.typeAnnotation.elementType.id if (parsedValue) {
? `${elt.typeAnnotation.elementType.id.name}[]` props.push(t.objectProperty(t.stringLiteral('default'), parsedValue));
: `${elt.typeAnnotation.elementType.type.replace('TypeAnnotation', '')}[]`; } else {
} throw new Error(`Unable to parse value for ${elt.name} `);
if (type === 'NumberOrBoolean') { }
type = 'Number|Boolean'; }
} let type = elt.type.replace('TypeAnnotation', '');
if (type === 'NumberOrString') { if (type === 'Generic') {
type = 'Number|String'; type = elt.typeAnnotation.id.name;
} }
if (type === 'Adapter') { if (type === 'Array') {
const adapterType = elt.typeAnnotation.typeParameters.params[0].id.name; type = elt.typeAnnotation.elementType.id
type = `Adapter<${adapterType}>`; ? `${elt.typeAnnotation.elementType.id.name}[]`
} : `${elt.typeAnnotation.elementType.type.replace('TypeAnnotation', '')}[]`;
comments += ` * @property {${type}} ${elt.name} ${elt.help}\n`; }
const obj = t.objectExpression(props); if (type === 'NumberOrBoolean') {
return t.objectProperty(t.stringLiteral(elt.name), obj); type = 'Number|Boolean';
}).filter((elt) => { }
return elt != undefined; if (type === 'NumberOrString') {
}); type = 'Number|String';
}
if (type === 'Adapter') {
const adapterType = elt.typeAnnotation.typeParameters.params[0].id.name;
type = `Adapter<${adapterType}>`;
}
comments += ` * @property {${type}} ${elt.name} ${elt.help}\n`;
const obj = t.objectExpression(props);
return t.objectProperty(t.stringLiteral(elt.name), obj);
})
.filter(elt => {
return elt != undefined;
});
return { results, comments }; return { results, comments };
} }
const makeRequire = function (variableName, module, t) { const makeRequire = function (variableName, module, t) {
const decl = t.variableDeclarator(t.identifier(variableName), t.callExpression(t.identifier('require'), [t.stringLiteral(module)])); const decl = t.variableDeclarator(
return t.variableDeclaration('var', [decl]) t.identifier(variableName),
} t.callExpression(t.identifier('require'), [t.stringLiteral(module)])
);
return t.variableDeclaration('var', [decl]);
};
let docs = ``; let docs = ``;
const plugin = function (babel) { const plugin = function (babel) {
const t = babel.types; const t = babel.types;
@@ -294,27 +307,34 @@ const plugin = function (babel) {
}, },
ExportDeclaration: function (path) { ExportDeclaration: function (path) {
// Export declaration on an interface // Export declaration on an interface
if (path.node && path.node.declaration && path.node.declaration.type == 'InterfaceDeclaration') { if (
path.node &&
path.node.declaration &&
path.node.declaration.type == 'InterfaceDeclaration'
) {
const { results, comments } = inject(t, doInterface(path.node.declaration)); const { results, comments } = inject(t, doInterface(path.node.declaration));
const id = path.node.declaration.id.name; const id = path.node.declaration.id.name;
const exports = t.memberExpression(moduleExports, t.identifier(id)); const exports = t.memberExpression(moduleExports, t.identifier(id));
docs += `/**\n * @interface ${id}\n${comments} */\n\n`; docs += `/**\n * @interface ${id}\n${comments} */\n\n`;
path.replaceWith( path.replaceWith(t.assignmentExpression('=', exports, t.objectExpression(results)));
t.assignmentExpression('=', exports, t.objectExpression(results))
)
} }
} },
} },
} };
}; };
const auxiliaryCommentBefore = ` const auxiliaryCommentBefore = `
**** GENERATED CODE **** **** GENERATED CODE ****
This code has been generated by resources/buildConfigDefinitions.js This code has been generated by resources/buildConfigDefinitions.js
Do not edit manually, but update Options/index.js Do not edit manually, but update Options/index.js
` `;
const babel = require("@babel/core"); const babel = require('@babel/core');
const res = babel.transformFileSync('./src/Options/index.js', { plugins: [plugin, '@babel/transform-flow-strip-types'], babelrc: false, auxiliaryCommentBefore, sourceMaps: false }); const res = babel.transformFileSync('./src/Options/index.js', {
plugins: [plugin, '@babel/transform-flow-strip-types'],
babelrc: false,
auxiliaryCommentBefore,
sourceMaps: false,
});
require('fs').writeFileSync('./src/Options/Definitions.js', res.code + '\n'); require('fs').writeFileSync('./src/Options/Definitions.js', res.code + '\n');
require('fs').writeFileSync('./src/Options/docs.js', docs); require('fs').writeFileSync('./src/Options/docs.js', docs);

View File

@@ -182,6 +182,41 @@ describe('Cloud Code Logger', () => {
}); });
}); });
it('should log cloud function triggers using the custom log level', async () => {
Parse.Cloud.beforeSave('TestClass', () => {});
Parse.Cloud.afterSave('TestClass', () => {});
const execTest = async (logLevel, triggerBeforeSuccess, triggerAfter) => {
await reconfigureServer({
silent: true,
logLevel,
logLevels: {
triggerAfter,
triggerBeforeSuccess,
},
});
spy = spyOn(Config.get('test').loggerController.adapter, 'log').and.callThrough();
const obj = new Parse.Object('TestClass');
await obj.save();
return {
beforeSave: spy.calls
.allArgs()
.find(log => log[1].startsWith('beforeSave triggered for TestClass for user '))?.[0],
afterSave: spy.calls
.allArgs()
.find(log => log[1].startsWith('afterSave triggered for TestClass for user '))?.[0],
};
};
let calls = await execTest('silly', 'silly', 'debug');
expect(calls).toEqual({ beforeSave: 'silly', afterSave: 'debug' });
calls = await execTest('info', 'warn', 'debug');
expect(calls).toEqual({ beforeSave: 'warn', afterSave: undefined });
});
it('should log cloud function failure', done => { it('should log cloud function failure', done => {
Parse.Cloud.define('aFunction', () => { Parse.Cloud.define('aFunction', () => {
throw 'it failed!'; throw 'it failed!';

View File

@@ -254,7 +254,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
stream.on('data', chunk => { stream.on('data', chunk => {
res.write(chunk); res.write(chunk);
}); });
stream.on('error', (e) => { stream.on('error', e => {
res.status(404); res.status(404);
res.send(e.message); res.send(e.message);
}); });

View File

@@ -2,19 +2,21 @@
// configured. // configured.
// mount is the URL for the root of the API; includes http, domain, etc. // mount is the URL for the root of the API; includes http, domain, etc.
import { isBoolean, isString } from 'lodash';
import net from 'net';
import AppCache from './cache'; import AppCache from './cache';
import DatabaseController from './Controllers/DatabaseController'; import DatabaseController from './Controllers/DatabaseController';
import net from 'net'; import { logLevels as validLogLevels } from './Controllers/LoggerController';
import { import {
IdempotencyOptions,
FileUploadOptions,
AccountLockoutOptions, AccountLockoutOptions,
FileUploadOptions,
IdempotencyOptions,
LogLevels,
PagesOptions, PagesOptions,
SecurityOptions,
SchemaOptions,
ParseServerOptions, ParseServerOptions,
SchemaOptions,
SecurityOptions,
} from './Options/Definitions'; } from './Options/Definitions';
import { isBoolean, isString } from 'lodash';
function removeTrailingSlash(str) { function removeTrailingSlash(str) {
if (!str) { if (!str) {
@@ -82,6 +84,7 @@ export class Config {
schema, schema,
requestKeywordDenylist, requestKeywordDenylist,
allowExpiredAuthDataToken, allowExpiredAuthDataToken,
logLevels,
}) { }) {
if (masterKey === readOnlyMasterKey) { if (masterKey === readOnlyMasterKey) {
throw new Error('masterKey and readOnlyMasterKey should be different'); throw new Error('masterKey and readOnlyMasterKey should be different');
@@ -123,6 +126,7 @@ export class Config {
this.validateEnforcePrivateUsers(enforcePrivateUsers); this.validateEnforcePrivateUsers(enforcePrivateUsers);
this.validateAllowExpiredAuthDataToken(allowExpiredAuthDataToken); this.validateAllowExpiredAuthDataToken(allowExpiredAuthDataToken);
this.validateRequestKeywordDenylist(requestKeywordDenylist); this.validateRequestKeywordDenylist(requestKeywordDenylist);
this.validateLogLevels(logLevels);
} }
static validateRequestKeywordDenylist(requestKeywordDenylist) { static validateRequestKeywordDenylist(requestKeywordDenylist) {
@@ -501,6 +505,18 @@ export class Config {
} }
} }
static validateLogLevels(logLevels) {
for (const key of Object.keys(LogLevels)) {
if (logLevels[key]) {
if (validLogLevels.indexOf(logLevels[key]) === -1) {
throw `'${key}' must be one of ${JSON.stringify(validLogLevels)}`;
}
} else {
logLevels[key] = LogLevels[key].default;
}
}
}
generateEmailVerifyTokenExpiresAt() { generateEmailVerifyTokenExpiresAt() {
if (!this.verifyUserEmails || !this.emailVerifyTokenValidityDuration) { if (!this.verifyUserEmails || !this.emailVerifyTokenValidityDuration) {
return undefined; return undefined;

View File

@@ -16,7 +16,7 @@ export const LogOrder = {
ASCENDING: 'asc', ASCENDING: 'asc',
}; };
const logLevels = ['error', 'warn', 'info', 'debug', 'verbose', 'silly']; export const logLevels = ['error', 'warn', 'info', 'debug', 'verbose', 'silly'];
export class LoggerController extends AdaptableController { export class LoggerController extends AdaptableController {
constructor(adapter, appId, options = { logLevel: 'info' }) { constructor(adapter, appId, options = { logLevel: 'info' }) {

View File

@@ -290,6 +290,12 @@ module.exports.ParseServerOptions = {
env: 'PARSE_SERVER_LOG_LEVEL', env: 'PARSE_SERVER_LOG_LEVEL',
help: 'Sets the level for logs', help: 'Sets the level for logs',
}, },
logLevels: {
env: 'PARSE_SERVER_LOG_LEVELS',
help: '(Optional) Overrides the log levels used internally by Parse Server to log events.',
action: parsers.objectParser,
default: {},
},
logsFolder: { logsFolder: {
env: 'PARSE_SERVER_LOGS_FOLDER', env: 'PARSE_SERVER_LOGS_FOLDER',
help: "Folder for the logs (defaults to './logs'); set to null to disable file based logging", help: "Folder for the logs (defaults to './logs'); set to null to disable file based logging",
@@ -898,3 +904,23 @@ module.exports.AuthAdapter = {
default: true, default: true,
}, },
}; };
module.exports.LogLevels = {
triggerAfter: {
env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_AFTER',
help:
'Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterSaveFile`, `afterDeleteFile`, `afterFind`, `afterLogout`. Default is `info`.',
default: 'info',
},
triggerBeforeError: {
env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_ERROR',
help:
'Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on error. Default is `error `.',
default: 'error',
},
triggerBeforeSuccess: {
env: 'PARSE_SERVER_LOG_LEVELS_TRIGGER_BEFORE_SUCCESS',
help:
'Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on success. Default is `info`.',
default: 'info',
},
};

View File

@@ -56,6 +56,7 @@
* @property {LiveQueryServerOptions} liveQueryServerOptions Live query server configuration options (will start the liveQuery server) * @property {LiveQueryServerOptions} liveQueryServerOptions Live query server configuration options (will start the liveQuery server)
* @property {Adapter<LoggerAdapter>} loggerAdapter Adapter module for the logging sub-system * @property {Adapter<LoggerAdapter>} loggerAdapter Adapter module for the logging sub-system
* @property {String} logLevel Sets the level for logs * @property {String} logLevel Sets the level for logs
* @property {LogLevels} logLevels (Optional) Overrides the log levels used internally by Parse Server to log events.
* @property {String} logsFolder Folder for the logs (defaults to './logs'); set to null to disable file based logging * @property {String} logsFolder Folder for the logs (defaults to './logs'); set to null to disable file based logging
* @property {String} masterKey Your Parse Master Key * @property {String} masterKey Your Parse Master Key
* @property {String[]} masterKeyIps (Optional) Restricts the use of master key permissions to a list of IP addresses.<br><br>This option accepts a list of single IP addresses, for example:<br>`['10.0.0.1', '10.0.0.2']`<br><br>You can also use CIDR notation to specify an IP address range, for example:<br>`['10.0.1.0/24']`<br><br>Special cases:<br>- Setting an empty array `[]` means that `masterKey`` cannot be used even in Parse Server Cloud Code.<br>- Setting `['0.0.0.0/0']` means disabling the filter and the master key can be used from any IP address.<br><br>To connect Parse Dashboard from a different server requires to add the IP address of the server that hosts Parse Dashboard because Parse Dashboard uses the master key.<br><br>Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server itself, is allowed to use the master key. * @property {String[]} masterKeyIps (Optional) Restricts the use of master key permissions to a list of IP addresses.<br><br>This option accepts a list of single IP addresses, for example:<br>`['10.0.0.1', '10.0.0.2']`<br><br>You can also use CIDR notation to specify an IP address range, for example:<br>`['10.0.1.0/24']`<br><br>Special cases:<br>- Setting an empty array `[]` means that `masterKey`` cannot be used even in Parse Server Cloud Code.<br>- Setting `['0.0.0.0/0']` means disabling the filter and the master key can be used from any IP address.<br><br>To connect Parse Dashboard from a different server requires to add the IP address of the server that hosts Parse Dashboard because Parse Dashboard uses the master key.<br><br>Defaults to `['127.0.0.1', '::1']` which means that only `localhost`, the server itself, is allowed to use the master key.
@@ -215,3 +216,10 @@
* @interface AuthAdapter * @interface AuthAdapter
* @property {Boolean} enabled Is `true` if the auth adapter is enabled, `false` otherwise. * @property {Boolean} enabled Is `true` if the auth adapter is enabled, `false` otherwise.
*/ */
/**
* @interface LogLevels
* @property {String} triggerAfter Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterSaveFile`, `afterDeleteFile`, `afterFind`, `afterLogout`. Default is `info`.
* @property {String} triggerBeforeError Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on error. Default is `error `.
* @property {String} triggerBeforeSuccess Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on success. Default is `info`.
*/

View File

@@ -1,11 +1,11 @@
// @flow // @flow
import { AnalyticsAdapter } from '../Adapters/Analytics/AnalyticsAdapter'; import { AnalyticsAdapter } from '../Adapters/Analytics/AnalyticsAdapter';
import { FilesAdapter } from '../Adapters/Files/FilesAdapter';
import { LoggerAdapter } from '../Adapters/Logger/LoggerAdapter';
import { StorageAdapter } from '../Adapters/Storage/StorageAdapter';
import { CacheAdapter } from '../Adapters/Cache/CacheAdapter'; import { CacheAdapter } from '../Adapters/Cache/CacheAdapter';
import { MailAdapter } from '../Adapters/Email/MailAdapter'; import { MailAdapter } from '../Adapters/Email/MailAdapter';
import { FilesAdapter } from '../Adapters/Files/FilesAdapter';
import { LoggerAdapter } from '../Adapters/Logger/LoggerAdapter';
import { PubSubAdapter } from '../Adapters/PubSub/PubSubAdapter'; import { PubSubAdapter } from '../Adapters/PubSub/PubSubAdapter';
import { StorageAdapter } from '../Adapters/Storage/StorageAdapter';
import { WSSAdapter } from '../Adapters/WebSocketServer/WSSAdapter'; import { WSSAdapter } from '../Adapters/WebSocketServer/WSSAdapter';
import { CheckGroup } from '../Security/CheckGroup'; import { CheckGroup } from '../Security/CheckGroup';
@@ -81,6 +81,9 @@ export interface ParseServerOptions {
verbose: ?boolean; verbose: ?boolean;
/* Sets the level for logs */ /* Sets the level for logs */
logLevel: ?string; logLevel: ?string;
/* (Optional) Overrides the log levels used internally by Parse Server to log events.
:DEFAULT: {} */
logLevels: ?LogLevels;
/* Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null) */ /* Maximum number of logs to keep. If not set, no logs will be removed. This can be a number of files or number of days. If using days, add 'd' as the suffix. (default: null) */
maxLogFiles: ?NumberOrString; maxLogFiles: ?NumberOrString;
/* Disables console output /* Disables console output
@@ -520,3 +523,18 @@ export interface AuthAdapter {
*/ */
enabled: ?boolean; enabled: ?boolean;
} }
export interface LogLevels {
/* Log level used by the Cloud Code Triggers `afterSave`, `afterDelete`, `afterSaveFile`, `afterDeleteFile`, `afterFind`, `afterLogout`. Default is `info`.
:DEFAULT: info
*/
triggerAfter: ?string;
/* Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on success. Default is `info`.
:DEFAULT: info
*/
triggerBeforeSuccess: ?string;
/* Log level used by the Cloud Code Triggers `beforeSave`, `beforeSaveFile`, `beforeDeleteFile`, `beforeFind`, `beforeLogin` on error. Default is `error `.
:DEFAULT: error
*/
triggerBeforeError: ?string;
}

View File

@@ -8,9 +8,9 @@ import Config from '../../Config';
import Parse from 'parse/node'; import Parse from 'parse/node';
/** /**
* The security checks group for Parse Server configuration. * The security checks group for Parse Server configuration.
* Checks common Parse Server parameters such as access keys. * Checks common Parse Server parameters such as access keys.
*/ */
class CheckGroupDatabase extends CheckGroup { class CheckGroupDatabase extends CheckGroup {
setName() { setName() {
return 'Database'; return 'Database';
@@ -23,7 +23,8 @@ class CheckGroupDatabase extends CheckGroup {
new Check({ new Check({
title: 'Secure database password', title: 'Secure database password',
warning: 'The database password is insecure and vulnerable to brute force attacks.', warning: 'The database password is insecure and vulnerable to brute force attacks.',
solution: 'Choose a longer and/or more complex password with a combination of upper- and lowercase characters, numbers and special characters.', solution:
'Choose a longer and/or more complex password with a combination of upper- and lowercase characters, numbers and special characters.',
check: () => { check: () => {
const password = databaseUrl.match(/\/\/\S+:(\S+)@/)[1]; const password = databaseUrl.match(/\/\/\S+:(\S+)@/)[1];
const hasUpperCase = /[A-Z]/.test(password); const hasUpperCase = /[A-Z]/.test(password);

View File

@@ -373,9 +373,9 @@ function userIdForLog(auth) {
return auth && auth.user ? auth.user.id : undefined; return auth && auth.user ? auth.user.id : undefined;
} }
function logTriggerAfterHook(triggerType, className, input, auth) { function logTriggerAfterHook(triggerType, className, input, auth, logLevel) {
const cleanInput = logger.truncateLogMessage(JSON.stringify(input)); const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
logger.info( logger[logLevel](
`${triggerType} triggered for ${className} for user ${userIdForLog( `${triggerType} triggered for ${className} for user ${userIdForLog(
auth auth
)}:\n Input: ${cleanInput}`, )}:\n Input: ${cleanInput}`,
@@ -387,10 +387,10 @@ function logTriggerAfterHook(triggerType, className, input, auth) {
); );
} }
function logTriggerSuccessBeforeHook(triggerType, className, input, result, auth) { function logTriggerSuccessBeforeHook(triggerType, className, input, result, auth, logLevel) {
const cleanInput = logger.truncateLogMessage(JSON.stringify(input)); const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
const cleanResult = logger.truncateLogMessage(JSON.stringify(result)); const cleanResult = logger.truncateLogMessage(JSON.stringify(result));
logger.info( logger[logLevel](
`${triggerType} triggered for ${className} for user ${userIdForLog( `${triggerType} triggered for ${className} for user ${userIdForLog(
auth auth
)}:\n Input: ${cleanInput}\n Result: ${cleanResult}`, )}:\n Input: ${cleanInput}\n Result: ${cleanResult}`,
@@ -402,9 +402,9 @@ function logTriggerSuccessBeforeHook(triggerType, className, input, result, auth
); );
} }
function logTriggerErrorBeforeHook(triggerType, className, input, auth, error) { function logTriggerErrorBeforeHook(triggerType, className, input, auth, error, logLevel) {
const cleanInput = logger.truncateLogMessage(JSON.stringify(input)); const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
logger.error( logger[logLevel](
`${triggerType} failed for ${className} for user ${userIdForLog( `${triggerType} failed for ${className} for user ${userIdForLog(
auth auth
)}:\n Input: ${cleanInput}\n Error: ${JSON.stringify(error)}`, )}:\n Input: ${cleanInput}\n Error: ${JSON.stringify(error)}`,
@@ -444,7 +444,14 @@ export function maybeRunAfterFindTrigger(
reject(error); reject(error);
} }
); );
logTriggerSuccessBeforeHook(triggerType, className, 'AfterFind', JSON.stringify(objects), auth); logTriggerSuccessBeforeHook(
triggerType,
className,
'AfterFind',
JSON.stringify(objects),
auth,
config.logLevels.triggerBeforeSuccess
);
request.objects = objects.map(object => { request.objects = objects.map(object => {
//setting the class name to transform into parse object //setting the class name to transform into parse object
object.className = className; object.className = className;
@@ -468,7 +475,13 @@ export function maybeRunAfterFindTrigger(
}) })
.then(success, error); .then(success, error);
}).then(results => { }).then(results => {
logTriggerAfterHook(triggerType, className, JSON.stringify(results), auth); logTriggerAfterHook(
triggerType,
className,
JSON.stringify(results),
auth,
config.logLevels.triggerAfter
);
return results; return results;
}); });
} }
@@ -842,7 +855,10 @@ export function maybeRunTrigger(
parseObject.className, parseObject.className,
parseObject.toJSON(), parseObject.toJSON(),
object, object,
auth auth,
triggerType.startsWith('after')
? config.logLevels.triggerAfter
: config.logLevels.triggerBeforeSuccess
); );
if ( if (
triggerType === Types.beforeSave || triggerType === Types.beforeSave ||
@@ -860,7 +876,8 @@ export function maybeRunTrigger(
parseObject.className, parseObject.className,
parseObject.toJSON(), parseObject.toJSON(),
auth, auth,
error error,
config.logLevels.triggerBeforeError
); );
reject(error); reject(error);
} }
@@ -885,7 +902,13 @@ export function maybeRunTrigger(
triggerType === Types.afterDelete || triggerType === Types.afterDelete ||
triggerType === Types.afterLogin triggerType === Types.afterLogin
) { ) {
logTriggerAfterHook(triggerType, parseObject.className, parseObject.toJSON(), auth); logTriggerAfterHook(
triggerType,
parseObject.className,
parseObject.toJSON(),
auth,
config.logLevels.triggerAfter
);
} }
// beforeSave is expected to return null (nothing) // beforeSave is expected to return null (nothing)
if (triggerType === Types.beforeSave) { if (triggerType === Types.beforeSave) {
@@ -965,7 +988,8 @@ export async function maybeRunFileTrigger(triggerType, fileObject, config, auth)
'Parse.File', 'Parse.File',
{ ...fileObject.file.toJSON(), fileSize: fileObject.fileSize }, { ...fileObject.file.toJSON(), fileSize: fileObject.fileSize },
result, result,
auth auth,
config.logLevels.triggerBeforeSuccess
); );
return result || fileObject; return result || fileObject;
} catch (error) { } catch (error) {
@@ -974,7 +998,8 @@ export async function maybeRunFileTrigger(triggerType, fileObject, config, auth)
'Parse.File', 'Parse.File',
{ ...fileObject.file.toJSON(), fileSize: fileObject.fileSize }, { ...fileObject.file.toJSON(), fileSize: fileObject.fileSize },
auth, auth,
error error,
config.logLevels.triggerBeforeError
); );
throw error; throw error;
} }