Better logging with winston

This commit is contained in:
Florent Vilmart
2016-03-26 13:47:44 -04:00
parent d47a756f80
commit 5c1fe3a325
15 changed files with 180 additions and 255 deletions

1
.gitignore vendored
View File

@@ -1,5 +1,6 @@
# Logs
logs
test_logs
*.log
# Runtime data

View File

@@ -43,6 +43,7 @@
"request": "^2.65.0",
"tv4": "^1.2.7",
"winston": "^2.1.1",
"winston-daily-rotate-file": "^1.0.1",
"ws": "^1.0.1"
},
"devDependencies": {

View File

@@ -1,35 +1,10 @@
var FileLoggerAdapter = require('../src/Adapters/Logger/FileLoggerAdapter').FileLoggerAdapter;
var Parse = require('parse/node').Parse;
var request = require('request');
var fs = require('fs');
var LOGS_FOLDER = './test_logs/';
var deleteFolderRecursive = function(path) {
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
describe('info logs', () => {
afterEach((done) => {
deleteFolderRecursive(LOGS_FOLDER);
done();
});
it("Verify INFO logs", (done) => {
var fileLoggerAdapter = new FileLoggerAdapter({
logsFolder: LOGS_FOLDER
});
var fileLoggerAdapter = new FileLoggerAdapter();
fileLoggerAdapter.info('testing info logs', () => {
fileLoggerAdapter.query({
size: 1,
@@ -49,11 +24,6 @@ describe('info logs', () => {
describe('error logs', () => {
afterEach((done) => {
deleteFolderRecursive(LOGS_FOLDER);
done();
});
it("Verify ERROR logs", (done) => {
var fileLoggerAdapter = new FileLoggerAdapter();
fileLoggerAdapter.error('testing error logs', () => {

View File

@@ -10,7 +10,11 @@ describe('LoggerController', () => {
expect(() => {
loggerController.getLogs(query).then(function(res) {
expect(res.length).toBe(0);
expect(res.length).not.toBe(0);
done();
}).catch((err) => {
console.error(err);
fail("should not fail");
done();
})
}).not.toThrow();
@@ -58,8 +62,8 @@ describe('LoggerController', () => {
it('can check process a query witout throwing', (done) => {
// Make mock request
var query = {
from: "2015-01-01",
until: "2016-01-01",
from: "2016-01-01",
until: "2016-01-30",
size: 5,
order: 'desc',
level: 'error'
@@ -71,13 +75,15 @@ describe('LoggerController', () => {
loggerController.getLogs(query).then(function(res) {
expect(res.length).toBe(0);
done();
}).catch((err) => {
console.error(err);
fail("should not fail");
done();
})
}).not.toThrow();
});
it('should throw without an adapter', (done) => {
expect(() => {
var loggerController = new LoggerController();
}).toThrow();

View File

@@ -9,6 +9,7 @@ import { LoggerAdapter } from './LoggerAdapter';
import winston from 'winston';
import fs from 'fs';
import { Parse } from 'parse/node';
import { logger, configure } from '../../logger';
const MILLISECONDS_IN_A_DAY = 24 * 60 * 60 * 1000;
const CACHE_TIME = 1000 * 60;
@@ -57,24 +58,6 @@ let _hasValidCache = (from, until, level) => {
return false;
}
// renews transports to current date
let _renewTransports = ({infoLogger, errorLogger, logsFolder}) => {
if (infoLogger) {
infoLogger.add(winston.transports.File, {
filename: logsFolder + _getFileName() + '.info',
name: 'info-file',
level: 'info'
});
}
if (errorLogger) {
errorLogger.add(winston.transports.File, {
filename: logsFolder + _getFileName() + '.error',
name: 'error-file',
level: 'error'
});
}
};
// check that log entry has valid time stamp based on query
let _isValidLogEntry = (from, until, entry) => {
var _entry = JSON.parse(entry),
@@ -84,139 +67,51 @@ let _isValidLogEntry = (from, until, entry) => {
: false
};
// ensure that file name is up to date
let _verifyTransports = ({infoLogger, errorLogger, logsFolder}) => {
if (_getNearestDay(currentDate) !== _getNearestDay(new Date())) {
currentDate = new Date();
if (infoLogger) {
infoLogger.remove('info-file');
}
if (errorLogger) {
errorLogger.remove('error-file');
}
_renewTransports({infoLogger, errorLogger, logsFolder});
}
}
export class FileLoggerAdapter extends LoggerAdapter {
constructor(options = {}) {
super();
this._logsFolder = options.logsFolder || LOGS_FOLDER;
// check logs folder exists
if (!fs.existsSync(this._logsFolder)) {
fs.mkdirSync(this._logsFolder);
}
this._errorLogger = new (winston.Logger)({
exitOnError: false,
transports: [
new (winston.transports.File)({
filename: this._logsFolder + _getFileName() + '.error',
name: 'error-file',
level: 'error'
})
]
});
this._infoLogger = new (winston.Logger)({
exitOnError: false,
transports: [
new (winston.transports.File)({
filename: this._logsFolder + _getFileName() + '.info',
name: 'info-file',
level: 'info'
})
]
});
}
info() {
_verifyTransports({infoLogger: this._infoLogger, logsFolder: this._logsFolder});
return this._infoLogger.info.apply(undefined, arguments);
return logger.info.apply(undefined, arguments);
}
error() {
_verifyTransports({errorLogger: this._errorLogger, logsFolder: this._logsFolder});
return this._errorLogger.error.apply(undefined, arguments);
return logger.error.apply(undefined, arguments);
}
// custom query as winston is currently limited
query(options, callback) {
query(options, callback = () => {}) {
if (!options) {
options = {};
}
// defaults to 7 days prior
let from = options.from || new Date(Date.now() - (7 * MILLISECONDS_IN_A_DAY));
let until = options.until || new Date();
let size = options.size || 10;
let limit = options.size || 10;
let order = options.order || 'desc';
let level = options.level || 'info';
let roundedUntil = _getNearestDay(until);
let roundedFrom = _getNearestDay(from);
if (_hasValidCache(roundedFrom, roundedUntil, level)) {
let logs = [];
if (order !== simpleCache.order) {
// reverse order of data
simpleCache.data.forEach((entry) => {
logs.unshift(entry);
});
} else {
logs = simpleCache.data;
}
callback(logs.slice(0, size));
return;
}
let curDate = roundedUntil;
let curSize = 0;
let method = order === 'desc' ? 'push' : 'unshift';
let files = [];
let promises = [];
// current a batch call, all files with valid dates are read
while (curDate >= from) {
files[method](this._logsFolder + curDate.toISOString() + '.' + level);
curDate = _getPrevDay(curDate);
}
// read each file and split based on newline char.
// limitation is message cannot contain newline
// TODO: strip out delimiter from logged message
files.forEach(function(file, i) {
let promise = new Parse.Promise();
fs.readFile(file, 'utf8', function(err, data) {
if (err) {
promise.resolve([]);
} else {
let results = data.split('\n').filter((value) => {
return value.trim() !== '';
});
promise.resolve(results);
}
});
promises[method](promise);
});
Parse.Promise.when(promises).then((results) => {
let logs = [];
results.forEach(function(logEntries, i) {
logEntries.forEach(function(entry) {
if (_isValidLogEntry(from, until, entry)) {
logs[method](JSON.parse(entry));
}
});
});
simpleCache = {
timestamp: new Date(),
from: roundedFrom,
until: roundedUntil,
data: logs,
order,
level,
var options = {
from,
until,
limit,
order
};
callback(logs.slice(0, size));
return new Promise((resolve, reject) => {
logger.query(options, (err, res) => {
if (err) {
callback(err);
return reject(err);
}
if (level == 'error') {
callback(res['parse-server-error']);
resolve(res['parse-server-error']);
} else {
callback(res['parse-server']);
resolve(res['parse-server']);
}
})
});
}
}

View File

@@ -3,7 +3,6 @@ import PromiseRouter from '../PromiseRouter';
import AdaptableController from './AdaptableController';
import { LoggerAdapter } from '../Adapters/Logger/LoggerAdapter';
const Promise = Parse.Promise;
const MILLISECONDS_IN_A_DAY = 24 * 60 * 60 * 1000;
export const LogLevel = {
@@ -61,15 +60,8 @@ export class LoggerController extends AdaptableController {
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
'Logger adapter is not availabe');
}
let promise = new Parse.Promise();
options = LoggerController.parseOptions(options);
this.adapter.query(options, (result) => {
promise.resolve(result);
});
return promise;
return this.adapter.query(options);
}
expectedAdapterType() {

View File

@@ -1,41 +1,5 @@
let LogLevel = {
'VERBOSE': 0,
'DEBUG': 1,
'INFO': 2,
'ERROR': 3,
'NONE': 4
}
import { addGroup } from '../logger';
function getCurrentLogLevel() {
if (PLog.logLevel && PLog.logLevel in LogLevel) {
return LogLevel[PLog.logLevel];
}
return LogLevel['ERROR'];
}
function verbose(): void {
if (getCurrentLogLevel() <= LogLevel['VERBOSE']) {
console.log.apply(console, arguments)
}
}
function log(): void {
if (getCurrentLogLevel() <= LogLevel['INFO']) {
console.log.apply(console, arguments)
}
}
function error(): void {
if (getCurrentLogLevel() <= LogLevel['ERROR']) {
console.error.apply(console, arguments)
}
}
let PLog = {
log: log,
error: error,
verbose: verbose,
logLevel: 'INFO'
};
let PLog = addGroup('parse-live-query-server');
module.exports = PLog;

View File

@@ -26,8 +26,7 @@ class ParseLiveQueryServer {
config = config || {};
// Set LogLevel
PLog.logLevel = config.logLevel || 'INFO';
PLog.level = config.logLevel || 'INFO';
// Store keys, convert obj to map
let keyPairs = config.keyPairs || {};
this.keyPairs = new Map();

View File

@@ -11,6 +11,8 @@ var batch = require('./batch'),
Parse = require('parse/node').Parse,
authDataManager = require('./authDataManager');
import { logger,
configureLogger } from './logger';
import cache from './cache';
import Config from './Config';
import parseServerPackage from '../package.json';
@@ -84,6 +86,7 @@ class ParseServer {
filesAdapter,
push,
loggerAdapter,
logsFolder,
databaseURI = DatabaseAdapter.defaultDatabaseURI,
databaseOptions,
cloud,
@@ -114,6 +117,12 @@ class ParseServer {
Parse.initialize(appId, javascriptKey || 'unused', masterKey);
Parse.serverURL = serverURL;
if (logsFolder) {
configureLogger({
logsFolder
})
}
if (databaseAdapter) {
DatabaseAdapter.setAdapter(databaseAdapter);
}
@@ -254,7 +263,7 @@ class ParseServer {
if (!process.env.TESTING) {
process.on('uncaughtException', (err) => {
if ( err.code === "EADDRINUSE" ) { // user-friendly message for this common error
console.log(`Unable to listen on port ${err.port}. The port is already in use.`);
log.error(`Unable to listen on port ${err.port}. The port is already in use.`);
process.exit(0);
} else {
throw err;

View File

@@ -6,6 +6,7 @@
// components that external developers may be modifying.
import express from 'express';
import log from './logger';
export default class PromiseRouter {
// Each entry should be an object with:
@@ -146,9 +147,6 @@ export default class PromiseRouter {
}
}
// Global flag. Set this to true to log every request and response.
PromiseRouter.verbose = process.env.VERBOSE || false;
// A helper function to make an express handler out of a a promise
// handler.
// Express handlers should never throw; if a promise handler throws we
@@ -156,18 +154,14 @@ PromiseRouter.verbose = process.env.VERBOSE || false;
function makeExpressHandler(promiseHandler) {
return function(req, res, next) {
try {
if (PromiseRouter.verbose) {
console.log(req.method, req.originalUrl, req.headers,
log.verbose(req.method, req.originalUrl, req.headers,
JSON.stringify(req.body, null, 2));
}
promiseHandler(req).then((result) => {
if (!result.response && !result.location && !result.text) {
console.log('BUG: the handler did not include a "response" or a "location" field');
log.error('the handler did not include a "response" or a "location" field');
throw 'control should not get here';
}
if (PromiseRouter.verbose) {
console.log('response:', JSON.stringify(result, null, 2));
}
log.verbose(JSON.stringify(result, null, 2));
var status = result.status || 200;
res.status(status);
@@ -186,15 +180,11 @@ function makeExpressHandler(promiseHandler) {
}
res.json(result.response);
}, (e) => {
if (PromiseRouter.verbose) {
console.log('error:', e);
}
log.verbose('error:', e);
next(e);
});
} catch (e) {
if (PromiseRouter.verbose) {
console.log('error:', e);
}
log.verbose('exception:', e);
next(e);
}
}

View File

@@ -2,6 +2,7 @@ import request from 'request';
import Parse from 'parse/node';
import HTTPResponse from './HTTPResponse';
import querystring from 'querystring';
import log from '../logger';
var encodeBody = function({body, headers = {}}) {
if (typeof body !== 'object') {
@@ -20,7 +21,7 @@ var encodeBody = function({body, headers = {}}) {
} else {
/* istanbul ignore next */
if (contentTypeKeys.length > 1) {
console.error('multiple content-type headers are set.');
log.error('Parse.Cloud.httpRequest', 'multiple content-type headers are set.');
}
// There maybe many, we'll just take the 1st one
var contentType = contentTypeKeys[0];

View File

@@ -1,8 +1,17 @@
import winston from 'winston';
import ParseServer from './ParseServer';
import { GCSAdapter } from 'parse-server-gcs-adapter';
import { S3Adapter } from 'parse-server-s3-adapter';
import { FileSystemAdapter } from 'parse-server-fs-adapter';
if (process.env.VERBOSE || process.env.VERBOSE_PARSE_SERVER) {
winston.level = 'silly';
}
if (process.env.DEBUG || process.env.DEBUG_PARSE_SERVER) {
winston.level = 'debug';
}
// Factory function
let _ParseServer = function(options) {
let server = new ParseServer(options);

86
src/logger.js Normal file
View File

@@ -0,0 +1,86 @@
import winston from 'winston';
import fs from 'fs';
import path from 'path';
import DailyRotateFile from 'winston-daily-rotate-file';
let LOGS_FOLDER = './logs/';
if (typeof process !== 'undefined' && process.env.NODE_ENV === 'test') {
LOGS_FOLDER = './test_logs/'
}
let currentLogsFolder = LOGS_FOLDER;
var currentTransports;
const logger = new winston.Logger();
export function configureLogger({logsFolder}) {
logsFolder = logsFolder || currentLogsFolder;
if (!path.isAbsolute(logsFolder)) {
logsFolder = path.resolve(process.cwd(), logsFolder);
}
if (!fs.existsSync(logsFolder)) {
fs.mkdirSync(logsFolder);
}
currentLogsFolder = logsFolder;
currentTransports = [
new (winston.transports.Console)({
colorize: true,
level: process.env.VERBOSE ? 'verbose': 'info'
}),
new (DailyRotateFile)({
filename: 'parse-server.info',
dirname: currentLogsFolder,
name: 'parse-server',
level: process.env.VERBOSE ? 'verbose': 'info'
}),
new (DailyRotateFile)({
filename: 'parse-server.err',
dirname: currentLogsFolder,
name: 'parse-server-error',
level: 'error'
})
]
logger.configure({
transports: currentTransports
})
}
configureLogger({logsFolder: LOGS_FOLDER});
export function addGroup(groupName) {
let level = process.env.VERBOSE ? 'verbose': 'info';
winston.loggers.add(groupName, {
transports: [
new (winston.transports.Console)({
colorize: true,
level: level
}),
new (DailyRotateFile)({
filename: groupName,
dirname: currentLogsFolder,
name: groupName,
level: level
}),
new (DailyRotateFile)({
filename: 'parse-server.info',
name: 'parse-server',
dirname: currentLogsFolder,
level: level
}),
new (DailyRotateFile)({
filename: 'parse-server.err',
dirname: currentLogsFolder,
name: 'parse-server-error',
level: 'error'
})
]
});
return winston.loggers.get(groupName);
}
export { logger };
export default winston;

View File

@@ -1,4 +1,5 @@
import cache from './cache';
import log from './logger';
var Parse = require('parse/node').Parse;
@@ -128,7 +129,7 @@ function handleParseHeaders(req, res, next) {
})
.catch((error) => {
// TODO: Determine the correct error scenario.
console.log(error);
log.error('error getting auth for sessionToken', error);
throw new Parse.Error(Parse.Error.UNKNOWN_ERROR, error);
});
}
@@ -178,7 +179,7 @@ var handleParseErrors = function(err, req, res, next) {
res.status(err.status);
res.json({error: err.message});
} else {
console.log('Uncaught internal server error.', err, err.stack);
log.error('Uncaught internal server error.', err, err.stack);
res.status(500);
res.json({code: Parse.Error.INTERNAL_SERVER_ERROR,
message: 'Internal server error.'});

View File

@@ -1,3 +1,4 @@
import log from './logger';
var mongodb = require('mongodb');
var Parse = require('parse/node').Parse;
@@ -691,13 +692,13 @@ function untransformObject(schema, className, mongoObject, isNestedObject = fals
expected = schema.getExpectedType(className, newKey);
}
if (!expected) {
console.log(
log.info('transform.js',
'Found a pointer column not in the schema, dropping it.',
className, newKey);
break;
}
if (expected && expected[0] != '*') {
console.log('Found a pointer in a non-pointer column, dropping it.', className, key);
log.info('transform.js', 'Found a pointer in a non-pointer column, dropping it.', className, key);
break;
}
if (mongoObject[key] === null) {