Compare commits
82 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a703f2dc12 | ||
|
|
b2edbb1146 | ||
|
|
99e7c0981d | ||
|
|
06bab894af | ||
|
|
101739ebef | ||
|
|
f8ffccffd5 | ||
|
|
7d50b4aeff | ||
|
|
b12200fabc | ||
|
|
f8b6cc7c39 | ||
|
|
6314e4a344 | ||
|
|
d18fb466fb | ||
|
|
e638ff7271 | ||
|
|
2daf29b400 | ||
|
|
ca5272aacc | ||
|
|
614127bb10 | ||
|
|
a549df44b4 | ||
|
|
5e0982f0b1 | ||
|
|
f5a76d9073 | ||
|
|
29d941f0a6 | ||
|
|
0c2baa9690 | ||
|
|
9b538ee8ed | ||
|
|
e4d5228f2b | ||
|
|
6aacb0da0b | ||
|
|
6e3da6f44b | ||
|
|
3b5eb28115 | ||
|
|
633ed3cddb | ||
|
|
8ca092cdb9 | ||
|
|
3ec9811b5e | ||
|
|
c852fceaf4 | ||
|
|
c569919160 | ||
|
|
28f7c87a0e | ||
|
|
492919b940 | ||
|
|
470baa6c09 | ||
|
|
cd2ee14bde | ||
|
|
c09c11b147 | ||
|
|
b74a514369 | ||
|
|
fd05d90c2f | ||
|
|
73344ba79f | ||
|
|
22c156582f | ||
|
|
72bfb5d980 | ||
|
|
83ad0babf3 | ||
|
|
ae1a55fed9 | ||
|
|
94034e1226 | ||
|
|
9b4c7d1574 | ||
|
|
770f2da627 | ||
|
|
eb51aa99be | ||
|
|
5286c50375 | ||
|
|
bb644a1632 | ||
|
|
a6efbf6273 | ||
|
|
2118d8f7b3 | ||
|
|
d2f044a451 | ||
|
|
d0661322aa | ||
|
|
8b8844694f | ||
|
|
abdba8e56f | ||
|
|
093f693232 | ||
|
|
b9bba00d8c | ||
|
|
731e217505 | ||
|
|
3018a49bde | ||
|
|
a5bb94a048 | ||
|
|
7a1a895e46 | ||
|
|
48dc22eb63 | ||
|
|
7888381991 | ||
|
|
cd286fa25f | ||
|
|
6df4753822 | ||
|
|
613474eb44 | ||
|
|
112246dd55 | ||
|
|
069ed31759 | ||
|
|
9e72189574 | ||
|
|
5a167d853a | ||
|
|
5755faa7bb | ||
|
|
1ed026a8d9 | ||
|
|
2d177d517b | ||
|
|
21aebbde33 | ||
|
|
49892f35d3 | ||
|
|
61beac28d3 | ||
|
|
8ad1cd67e2 | ||
|
|
c67ab855bb | ||
|
|
4905761f60 | ||
|
|
2a38f460dc | ||
|
|
9f77734f74 | ||
|
|
ce8b6b06b9 | ||
|
|
eb21e10208 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@ build
|
||||
node_modules
|
||||
.bob/
|
||||
test/streams/test-rolling-file-stream*
|
||||
test/streams/test-rolling-stream-with-existing-files*
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# log4js-node [](http://travis-ci.org/nomiddlename/log4js-node)
|
||||
|
||||
|
||||
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
|
||||
This is a conversion of the [log4js](https://github.com/stritti/log4js)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
|
||||
|
||||
Out of the box it supports the following features:
|
||||
@@ -12,6 +12,7 @@ Out of the box it supports the following features:
|
||||
* SMTP appender
|
||||
* GELF appender
|
||||
* hook.io appender
|
||||
* Loggly appender
|
||||
* multiprocess appender (useful when you've got worker processes)
|
||||
* a logger for connect/express servers
|
||||
* configurable log message layout/patterns
|
||||
|
||||
27
examples/flush-on-exit.js
Normal file
27
examples/flush-on-exit.js
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
|
||||
* another shell)
|
||||
*/
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'file', filename: 'cheese.log', category: 'cheese' },
|
||||
{ type: 'console'}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('INFO');
|
||||
|
||||
var http=require('http');
|
||||
|
||||
var server = http.createServer(function(request, response){
|
||||
response.writeHead(200, {'Content-Type': 'text/plain'});
|
||||
var rd = Math.random() * 50;
|
||||
logger.info("hello " + rd);
|
||||
response.write('hello ');
|
||||
if (Math.floor(rd) == 30){
|
||||
log4js.shutdown(function() { process.exit(1); });
|
||||
}
|
||||
response.end();
|
||||
}).listen(4444);
|
||||
24
examples/loggly-appender.js
Normal file
24
examples/loggly-appender.js
Normal file
@@ -0,0 +1,24 @@
|
||||
//Note that loggly appender needs node-loggly to work.
|
||||
//If you haven't got node-loggly installed, you'll get cryptic
|
||||
//"cannot find module" errors when using the loggly appender
|
||||
var log4js = require('../lib/log4js');
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
type: "console",
|
||||
category: "test"
|
||||
},
|
||||
{
|
||||
"type" : "loggly",
|
||||
"token" : "12345678901234567890",
|
||||
"subdomain": "your-subdomain",
|
||||
"tags" : ["test"],
|
||||
"category" : "loggly"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("loggly");
|
||||
logger.info("Test log message");
|
||||
//logger.debug("Test log message");
|
||||
20
lib/appenders/categoryFilter.js
Normal file
20
lib/appenders/categoryFilter.js
Normal file
@@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js');
|
||||
|
||||
function categoryFilter (excludes, appender) {
|
||||
if (typeof(excludes) === 'string') excludes = [excludes];
|
||||
return function(logEvent) {
|
||||
if (excludes.indexOf(logEvent.categoryName) === -1) {
|
||||
appender(logEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
return categoryFilter(config.exclude, appender);
|
||||
}
|
||||
|
||||
exports.appender = categoryFilter;
|
||||
exports.configure = configure;
|
||||
129
lib/appenders/clustered.js
Executable file
129
lib/appenders/clustered.js
Executable file
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
|
||||
var cluster = require('cluster');
|
||||
var log4js = require('../log4js');
|
||||
|
||||
/**
|
||||
* Takes a loggingEvent object, returns string representation of it.
|
||||
*/
|
||||
function serializeLoggingEvent(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
if (item && item.stack && JSON.stringify(item) === '{}') { // Validate that we really are in this case
|
||||
loggingEvent.data[i] = {stack : item.stack};
|
||||
}
|
||||
}
|
||||
return JSON.stringify(loggingEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a string, returns an object with
|
||||
* the correct log properties.
|
||||
*
|
||||
* This method has been "borrowed" from the `multiprocess` appender
|
||||
* by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
|
||||
*
|
||||
* Apparently, node.js serializes everything to strings when using `process.send()`,
|
||||
* so we need smart deserialization that will recreate log date and level for further processing by log4js internals.
|
||||
*/
|
||||
function deserializeLoggingEvent(loggingEventString) {
|
||||
|
||||
var loggingEvent;
|
||||
|
||||
try {
|
||||
|
||||
loggingEvent = JSON.parse(loggingEventString);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
|
||||
} catch (e) {
|
||||
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', loggingEventString ]
|
||||
};
|
||||
}
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appender.
|
||||
*
|
||||
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
|
||||
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
|
||||
*
|
||||
* If you are using this method directly, make sure to provide it with `config.actualAppenders` array
|
||||
* of actual appender instances.
|
||||
*
|
||||
* Or better use `configure(config, options)`
|
||||
*/
|
||||
function createAppender(config) {
|
||||
|
||||
if (cluster.isMaster) {
|
||||
|
||||
var masterAppender = function(loggingEvent) {
|
||||
|
||||
if (config.actualAppenders) {
|
||||
var size = config.actualAppenders.length;
|
||||
for(var i = 0; i < size; i++) {
|
||||
if (!config.appenders[i].category || config.appenders[i].category === loggingEvent.categoryName) {
|
||||
// Relying on the index is not a good practice but otherwise the change would have been bigger.
|
||||
config.actualAppenders[i](loggingEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Listen on new workers
|
||||
cluster.on('fork', function(worker) {
|
||||
|
||||
worker.on('message', function(message) {
|
||||
if (message.type && message.type === '::log-message') {
|
||||
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
|
||||
|
||||
var loggingEvent = deserializeLoggingEvent(message.event);
|
||||
masterAppender(loggingEvent);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
return masterAppender;
|
||||
|
||||
} else {
|
||||
|
||||
return function(loggingEvent) {
|
||||
// If inside the worker process, then send the logger event to master.
|
||||
if (cluster.isWorker) {
|
||||
// console.log("worker " + cluster.worker.id + " is sending message");
|
||||
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
|
||||
if (config.appenders && cluster.isMaster) {
|
||||
|
||||
var size = config.appenders.length;
|
||||
config.actualAppenders = new Array(size);
|
||||
|
||||
for(var i = 0; i < size; i++) {
|
||||
|
||||
log4js.loadAppender(config.appenders[i].type);
|
||||
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](config.appenders[i], options);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
@@ -1,6 +1,7 @@
|
||||
"use strict";
|
||||
var streams = require('../streams')
|
||||
, layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
@@ -24,12 +25,12 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
|
||||
var logFile = new streams.DateRollingFileStream(
|
||||
filename,
|
||||
pattern,
|
||||
filename,
|
||||
pattern,
|
||||
{ alwaysIncludePattern: alwaysIncludePattern }
|
||||
);
|
||||
openFiles.push(logFile);
|
||||
|
||||
|
||||
return function(logEvent) {
|
||||
logFile.write(layout(logEvent) + eol, "utf8");
|
||||
};
|
||||
@@ -38,15 +39,15 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
|
||||
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
|
||||
if (!config.alwaysIncludePattern) {
|
||||
config.alwaysIncludePattern = false;
|
||||
}
|
||||
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
@@ -54,5 +55,18 @@ function configure(config, options) {
|
||||
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.forEach(openFiles, function(file, done) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(done);
|
||||
});
|
||||
} else {
|
||||
file.end(done);
|
||||
}
|
||||
}, cb);
|
||||
}
|
||||
|
||||
exports.appender = appender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams')
|
||||
@@ -78,5 +79,18 @@ function configure(config, options) {
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.forEach(openFiles, function(file, done) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(done);
|
||||
});
|
||||
} else {
|
||||
file.end(done);
|
||||
}
|
||||
}, cb);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
||||
|
||||
@@ -100,7 +100,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
|
||||
msg.short_message = msg.full_message;
|
||||
|
||||
msg.version="1.0";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0;
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
msg.facility = facility;
|
||||
|
||||
84
lib/appenders/loggly.js
Normal file
84
lib/appenders/loggly.js
Normal file
@@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
var layouts = require('../layouts')
|
||||
, loggly = require('loggly')
|
||||
, os = require('os');
|
||||
|
||||
/**
|
||||
* Loggly Appender. Sends logging events to Loggly using node-loggly
|
||||
*
|
||||
* @param config object with loggly configuration data
|
||||
* {
|
||||
* token: 'your-really-long-input-token',
|
||||
* subdomain: 'your-subdomain',
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* }
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
|
||||
*/
|
||||
function logglyAppender(config, layout) {
|
||||
var client = loggly.createClient(config);
|
||||
|
||||
var packageMessage = function (loggingEvent) {
|
||||
var BaseItem = function(level, msg) {
|
||||
this.level = level || loggingEvent.level.toString();
|
||||
this.category = loggingEvent.categoryName;
|
||||
this.hostname = os.hostname().toString();
|
||||
if (typeof msg !== 'undefined')
|
||||
this.msg = msg;
|
||||
};
|
||||
|
||||
var packageItem = function (item) {
|
||||
if (item instanceof Error)
|
||||
return new BaseItem('ERROR', item.message);
|
||||
|
||||
if (['string', 'number', 'boolean'].indexOf(typeof item) > -1 )
|
||||
return new BaseItem(undefined, item);
|
||||
|
||||
var obj = new BaseItem();
|
||||
if (Array.isArray(item))
|
||||
return item.unshift(obj); //add base object as first item
|
||||
|
||||
if (item && Object.prototype.toString.call(item) === '[object Object]') {
|
||||
for (var key in item) {
|
||||
if (item.hasOwnProperty(key)) {
|
||||
obj[key] = item[key]; //don't do packageItem on nested items, because level, category and hostname are needed on top level items only.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
if (loggingEvent.data.length === 1) {
|
||||
return packageItem(loggingEvent.data[0]);
|
||||
}
|
||||
//length >1
|
||||
var msg = loggingEvent.data;
|
||||
for (var i = 0, l = msg.length; i < l; i++) {
|
||||
msg[i] = packageItem(msg[i]);
|
||||
}
|
||||
|
||||
return msg;
|
||||
};
|
||||
|
||||
return function(loggingEvent) {
|
||||
var a = layout ? layout(loggingEvent) : packageMessage(loggingEvent);
|
||||
//console.log('log now', a);
|
||||
client.log(a, config.tags, function(err, result) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return logglyAppender(config, layout);
|
||||
}
|
||||
|
||||
exports.name = 'loggly';
|
||||
exports.appender = logglyAppender;
|
||||
exports.configure = configure;
|
||||
@@ -94,6 +94,11 @@ function workerAppender(config) {
|
||||
}
|
||||
|
||||
function write(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
|
||||
loggingEvent = {stack : loggingEvent.stack};
|
||||
}
|
||||
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||
socket.write(END_MSG, 'utf8');
|
||||
}
|
||||
|
||||
@@ -120,10 +120,10 @@ function format(str, req, res) {
|
||||
.replace(':referrer', req.headers.referer || req.headers.referrer || '')
|
||||
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
|
||||
.replace(
|
||||
':remote-addr',
|
||||
':remote-addr', req.ip || req._remoteAddress || (
|
||||
req.socket &&
|
||||
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
|
||||
)
|
||||
))
|
||||
.replace(':user-agent', req.headers['user-agent'] || '')
|
||||
.replace(
|
||||
':content-length',
|
||||
|
||||
@@ -45,7 +45,7 @@ exports.asString = function(/*format,*/ date) {
|
||||
var vDay = addZero(date.getDate());
|
||||
var vMonth = addZero(date.getMonth()+1);
|
||||
var vYearLong = addZero(date.getFullYear());
|
||||
var vYearShort = addZero(date.getFullYear().toString().substring(3,4));
|
||||
var vYearShort = addZero(date.getFullYear().toString().substring(2,4));
|
||||
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
||||
var vHour = addZero(date.getHours());
|
||||
var vMinute = addZero(date.getMinutes());
|
||||
|
||||
@@ -120,6 +120,7 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
* - %r time in toLocaleTimeString format
|
||||
* - %p log level
|
||||
* - %c log category
|
||||
* - %h hostname
|
||||
* - %m log data
|
||||
* - %d date in various formats
|
||||
* - %% %
|
||||
@@ -143,7 +144,7 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
*/
|
||||
function patternLayout (pattern, tokens) {
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
|
||||
@@ -166,6 +167,8 @@ function patternLayout (pattern, tokens) {
|
||||
// Pick up special cases
|
||||
if (format == "ISO8601") {
|
||||
format = dateFormat.ISO8601_FORMAT;
|
||||
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
} else if (format == "ABSOLUTE") {
|
||||
format = dateFormat.ABSOLUTETIME_FORMAT;
|
||||
} else if (format == "DATE") {
|
||||
@@ -175,6 +178,10 @@ function patternLayout (pattern, tokens) {
|
||||
// Format the date
|
||||
return dateFormat.asString(format, loggingEvent.startTime);
|
||||
}
|
||||
|
||||
function hostname() {
|
||||
return os.hostname().toString();
|
||||
}
|
||||
|
||||
function formatMessage(loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
@@ -218,6 +225,7 @@ function patternLayout (pattern, tokens) {
|
||||
var replacers = {
|
||||
'c': categoryName,
|
||||
'd': formatAsDate,
|
||||
'h': hostname,
|
||||
'm': formatMessage,
|
||||
'n': endOfLine,
|
||||
'p': logLevel,
|
||||
|
||||
@@ -44,17 +44,20 @@
|
||||
* Website: http://log4js.berlios.de
|
||||
*/
|
||||
var events = require('events')
|
||||
, async = require('async')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, util = require('util')
|
||||
, layouts = require('./layouts')
|
||||
, levels = require('./levels')
|
||||
, LoggingEvent = require('./logger').LoggingEvent
|
||||
, Logger = require('./logger').Logger
|
||||
, loggerModule = require('./logger')
|
||||
, LoggingEvent = loggerModule.LoggingEvent
|
||||
, Logger = loggerModule.Logger
|
||||
, ALL_CATEGORIES = '[all]'
|
||||
, appenders = {}
|
||||
, loggers = {}
|
||||
, appenderMakers = {}
|
||||
, appenderShutdowns = {}
|
||||
, defaultConfig = {
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
@@ -62,6 +65,11 @@ var events = require('events')
|
||||
replaceConsole: false
|
||||
};
|
||||
|
||||
function hasLogger(logger) {
|
||||
return loggers.hasOwnProperty(logger);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get a logger instance. Instance is cached on categoryName level.
|
||||
* @param {String} categoryName name of category to log to.
|
||||
@@ -76,7 +84,7 @@ function getLogger (categoryName) {
|
||||
}
|
||||
|
||||
var appenderList;
|
||||
if (!loggers[categoryName]) {
|
||||
if (!hasLogger(categoryName)) {
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
@@ -115,7 +123,7 @@ function addAppender () {
|
||||
|
||||
if (category === ALL_CATEGORIES) {
|
||||
addAppenderToAllLoggers(appender);
|
||||
} else if (loggers[category]) {
|
||||
} else if (hasLogger(category)) {
|
||||
loggers[category].addListener("log", appender);
|
||||
}
|
||||
});
|
||||
@@ -123,7 +131,7 @@ function addAppender () {
|
||||
|
||||
function addAppenderToAllLoggers(appender) {
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
if (hasLogger(logger)) {
|
||||
loggers[logger].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
@@ -139,7 +147,7 @@ function addAppenderToCategory(appender, category) {
|
||||
function clearAppenders () {
|
||||
appenders = {};
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
if (hasLogger(logger)) {
|
||||
loggers[logger].removeAllListeners("log");
|
||||
}
|
||||
}
|
||||
@@ -290,25 +298,89 @@ function restoreConsole() {
|
||||
});
|
||||
}
|
||||
|
||||
function loadAppender(appender) {
|
||||
/**
|
||||
* Load an appenderModule based on the provided appender filepath. Will first
|
||||
* check if the appender path is a subpath of the log4js "lib/appenders" directory.
|
||||
* If not, it will attempt to load the the appender as complete path.
|
||||
*
|
||||
* @param {string} appender The filepath for the appender.
|
||||
* @returns {Object|null} The required appender or null if appender could not be loaded.
|
||||
* @private
|
||||
*/
|
||||
function requireAppender(appender) {
|
||||
var appenderModule;
|
||||
try {
|
||||
appenderModule = require('./appenders/' + appender);
|
||||
} catch (e) {
|
||||
appenderModule = require(appender);
|
||||
}
|
||||
return appenderModule;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
|
||||
* it will be used in place of requiring the appender module.
|
||||
*
|
||||
* @param {string} appender The path to the appender module.
|
||||
* @param {Object|void} [appenderModule] The pre-required appender module. When provided,
|
||||
* instead of requiring the appender by its path, this object will be used.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function loadAppender(appender, appenderModule) {
|
||||
appenderModule = appenderModule || requireAppender(appender);
|
||||
|
||||
if (!appenderModule) {
|
||||
throw new Error("Invalid log4js appender: " + util.inspect(appender));
|
||||
}
|
||||
|
||||
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
|
||||
if (appenderModule.shutdown) {
|
||||
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
|
||||
}
|
||||
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown all log appenders. This will first disable all writing to appenders
|
||||
* and then call the shutdown function each appender.
|
||||
*
|
||||
* @params {Function} cb - The callback to be invoked once all appenders have
|
||||
* shutdown. If an error occurs, the callback will be given the error object
|
||||
* as the first argument.
|
||||
* @returns {void}
|
||||
*/
|
||||
function shutdown(cb) {
|
||||
// First, disable all writing to appenders. This prevents appenders from
|
||||
// not being able to be drained because of run-away log writes.
|
||||
loggerModule.disableAllLogWrites();
|
||||
|
||||
// Next, get all the shutdown functions for appenders as an array.
|
||||
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
|
||||
function(accum, category) {
|
||||
return accum.concat(appenderShutdowns[category]);
|
||||
}, []);
|
||||
|
||||
// Call each of the shutdown functions.
|
||||
async.forEach(
|
||||
shutdownFunctions,
|
||||
function(shutdownFn, done) {
|
||||
shutdownFn(done);
|
||||
},
|
||||
cb
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLogger: getLogger,
|
||||
getDefaultLogger: getDefaultLogger,
|
||||
hasLogger: hasLogger,
|
||||
|
||||
addAppender: addAppender,
|
||||
loadAppender: loadAppender,
|
||||
clearAppenders: clearAppenders,
|
||||
configure: configure,
|
||||
shutdown: shutdown,
|
||||
|
||||
replaceConsole: replaceConsole,
|
||||
restoreConsole: restoreConsole,
|
||||
|
||||
@@ -4,6 +4,8 @@ var levels = require('./levels')
|
||||
, events = require('events')
|
||||
, DEFAULT_CATEGORY = '[default]';
|
||||
|
||||
var logWritesEnabled = true;
|
||||
|
||||
/**
|
||||
* Models a logging event.
|
||||
* @constructor
|
||||
@@ -49,9 +51,12 @@ Logger.prototype.removeLevel = function() {
|
||||
|
||||
Logger.prototype.log = function() {
|
||||
var args = Array.prototype.slice.call(arguments)
|
||||
, logLevel = args.shift()
|
||||
, loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||
this.emit("log", loggingEvent);
|
||||
, logLevel = levels.toLevel(args.shift())
|
||||
, loggingEvent;
|
||||
if (this.isLevelEnabled(logLevel)) {
|
||||
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||
this.emit("log", loggingEvent);
|
||||
}
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
@@ -66,7 +71,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (this.isLevelEnabled(level)) {
|
||||
if (logWritesEnabled && this.isLevelEnabled(level)) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift(level);
|
||||
Logger.prototype.log.apply(this, args);
|
||||
@@ -75,6 +80,23 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Disable all log writes.
|
||||
* @returns {void}
|
||||
*/
|
||||
function disableAllLogWrites() {
|
||||
logWritesEnabled = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable log writes.
|
||||
* @returns {void}
|
||||
*/
|
||||
function enableAllLogWrites() {
|
||||
logWritesEnabled = true;
|
||||
}
|
||||
|
||||
exports.LoggingEvent = LoggingEvent;
|
||||
exports.Logger = Logger;
|
||||
exports.disableAllLogWrites = disableAllLogWrites;
|
||||
exports.enableAllLogWrites = enableAllLogWrites;
|
||||
|
||||
@@ -48,7 +48,13 @@ BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
|
||||
function writeTheChunk() {
|
||||
debug("writing the chunk to the underlying stream");
|
||||
that.currentSize += chunk.length;
|
||||
that.theStream.write(chunk, encoding, callback);
|
||||
try {
|
||||
that.theStream.write(chunk, encoding, callback);
|
||||
}
|
||||
catch (err){
|
||||
debug(err);
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
debug("in _write");
|
||||
|
||||
@@ -17,7 +17,14 @@ function DateRollingFileStream(filename, pattern, options, now) {
|
||||
}
|
||||
this.pattern = pattern || '.yyyy-MM-dd';
|
||||
this.now = now || Date.now;
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
|
||||
if (fs.existsSync(filename)) {
|
||||
var stat = fs.statSync(filename);
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, stat.mtime);
|
||||
} else {
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
}
|
||||
|
||||
this.baseFilename = filename;
|
||||
this.alwaysIncludePattern = false;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "log4js",
|
||||
"version": "0.6.7",
|
||||
"version": "0.6.15",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
@@ -9,7 +9,7 @@
|
||||
"node"
|
||||
],
|
||||
"main": "./lib/log4js",
|
||||
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
|
||||
"author": "Gareth Jones <gareth.nomiddlename@gmail.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/nomiddlename/log4js-node.git"
|
||||
@@ -29,7 +29,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "0.1.15",
|
||||
"dequeue": "1.0.3",
|
||||
"semver": "~1.1.4",
|
||||
"readable-stream": "~1.0.2"
|
||||
},
|
||||
@@ -38,5 +37,8 @@
|
||||
"sandboxed-module": "0.1.3",
|
||||
"hook.io": "0.8.10",
|
||||
"underscore": "1.2.1"
|
||||
},
|
||||
"browser": {
|
||||
"os": false
|
||||
}
|
||||
}
|
||||
|
||||
83
test/categoryFilter-test.js
Normal file
83
test/categoryFilter-test.js
Normal file
@@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert');
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js categoryFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
|
||||
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
|
||||
log4js.clearAppenders();
|
||||
var appender = require('../lib/appenders/categoryFilter')
|
||||
.appender(
|
||||
['app'],
|
||||
function(evt) { logEvents.push(evt); }
|
||||
);
|
||||
log4js.addAppender(appender, ["app","web"]);
|
||||
|
||||
webLogger = log4js.getLogger("web");
|
||||
appLogger = log4js.getLogger("app");
|
||||
|
||||
webLogger.debug('This should get logged');
|
||||
appLogger.debug('This should not');
|
||||
webLogger.debug('Hello again');
|
||||
log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass matching category' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'This should get logged');
|
||||
assert.equal(logEvents[1].data[0], 'Hello again');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger, weblogger;
|
||||
|
||||
remove(__dirname + '/categoryFilter-web.log');
|
||||
remove(__dirname + '/categoryFilter-noweb.log');
|
||||
|
||||
log4js.configure('test/with-categoryFilter.json');
|
||||
logger = log4js.getLogger("app");
|
||||
weblogger = log4js.getLogger("web");
|
||||
|
||||
logger.info('Loading app');
|
||||
logger.debug('Initialising indexes');
|
||||
weblogger.info('00:00:00 GET / 200');
|
||||
weblogger.warn('00:00:00 GET / 500');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 100);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['Loading app','Initialising indexes']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-web.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
128
test/clusteredAppender-test.js
Executable file
128
test/clusteredAppender-test.js
Executable file
@@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
var assert = require('assert');
|
||||
var vows = require('vows');
|
||||
var layouts = require('../lib/layouts');
|
||||
var sandbox = require('sandboxed-module');
|
||||
var LoggingEvent = require('../lib/logger').LoggingEvent;
|
||||
var cluster = require('cluster');
|
||||
|
||||
vows.describe('log4js cluster appender').addBatch({
|
||||
'when in master mode': {
|
||||
topic: function() {
|
||||
|
||||
var registeredClusterEvents = [];
|
||||
var loggingEvents = [];
|
||||
|
||||
// Fake cluster module, so no cluster listeners be really added
|
||||
var fakeCluster = {
|
||||
|
||||
on: function(event, callback) {
|
||||
registeredClusterEvents.push(event);
|
||||
},
|
||||
|
||||
isMaster: true,
|
||||
isWorker: false,
|
||||
|
||||
};
|
||||
|
||||
var fakeActualAppender = function(loggingEvent) {
|
||||
loggingEvents.push(loggingEvent);
|
||||
}
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
}
|
||||
});
|
||||
|
||||
var masterAppender = appenderModule.appender({
|
||||
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
});
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
|
||||
|
||||
var returnValue = {
|
||||
registeredClusterEvents: registeredClusterEvents,
|
||||
loggingEvents: loggingEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
assert.equal(topic.registeredClusterEvents[0], 'fork');
|
||||
},
|
||||
|
||||
"should log using actual appender": function(topic) {
|
||||
assert.equal(topic.loggingEvents.length, 2)
|
||||
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'when in worker mode': {
|
||||
|
||||
topic: function() {
|
||||
|
||||
var registeredProcessEvents = [];
|
||||
|
||||
// Fake cluster module, to fake we're inside a worker process
|
||||
var fakeCluster = {
|
||||
|
||||
isMaster: false,
|
||||
isWorker: true,
|
||||
|
||||
};
|
||||
|
||||
var fakeProcess = {
|
||||
|
||||
send: function(data) {
|
||||
registeredProcessEvents.push(data);
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
},
|
||||
globals: {
|
||||
'process': fakeProcess,
|
||||
}
|
||||
});
|
||||
|
||||
var workerAppender = appenderModule.appender();
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
|
||||
|
||||
var returnValue = {
|
||||
registeredProcessEvents: registeredProcessEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
|
||||
},
|
||||
|
||||
"worker appender should call process.send" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
|
||||
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
|
||||
},
|
||||
|
||||
"worker should serialize an Error correctly" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
|
||||
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
|
||||
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
@@ -86,6 +86,21 @@ vows.describe('log4js configure').addBatch({
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when appender object loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require('../lib/log4js');
|
||||
|
||||
log4js.loadAppender('some/other/external', testAppender);
|
||||
return log4js;
|
||||
},
|
||||
'should load appender with provided object': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
|
||||
topic: function() {
|
||||
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||
|
||||
@@ -4,7 +4,8 @@ var vows = require('vows')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js');
|
||||
, log4js = require('../lib/log4js')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
function removeFile(filename) {
|
||||
return function() {
|
||||
@@ -134,7 +135,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
|
||||
teardown: removeFile('date-file-test.log'),
|
||||
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
assert.include(contents, 'this should be written to the file' + require('os').EOL);
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
assert.include(contents, 'this should be written to the file' + EOL);
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
},
|
||||
@@ -161,7 +165,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
|
||||
, thisTime = format.asString(options.appenders[0].pattern, new Date());
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, 'date-file-test' + thisTime),
|
||||
"this is existing data" + require('os').EOL,
|
||||
"this is existing data" + EOL,
|
||||
'utf8'
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
|
||||
@@ -39,6 +39,13 @@ vows.describe('date_format').addBatch({
|
||||
dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
|
||||
'14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a custom format': function(date) {
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
||||
'-0200.005.30.31.14.11.01.10'
|
||||
);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
|
||||
@@ -222,7 +222,7 @@ vows.describe('log4js fileAppender').addBatch({
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/log4js.json');
|
||||
log4js.configure('./test/log4js.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
@@ -7,7 +7,7 @@ function test(args, pattern, value) {
|
||||
var layout = args[0]
|
||||
, event = args[1]
|
||||
, tokens = args[2];
|
||||
|
||||
|
||||
assert.equal(layout(pattern, tokens)(event), value);
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
topic: function() {
|
||||
return require('../lib/layouts').colouredLayout;
|
||||
},
|
||||
|
||||
|
||||
'should apply level colour codes to output': function(layout) {
|
||||
var output = layout({
|
||||
data: ["nonsense"],
|
||||
@@ -40,7 +40,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'messagePassThroughLayout': {
|
||||
topic: function() {
|
||||
return require('../lib/layouts').messagePassThroughLayout;
|
||||
@@ -58,49 +58,49 @@ vows.describe('log4js layouts').addBatch({
|
||||
},
|
||||
'should support the console.log format for the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["thing %d", 1, "cheese"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
data: ["thing %d", 1, "cheese"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level : {
|
||||
colour: "green",
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "thing 1 cheese");
|
||||
},
|
||||
'should output the first item even if it is not a string': function(layout) {
|
||||
assert.equal(layout({
|
||||
data: [ { thing: 1} ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
data: [ { thing: 1} ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "{ thing: 1 }");
|
||||
},
|
||||
'should print the stacks of a passed error objects': function(layout) {
|
||||
assert.isArray(layout({
|
||||
data: [ new Error() ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
data: [ new Error() ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
|
||||
, 'regexp did not return a match');
|
||||
},
|
||||
'with passed augmented errors': {
|
||||
'with passed augmented errors': {
|
||||
topic: function(layout){
|
||||
var e = new Error("My Unique Error Message");
|
||||
e.augmented = "My Unique attribute value";
|
||||
e.augObj = { at1: "at2" };
|
||||
return layout({
|
||||
data: [ e ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
data: [ e ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
@@ -118,10 +118,10 @@ vows.describe('log4js layouts').addBatch({
|
||||
assert.isArray(m);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
},
|
||||
|
||||
|
||||
'basicLayout': {
|
||||
topic: function() {
|
||||
var layout = require('../lib/layouts').basicLayout,
|
||||
@@ -143,17 +143,17 @@ vows.describe('log4js layouts').addBatch({
|
||||
var layout = args[0], event = args[1], output, lines,
|
||||
error = new Error("Some made-up error"),
|
||||
stack = error.stack.split(/\n/);
|
||||
|
||||
|
||||
event.data = ['this is a test', error];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
|
||||
);
|
||||
|
||||
|
||||
for (var i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+2], stack[i+1]);
|
||||
}
|
||||
@@ -166,13 +166,13 @@ vows.describe('log4js layouts').addBatch({
|
||||
}];
|
||||
output = layout(event);
|
||||
assert.equal(
|
||||
output,
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
|
||||
output,
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
|
||||
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'patternLayout': {
|
||||
topic: function() {
|
||||
var event = {
|
||||
@@ -188,9 +188,12 @@ vows.describe('log4js layouts').addBatch({
|
||||
testFunction: function() { return 'testFunctionToken'; },
|
||||
fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
|
||||
};
|
||||
|
||||
//override getTimezoneOffset
|
||||
event.startTime.getTimezoneOffset = function() { return 0; };
|
||||
return [layout, event, tokens];
|
||||
},
|
||||
|
||||
|
||||
'should default to "time logLevel loggerName - message"': function(args) {
|
||||
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n");
|
||||
},
|
||||
@@ -209,6 +212,9 @@ vows.describe('log4js layouts').addBatch({
|
||||
'%n should output a new line': function(args) {
|
||||
test(args, '%n', '\n');
|
||||
},
|
||||
'%h should output hostname' : function(args) {
|
||||
test(args, '%h', require('os').hostname().toString());
|
||||
},
|
||||
'%c should handle category names like java-style package names': function(args) {
|
||||
test(args, '%c{1}', 'tests');
|
||||
test(args, '%c{2}', 'of.tests');
|
||||
@@ -221,9 +227,11 @@ vows.describe('log4js layouts').addBatch({
|
||||
test(args, '%d', '2010-12-05 14:18:30.045');
|
||||
},
|
||||
'%d should allow for format specification': function(args) {
|
||||
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000');
|
||||
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
|
||||
test(args, '%d{ABSOLUTE}', '14:18:30.045');
|
||||
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
|
||||
test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
|
||||
test(args, '%d{yyyy MM dd}', '2010 12 05');
|
||||
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
|
||||
},
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels')
|
||||
, Logger = require('../lib/logger').Logger;
|
||||
, loggerModule = require('../lib/logger')
|
||||
, Logger = loggerModule.Logger;
|
||||
|
||||
vows.describe('../lib/logger').addBatch({
|
||||
'constructor with no parameters': {
|
||||
@@ -53,5 +54,28 @@ vows.describe('../lib/logger').addBatch({
|
||||
assert.isTrue(logger.isErrorEnabled());
|
||||
assert.isTrue(logger.isFatalEnabled());
|
||||
}
|
||||
},
|
||||
|
||||
'should emit log events': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener('log', function (logEvent) { events.push(logEvent); });
|
||||
logger.debug('Event 1');
|
||||
loggerModule.disableAllLogWrites();
|
||||
logger.debug('Event 2');
|
||||
loggerModule.enableAllLogWrites();
|
||||
logger.debug('Event 3');
|
||||
return events;
|
||||
},
|
||||
|
||||
'when log writes are enabled': function(events) {
|
||||
assert.equal(events[0].data[0], 'Event 1');
|
||||
},
|
||||
|
||||
'but not when log writes are disabled': function(events) {
|
||||
assert.equal(events.length, 2);
|
||||
assert.equal(events[1].data[0], 'Event 3');
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
|
||||
@@ -75,13 +75,65 @@ vows.describe('log4js').addBatch({
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
'should include the error if passed in': function (events) {
|
||||
'should include the error if passed in': function(events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
}
|
||||
|
||||
}
|
||||
},
|
||||
|
||||
'when shutdown is called': {
|
||||
topic: function() {
|
||||
var events = {
|
||||
appenderShutdownCalled: false,
|
||||
shutdownCallbackCalled: false
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
return function() {};
|
||||
},
|
||||
shutdown: function(cb) {
|
||||
events.appenderShutdownCalled = true;
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
shutdownCallback = function() {
|
||||
events.shutdownCallbackCalled = true;
|
||||
},
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
log4js.shutdown(shutdownCallback);
|
||||
// Re-enable log writing so other tests that use logger are not
|
||||
// affected.
|
||||
require('../lib/logger').enableAllLogWrites();
|
||||
return events;
|
||||
},
|
||||
|
||||
|
||||
'should invoke appender shutdowns': function(events) {
|
||||
assert.ok(events.appenderShutdownCalled);
|
||||
},
|
||||
|
||||
'should call callback': function(events) {
|
||||
assert.ok(events.shutdownCallbackCalled);
|
||||
}
|
||||
},
|
||||
|
||||
'invalid configuration': {
|
||||
|
||||
82
test/logglyAppender-test.js
Normal file
82
test/logglyAppender-test.js
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeLoggly = {
|
||||
createClient: function (options) {
|
||||
return {
|
||||
config: options,
|
||||
log: function (msg, tags) {
|
||||
msgs.push({
|
||||
msg: msg,
|
||||
tags: tags
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function(type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function(msg, value) {
|
||||
this.errors.push({ msg: msg, value: value });
|
||||
}
|
||||
};
|
||||
|
||||
var logglyModule = sandbox.require('../lib/appenders/loggly', {
|
||||
requires: {
|
||||
'loggly': fakeLoggly,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(logglyModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
loggly: fakeLoggly,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js logglyAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('loggly', {
|
||||
token: 'your-really-long-input-token',
|
||||
subdomain: 'your-subdomain',
|
||||
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
|
||||
});
|
||||
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (topic) {
|
||||
//console.log('topic', topic);
|
||||
assert.equal(topic.results.length, 1);
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
@@ -75,6 +75,7 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
appender('after error, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after error, after connect');
|
||||
appender(new Error('Error test'));
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
@@ -98,6 +99,13 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
|
||||
assert.equal(net.data[7], '__LOG4JS__');
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
},
|
||||
'should serialize an Error correctly': function(net) {
|
||||
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
|
||||
var actual = JSON.parse(net.data[8]).stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
|
||||
}
|
||||
},
|
||||
'worker with timeout': {
|
||||
|
||||
23
test/with-categoryFilter.json
Normal file
23
test/with-categoryFilter.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "categoryFilter",
|
||||
"exclude": "web",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "test/categoryFilter-noweb.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": "web",
|
||||
"type": "file",
|
||||
"filename": "test/categoryFilter-web.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user