Compare commits

...

87 Commits

Author SHA1 Message Date
Gareth Jones
6aacb0da0b Merge pull request #195 from jengler/flush-on-exit
Flush on exit
2014-04-09 07:35:25 +10:00
John Engler
6e3da6f44b Added error throwing when error loading test file.
This will hopefully give us better visibility into our Travis CI
build failures.
2014-04-08 12:40:27 -07:00
John Engler
3b5eb28115 Update dateFile EOL usage to be consistent with appender.
From the looks of the Travis CI failure, this could be the issue
causing failures. Not sure as I can't reproduce locally. However,
it is still an inconsistency and worth fixing.
2014-04-08 10:47:18 -07:00
John Engler
633ed3cddb Support for disabling log writes on shutdown.
Updated logger.js to support disabling all log writes.
Updated log4js.js shutdown function to disable log writes.
Added tests.
Update gitignore to ignore rolling date stream's test output.
2014-04-07 19:06:29 -07:00
John Engler
8ca092cdb9 Removed callback to write, as it is not needed. 2014-04-05 16:14:56 -07:00
John Engler
3ec9811b5e Update log4js module to expose a shutdown function.
loadAppender will check for a shutdown function exposed by
a loaded appender. If present, it will be cached so that the
shutdown function can execute it.

The intent here is that a Node application would not invoked
process.exit until after the log4js shutdown callback returns.
2014-04-05 15:12:45 -07:00
Gareth Jones
2a38f460dc tried adding process.nexttick - didn't help 2013-08-05 07:56:02 +10:00
Gareth Jones
9f77734f74 test case for flush on exit 2013-08-05 07:55:07 +10:00
Gareth Jones
ce8b6b06b9 trying out a shutdown function 2013-08-05 07:21:12 +10:00
Gareth Jones
1e17f88ded 0.6.7 2013-08-02 11:38:34 +10:00
Gareth Jones
d25e1abd48 Merge pull request #142 from crisply/master
Allows use of Console Appender when using with node-webkit
2013-07-14 18:32:26 -07:00
Lex
dde2e69948 Getting console appender to work with node-webkit 2013-07-10 05:07:28 -07:00
Gareth Jones
351a912a86 simplified the reload config code a little, moved the tests into their own file, improved coverage 2013-07-09 09:24:11 +10:00
Gareth Jones
c5fd75dac3 removed check on undefined configState.filename - should not happen, and is covered by the statSync anyway 2013-07-09 08:01:41 +10:00
Gareth Jones
4dd5989d27 Merge branch 'master' of https://github.com/nomiddlename/log4js-node
Conflicts:
	test/gelfAppender-test.js
2013-07-08 15:24:29 +10:00
Gareth Jones
46721465a1 Merge pull request #140 from karlvlam/master
Add custom field support to GELF appender
2013-07-07 16:17:23 -07:00
Gareth Jones
76ff7aa5fa improved coverage of date format 2013-07-08 08:51:42 +10:00
Gareth Jones
be5fa838be improved coverage of hookio appender 2013-07-08 08:46:11 +10:00
Gareth Jones
a86bed975c improved coverage of lib/log4js.js 2013-07-08 08:18:48 +10:00
Karl Lam
baaebef2ed GELF appender - test case covers custom fields, remove unused
console.log
2013-07-05 15:28:10 +08:00
Karl Lam
837d007de3 GELF appender can add customFields to config for every message 2013-07-05 11:23:59 +08:00
Karl Lam
be754f0c0e GELF appender can add custom fields 2013-07-05 10:54:31 +08:00
Gareth Jones
946b216a79 improved coverage of rolling file stream 2013-07-05 08:36:42 +10:00
Gareth Jones
508dbdadf8 improved coverage of gelf appender 2013-07-05 08:04:16 +10:00
Gareth Jones
2e7f6e5a66 improved coverage of logger 2013-07-01 08:24:29 +10:00
Gareth Jones
cbadb5fa19 improved coverage of multiprocess appender 2013-07-01 08:24:06 +10:00
Gareth Jones
c258470cda improved coverage of file appenders 2013-06-28 08:44:54 +10:00
Gareth Jones
2b070e5470 Fixed a problem when tests run in node 0.8 2013-06-28 07:55:25 +10:00
Gareth Jones
4cd546e8b3 improved coverage of baserollingfilestream 2013-06-27 08:46:18 +10:00
Gareth Jones
0e5da1d361 moved debug fn out to own module, added tests 2013-06-24 08:51:10 +10:00
Gareth Jones
fc7f686f65 improved coverage for console appender 2013-06-18 08:47:32 +10:00
Gareth Jones
4a8f0580de improved coverage for connect-logger 2013-06-18 08:47:18 +10:00
Gareth Jones
f50fab2b86 improved coverage for connect logger 2013-06-17 16:01:22 +10:00
Gareth Jones
f1c0767ca3 improved coverage 2013-06-17 16:01:03 +10:00
Gareth Jones
652888944b improved coverage for date_format 2013-06-17 16:00:42 +10:00
Gareth Jones
efc4e36317 improved coverage for layouts 2013-06-14 08:13:16 +10:00
Gareth Jones
d2f30b473f added test to improve levels coverage 2013-06-14 07:28:55 +10:00
Gareth Jones
fa179ecba2 added a delay to dateFile test, to let the filesystem catch up 2013-06-06 08:00:34 +10:00
Gareth Jones
dd25d30228 rolled back my clever map+join, because it broke the tests 2013-06-06 07:53:22 +10:00
Gareth Jones
11fe5bde5f increased test coverage for smtp appender 2013-06-05 18:30:11 +10:00
Gareth Jones
41ddf5eea7 merged util.format branch (fixes a lint error and simplifies the code) 2013-06-05 08:52:07 +10:00
Gareth Jones
81fa9c3568 removed unnecessary argument to createNoLogCondition 2013-06-05 08:38:39 +10:00
Gareth Jones
7ca517b5ed simplified createNoLogCondition 2013-06-05 08:37:27 +10:00
Gareth Jones
6368de1094 refactored pattern layout 2013-06-05 08:02:10 +10:00
Gareth Jones
94dbd22c71 reduced complex function to smaller ones 2013-06-04 08:37:36 +10:00
Gareth Jones
0a2a6c0769 don't create functions in a loop 2013-06-04 08:32:35 +10:00
Gareth Jones
5d6f00eda4 fixed all lint errors except ones which require refactoring of code 2013-06-04 08:17:36 +10:00
Gareth Jones
f998d7e81a more linting 2013-05-30 08:45:15 +10:00
Gareth Jones
46ae1a586d more linting 2013-05-30 08:26:26 +10:00
Gareth Jones
516320c79a more linting 2013-05-30 08:26:03 +10:00
Gareth Jones
40ec9e98e4 more linting 2013-05-30 08:00:04 +10:00
Gareth Jones
cc2e94cf11 more linting 2013-05-30 07:58:09 +10:00
Gareth Jones
2de838bc76 more linting 2013-05-30 07:56:28 +10:00
Gareth Jones
87dc7cf5aa more linting 2013-05-30 07:54:42 +10:00
Gareth Jones
913c748ee0 more linting 2013-05-29 08:42:09 +10:00
Gareth Jones
def0e8e371 more linting 2013-05-29 08:35:40 +10:00
Gareth Jones
20f80ff775 more linting 2013-05-29 08:29:30 +10:00
Gareth Jones
f24db59523 more linting 2013-05-29 08:28:35 +10:00
Gareth Jones
07869b915f more linting 2013-05-27 08:17:32 +10:00
Gareth Jones
2cd27e4293 more linting 2013-05-27 08:15:57 +10:00
Gareth Jones
3d11cbc0ad more linting 2013-05-27 08:14:51 +10:00
Gareth Jones
e5dba219d1 more linting 2013-05-27 08:11:24 +10:00
Gareth Jones
9853e13429 more linting 2013-05-27 08:01:00 +10:00
Gareth Jones
4fd138f87d more linting 2013-05-27 07:48:29 +10:00
Gareth Jones
1ad4977aec more linting 2013-05-27 07:44:59 +10:00
Gareth Jones
7cb7e6df72 more linting 2013-05-27 07:41:16 +10:00
Gareth Jones
2192a094b6 more linting 2013-05-26 17:21:39 +10:00
Gareth Jones
6a9441d261 more linting 2013-05-26 17:15:10 +10:00
Gareth Jones
50b676dec5 more linting 2013-05-26 16:51:46 +10:00
Gareth Jones
8b3c036245 more linting 2013-05-26 16:41:31 +10:00
Gareth Jones
b356dec318 Getting my lint on (via bob) 2013-05-25 14:00:06 +10:00
Gareth Jones
8383dfc4f4 0.6.6 2013-05-25 13:10:46 +10:00
Gareth Jones
4e8fb26099 Missed out the smtp test 2013-05-25 13:08:43 +10:00
Gareth Jones
8492519e3b Fixing issue #137 2013-05-25 13:04:48 +10:00
Gareth Jones
fdc9d253c9 0.6.5 2013-05-16 16:57:25 +10:00
Gareth Jones
18e21ca473 Merge branch 'master' of https://github.com/nomiddlename/log4js-node 2013-05-16 16:55:47 +10:00
Gareth Jones
ab8c7ed89d Merge pull request #136 from issacg/dontalwaysrename-bug
Dontalwaysrename bug
2013-05-15 23:52:57 -07:00
Gareth Jones
aa4f7c071b Merge pull request #135 from jmav/master
auto level detection from @jmav
2013-05-15 23:52:27 -07:00
Issac Goldstand
dc632f4705 Fixes bug introduced in github issue #132 where file rolling needs to be handled differently for alwaysIncludePattern streams 2013-05-11 23:01:28 +03:00
Jure Mav
ac6284add1 Added automatic level detection to connect-logger, depends on http status response.
Update of connect logger example code, compatible with express 3.x
2013-05-11 16:17:23 +02:00
Issac Goldstand
2da01cc611 Fixes bug introduced in github issue #132 where renaming a file to itself can cause an unhandled error 2013-05-09 13:09:59 +03:00
Gareth Jones
ad8229145e Merge pull request #133 from issacg/baseFileRollingStream-bug
Fixes bug in detecting empty options (see issue #132 on github)
2013-05-08 02:24:02 -07:00
Issac Goldstand
8c12c948d9 Fixes bug in detecting empty options (see issue #132 on github) 2013-05-08 12:05:32 +03:00
Gareth Jones
af6ae7af98 new version for alwaysIncludePattern 2013-05-05 14:01:40 +10:00
Gareth Jones
f272e3fd0a Merge branch 'master' into util.format 2013-02-25 16:43:03 +11:00
Gareth Jones
c9a890b37b added some test output files to gitignore 2013-02-12 07:23:18 +11:00
Gareth Jones
0dbc4921a3 Changed layouts to use util.format instead of my own implementation 2013-01-11 15:35:00 +11:00
50 changed files with 5698 additions and 3640 deletions

12
.bob.json Normal file
View File

@@ -0,0 +1,12 @@
{
"build": "clean lint coverage test",
"lint": {
"type": "jshint"
},
"coverage": {
"type": "vows"
},
"test": {
"type": "vows"
}
}

4
.gitignore vendored
View File

@@ -2,4 +2,6 @@
*.log?? *.log??
build build
node_modules node_modules
.bob/
test/streams/test-rolling-file-stream*
test/streams/test-rolling-stream-with-existing-files*

15
.jshintrc Normal file
View File

@@ -0,0 +1,15 @@
{
"node": true,
"laxcomma": true,
"indent": 2,
"globalstrict": true,
"maxparams": 5,
"maxdepth": 3,
"maxstatements": 20,
"maxcomplexity": 5,
"maxlen": 100,
"globals": {
"describe": true,
"it": true
}
}

View File

@@ -1,14 +1,46 @@
var log4js = require('./lib/log4js'); //The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js.
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese'); //https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger
var logger = log4js.getLogger('cheese'); // load modules
logger.setLevel('INFO'); var log4js = require('log4js');
var express = require("express");
var app = express();
var app = require('express').createServer(); //config
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: 'logs/log4jsconnect.log', category: 'log4jslog' }
]
});
//define logger
var logger = log4js.getLogger('log4jslog');
// set at which time msg is logged print like: only on error & above
// logger.setLevel('ERROR');
//express app
app.configure(function() { app.configure(function() {
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO })); app.use(express.favicon(''));
// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
//### AUTO LEVEL DETECTION
//http responses 3xx, level = WARN
//http responses 4xx & 5xx, level = ERROR
//else.level = INFO
app.use(log4js.connectLogger(logger, { level: 'auto' }));
}); });
app.get('*', function(req,res) {
res.send('hello world\n <a href="/cheese">cheese</a>\n'); //route
app.get('/', function(req,res) {
res.send('hello world');
}); });
//start app
app.listen(5000); app.listen(5000);
console.log('server runing at localhost:5000');
console.log('Simulation of normal response: goto localhost:5000');
console.log('Simulation of error response: goto localhost:5000/xxx');

27
examples/flush-on-exit.js Normal file
View File

@@ -0,0 +1,27 @@
/**
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
* another shell)
*/
var log4js = require('../lib/log4js');
log4js.configure({
appenders: [
{ type: 'file', filename: 'cheese.log', category: 'cheese' },
{ type: 'console'}
]
});
var logger = log4js.getLogger('cheese');
logger.setLevel('INFO');
var http=require('http');
var server = http.createServer(function(request, response){
response.writeHead(200, {'Content-Type': 'text/plain'});
var rd = Math.random() * 50;
logger.info("hello " + rd);
response.write('hello ');
if (Math.floor(rd) == 30){
log4js.shutdown(function() { process.exit(1); });
}
response.end();
}).listen(4444);

43
examples/smtp-appender.js Normal file
View File

@@ -0,0 +1,43 @@
//Note that smtp appender needs nodemailer to work.
//If you haven't got nodemailer installed, you'll get cryptic
//"cannot find module" errors when using the smtp appender
var log4js = require('../lib/log4js')
, log
, logmailer
, i = 0;
log4js.configure({
"appenders": [
{
type: "console",
category: "test"
},
{
"type": "smtp",
"recipients": "logfilerecipient@logging.com",
"sendInterval": 5,
"transport": "SMTP",
"SMTP": {
"host": "smtp.gmail.com",
"secureConnection": true,
"port": 465,
"auth": {
"user": "someone@gmail",
"pass": "********************"
},
"debug": true
},
"category": "mailer"
}
]
});
log = log4js.getLogger("test");
logmailer = log4js.getLogger("mailer");
function doTheLogging(x) {
log.info("Logging something %d", x);
logmailer.info("Logging something %d", x);
}
for ( ; i < 500; i++) {
doTheLogging(i);
}

View File

@@ -1,5 +1,6 @@
var layouts = require('../layouts'), "use strict";
consoleLog = console.log; var layouts = require('../layouts')
, consoleLog = console.log.bind(console);
function consoleAppender (layout) { function consoleAppender (layout) {
layout = layout || layouts.colouredLayout; layout = layout || layouts.colouredLayout;

View File

@@ -1,9 +1,10 @@
var streams = require('../streams'), "use strict";
layouts = require('../layouts'), var streams = require('../streams')
path = require('path'), , layouts = require('../layouts')
os = require('os'), , path = require('path')
eol = os.EOL || '\n', , os = require('os')
openFiles = []; , eol = os.EOL || '\n'
, openFiles = [];
//close open files on process exit. //close open files on process exit.
process.on('exit', function() { process.on('exit', function() {
@@ -22,7 +23,11 @@ process.on('exit', function() {
function appender(filename, pattern, alwaysIncludePattern, layout) { function appender(filename, pattern, alwaysIncludePattern, layout) {
layout = layout || layouts.basicLayout; layout = layout || layouts.basicLayout;
var logFile = new streams.DateRollingFileStream(filename, pattern, { alwaysIncludePattern: alwaysIncludePattern }); var logFile = new streams.DateRollingFileStream(
filename,
pattern,
{ alwaysIncludePattern: alwaysIncludePattern }
);
openFiles.push(logFile); openFiles.push(logFile);
return function(logEvent) { return function(logEvent) {

View File

@@ -1,10 +1,12 @@
"use strict";
var layouts = require('../layouts') var layouts = require('../layouts')
, path = require('path') , async = require('async')
, fs = require('fs') , path = require('path')
, streams = require('../streams') , fs = require('fs')
, os = require('os') , streams = require('../streams')
, eol = os.EOL || '\n' , os = require('os')
, openFiles = []; , eol = os.EOL || '\n'
, openFiles = [];
//close open files on process exit. //close open files on process exit.
process.on('exit', function() { process.on('exit', function() {
@@ -17,9 +19,12 @@ process.on('exit', function() {
* File Appender writing the logs to a text file. Supports rolling of logs by size. * File Appender writing the logs to a text file. Supports rolling of logs by size.
* *
* @param file file log messages will be written to * @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout). * @param layout a function that takes a logevent and returns a string
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated. * (defaults to basicLayout).
* @param numBackups - the number of log files to keep after logSize has been reached (default 5) * @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
*/ */
function fileAppender (file, layout, logSize, numBackups) { function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0; var bytesWritten = 0;
@@ -38,7 +43,12 @@ function fileAppender (file, layout, logSize, numBackups) {
numFiles numFiles
); );
} else { } else {
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }); stream = fs.createWriteStream(
file,
{ encoding: "utf8",
mode: parseInt('0644', 8),
flags: 'a' }
);
} }
stream.on("error", function (err) { stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err); console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
@@ -69,5 +79,16 @@ function configure(config, options) {
return fileAppender(config.filename, layout, config.maxLogSize, config.backups); return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
} }
function shutdown(cb) {
async.forEach(openFiles, function(file, done) {
if (!file.write(eol, "utf-8")) {
file.once('drain', function() {
file.end(done);
});
}
}, cb);
}
exports.appender = fileAppender; exports.appender = fileAppender;
exports.configure = configure; exports.configure = configure;
exports.shutdown = shutdown;

View File

@@ -1,8 +1,10 @@
"use strict";
var zlib = require('zlib'); var zlib = require('zlib');
var layouts = require('../layouts'); var layouts = require('../layouts');
var levels = require('../levels'); var levels = require('../levels');
var dgram = require('dgram'); var dgram = require('dgram');
var util = require('util'); var util = require('util');
var debug = require('../debug')('GELF Appender');
var LOG_EMERG=0; // system is unusable var LOG_EMERG=0; // system is unusable
var LOG_ALERT=1; // action must be taken immediately var LOG_ALERT=1; // action must be taken immediately
@@ -33,6 +35,15 @@ levelMapping[levels.FATAL] = LOG_CRIT;
* @param facility - facility to log to (default:nodejs-server) * @param facility - facility to log to (default:nodejs-server)
*/ */
function gelfAppender (layout, host, port, hostname, facility) { function gelfAppender (layout, host, port, hostname, facility) {
var config, customFields;
if (typeof(host) === 'object') {
config = host;
host = config.host;
port = config.port;
hostname = config.hostname;
facility = config.facility;
customFields = config.customFields;
}
host = host || 'localhost'; host = host || 'localhost';
port = port || 12201; port = port || 12201;
@@ -40,14 +51,51 @@ function gelfAppender (layout, host, port, hostname, facility) {
facility = facility || 'nodejs-server'; facility = facility || 'nodejs-server';
layout = layout || layouts.messagePassThroughLayout; layout = layout || layouts.messagePassThroughLayout;
var defaultCustomFields = customFields || {};
var client = dgram.createSocket("udp4"); var client = dgram.createSocket("udp4");
process.on('exit', function() { process.on('exit', function() {
if (client) client.close(); if (client) client.close();
}); });
/**
* Add custom fields (start with underscore )
* - if the first object passed to the logger contains 'GELF' field,
* copy the underscore fields to the message
* @param loggingEvent
* @param msg
*/
function addCustomFields(loggingEvent, msg){
/* append defaultCustomFields firsts */
Object.keys(defaultCustomFields).forEach(function(key) {
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
if (key.match(/^_/) && key !== "_id") {
msg[key] = defaultCustomFields[key];
}
});
/* append custom fields per message */
var data = loggingEvent.data;
if (!Array.isArray(data) || data.length === 0) return;
var firstData = data[0];
if (!firstData.GELF) return; // identify with GELF field defined
Object.keys(firstData).forEach(function(key) {
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
if (key.match(/^_/) || key !== "_id") {
msg[key] = firstData[key];
}
});
/* the custom field object should be removed, so it will not be looged by the later appenders */
loggingEvent.data.shift();
}
function preparePacket(loggingEvent) { function preparePacket(loggingEvent) {
var msg = {}; var msg = {};
addCustomFields(loggingEvent, msg);
msg.full_message = layout(loggingEvent); msg.full_message = layout(loggingEvent);
msg.short_message = msg.full_message; msg.short_message = msg.full_message;
@@ -72,7 +120,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
console.error(err.stack); console.error(err.stack);
} else { } else {
if (packet.length > 8192) { if (packet.length > 8192) {
util.debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending"); debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
} else { } else {
sendPacket(packet); sendPacket(packet);
} }
@@ -86,7 +134,7 @@ function configure(config) {
if (config.layout) { if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout); layout = layouts.layout(config.layout.type, config.layout);
} }
return gelfAppender(layout, config.host, config.port, config.hostname, config.facility); return gelfAppender(layout, config);
} }
exports.appender = gelfAppender; exports.appender = gelfAppender;

View File

@@ -1,7 +1,8 @@
var log4js = require('../log4js'); "use strict";
var layouts = require('../layouts'); var log4js = require('../log4js')
var Hook = require('hook.io').Hook; , layouts = require('../layouts')
var util = require('util'); , Hook = require('hook.io').Hook
, util = require('util');
var Logger = function createLogger(options) { var Logger = function createLogger(options) {
var self = this; var self = this;
@@ -13,7 +14,7 @@ var Logger = function createLogger(options) {
actualAppender(loggingEvent); actualAppender(loggingEvent);
}); });
}); });
} };
util.inherits(Logger, Hook); util.inherits(Logger, Hook);
function deserializeLoggingEvent(loggingEvent) { function deserializeLoggingEvent(loggingEvent) {
@@ -42,7 +43,7 @@ function getBufferedHook(hook, eventName) {
hook.on('hook::ready', function emptyBuffer() { hook.on('hook::ready', function emptyBuffer() {
hookBuffer.forEach(function logBufferItem(loggingEvent) { hookBuffer.forEach(function logBufferItem(loggingEvent) {
hook.emit(eventName, loggingEvent); hook.emit(eventName, loggingEvent);
}) });
hookReady = true; hookReady = true;
}); });
@@ -52,7 +53,7 @@ function getBufferedHook(hook, eventName) {
} else { } else {
hookBuffer.push(loggingEvent); hookBuffer.push(loggingEvent);
} }
} };
} }
function createAppender(hookioOptions) { function createAppender(hookioOptions) {

View File

@@ -1,5 +1,6 @@
var levels = require('../levels'); "use strict";
var log4js = require('../log4js'); var levels = require('../levels')
, log4js = require('../log4js');
function logLevelFilter (levelString, appender) { function logLevelFilter (levelString, appender) {
var level = levels.toLevel(levelString); var level = levels.toLevel(levelString);
@@ -7,7 +8,7 @@ function logLevelFilter (levelString, appender) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) { if (logEvent.level.isGreaterThanOrEqualTo(level)) {
appender(logEvent); appender(logEvent);
} }
} };
} }
function configure(config) { function configure(config) {

View File

@@ -1,6 +1,7 @@
var log4js = require('../log4js'), "use strict";
net = require('net'), var log4js = require('../log4js')
END_MSG = '__LOG4JS__'; , net = require('net')
, END_MSG = '__LOG4JS__';
/** /**
* Creates a server, listening on config.loggerPort, config.loggerHost. * Creates a server, listening on config.loggerPort, config.loggerHost.

View File

@@ -1,14 +1,17 @@
var layouts = require("../layouts"), "use strict";
mailer = require("nodemailer"), var layouts = require("../layouts")
os = require('os'); , mailer = require("nodemailer")
, os = require('os');
/** /**
* SMTP Appender. Sends logging events using SMTP protocol. * SMTP Appender. Sends logging events using SMTP protocol.
* It can either send an email on each event or group several logging events gathered during specified interval. * It can either send an email on each event or group several
* logging events gathered during specified interval.
* *
* @param config appender configuration data * @param config appender configuration data
* config.sendInterval time between log emails (in seconds), if 0
* then every event sends an email
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout). * @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* all events are buffered and sent in one email during this time; if 0 than every event sends an email
*/ */
function smtpAppender(config, layout) { function smtpAppender(config, layout) {
layout = layout || layouts.basicLayout; layout = layout || layouts.basicLayout;
@@ -17,12 +20,11 @@ function smtpAppender(config, layout) {
var logEventBuffer = []; var logEventBuffer = [];
var sendTimer; var sendTimer;
var transport = mailer.createTransport(config.transport, config[config.transport]);
function sendBuffer() { function sendBuffer() {
if (logEventBuffer.length == 0) if (logEventBuffer.length > 0) {
return;
var transport = mailer.createTransport(config.transport, config[config.transport]);
var firstEvent = logEventBuffer[0]; var firstEvent = logEventBuffer[0];
var body = ""; var body = "";
while (logEventBuffer.length > 0) { while (logEventBuffer.length > 0) {
@@ -33,31 +35,36 @@ function smtpAppender(config, layout) {
to: config.recipients, to: config.recipients,
subject: config.subject || subjectLayout(firstEvent), subject: config.subject || subjectLayout(firstEvent),
text: body, text: body,
headers: {"Hostname": os.hostname()} headers: { "Hostname": os.hostname() }
}; };
if (config.sender) if (config.sender) {
msg.from = config.sender; msg.from = config.sender;
}
transport.sendMail(msg, function(error, success) { transport.sendMail(msg, function(error, success) {
if (error) { if (error) {
console.error("log4js.smtpAppender - Error happened ", error); console.error("log4js.smtpAppender - Error happened", error);
} }
transport.close();
}); });
}
} }
function scheduleSend() { function scheduleSend() {
if (!sendTimer) if (!sendTimer) {
sendTimer = setTimeout(function() { sendTimer = setTimeout(function() {
sendTimer = null; sendTimer = null;
sendBuffer(); sendBuffer();
}, sendInterval); }, sendInterval);
}
} }
return function(loggingEvent) { return function(loggingEvent) {
logEventBuffer.push(loggingEvent); logEventBuffer.push(loggingEvent);
if (sendInterval > 0) if (sendInterval > 0) {
scheduleSend(); scheduleSend();
else } else {
sendBuffer(); sendBuffer();
}
}; };
} }

View File

@@ -1,11 +1,16 @@
"use strict";
var levels = require("./levels"); var levels = require("./levels");
var DEFAULT_FORMAT = ':remote-addr - -' +
' ":method :url HTTP/:http-version"' +
' :status :content-length ":referrer"' +
' ":user-agent"';
/** /**
* Log requests with the given `options` or a `format` string. * Log requests with the given `options` or a `format` string.
* *
* Options: * Options:
* *
* - `format` Format string, see below for tokens * - `format` Format string, see below for tokens
* - `level` A log4js levels instance. * - `level` A log4js levels instance. Supports also 'auto'
* *
* Tokens: * Tokens:
* *
@@ -37,20 +42,18 @@ function getLogger(logger4js, options) {
var thislogger = logger4js var thislogger = logger4js
, level = levels.toLevel(options.level, levels.INFO) , level = levels.toLevel(options.level, levels.INFO)
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"' , fmt = options.format || DEFAULT_FORMAT
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null; , nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
return function (req, res, next) { return function (req, res, next) {
// mount safety // mount safety
if (req._logging) return next(); if (req._logging) return next();
// nologs // nologs
if (nolog && nolog.test(req.originalUrl)) return next(); if (nolog && nolog.test(req.originalUrl)) return next();
if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
if (thislogger.isLevelEnabled(level)) { var start = new Date()
var start = +new Date
, statusCode , statusCode
, writeHead = res.writeHead , writeHead = res.writeHead
, end = res.end , end = res.end
@@ -65,19 +68,30 @@ function getLogger(logger4js, options) {
res.writeHead(code, headers); res.writeHead(code, headers);
res.__statusCode = statusCode = code; res.__statusCode = statusCode = code;
res.__headers = headers || {}; res.__headers = headers || {};
//status code response level handling
if(options.level === 'auto'){
level = levels.INFO;
if(code >= 300) level = levels.WARN;
if(code >= 400) level = levels.ERROR;
} else {
level = levels.toLevel(options.level, levels.INFO);
}
}; };
// proxy end to output a line to the provided logger. // proxy end to output a line to the provided logger.
res.end = function(chunk, encoding) { res.end = function(chunk, encoding) {
res.end = end; res.end = end;
res.end(chunk, encoding); res.end(chunk, encoding);
res.responseTime = +new Date - start; res.responseTime = new Date() - start;
if ('function' == typeof fmt) { if (thislogger.isLevelEnabled(level)) {
if (typeof fmt === 'function') {
var line = fmt(req, res, function(str){ return format(str, req, res); }); var line = fmt(req, res, function(str){ return format(str, req, res); });
if (line) thislogger.log(level, line); if (line) thislogger.log(level, line);
} else { } else {
thislogger.log(level, format(fmt, req, res)); thislogger.log(level, format(fmt, req, res));
} }
}
}; };
} }
@@ -103,15 +117,24 @@ function format(str, req, res) {
.replace(':status', res.__statusCode || res.statusCode) .replace(':status', res.__statusCode || res.statusCode)
.replace(':response-time', res.responseTime) .replace(':response-time', res.responseTime)
.replace(':date', new Date().toUTCString()) .replace(':date', new Date().toUTCString())
.replace(':referrer', req.headers['referer'] || req.headers['referrer'] || '') .replace(':referrer', req.headers.referer || req.headers.referrer || '')
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor) .replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
.replace(':remote-addr', req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))) .replace(
':remote-addr',
req.socket &&
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
)
.replace(':user-agent', req.headers['user-agent'] || '') .replace(':user-agent', req.headers['user-agent'] || '')
.replace(':content-length', (res._headers && res._headers['content-length']) || (res.__headers && res.__headers['Content-Length']) || '-') .replace(
':content-length',
(res._headers && res._headers['content-length']) ||
(res.__headers && res.__headers['Content-Length']) ||
'-'
)
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; }) .replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
.replace(/:res\[([^\]]+)\]/g, function(_, field){ .replace(/:res\[([^\]]+)\]/g, function(_, field){
return res._headers return res._headers ?
? (res._headers[field.toLowerCase()] || res.__headers[field]) (res._headers[field.toLowerCase()] || res.__headers[field])
: (res.__headers && res.__headers[field]); : (res.__headers && res.__headers[field]);
}); });
} }
@@ -122,17 +145,17 @@ function format(str, req, res) {
* @param {String} nolog * @param {String} nolog
* @return {RegExp} * @return {RegExp}
* @api private * @api private
*/ *
/**
* syntax * syntax
* 1. String * 1. String
* 1.1 "\\.gif" * 1.1 "\\.gif"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
* LOGGING http://example.com/hoge.agif * LOGGING http://example.com/hoge.agif
* 1.2 in "\\.gif|\\.jpg$" * 1.2 in "\\.gif|\\.jpg$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga * NOT LOGGING http://example.com/hoge.gif and
* LOGGING http://example.com/hoge.agif, http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge * http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
* LOGGING http://example.com/hoge.agif,
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
* 1.3 in "\\.(gif|jpe?g|png)$" * 1.3 in "\\.(gif|jpe?g|png)$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg * NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3 * LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
@@ -143,26 +166,29 @@ function format(str, req, res) {
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"] * 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif" * SAME AS "\\.jpg|\\.png|\\.gif"
*/ */
function createNoLogCondition(nolog, type) { function createNoLogCondition(nolog) {
if(!nolog) return null; var regexp = null;
type = type || '';
if(nolog instanceof RegExp){ if (nolog) {
if(type === 'string') if (nolog instanceof RegExp) {
return nolog.source; regexp = nolog;
return nolog;
} else if(typeof nolog === 'string'){
if(type === 'string')
return nolog;
try{
return new RegExp(nolog);
} catch (ex) {
return null;
} }
} else if(nolog instanceof Array){
var regexps = nolog.map(function(o){ return createNoLogCondition(o, 'string')}); if (typeof nolog === 'string') {
return new RegExp(regexps.join('|')); regexp = new RegExp(nolog);
} }
if (Array.isArray(nolog)) {
var regexpsAsStrings = nolog.map(
function convertToStrings(o) {
return o.source ? o.source : o;
}
);
regexp = new RegExp(regexpsAsStrings.join('|'));
}
}
return regexp;
} }
exports.connectLogger = getLogger; exports.connectLogger = getLogger;

View File

@@ -1,8 +1,40 @@
"use strict";
exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS"; exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO"; exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO";
exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS"; exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS";
exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS"; exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
function padWithZeros(vNumber, width) {
var numAsString = vNumber + "";
while (numAsString.length < width) {
numAsString = "0" + numAsString;
}
return numAsString;
}
function addZero(vNumber) {
return padWithZeros(vNumber, 2);
}
/**
* Formats the TimeOffest
* Thanks to http://www.svendtofte.com/code/date_format/
* @private
*/
function offset(date) {
// Difference to Greenwich time (GMT) in hours
var os = Math.abs(date.getTimezoneOffset());
var h = String(Math.floor(os/60));
var m = String(os%60);
if (h.length == 1) {
h = "0" + h;
}
if (m.length == 1) {
m = "0" + m;
}
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
}
exports.asString = function(/*format,*/ date) { exports.asString = function(/*format,*/ date) {
var format = exports.ISO8601_FORMAT; var format = exports.ISO8601_FORMAT;
if (typeof(date) === "string") { if (typeof(date) === "string") {
@@ -31,30 +63,4 @@ exports.asString = function(/*format,*/ date) {
.replace(/O/g, vTimeZone); .replace(/O/g, vTimeZone);
return formatted; return formatted;
function padWithZeros(vNumber, width) {
var numAsString = vNumber + "";
while (numAsString.length < width) {
numAsString = "0" + numAsString;
}
return numAsString;
}
function addZero(vNumber) {
return padWithZeros(vNumber, 2);
}
/**
* Formats the TimeOffest
* Thanks to http://www.svendtofte.com/code/date_format/
* @private
*/
function offset(date) {
// Difference to Greenwich time (GMT) in hours
var os = Math.abs(date.getTimezoneOffset());
var h = String(Math.floor(os/60));
var m = String(os%60);
h.length == 1? h = "0"+h:1;
m.length == 1? m = "0"+m:1;
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
}
}; };

15
lib/debug.js Normal file
View File

@@ -0,0 +1,15 @@
"use strict";
module.exports = function(label) {
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) {
console.error('LOG4JS: (%s) %s', label, message);
};
} else {
debug = function() { };
}
return debug;
};

View File

@@ -1,61 +1,42 @@
"use strict";
var dateFormat = require('./date_format') var dateFormat = require('./date_format')
, os = require('os') , os = require('os')
, eol = os.EOL || '\n' , eol = os.EOL || '\n'
, util = require('util') , util = require('util')
, replacementRegExp = /%[sdj]/g , replacementRegExp = /%[sdj]/g
, layoutMakers = { , layoutMakers = {
"messagePassThrough": function() { return messagePassThroughLayout; } "messagePassThrough": function() { return messagePassThroughLayout; },
, "basic": function() { return basicLayout; } "basic": function() { return basicLayout; },
, "colored": function() { return colouredLayout; } "colored": function() { return colouredLayout; },
, "coloured": function() { return colouredLayout; } "coloured": function() { return colouredLayout; },
, "pattern": function (config) { "pattern": function (config) {
var pattern = config.pattern || undefined; return patternLayout(config && config.pattern, config && config.tokens);
var tokens = config.tokens || undefined;
return patternLayout(pattern, tokens);
} }
}
, colours = {
ALL: "grey",
TRACE: "blue",
DEBUG: "cyan",
INFO: "green",
WARN: "yellow",
ERROR: "red",
FATAL: "magenta",
OFF: "grey"
};
function wrapErrorsWithInspect(items) {
return items.map(function(item) {
if ((item instanceof Error) && item.stack) {
return { inspect: function() { return util.format(item) + '\n' + item.stack; } };
} else {
return item;
} }
, colours = { });
ALL: "grey" }
, TRACE: "blue"
, DEBUG: "cyan"
, INFO: "green"
, WARN: "yellow"
, ERROR: "red"
, FATAL: "magenta"
, OFF: "grey"
};
function formatLogData(logData) { function formatLogData(logData) {
var output = "" var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments);
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments) return util.format.apply(util, wrapErrorsWithInspect(data));
, format = data.shift();
if (typeof format === "string") {
output = format.replace(replacementRegExp, function(match) {
switch (match) {
case "%s": return new String(data.shift());
case "%d": return new Number(data.shift());
case "%j": return JSON.stringify(data.shift());
default:
return match;
};
});
} else {
//put it back, it's not a format string
data.unshift(format);
}
data.forEach(function (item) {
if (output) {
output += ' ';
}
output += util.inspect(item);
if (item && item.stack) {
output += "\n" + item.stack;
}
});
return output;
} }
var styles = { var styles = {
@@ -78,10 +59,10 @@ var styles = {
}; };
function colorizeStart(style) { function colorizeStart(style) {
return style ? '\033[' + styles[style][0] + 'm' : ''; return style ? '\x1B[' + styles[style][0] + 'm' : '';
} }
function colorizeEnd(style) { function colorizeEnd(style) {
return style ? '\033[' + styles[style][1] + 'm' : ''; return style ? '\x1B[' + styles[style][1] + 'm' : '';
} }
/** /**
* Taken from masylum's fork (https://github.com/masylum/log4js-node) * Taken from masylum's fork (https://github.com/masylum/log4js-node)
@@ -121,7 +102,10 @@ function basicLayout (loggingEvent) {
* same as basicLayout, but with colours. * same as basicLayout, but with colours.
*/ */
function colouredLayout (loggingEvent) { function colouredLayout (loggingEvent) {
return timestampLevelAndCategory(loggingEvent, colours[loggingEvent.level.toString()]) + formatLogData(loggingEvent.data); return timestampLevelAndCategory(
loggingEvent,
colours[loggingEvent.level.toString()]
) + formatLogData(loggingEvent.data);
} }
function messagePassThroughLayout (loggingEvent) { function messagePassThroughLayout (loggingEvent) {
@@ -163,6 +147,121 @@ function patternLayout (pattern, tokens) {
pattern = pattern || TTCC_CONVERSION_PATTERN; pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
var loggerName = loggingEvent.categoryName;
if (specifier) {
var precision = parseInt(specifier, 10);
var loggerNameBits = loggerName.split(".");
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
var format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
if (format == "ISO8601") {
format = dateFormat.ISO8601_FORMAT;
} else if (format == "ABSOLUTE") {
format = dateFormat.ABSOLUTETIME_FORMAT;
} else if (format == "DATE") {
format = dateFormat.DATETIME_FORMAT;
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function formatMessage(loggingEvent) {
return formatLogData(loggingEvent.data);
}
function endOfLine() {
return eol;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return "" + loggingEvent.startTime.toLocaleTimeString();
}
function startColour(loggingEvent) {
return colorizeStart(colours[loggingEvent.level.toString()]);
}
function endColour(loggingEvent) {
return colorizeEnd(colours[loggingEvent.level.toString()]);
}
function percent() {
return '%';
}
function userDefined(loggingEvent, specifier) {
if (typeof(tokens[specifier]) !== 'undefined') {
if (typeof(tokens[specifier]) === 'function') {
return tokens[specifier](loggingEvent);
} else {
return tokens[specifier];
}
}
return null;
}
var replacers = {
'c': categoryName,
'd': formatAsDate,
'm': formatMessage,
'n': endOfLine,
'p': logLevel,
'r': startTime,
'[': startColour,
']': endColour,
'%': percent,
'x': userDefined
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
var len;
if (truncation) {
len = parseInt(truncation.substr(1), 10);
return toTruncate.substring(0, len);
}
return toTruncate;
}
function pad(padding, toPad) {
var len;
if (padding) {
if (padding.charAt(0) == "-") {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += " ";
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = " " + toPad;
}
}
}
return toPad;
}
return function(loggingEvent) { return function(loggingEvent) {
var formattedString = ""; var formattedString = "";
var result; var result;
@@ -182,100 +281,14 @@ function patternLayout (pattern, tokens) {
} else { } else {
// Create a raw replacement string based on the conversion // Create a raw replacement string based on the conversion
// character and specifier // character and specifier
var replacement = ""; var replacement =
switch(conversionCharacter) { replaceToken(conversionCharacter, loggingEvent, specifier) ||
case "c": matchedString;
var loggerName = loggingEvent.categoryName;
if (specifier) {
var precision = parseInt(specifier, 10);
var loggerNameBits = loggingEvent.categoryName.split(".");
if (precision >= loggerNameBits.length) {
replacement = loggerName;
} else {
replacement = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
}
} else {
replacement = loggerName;
}
break;
case "d":
var format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
if (format == "ISO8601") {
format = dateFormat.ISO8601_FORMAT;
} else if (format == "ABSOLUTE") {
format = dateFormat.ABSOLUTETIME_FORMAT;
} else if (format == "DATE") {
format = dateFormat.DATETIME_FORMAT;
}
}
// Format the date
replacement = dateFormat.asString(format, loggingEvent.startTime);
break;
case "m":
replacement = formatLogData(loggingEvent.data);
break;
case "n":
replacement = eol;
break;
case "p":
replacement = loggingEvent.level.toString();
break;
case "r":
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
break;
case "[":
replacement = colorizeStart(colours[loggingEvent.level.toString()]);
break;
case "]":
replacement = colorizeEnd(colours[loggingEvent.level.toString()]);
break;
case "%":
replacement = "%";
break;
case "x":
if(typeof(tokens[specifier]) !== 'undefined') {
if(typeof(tokens[specifier]) === 'function') {
replacement = tokens[specifier]();
} else {
replacement = tokens[specifier];
}
} else {
replacement = matchedString;
}
break;
default:
replacement = matchedString;
break;
}
// Format the replacement according to any padding or // Format the replacement according to any padding or
// truncation specified // truncation specified
replacement = truncate(truncation, replacement);
var len; replacement = pad(padding, replacement);
// First, truncation
if (truncation) {
len = parseInt(truncation.substr(1), 10);
replacement = replacement.substring(0, len);
}
// Next, padding
if (padding) {
if (padding.charAt(0) == "-") {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (replacement.length < len) {
replacement += " ";
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (replacement.length < len) {
replacement = " " + replacement;
}
}
}
formattedString += replacement; formattedString += replacement;
} }
searchString = searchString.substr(result.index + result[0].length); searchString = searchString.substr(result.index + result[0].length);
@@ -283,15 +296,15 @@ function patternLayout (pattern, tokens) {
return formattedString; return formattedString;
}; };
}; }
module.exports = { module.exports = {
basicLayout: basicLayout basicLayout: basicLayout,
, messagePassThroughLayout: messagePassThroughLayout messagePassThroughLayout: messagePassThroughLayout,
, patternLayout: patternLayout patternLayout: patternLayout,
, colouredLayout: colouredLayout colouredLayout: colouredLayout,
, coloredLayout: colouredLayout coloredLayout: colouredLayout,
, layout: function(name, config) { layout: function(name, config) {
return layoutMakers[name] && layoutMakers[name](config); return layoutMakers[name] && layoutMakers[name](config);
} }
}; };

View File

@@ -1,3 +1,5 @@
"use strict";
function Level(level, levelStr) { function Level(level, levelStr) {
this.level = level; this.level = level;
this.levelStr = levelStr; this.levelStr = levelStr;
@@ -26,8 +28,7 @@ function toLevel(sArg, defaultLevel) {
} }
return toLevel(sArg.toString()); return toLevel(sArg.toString());
}
};
Level.prototype.toString = function() { Level.prototype.toString = function() {
return this.levelStr; return this.levelStr;
@@ -52,16 +53,16 @@ Level.prototype.isEqualTo = function(otherLevel) {
otherLevel = toLevel(otherLevel); otherLevel = toLevel(otherLevel);
} }
return this.level === otherLevel.level; return this.level === otherLevel.level;
} };
module.exports = { module.exports = {
ALL: new Level(Number.MIN_VALUE, "ALL") ALL: new Level(Number.MIN_VALUE, "ALL"),
, TRACE: new Level(5000, "TRACE") TRACE: new Level(5000, "TRACE"),
, DEBUG: new Level(10000, "DEBUG") DEBUG: new Level(10000, "DEBUG"),
, INFO: new Level(20000, "INFO") INFO: new Level(20000, "INFO"),
, WARN: new Level(30000, "WARN") WARN: new Level(30000, "WARN"),
, ERROR: new Level(40000, "ERROR") ERROR: new Level(40000, "ERROR"),
, FATAL: new Level(50000, "FATAL") FATAL: new Level(50000, "FATAL"),
, OFF: new Level(Number.MAX_VALUE, "OFF") OFF: new Level(Number.MAX_VALUE, "OFF"),
, toLevel: toLevel toLevel: toLevel
}; };

View File

@@ -1,3 +1,4 @@
"use strict";
/* /*
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@@ -12,8 +13,6 @@
* limitations under the License. * limitations under the License.
*/ */
/*jsl:option explicit*/
/** /**
* @fileoverview log4js is a library to log in JavaScript in similar manner * @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java. The API should be nearly the same. * than in log4j for Java. The API should be nearly the same.
@@ -45,17 +44,20 @@
* Website: http://log4js.berlios.de * Website: http://log4js.berlios.de
*/ */
var events = require('events') var events = require('events')
, async = require('async')
, fs = require('fs') , fs = require('fs')
, path = require('path') , path = require('path')
, util = require('util') , util = require('util')
, layouts = require('./layouts') , layouts = require('./layouts')
, levels = require('./levels') , levels = require('./levels')
, LoggingEvent = require('./logger').LoggingEvent , loggerModule = require('./logger')
, Logger = require('./logger').Logger , LoggingEvent = loggerModule.LoggingEvent
, Logger = loggerModule.Logger
, ALL_CATEGORIES = '[all]' , ALL_CATEGORIES = '[all]'
, appenders = {} , appenders = {}
, loggers = {} , loggers = {}
, appenderMakers = {} , appenderMakers = {}
, appenderShutdowns = {}
, defaultConfig = { , defaultConfig = {
appenders: [ appenders: [
{ type: "console" } { type: "console" }
@@ -72,7 +74,7 @@ var events = require('events')
function getLogger (categoryName) { function getLogger (categoryName) {
// Use default logger if categoryName is not specified or invalid // Use default logger if categoryName is not specified or invalid
if (!(typeof categoryName == "string")) { if (typeof categoryName !== "string") {
categoryName = Logger.DEFAULT_CATEGORY; categoryName = Logger.DEFAULT_CATEGORY;
} }
@@ -103,7 +105,7 @@ function getLogger (categoryName) {
function addAppender () { function addAppender () {
var args = Array.prototype.slice.call(arguments); var args = Array.prototype.slice.call(arguments);
var appender = args.shift(); var appender = args.shift();
if (args.length == 0 || args[0] === undefined) { if (args.length === 0 || args[0] === undefined) {
args = [ ALL_CATEGORIES ]; args = [ ALL_CATEGORIES ];
} }
//argument may already be an array //argument may already be an array
@@ -112,21 +114,29 @@ function addAppender () {
} }
args.forEach(function(category) { args.forEach(function(category) {
if (!appenders[category]) { addAppenderToCategory(appender, category);
appenders[category] = [];
}
appenders[category].push(appender);
if (category === ALL_CATEGORIES) { if (category === ALL_CATEGORIES) {
addAppenderToAllLoggers(appender);
} else if (loggers[category]) {
loggers[category].addListener("log", appender);
}
});
}
function addAppenderToAllLoggers(appender) {
for (var logger in loggers) { for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) { if (loggers.hasOwnProperty(logger)) {
loggers[logger].addListener("log", appender); loggers[logger].addListener("log", appender);
} }
} }
} else if (loggers[category]) { }
loggers[category].addListener("log", appender);
function addAppenderToCategory(appender, category) {
if (!appenders[category]) {
appenders[category] = [];
} }
}); appenders[category].push(appender);
} }
function clearAppenders () { function clearAppenders () {
@@ -145,11 +155,11 @@ function configureAppenders(appenderList, options) {
loadAppender(appenderConfig.type); loadAppender(appenderConfig.type);
var appender; var appender;
appenderConfig.makers = appenderMakers; appenderConfig.makers = appenderMakers;
try {
appender = appenderMakers[appenderConfig.type](appenderConfig, options); appender = appenderMakers[appenderConfig.type](appenderConfig, options);
if (appender) {
addAppender(appender, appenderConfig.category); addAppender(appender, appenderConfig.category);
} else { } catch(e) {
throw new Error("log4js configuration problem for "+util.inspect(appenderConfig)); throw new Error("log4js configuration problem for " + util.inspect(appenderConfig), e);
} }
}); });
} }
@@ -181,10 +191,7 @@ function getDefaultLogger () {
var configState = {}; var configState = {};
function loadConfigurationFile(filename) { function loadConfigurationFile(filename) {
if (filename && (!configState.lastFilename || filename !== configState.lastFilename || if (filename) {
!configState.lastMTime || fs.statSync(filename).mtime !== configState.lastMTime)) {
configState.lastFilename = filename;
configState.lastMTime = fs.statSync(filename).mtime;
return JSON.parse(fs.readFileSync(filename, "utf8")); return JSON.parse(fs.readFileSync(filename, "utf8"));
} }
return undefined; return undefined;
@@ -202,31 +209,32 @@ function configureOnceOff(config, options) {
restoreConsole(); restoreConsole();
} }
} catch (e) { } catch (e) {
throw new Error("Problem reading log4js config " + util.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")"); throw new Error(
"Problem reading log4js config " + util.inspect(config) +
". Error was \"" + e.message + "\" (" + e.stack + ")"
);
} }
} }
} }
function reloadConfiguration() { function reloadConfiguration() {
var filename = configState.filename, var mtime = getMTime(configState.filename);
mtime; if (!mtime) return;
if (!filename) {
// can't find anything to reload if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
return; configureOnceOff(loadConfigurationFile(configState.filename));
} }
configState.lastMTime = mtime;
}
function getMTime(filename) {
var mtime;
try { try {
mtime = fs.statSync(filename).mtime; mtime = fs.statSync(configState.filename).mtime;
} catch (e) { } catch (e) {
getLogger('log4js').warn('Failed to load configuration file ' + filename); getLogger('log4js').warn('Failed to load configuration file ' + filename);
return;
}
if (configState.lastFilename && configState.lastFilename === filename) {
if (mtime.getTime() > configState.lastMTime.getTime()) {
configureOnceOff(loadConfigurationFile(filename));
}
} else {
configureOnceOff(loadConfigurationFile(filename));
} }
return mtime;
} }
function initReloadConfiguration(filename, options) { function initReloadConfiguration(filename, options) {
@@ -235,6 +243,7 @@ function initReloadConfiguration(filename, options) {
delete configState.timerId; delete configState.timerId;
} }
configState.filename = filename; configState.filename = filename;
configState.lastMTime = getMTime(filename);
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000); configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
} }
@@ -250,7 +259,9 @@ function configure(configurationFileOrObject, options) {
config = loadConfigurationFile(config) || defaultConfig; config = loadConfigurationFile(config) || defaultConfig;
} else { } else {
if (options.reloadSecs) { if (options.reloadSecs) {
getLogger('log4js').warn('Ignoring configuration reload parameter for "object" configuration.'); getLogger('log4js').warn(
'Ignoring configuration reload parameter for "object" configuration.'
);
} }
} }
configureOnceOff(config, options); configureOnceOff(config, options);
@@ -268,7 +279,7 @@ function replaceConsole(logger) {
function replaceWith(fn) { function replaceWith(fn) {
return function() { return function() {
fn.apply(logger, arguments); fn.apply(logger, arguments);
} };
} }
logger = logger || getLogger("console"); logger = logger || getLogger("console");
['log','debug','info','warn','error'].forEach(function (item) { ['log','debug','info','warn','error'].forEach(function (item) {
@@ -290,9 +301,42 @@ function loadAppender(appender) {
appenderModule = require(appender); appenderModule = require(appender);
} }
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule); module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
if (appenderModule.shutdown) {
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
}
appenderMakers[appender] = appenderModule.configure.bind(appenderModule); appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
} }
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @params {Function} cb - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
* @returns {void}
*/
function shutdown(cb) {
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
loggerModule.disableAllLogWrites();
// Next, get all the shutdown functions for appenders as an array.
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
function(accum, category) {
return accum.concat(appenderShutdowns[category]);
}, []);
// Call each of the shutdown functions.
async.forEach(
shutdownFunctions,
function(shutdownFn, done) {
shutdownFn(done);
},
cb
);
}
module.exports = { module.exports = {
getLogger: getLogger, getLogger: getLogger,
getDefaultLogger: getDefaultLogger, getDefaultLogger: getDefaultLogger,
@@ -301,6 +345,7 @@ module.exports = {
loadAppender: loadAppender, loadAppender: loadAppender,
clearAppenders: clearAppenders, clearAppenders: clearAppenders,
configure: configure, configure: configure,
shutdown: shutdown,
replaceConsole: replaceConsole, replaceConsole: replaceConsole,
restoreConsole: restoreConsole, restoreConsole: restoreConsole,

View File

@@ -1,7 +1,10 @@
var levels = require('./levels'), "use strict";
util = require('util'), var levels = require('./levels')
events = require('events'), , util = require('util')
DEFAULT_CATEGORY = '[default]'; , events = require('events')
, DEFAULT_CATEGORY = '[default]';
var logWritesEnabled = true;
/** /**
* Models a logging event. * Models a logging event.
@@ -30,12 +33,13 @@ function LoggingEvent (categoryName, level, data, logger) {
function Logger (name, level) { function Logger (name, level) {
this.category = name || DEFAULT_CATEGORY; this.category = name || DEFAULT_CATEGORY;
if (! this.level) { if (level) {
this.__proto__.level = levels.TRACE; this.setLevel(level);
} }
} }
util.inherits(Logger, events.EventEmitter); util.inherits(Logger, events.EventEmitter);
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY; Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
Logger.prototype.level = levels.TRACE;
Logger.prototype.setLevel = function(level) { Logger.prototype.setLevel = function(level) {
this.level = levels.toLevel(level, this.level || levels.TRACE); this.level = levels.toLevel(level, this.level || levels.TRACE);
@@ -64,7 +68,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
}; };
Logger.prototype[levelString.toLowerCase()] = function () { Logger.prototype[levelString.toLowerCase()] = function () {
if (this.isLevelEnabled(level)) { if (logWritesEnabled && this.isLevelEnabled(level)) {
var args = Array.prototype.slice.call(arguments); var args = Array.prototype.slice.call(arguments);
args.unshift(level); args.unshift(level);
Logger.prototype.log.apply(this, args); Logger.prototype.log.apply(this, args);
@@ -73,6 +77,23 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
} }
); );
/**
* Disable all log writes.
* @returns {void}
*/
function disableAllLogWrites() {
logWritesEnabled = false;
}
/**
* Enable log writes.
* @returns {void}
*/
function enableAllLogWrites() {
logWritesEnabled = true;
}
exports.LoggingEvent = LoggingEvent; exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger; exports.Logger = Logger;
exports.disableAllLogWrites = disableAllLogWrites;
exports.enableAllLogWrites = enableAllLogWrites;

View File

@@ -1,7 +1,9 @@
var fs = require('fs'), "use strict";
stream, var fs = require('fs')
util = require('util'), , stream
semver = require('semver'); , debug = require('../debug')('BaseRollingFileStream')
, util = require('util')
, semver = require('semver');
if (semver.satisfies(process.version, '>=0.10.0')) { if (semver.satisfies(process.version, '>=0.10.0')) {
stream = require('stream'); stream = require('stream');
@@ -9,19 +11,12 @@ if (semver.satisfies(process.version, '>=0.10.0')) {
stream = require('readable-stream'); stream = require('readable-stream');
} }
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (BaseRollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = BaseRollingFileStream; module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) { function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream"); debug("In BaseRollingFileStream");
this.filename = filename; this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' }; this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
this.currentSize = 0; this.currentSize = 0;
function currentFileSize(file) { function currentFileSize(file) {

View File

@@ -1,18 +1,13 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'), "use strict";
format = require('../date_format'), var BaseRollingFileStream = require('./BaseRollingFileStream')
async = require('async'), , debug = require('../debug')('DateRollingFileStream')
fs = require('fs'), , format = require('../date_format')
util = require('util'); , async = require('async')
, fs = require('fs')
, util = require('util');
module.exports = DateRollingFileStream; module.exports = DateRollingFileStream;
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (DateRollingFileStream) %s', message); };
} else {
debug = function() { };
}
function DateRollingFileStream(filename, pattern, options, now) { function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now); debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') { if (pattern && typeof(pattern) === 'object') {
@@ -24,13 +19,15 @@ function DateRollingFileStream(filename, pattern, options, now) {
this.now = now || Date.now; this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now())); this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
this.baseFilename = filename; this.baseFilename = filename;
this.alwaysIncludePattern = false;
if (options) { if (options) {
if (options.alwaysIncludePattern) { if (options.alwaysIncludePattern) {
filename = filename + this.lastTimeWeWroteSomething; this.alwaysIncludePattern = true;
filename = this.baseFilename + this.lastTimeWeWroteSomething;
} }
delete options.alwaysIncludePattern; delete options.alwaysIncludePattern;
if (options === {}) { if (Object.keys(options).length === 0) {
options = null; options = null;
} }
} }
@@ -44,7 +41,8 @@ DateRollingFileStream.prototype.shouldRoll = function() {
var lastTime = this.lastTimeWeWroteSomething, var lastTime = this.lastTimeWeWroteSomething,
thisTime = format.asString(this.pattern, new Date(this.now())); thisTime = format.asString(this.pattern, new Date(this.now()));
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime); debug("DateRollingFileStream.shouldRoll with now = " +
this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
this.lastTimeWeWroteSomething = thisTime; this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime; this.previousTime = lastTime;
@@ -53,17 +51,25 @@ DateRollingFileStream.prototype.shouldRoll = function() {
}; };
DateRollingFileStream.prototype.roll = function(filename, callback) { DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this, var that = this;
newFilename = this.baseFilename + this.previousTime;
debug("Starting roll"); debug("Starting roll");
if (this.alwaysIncludePattern) {
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
async.series([
this.closeTheStream.bind(this),
this.openTheStream.bind(this)
], callback);
} else {
var newFilename = this.baseFilename + this.previousTime;
async.series([ async.series([
this.closeTheStream.bind(this), this.closeTheStream.bind(this),
deleteAnyExistingFile, deleteAnyExistingFile,
renameTheCurrentFile, renameTheCurrentFile,
this.openTheStream.bind(this) this.openTheStream.bind(this)
], callback); ], callback);
}
function deleteAnyExistingFile(cb) { function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file //on windows, you can get a EEXIST error if you rename a file to an existing file

View File

@@ -1,15 +1,10 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'), "use strict";
util = require('util'), var BaseRollingFileStream = require('./BaseRollingFileStream')
path = require('path'), , debug = require('../debug')('RollingFileStream')
fs = require('fs'), , util = require('util')
async = require('async'); , path = require('path')
, fs = require('fs')
var debug; , async = require('async');
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (RollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = RollingFileStream; module.exports = RollingFileStream;

View File

@@ -1,6 +1,6 @@
{ {
"name": "log4js", "name": "log4js",
"version": "0.6.3", "version": "0.6.7",
"description": "Port of Log4js to work with node.", "description": "Port of Log4js to work with node.",
"keywords": [ "keywords": [
"logging", "logging",

View File

@@ -1,6 +1,7 @@
var assert = require('assert'), "use strict";
vows = require('vows'), var assert = require('assert')
sandbox = require('sandboxed-module'); , vows = require('vows')
, sandbox = require('sandboxed-module');
function makeTestAppender() { function makeTestAppender() {
return { return {
@@ -12,7 +13,7 @@ function makeTestAppender() {
}, },
appender: function() { appender: function() {
var self = this; var self = this;
return function(logEvt) { self.logEvt = logEvt; } return function(logEvt) { self.logEvt = logEvt; };
} }
}; };
} }
@@ -114,12 +115,14 @@ vows.describe('log4js configure').addBatch({
} }
} }
}, },
log4js = sandbox.require('../lib/log4js', log4js = sandbox.require(
'../lib/log4js',
{ {
requires: { requires: {
'fs': fakeFS, 'fs': fakeFS,
} }
}); }
);
delete process.env.LOG4JS_CONFIG; delete process.env.LOG4JS_CONFIG;
return fileRead; return fileRead;
}, },

View File

@@ -1,7 +1,10 @@
"use strict";
// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier: // This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
// 1) log4js.configure(), log4js.configure(null), log4js.configure({}), log4js.configure(<some object with no levels prop>) // 1) log4js.configure(), log4js.configure(null),
// log4js.configure({}), log4js.configure(<some object with no levels prop>)
// all set all loggers levels to trace, even if they were previously set to something else. // all set all loggers levels to trace, even if they were previously set to something else.
// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo: bar}}) leaves previously set logger levels intact. // 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo:
// bar}}) leaves previously set logger levels intact.
// //
// Basic set up // Basic set up
@@ -28,7 +31,7 @@ var configs = {
'has empty levels': {levels: {}}, 'has empty levels': {levels: {}},
'has random levels': {levels: {foo: 'bar'}}, 'has random levels': {levels: {foo: 'bar'}},
'has some valid levels': {levels: {A: 'INFO'}} 'has some valid levels': {levels: {A: 'INFO'}}
} };
// Set up the basic vows batches for this test // Set up the basic vows batches for this test
var batches = []; var batches = [];
@@ -60,13 +63,85 @@ function getTopLevelContext(nop, configToTest, name) {
} }
return log4js; return log4js;
} }
} };
}; }
showProgress('Populating batch object...'); showProgress('Populating batch object...');
// Populating the batches programmatically, function checkForMismatch(topic) {
// as there are (configs.length x strLevels.length x strLevels.length) = 324 possible test combinations var er = topic.log4js.levels.toLevel(topic.baseLevel)
.isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(
er,
topic.expectedResult,
'Mismatch: for setLevel(' + topic.baseLevel +
') was expecting a comparison with ' + topic.comparisonLevel +
' to be ' + topic.expectedResult
);
}
function checkExpectedResult(topic) {
var result = topic.log4js
.getLogger(getLoggerName(topic.baseLevel))
.isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(
result,
topic.expectedResult,
'Failed: ' + getLoggerName(topic.baseLevel) +
'.isLevelEnabled( ' + topic.comparisonLevel + ' ) returned ' + result
);
}
function setupBaseLevelAndCompareToOtherLevels(baseLevel) {
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
var subContext = { topic: baseLevel };
batch[context][baseLevelSubContext] = subContext;
// each logging level has strLevels sub-contexts,
// to exhaustively test all the combinations of
// setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
strLevels.forEach(compareToOtherLevels(subContext));
}
function compareToOtherLevels(subContext) {
var baseLevel = subContext.topic;
return function (comparisonLevel) {
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
// calculate this independently of log4js, but we'll add a vow
// later on to check that we're not mismatched with log4js
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
// the topic simply gathers all the parameters for the vow
// into an object, to simplify the vow's work.
subContext[comparisonLevelSubContext] = {
topic: function(baseLevel, log4js) {
return {
comparisonLevel: comparisonLevel,
baseLevel: baseLevel,
log4js: log4js,
expectedResult: expectedResult
};
}
};
var vow = 'should return '+expectedResult;
subContext[comparisonLevelSubContext][vow] = checkExpectedResult;
// the extra vow to check the comparison between baseLevel and
// comparisonLevel we performed earlier matches log4js'
// comparison too
var subSubContext = subContext[comparisonLevelSubContext];
subSubContext['finally checking for comparison mismatch with log4js'] = checkForMismatch;
};
}
// Populating the batches programmatically, as there are
// (configs.length x strLevels.length x strLevels.length) = 324
// possible test combinations
for (var cfg in configs) { for (var cfg in configs) {
var configToTest = configs[cfg]; var configToTest = configs[cfg];
var nop = configToTest === 'nop'; var nop = configToTest === 'nop';
@@ -84,43 +159,15 @@ for (var cfg in configs) {
batch[context]= getTopLevelContext(nop, configToTest, context); batch[context]= getTopLevelContext(nop, configToTest, context);
batches.push(batch); batches.push(batch);
// each top-level context has strLevels sub-contexts, one per logger which has set to a specific level in the top-level context's topic // each top-level context has strLevels sub-contexts, one per logger
strLevels.forEach(function (baseLevel) { // which has set to a specific level in the top-level context's topic
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ; strLevels.forEach(setupBaseLevelAndCompareToOtherLevels);
batch[context][baseLevelSubContext] = {topic: baseLevel}; }
// each logging level has strLevels sub-contexts,
// to exhaustively test all the combinations of setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
strLevels.forEach(function (comparisonLevel) {
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
// calculate this independently of log4js, but we'll add a vow later on to check that we're not mismatched with log4js
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
// the topic simply gathers all the parameters for the vow into an object, to simplify the vow's work.
batch[context][baseLevelSubContext][comparisonLevelSubContext] = {topic: function(baseLevel, log4js){
return {comparisonLevel: comparisonLevel, baseLevel: baseLevel, log4js: log4js, expectedResult: expectedResult};
}};
var vow = 'should return '+expectedResult;
batch[context][baseLevelSubContext][comparisonLevelSubContext][vow] = function(topic){
var result = topic.log4js.getLogger(getLoggerName(topic.baseLevel)).isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(result, topic.expectedResult, 'Failed: '+getLoggerName(topic.baseLevel)+'.isLevelEnabled( '+topic.comparisonLevel+' ) returned '+result);
};
// the extra vow to check the comparison between baseLevel and comparisonLevel we performed earlier matches log4js' comparison too
batch[context][baseLevelSubContext][comparisonLevelSubContext]['finally checking for comparison mismatch with log4js'] = function(topic){
var er = topic.log4js.levels.toLevel(topic.baseLevel).isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(er, topic.expectedResult, 'Mismatch: for setLevel('+topic.baseLevel+') was expecting a comparison with '+topic.comparisonLevel+' to be '+topic.expectedResult);
};
});
});
};
showProgress('Running tests'); showProgress('Running tests');
var v = vows.describe('log4js.configure(), with or without a "levels" property'); var v = vows.describe('log4js.configure(), with or without a "levels" property');
batches.forEach(function(batch) {v=v.addBatch(batch)}); batches.forEach(function(batch) {v=v.addBatch(batch);});
v.export(module); v.export(module);

View File

@@ -1,3 +1,5 @@
/* jshint maxparams:7 */
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, levels = require('../lib/levels'); , levels = require('../lib/levels');
@@ -19,25 +21,37 @@ function MockLogger() {
} }
function MockRequest(remoteAddr, method, originalUrl) { function MockRequest(remoteAddr, method, originalUrl, headers) {
this.socket = { remoteAddress: remoteAddr }; this.socket = { remoteAddress: remoteAddr };
this.originalUrl = originalUrl; this.originalUrl = originalUrl;
this.method = method; this.method = method;
this.httpVersionMajor = '5'; this.httpVersionMajor = '5';
this.httpVersionMinor = '0'; this.httpVersionMinor = '0';
this.headers = {} this.headers = headers || {};
var self = this;
Object.keys(this.headers).forEach(function(key) {
self.headers[key.toLowerCase()] = self.headers[key];
});
}
function MockResponse() {
this.end = function(chunk, encoding) {
};
this.writeHead = function(code, headers) {
};
} }
function MockResponse(statusCode) { function request(cl, method, url, code, reqHeaders, resHeaders) {
var req = new MockRequest('my.remote.addr', method, url, reqHeaders);
this.statusCode = statusCode; var res = new MockResponse();
cl(req, res, function() {});
this.end = function(chunk, encoding) { res.writeHead(code, resHeaders);
res.end('chunk','encoding');
}
} }
vows.describe('log4js connect logger').addBatch({ vows.describe('log4js connect logger').addBatch({
@@ -67,10 +81,7 @@ vows.describe('log4js connect logger').addBatch({
topic: function(clm) { topic: function(clm) {
var ml = new MockLogger(); var ml = new MockLogger();
var cl = clm.connectLogger(ml); var cl = clm.connectLogger(ml);
var req = new MockRequest('my.remote.addr', 'GET', 'http://url'); request(cl, 'GET', 'http://url', 200);
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages; return ml.messages;
}, },
@@ -90,10 +101,7 @@ vows.describe('log4js connect logger').addBatch({
var ml = new MockLogger(); var ml = new MockLogger();
ml.level = levels.FATAL; ml.level = levels.FATAL;
var cl = clm.connectLogger(ml); var cl = clm.connectLogger(ml);
var req = new MockRequest('my.remote.addr', 'GET', 'http://url'); request(cl, 'GET', 'http://url', 200);
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages; return ml.messages;
}, },
@@ -108,10 +116,7 @@ vows.describe('log4js connect logger').addBatch({
var ml = new MockLogger(); var ml = new MockLogger();
ml.level = levels.INFO; ml.level = levels.INFO;
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } ); var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
var req = new MockRequest('my.remote.addr', 'GET', 'http://url'); request(cl, 'GET', 'http://url', 200);
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages; return ml.messages;
}, },
@@ -121,8 +126,101 @@ vows.describe('log4js connect logger').addBatch({
assert.ok(levels.INFO.isEqualTo(messages[0].level)); assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url'); assert.equal(messages[0].message, 'GET http://url');
} }
},
'logger with options as string': {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, ':method :url');
request(cl, 'POST', 'http://meh', 200);
return ml.messages;
},
'should use the passed in format': function(messages) {
assert.equal(messages[0].message, 'POST http://meh');
}
},
'auto log levels': {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
request(cl, 'GET', 'http://meh', 200);
request(cl, 'GET', 'http://meh', 201);
request(cl, 'GET', 'http://meh', 302);
request(cl, 'GET', 'http://meh', 404);
request(cl, 'GET', 'http://meh', 500);
return ml.messages;
},
'should use INFO for 2xx': function(messages) {
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.ok(levels.INFO.isEqualTo(messages[1].level));
},
'should use WARN for 3xx': function(messages) {
assert.ok(levels.WARN.isEqualTo(messages[2].level));
},
'should use ERROR for 4xx': function(messages) {
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
},
'should use ERROR for 5xx': function(messages) {
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
}
},
'format using a function': {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; });
request(cl, 'GET', 'http://blah', 200);
return ml.messages;
},
'should call the format function': function(messages) {
assert.equal(messages[0].message, 'I was called');
}
},
'format that includes request headers': {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, ':req[Content-Type]');
request(
cl,
'GET', 'http://blah', 200,
{ 'Content-Type': 'application/json' }
);
return ml.messages;
},
'should output the request header': function(messages) {
assert.equal(messages[0].message, 'application/json');
}
},
'format that includes response headers': {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, ':res[Content-Type]');
request(
cl,
'GET', 'http://blah', 200,
null,
{ 'Content-Type': 'application/cheese' }
);
return ml.messages;
},
'should output the response header': function(messages) {
assert.equal(messages[0].message, 'application/cheese');
}
} }
} }
}).export(module); }).export(module);

View File

@@ -0,0 +1,33 @@
"use strict";
var assert = require('assert')
, vows = require('vows')
, layouts = require('../lib/layouts')
, sandbox = require('sandboxed-module');
vows.describe('../lib/appenders/console').addBatch({
'appender': {
topic: function() {
var messages = []
, fakeConsole = {
log: function(msg) { messages.push(msg); }
}
, appenderModule = sandbox.require(
'../lib/appenders/console',
{
globals: {
'console': fakeConsole
}
}
)
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
appender({ data: ["blah"] });
return messages;
},
'should output to console': function(messages) {
assert.equal(messages[0], 'blah');
}
}
}).exportTo(module);

View File

@@ -1,8 +1,11 @@
var vows = require('vows'), "use strict";
assert = require('assert'), var vows = require('vows')
path = require('path'), , assert = require('assert')
fs = require('fs'), , path = require('path')
log4js = require('../lib/log4js'); , fs = require('fs')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, EOL = require('os').EOL || '\n';
function removeFile(filename) { function removeFile(filename) {
return function() { return function() {
@@ -40,6 +43,46 @@ vows.describe('../lib/appenders/dateFile').addBatch({
'should only add one `exit` listener': function (initialCount) { 'should only add one `exit` listener': function (initialCount) {
assert.equal(process.listeners('exit').length, initialCount + 1); assert.equal(process.listeners('exit').length, initialCount + 1);
},
},
'exit listener': {
topic: function() {
var exitListener
, openedFiles = []
, dateFileAppender = sandbox.require(
'../lib/appenders/dateFile',
{
globals: {
process: {
on: function(evt, listener) {
exitListener = listener;
}
}
},
requires: {
'../streams': {
DateRollingFileStream: function(filename) {
openedFiles.push(filename);
this.end = function() {
openedFiles.shift();
};
}
}
}
}
);
for (var i=0; i < 5; i += 1) {
dateFileAppender.appender('test' + i);
}
assert.isNotEmpty(openedFiles);
exitListener();
return openedFiles;
},
'should close all open files': function(openedFiles) {
assert.isEmpty(openedFiles);
} }
}, },
@@ -66,7 +109,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}, },
'should use the basic layout': function(contents) { 'should use the basic layout': function(contents) {
assert.match(contents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /); assert.match(
contents,
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
);
} }
} }
@@ -89,13 +135,17 @@ vows.describe('../lib/appenders/dateFile').addBatch({
teardown: removeFile('date-file-test.log'), teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) { 'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file' + require('os').EOL); if (err) {
throw err;
}
assert.include(contents, 'this should be written to the file' + EOL);
assert.equal(contents.indexOf('this should not be written to the file'), -1); assert.equal(contents.indexOf('this should not be written to the file'), -1);
} }
}, },
'with options.alwaysIncludePattern': { 'with options.alwaysIncludePattern': {
topic: function() { topic: function() {
var log4js = require('../lib/log4js') var self = this
, log4js = require('../lib/log4js')
, format = require('../lib/date_format') , format = require('../lib/date_format')
, logger , logger
, options = { , options = {
@@ -113,15 +163,58 @@ vows.describe('../lib/appenders/dateFile').addBatch({
] ]
} }
, thisTime = format.asString(options.appenders[0].pattern, new Date()); , thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(
path.join(__dirname, 'date-file-test' + thisTime),
"this is existing data" + EOL,
'utf8'
);
log4js.clearAppenders(); log4js.clearAppenders();
log4js.configure(options); log4js.configure(options);
logger = log4js.getLogger('tests'); logger = log4js.getLogger('tests');
logger.warn('this should be written to the file with the appended date'); logger.warn('this should be written to the file with the appended date');
this.teardown = removeFile('date-file-test' + thisTime); this.teardown = removeFile('date-file-test' + thisTime);
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', this.callback); //wait for filesystem to catch up
setTimeout(function() {
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', self.callback);
}, 100);
}, },
'should create file with the correct pattern': function(contents) { 'should create file with the correct pattern': function(contents) {
assert.include(contents, 'this should be written to the file with the appended date'); assert.include(contents, 'this should be written to the file with the appended date');
},
'should not overwrite the file on open (bug found in issue #132)': function(contents) {
assert.include(contents, 'this is existing data');
}
},
'with cwd option': {
topic: function() {
var fileOpened,
appender = sandbox.require(
'../lib/appenders/dateFile',
{ requires:
{ '../streams':
{ DateRollingFileStream:
function(file) {
fileOpened = file;
return {
on: function() {},
end: function() {}
};
}
}
}
}
);
appender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened;
},
'should prepend options.cwd to config.filename': function(fileOpened) {
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
} }
} }

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, dateFormat = require('../lib/date_format'); , dateFormat = require('../lib/date_format');
@@ -18,6 +19,26 @@ vows.describe('date_format').addBatch({
dateFormat.asString(date), dateFormat.asString(date),
'2010-01-11 14:31:30.005' '2010-01-11 14:31:30.005'
); );
},
'should provide a ISO8601 with timezone offset format': function(date) {
date.getTimezoneOffset = function() { return -660; };
assert.equal(
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
"2010-01-11T14:31:30+1100"
);
date.getTimezoneOffset = function() { return 120; };
assert.equal(
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
"2010-01-11T14:31:30-0200"
);
},
'should provide a just-the-time format': function(date) {
assert.equal(
dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
'14:31:30.005'
);
} }
} }
}).export(module); }).export(module);

72
test/debug-test.js Normal file
View File

@@ -0,0 +1,72 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, fakeConsole = {
error: function(format, label, message) {
this.logged = [ format, label, message ];
}
}
, globals = function(debugValue) {
return {
process: {
env: {
'NODE_DEBUG': debugValue
}
},
console: fakeConsole
};
};
vows.describe('../lib/debug').addBatch({
'when NODE_DEBUG is set to log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ 'globals': globals('log4js') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should log to console.error': function(logged) {
assert.equal(logged[0], 'LOG4JS: (%s) %s');
assert.equal(logged[1], 'cheese');
assert.equal(logged[2], 'biscuits');
}
},
'when NODE_DEBUG is set to not log4js': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals('other_module') }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
},
'when NODE_DEBUG is not set': {
topic: function() {
var debug = sandbox.require(
'../lib/debug',
{ globals: globals(null) }
);
fakeConsole.logged = [];
debug('cheese')('biscuits');
return fakeConsole.logged;
},
'it should not log to console.error': function(logged) {
assert.equal(logged.length, 0);
}
}
}).exportTo(module);

View File

@@ -1,6 +1,8 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, fs = require('fs') , fs = require('fs')
, path = require('path') , path = require('path')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js') , log4js = require('../lib/log4js')
, assert = require('assert'); , assert = require('assert');
@@ -29,18 +31,59 @@ vows.describe('log4js fileAppender').addBatch({
return listenersCount; return listenersCount;
}, },
'does not adds more than one `exit` listeners': function (initialCount) { 'does not add more than one `exit` listeners': function (initialCount) {
assert.ok(process.listeners('exit').length <= initialCount + 1); assert.ok(process.listeners('exit').length <= initialCount + 1);
} }
}, },
'exit listener': {
topic: function() {
var exitListener
, openedFiles = []
, fileAppender = sandbox.require(
'../lib/appenders/file',
{
globals: {
process: {
on: function(evt, listener) {
exitListener = listener;
}
}
},
requires: {
'../streams': {
RollingFileStream: function(filename) {
openedFiles.push(filename);
this.end = function() {
openedFiles.shift();
};
this.on = function() {};
}
}
}
}
);
for (var i=0; i < 5; i += 1) {
fileAppender.appender('test' + i, null, 100);
}
assert.isNotEmpty(openedFiles);
exitListener();
return openedFiles;
},
'should close all open files': function(openedFiles) {
assert.isEmpty(openedFiles);
}
},
'with default fileAppender settings': { 'with default fileAppender settings': {
topic: function() { topic: function() {
var that = this var that = this
, testFile = path.join(__dirname, '/fa-default-test.log') , testFile = path.join(__dirname, '/fa-default-test.log')
, logger = log4js.getLogger('default-settings'); , logger = log4js.getLogger('default-settings');
remove(testFile); remove(testFile);
//log4js.configure({ appenders:[ { type: "file", filename: testFile, category: 'default-settings' } ] });
log4js.clearAppenders(); log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings'); log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
@@ -54,7 +97,10 @@ vows.describe('log4js fileAppender').addBatch({
assert.include(fileContents, "This should be in the file.\n"); assert.include(fileContents, "This should be in the file.\n");
}, },
'log messages should be in the basic layout format': function(err, fileContents) { 'log messages should be in the basic layout format': function(err, fileContents) {
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /); assert.match(
fileContents,
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
);
} }
}, },
'with a max file size and no backups': { 'with a max file size and no backups': {
@@ -66,7 +112,10 @@ vows.describe('log4js fileAppender').addBatch({
remove(testFile + '.1'); remove(testFile + '.1');
//log file of 100 bytes maximum, no backups //log file of 100 bytes maximum, no backups
log4js.clearAppenders(); log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), 'max-file-size'); log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
'max-file-size'
);
logger.info("This is the first log message."); logger.info("This is the first log message.");
logger.info("This is an intermediate log message."); logger.info("This is an intermediate log message.");
logger.info("This is the second log message."); logger.info("This is the second log message.");
@@ -85,7 +134,9 @@ vows.describe('log4js fileAppender').addBatch({
}, },
'starting with the test file name should be two': function(err, files) { 'starting with the test file name should be two': function(err, files) {
//there will always be one backup if you've specified a max log size //there will always be one backup if you've specified a max log size
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }); var logFiles = files.filter(
function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }
);
assert.equal(logFiles.length, 2); assert.equal(logFiles.length, 2);
} }
} }
@@ -100,7 +151,10 @@ vows.describe('log4js fileAppender').addBatch({
//log file of 50 bytes maximum, 2 backups //log file of 50 bytes maximum, 2 backups
log4js.clearAppenders(); log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), 'max-file-size-backups'); log4js.addAppender(
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
'max-file-size-backups'
);
logger.info("This is the first log message."); logger.info("This is the first log message.");
logger.info("This is the second log message."); logger.info("This is the second log message.");
logger.info("This is the third log message."); logger.info("This is the third log message.");
@@ -119,14 +173,20 @@ vows.describe('log4js fileAppender').addBatch({
}, },
'the log files': { 'the log files': {
topic: function(files) { topic: function(files) {
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }); var logFiles = files.filter(
function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }
);
return logFiles; return logFiles;
}, },
'should be 3': function (files) { 'should be 3': function (files) {
assert.equal(files.length, 3); assert.equal(files.length, 3);
}, },
'should be named in sequence': function (files) { 'should be named in sequence': function (files) {
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']); assert.deepEqual(files, [
'fa-maxFileSize-with-backups-test.log',
'fa-maxFileSize-with-backups-test.log.1',
'fa-maxFileSize-with-backups-test.log.2'
]);
}, },
'and the contents of the first file': { 'and the contents of the first file': {
topic: function(logFiles) { topic: function(logFiles) {
@@ -175,5 +235,46 @@ vows.describe('log4js fileAppender').addBatch({
} }
} }
} }
}).addBatch({
'when underlying stream errors': {
topic: function() {
var consoleArgs
, errorHandler
, fileAppender = sandbox.require(
'../lib/appenders/file',
{
globals: {
console: {
error: function() {
consoleArgs = Array.prototype.slice.call(arguments);
}
}
},
requires: {
'../streams': {
RollingFileStream: function(filename) {
this.end = function() {};
this.on = function(evt, cb) {
if (evt === 'error') {
errorHandler = cb;
}
};
}
}
}
}
);
fileAppender.appender('test1.log', null, 100);
errorHandler({ error: 'aargh' });
return consoleArgs;
},
'should log the error to console.error': function(consoleArgs) {
assert.isNotEmpty(consoleArgs);
assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
assert.equal(consoleArgs[1], 'test1.log');
assert.equal(consoleArgs[2].error, 'aargh');
}
}
}).export(module); }).export(module);

View File

@@ -1,13 +1,17 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, sandbox = require('sandboxed-module') , sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js') , log4js = require('../lib/log4js')
, realLayouts = require('../lib/layouts')
, setupLogging = function(options, category, compressedLength) { , setupLogging = function(options, category, compressedLength) {
var fakeDgram = { var fakeDgram = {
sent: false, sent: false,
socket: { socket: {
packetLength: 0, packetLength: 0,
closed: false,
close: function() { close: function() {
this.closed = true;
}, },
send: function(pkt, offset, pktLength, port, host) { send: function(pkt, offset, pktLength, port, host) {
fakeDgram.sent = true; fakeDgram.sent = true;
@@ -26,6 +30,11 @@ var vows = require('vows')
, fakeZlib = { , fakeZlib = {
gzip: function(objectToCompress, callback) { gzip: function(objectToCompress, callback) {
fakeZlib.uncompressed = objectToCompress; fakeZlib.uncompressed = objectToCompress;
if (this.shouldError) {
callback({ stack: "oh noes" });
return;
}
if (compressedLength) { if (compressedLength) {
callback(null, { length: compressedLength }); callback(null, { length: compressedLength });
} else { } else {
@@ -33,10 +42,35 @@ var vows = require('vows')
} }
} }
} }
, exitHandler
, fakeConsole = {
error: function(message) {
this.message = message;
}
}
, fakeLayouts = {
layout: function(type, options) {
this.type = type;
this.options = options;
return realLayouts.messagePassThroughLayout;
},
messagePassThroughLayout: realLayouts.messagePassThroughLayout
}
, appender = sandbox.require('../lib/appenders/gelf', { , appender = sandbox.require('../lib/appenders/gelf', {
requires: { requires: {
dgram: fakeDgram, dgram: fakeDgram,
zlib: fakeZlib zlib: fakeZlib,
'../layouts': fakeLayouts
},
globals: {
process: {
on: function(evt, handler) {
if (evt === 'exit') {
exitHandler = handler;
}
}
},
console: fakeConsole
} }
}); });
@@ -45,12 +79,13 @@ var vows = require('vows')
return { return {
dgram: fakeDgram, dgram: fakeDgram,
compress: fakeZlib, compress: fakeZlib,
exitHandler: exitHandler,
console: fakeConsole,
layouts: fakeLayouts,
logger: log4js.getLogger(category || "gelf-test") logger: log4js.getLogger(category || "gelf-test")
}; };
}; };
//log4js.configure({ doNotReplaceConsole: true });
vows.describe('log4js gelfAppender').addBatch({ vows.describe('log4js gelfAppender').addBatch({
'with default gelfAppender settings': { 'with default gelfAppender settings': {
@@ -134,5 +169,91 @@ vows.describe('log4js gelfAppender').addBatch({
assert.equal(message.facility, 'nonsense'); assert.equal(message.facility, 'nonsense');
} }
} }
},
'on process.exit': {
topic: function() {
var setup = setupLogging();
setup.exitHandler();
return setup;
},
'should close open sockets': function(setup) {
assert.isTrue(setup.dgram.socket.closed);
} }
},
'on zlib error': {
topic: function() {
var setup = setupLogging();
setup.compress.shouldError = true;
setup.logger.info('whatever');
return setup;
},
'should output to console.error': function(setup) {
assert.equal(setup.console.message, 'oh noes');
}
},
'with layout in configuration': {
topic: function() {
var setup = setupLogging({
layout: {
type: 'madeuplayout',
earlgrey: 'yes, please'
}
});
return setup;
},
'should pass options to layout': function(setup) {
assert.equal(setup.layouts.type, 'madeuplayout');
assert.equal(setup.layouts.options.earlgrey, 'yes, please');
}
},
'with custom fields options': {
topic: function() {
var setup = setupLogging({
host: 'somewhere',
port: 12345,
hostname: 'cheese',
facility: 'nonsense',
customFields: {
_every1: 'Hello every one',
_every2: 'Hello every two'
}
});
var myFields = {
GELF: true,
_every2: 'Overwritten!',
_myField: 'This is my field!'
};
setup.logger.debug(myFields, "Just testing.");
return setup;
},
'the dgram packet': {
topic: function(setup) {
return setup.dgram;
},
'should pick up the options': function(dgram) {
assert.equal(dgram.socket.host, 'somewhere');
assert.equal(dgram.socket.port, 12345);
}
},
'the uncompressed packet': {
topic: function(setup) {
var message = JSON.parse(setup.compress.uncompressed);
return message;
},
'should pick up the options': function(message) {
assert.equal(message.host, 'cheese');
assert.equal(message.facility, 'nonsense');
assert.equal(message._every1, 'Hello every one'); // the default value
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
assert.equal(message._myField, 'This is my field!'); // the value for this message only
assert.equal(message.short_message, 'Just testing.'); // skip the field object
assert.equal(message.full_message, 'Just testing.'); // should be as same as short_message
}
}
}
}).export(module); }).export(module);

View File

@@ -1,5 +1,6 @@
var vows = require('vows'), "use strict";
assert = require('assert'); var vows = require('vows')
, assert = require('assert');
vows.describe('log4js global loglevel').addBatch({ vows.describe('log4js global loglevel').addBatch({
'global loglevel' : { 'global loglevel' : {

View File

@@ -1,9 +1,11 @@
var vows = require('vows'); "use strict";
var assert = require('assert'); var vows = require('vows')
var sandbox = require('sandboxed-module'); , assert = require('assert')
, sandbox = require('sandboxed-module');
function fancyResultingHookioAppender(opts) { function fancyResultingHookioAppender(hookNotReady) {
var result = { ons: {}, emissions: {}, logged: [], configs: [] }; var emitHook = !hookNotReady
, result = { ons: {}, emissions: {}, logged: [], configs: [] };
var fakeLog4Js = { var fakeLog4Js = {
appenderMakers: {} appenderMakers: {}
@@ -13,7 +15,7 @@ function fancyResultingHookioAppender(opts) {
result.actualLoggerConfig = config; result.actualLoggerConfig = config;
return function log(logEvent) { return function log(logEvent) {
result.logged.push(logEvent); result.logged.push(logEvent);
} };
}; };
}; };
@@ -23,7 +25,7 @@ function fancyResultingHookioAppender(opts) {
}; };
fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) { fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) {
result.ons[eventName] = { functionToExec: functionToExec }; result.ons[eventName] = { functionToExec: functionToExec };
if (eventName === 'hook::ready') { if (emitHook && eventName === 'hook::ready') {
functionToExec(); functionToExec();
} }
}; };
@@ -32,7 +34,8 @@ function fancyResultingHookioAppender(opts) {
result.emissions[eventName].push({data: data}); result.emissions[eventName].push({data: data});
var on = '*::' + eventName; var on = '*::' + eventName;
if (eventName !== 'hook::ready' && result.ons[on]) { if (eventName !== 'hook::ready' && result.ons[on]) {
result.ons[on].callingCount = result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1; result.ons[on].callingCount =
result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1;
result.ons[on].functionToExec(data); result.ons[on].functionToExec(data);
} }
}; };
@@ -52,9 +55,28 @@ vows.describe('log4js hookioAppender').addBatch({
'master': { 'master': {
topic: function() { topic: function() {
var fancy = fancyResultingHookioAppender(); var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'master', 'hook-port': 5001, appender: { type: 'file' } }); var logger = fancy.theModule.configure(
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' }); {
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'}); name: 'ohno',
mode: 'master',
'hook-port': 5001,
appender: { type: 'file' }
}
);
logger(
{
level: { levelStr: 'INFO' },
data: "ALRIGHTY THEN",
startTime: '2011-10-27T03:53:16.031Z'
}
);
logger(
{
level: { levelStr: 'DEBUG' },
data: "OH WOW",
startTime: '2011-10-27T04:53:16.031Z'
}
);
return fancy.theResult; return fancy.theResult;
}, },
@@ -84,9 +106,21 @@ vows.describe('log4js hookioAppender').addBatch({
'should emit logging events to the master': { 'should emit logging events to the master': {
topic: function() { topic: function() {
var fancy = fancyResultingHookioAppender(); var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'worker', appender: { type: 'file' } }); var logger = fancy.theModule.configure({
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' }); name: 'ohno',
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'}); mode: 'worker',
appender: { type: 'file' }
});
logger({
level: { levelStr: 'INFO' },
data: "ALRIGHTY THEN",
startTime: '2011-10-27T03:53:16.031Z'
});
logger({
level: { levelStr: 'DEBUG' },
data: "OH WOW",
startTime: '2011-10-27T04:53:16.031Z'
});
return fancy.theResult; return fancy.theResult;
}, },
@@ -97,5 +131,46 @@ vows.describe('log4js hookioAppender').addBatch({
assert.isUndefined(result.ons['*::ohno::log']); assert.isUndefined(result.ons['*::ohno::log']);
} }
} }
},
'when hook not ready': {
topic: function() {
var fancy = fancyResultingHookioAppender(true)
, logger = fancy.theModule.configure({
name: 'ohno',
mode: 'worker'
});
logger({
level: { levelStr: 'INFO' },
data: "something",
startTime: '2011-10-27T03:45:12.031Z'
});
return fancy;
},
'should buffer the log events': function(fancy) {
assert.isUndefined(fancy.theResult.emissions['ohno::log']);
},
},
'when hook ready': {
topic: function() {
var fancy = fancyResultingHookioAppender(true)
, logger = fancy.theModule.configure({
name: 'ohno',
mode: 'worker'
});
logger({
level: { levelStr: 'INFO' },
data: "something",
startTime: '2011-10-27T03:45:12.031Z'
});
fancy.theResult.ons['hook::ready'].functionToExec();
return fancy;
},
'should emit the buffered events': function(fancy) {
assert.equal(fancy.theResult.emissions['ohno::log'].length, 1);
} }
}
}).exportTo(module); }).exportTo(module);

View File

@@ -1,5 +1,6 @@
var vows = require('vows'), "use strict";
assert = require('assert'); var vows = require('vows')
, assert = require('assert');
//used for patternLayout tests. //used for patternLayout tests.
function test(args, pattern, value) { function test(args, pattern, value) {
@@ -25,9 +26,8 @@ vows.describe('log4js layouts').addBatch({
toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}); });
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mnonsense'); assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense');
}, },
'should support the console.log format for the message': function(layout) { 'should support the console.log format for the message': function(layout) {
var output = layout({ var output = layout({
data: ["thing %d", 2], data: ["thing %d", 2],
@@ -37,7 +37,7 @@ vows.describe('log4js layouts').addBatch({
toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}); });
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mthing 2'); assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
} }
}, },
@@ -58,51 +58,50 @@ vows.describe('log4js layouts').addBatch({
}, },
'should support the console.log format for the message' : function(layout) { 'should support the console.log format for the message' : function(layout) {
assert.equal(layout({ assert.equal(layout({
data: ["thing %d", 1, "cheese"] data: ["thing %d", 1, "cheese"],
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45) startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
, categoryName: "cheese" categoryName: "cheese",
, level : { level : {
colour: "green" colour: "green",
, toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}), "thing 1 'cheese'"); }), "thing 1 cheese");
}, },
'should output the first item even if it is not a string': function(layout) { 'should output the first item even if it is not a string': function(layout) {
assert.equal(layout({ assert.equal(layout({
data: [ { thing: 1} ] data: [ { thing: 1} ],
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45) startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
, categoryName: "cheese" categoryName: "cheese",
, level: { level: {
colour: "green" colour: "green",
, toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}), "{ thing: 1 }"); }), "{ thing: 1 }");
}, },
'should print the stacks of a passed error objects': function(layout) { 'should print the stacks of a passed error objects': function(layout) {
assert.isArray(layout({ assert.isArray(layout({
data: [ new Error() ] data: [ new Error() ],
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45) startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
, categoryName: "cheese" categoryName: "cheese",
, level: { level: {
colour: "green" colour: "green",
, toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/) }).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
, 'regexp did not return a match'); , 'regexp did not return a match');
}, },
'with passed augmented errors': 'with passed augmented errors': {
{ topic: topic: function(layout){
function(layout){
var e = new Error("My Unique Error Message"); var e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value" e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" } e.augObj = { at1: "at2" };
return layout({ return layout({
data: [ e ] data: [ e ],
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45) startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
, categoryName: "cheese" categoryName: "cheese",
, level: { level: {
colour: "green" colour: "green",
, toString: function() { return "ERROR"; } toString: function() { return "ERROR"; }
} }
}); });
}, },
@@ -150,7 +149,10 @@ vows.describe('log4js layouts').addBatch({
lines = output.split(/\n/); lines = output.split(/\n/);
assert.equal(lines.length - 1, stack.length); assert.equal(lines.length - 1, stack.length);
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"); assert.equal(
lines[0],
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
);
for (var i = 1; i < stack.length; i++) { for (var i = 1; i < stack.length; i++) {
assert.equal(lines[i+2], stack[i+1]); assert.equal(lines[i+2], stack[i+1]);
@@ -163,7 +165,11 @@ vows.describe('log4js layouts').addBatch({
message: 'Gorgonzola smells.' message: 'Gorgonzola smells.'
}]; }];
output = layout(event); output = layout(event);
assert.equal(output, "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test { name: 'Cheese', message: 'Gorgonzola smells.' }"); assert.equal(
output,
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
} }
}, },
@@ -179,7 +185,8 @@ vows.describe('log4js layouts').addBatch({
}, layout = require('../lib/layouts').patternLayout }, layout = require('../lib/layouts').patternLayout
, tokens = { , tokens = {
testString: 'testStringToken', testString: 'testStringToken',
testFunction: function() { return 'testFunctionToken'; } testFunction: function() { return 'testFunctionToken'; },
fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
}; };
return [layout, event, tokens]; return [layout, event, tokens];
}, },
@@ -226,6 +233,9 @@ vows.describe('log4js layouts').addBatch({
'should output anything not preceded by % as literal': function(args) { 'should output anything not preceded by % as literal': function(args) {
test(args, 'blah blah blah', 'blah blah blah'); test(args, 'blah blah blah', 'blah blah blah');
}, },
'should output the original string if no replacer matches the token': function(args) {
test(args, '%a{3}', 'a{3}');
},
'should handle complicated patterns': function(args) { 'should handle complicated patterns': function(args) {
test(args, test(args,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n', '%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
@@ -250,7 +260,7 @@ vows.describe('log4js layouts').addBatch({
test(args, '%-10p', 'DEBUG '); test(args, '%-10p', 'DEBUG ');
}, },
'%[%r%] should output colored time': function(args) { '%[%r%] should output colored time': function(args) {
test(args, '%[%r%]', '\033[36m14:18:30\033[39m'); test(args, '%[%r%]', '\x1B[36m14:18:30\x1B[39m');
}, },
'%x{testString} should output the string stored in tokens': function(args) { '%x{testString} should output the string stored in tokens': function(args) {
test(args, '%x{testString}', 'testStringToken'); test(args, '%x{testString}', 'testStringToken');
@@ -261,8 +271,21 @@ vows.describe('log4js layouts').addBatch({
'%x{doesNotExist} should output the string stored in tokens': function(args) { '%x{doesNotExist} should output the string stored in tokens': function(args) {
test(args, '%x{doesNotExist}', '%x{doesNotExist}'); test(args, '%x{doesNotExist}', '%x{doesNotExist}');
}, },
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
},
'%x should output the string stored in tokens': function(args) { '%x should output the string stored in tokens': function(args) {
test(args, '%x', '%x'); test(args, '%x', '%x');
}, },
},
'layout makers': {
topic: require('../lib/layouts'),
'should have a maker for each layout': function(layouts) {
assert.ok(layouts.layout("messagePassThrough"));
assert.ok(layouts.layout("basic"));
assert.ok(layouts.layout("colored"));
assert.ok(layouts.layout("coloured"));
assert.ok(layouts.layout("pattern"));
}
} }
}).export(module); }).export(module);

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, levels = require('../lib/levels'); , levels = require('../lib/levels');
@@ -47,117 +48,304 @@ vows.describe('levels').addBatch({
'ALL': { 'ALL': {
topic: levels.ALL, topic: levels.ALL,
'should be less than the other levels': function(all) { 'should be less than the other levels': function(all) {
assertThat(all).isLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(all).isLessThanOrEqualTo(
[
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
}, },
'should be greater than no levels': function(all) { 'should be greater than no levels': function(all) {
assertThat(all).isNotGreaterThanOrEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(all).isNotGreaterThanOrEqualTo(
[
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
}, },
'should only be equal to ALL': function(all) { 'should only be equal to ALL': function(all) {
assertThat(all).isEqualTo([levels.toLevel("ALL")]); assertThat(all).isEqualTo([levels.toLevel("ALL")]);
assertThat(all).isNotEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(all).isNotEqualTo(
[
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
} }
}, },
'TRACE': { 'TRACE': {
topic: levels.TRACE, topic: levels.TRACE,
'should be less than DEBUG': function(trace) { 'should be less than DEBUG': function(trace) {
assertThat(trace).isLessThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isLessThanOrEqualTo(
[
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]); assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
}, },
'should be greater than ALL': function(trace) { 'should be greater than ALL': function(trace) {
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
assertThat(trace).isNotGreaterThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isNotGreaterThanOrEqualTo(
[
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
}, },
'should only be equal to TRACE': function(trace) { 'should only be equal to TRACE': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]); assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isNotEqualTo(
[
levels.ALL,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
} }
}, },
'DEBUG': { 'DEBUG': {
topic: levels.DEBUG, topic: levels.DEBUG,
'should be less than INFO': function(debug) { 'should be less than INFO': function(debug) {
assertThat(debug).isLessThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(debug).isLessThanOrEqualTo(
[
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]); assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
}, },
'should be greater than TRACE': function(debug) { 'should be greater than TRACE': function(debug) {
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]); assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
assertThat(debug).isNotGreaterThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(debug).isNotGreaterThanOrEqualTo(
[
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
}, },
'should only be equal to DEBUG': function(trace) { 'should only be equal to DEBUG': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]); assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isNotEqualTo(
[
levels.ALL,
levels.TRACE,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]
);
} }
}, },
'INFO': { 'INFO': {
topic: levels.INFO, topic: levels.INFO,
'should be less than WARN': function(info) { 'should be less than WARN': function(info) {
assertThat(info).isLessThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(info).isLessThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]);
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
}, },
'should be greater than DEBUG': function(info) { 'should be greater than DEBUG': function(info) {
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]); assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
assertThat(info).isNotGreaterThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(info).isNotGreaterThanOrEqualTo([
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]);
}, },
'should only be equal to INFO': function(trace) { 'should only be equal to INFO': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("INFO")]); assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.WARN,
levels.ERROR,
levels.FATAL,
levels.OFF
]);
} }
}, },
'WARN': { 'WARN': {
topic: levels.WARN, topic: levels.WARN,
'should be less than ERROR': function(warn) { 'should be less than ERROR': function(warn) {
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
assertThat(warn).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]); assertThat(warn).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO
]);
}, },
'should be greater than INFO': function(warn) { 'should be greater than INFO': function(warn) {
assertThat(warn).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]); assertThat(warn).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO
]);
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]); assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
}, },
'should only be equal to WARN': function(trace) { 'should only be equal to WARN': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("WARN")]); assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.ERROR, levels.FATAL, levels.OFF]); assertThat(trace).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.ERROR,
levels.FATAL,
levels.OFF
]);
} }
}, },
'ERROR': { 'ERROR': {
topic: levels.ERROR, topic: levels.ERROR,
'should be less than FATAL': function(error) { 'should be less than FATAL': function(error) {
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]); assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
assertThat(error).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]); assertThat(error).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN
]);
}, },
'should be greater than WARN': function(error) { 'should be greater than WARN': function(error) {
assertThat(error).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]); assertThat(error).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN
]);
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]); assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
}, },
'should only be equal to ERROR': function(trace) { 'should only be equal to ERROR': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]); assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.FATAL, levels.OFF]); assertThat(trace).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.FATAL,
levels.OFF
]);
} }
}, },
'FATAL': { 'FATAL': {
topic: levels.FATAL, topic: levels.FATAL,
'should be less than OFF': function(fatal) { 'should be less than OFF': function(fatal) {
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]); assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
assertThat(fatal).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]); assertThat(fatal).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR
]);
}, },
'should be greater than ERROR': function(fatal) { 'should be greater than ERROR': function(fatal) {
assertThat(fatal).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]); assertThat(fatal).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR
]);
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]); assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
}, },
'should only be equal to FATAL': function(fatal) { 'should only be equal to FATAL': function(fatal) {
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]); assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
assertThat(fatal).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.OFF]); assertThat(fatal).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.OFF
]);
} }
}, },
'OFF': { 'OFF': {
topic: levels.OFF, topic: levels.OFF,
'should not be less than anything': function(off) { 'should not be less than anything': function(off) {
assertThat(off).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]); assertThat(off).isNotLessThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL
]);
}, },
'should be greater than everything': function(off) { 'should be greater than everything': function(off) {
assertThat(off).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]); assertThat(off).isGreaterThanOrEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL
]);
}, },
'should only be equal to OFF': function(off) { 'should only be equal to OFF': function(off) {
assertThat(off).isEqualTo([levels.toLevel("OFF")]); assertThat(off).isEqualTo([levels.toLevel("OFF")]);
assertThat(off).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]); assertThat(off).isNotEqualTo([
levels.ALL,
levels.TRACE,
levels.DEBUG,
levels.INFO,
levels.WARN,
levels.ERROR,
levels.FATAL
]);
} }
} }
}, },
@@ -175,6 +363,12 @@ vows.describe('levels').addBatch({
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']); assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
} }
}, },
'isEqualTo': {
topic: levels.INFO,
'should handle string arguments': function(info) {
assertThat(info).isEqualTo(["info", "INFO", "iNfO"]);
}
},
'toLevel': { 'toLevel': {
'with lowercase argument': { 'with lowercase argument': {
topic: levels.toLevel("debug"), topic: levels.toLevel("debug"),

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, sandbox = require('sandboxed-module'); , sandbox = require('sandboxed-module');
@@ -10,8 +11,7 @@ vows.describe('log4js-abspath').addBatch({
'../lib/log4js', '../lib/log4js',
{ requires: { requires:
{ './appenders/fake': { './appenders/fake':
{ { name: "fake",
name: "fake",
appender: function() {}, appender: function() {},
configure: function(configuration, options) { configure: function(configuration, options) {
appenderOptions = options; appenderOptions = options;
@@ -47,8 +47,8 @@ vows.describe('log4js-abspath').addBatch({
'../lib/appenders/file', '../lib/appenders/file',
{ requires: { requires:
{ '../streams': { '../streams':
{ { RollingFileStream:
RollingFileStream: function(file) { function(file) {
fileOpened = file; fileOpened = file;
return { return {
on: function() {}, on: function() {},
@@ -59,7 +59,13 @@ vows.describe('log4js-abspath').addBatch({
} }
} }
); );
fileAppender.configure({ filename: "whatever.log", maxLogSize: 10 }, { cwd: '/absolute/path/to' }); fileAppender.configure(
{
filename: "whatever.log",
maxLogSize: 10
},
{ cwd: '/absolute/path/to' }
);
return fileOpened; return fileOpened;
}, },
'should prepend options.cwd to config.filename': function(fileOpened) { 'should prepend options.cwd to config.filename': function(fileOpened) {

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, fs = require('fs') , fs = require('fs')
, assert = require('assert'); , assert = require('assert');
@@ -15,7 +16,15 @@ vows.describe('log4js logLevelFilter').addBatch({
topic: function() { topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger; var log4js = require('../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders(); log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/logLevelFilter').appender('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest"); log4js.addAppender(
require('../lib/appenders/logLevelFilter')
.appender(
'ERROR',
function(evt) { logEvents.push(evt); }
),
"logLevelTest"
);
logger = log4js.getLogger("logLevelTest"); logger = log4js.getLogger("logLevelTest");
logger.debug('this should not trigger an event'); logger.debug('this should not trigger an event');
logger.warn('neither should this'); logger.warn('neither should this');

81
test/logger-test.js Normal file
View File

@@ -0,0 +1,81 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels')
, loggerModule = require('../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
'constructor with no parameters': {
topic: new Logger(),
'should use default category': function(logger) {
assert.equal(logger.category, Logger.DEFAULT_CATEGORY);
},
'should use TRACE log level': function(logger) {
assert.equal(logger.level, levels.TRACE);
}
},
'constructor with category': {
topic: new Logger('cheese'),
'should use category': function(logger) {
assert.equal(logger.category, 'cheese');
},
'should use TRACE log level': function(logger) {
assert.equal(logger.level, levels.TRACE);
}
},
'constructor with category and level': {
topic: new Logger('cheese', 'debug'),
'should use category': function(logger) {
assert.equal(logger.category, 'cheese');
},
'should use level': function(logger) {
assert.equal(logger.level, levels.DEBUG);
}
},
'isLevelEnabled': {
topic: new Logger('cheese', 'info'),
'should provide a level enabled function for all levels': function(logger) {
assert.isFunction(logger.isTraceEnabled);
assert.isFunction(logger.isDebugEnabled);
assert.isFunction(logger.isInfoEnabled);
assert.isFunction(logger.isWarnEnabled);
assert.isFunction(logger.isErrorEnabled);
assert.isFunction(logger.isFatalEnabled);
},
'should return the right values': function(logger) {
assert.isFalse(logger.isTraceEnabled());
assert.isFalse(logger.isDebugEnabled());
assert.isTrue(logger.isInfoEnabled());
assert.isTrue(logger.isWarnEnabled());
assert.isTrue(logger.isErrorEnabled());
assert.isTrue(logger.isFatalEnabled());
}
},
'should emit log events': {
topic: function() {
var events = [],
logger = new Logger();
logger.addListener('log', function (logEvent) { events.push(logEvent); });
logger.debug('Event 1');
loggerModule.disableAllLogWrites();
logger.debug('Event 2');
loggerModule.enableAllLogWrites();
logger.debug('Event 3');
return events;
},
'when log writes are enabled': function(events) {
assert.equal(events[0].data[0], 'Event 1');
},
'but not when log writes are disabled': function(events) {
assert.equal(events.length, 2);
assert.equal(events[1].data[0], 'Event 3');
}
}
}).exportTo(module);

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, sandbox = require('sandboxed-module'); , sandbox = require('sandboxed-module');
@@ -14,8 +15,8 @@ function setupConsoleTest() {
}); });
log4js = sandbox.require( log4js = sandbox.require(
'../lib/log4js' '../lib/log4js',
, { {
globals: { globals: {
console: fakeConsole console: fakeConsole
} }
@@ -74,13 +75,65 @@ vows.describe('log4js').addBatch({
assert.equal(events[1].level.toString(), 'WARN'); assert.equal(events[1].level.toString(), 'WARN');
}, },
'should include the error if passed in': function (events) { 'should include the error if passed in': function(events) {
assert.instanceOf(events[2].data[1], Error); assert.instanceOf(events[2].data[1], Error);
assert.equal(events[2].data[1].message, 'Pants are on fire!'); assert.equal(events[2].data[1].message, 'Pants are on fire!');
} }
}
}, },
'when shutdown is called': {
topic: function() {
var events = {
appenderShutdownCalled: false,
shutdownCallbackCalled: false
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/file':
{
name: "file",
appender: function() {},
configure: function(configuration) {
return function() {};
},
shutdown: function(cb) {
events.appenderShutdownCalled = true;
cb();
}
}
}
}
),
shutdownCallback = function() {
events.shutdownCallbackCalled = true;
},
config = { appenders:
[ { "type" : "file",
"filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024,
"backups" : 3
}
]
};
log4js.configure(config);
log4js.shutdown(shutdownCallback);
// Re-enable log writing so other tests that use logger are not
// affected.
require('../lib/logger').enableAllLogWrites();
return events;
},
'should invoke appender shutdowns': function(events) {
assert.ok(events.appenderShutdownCalled);
},
'should call callback': function(events) {
assert.ok(events.shutdownCallbackCalled);
}
}, },
'invalid configuration': { 'invalid configuration': {
@@ -93,26 +146,25 @@ vows.describe('log4js').addBatch({
'configuration when passed as object': { 'configuration when passed as object': {
topic: function() { topic: function() {
var appenderConfig var appenderConfig,
, log4js = sandbox.require( log4js = sandbox.require(
'../lib/log4js' '../lib/log4js',
, { requires:
{ './appenders/file':
{ {
name: "file" requires: {
, appender: function() {} './appenders/file':
, configure: function(configuration) { {
name: "file",
appender: function() {},
configure: function(configuration) {
appenderConfig = configuration; appenderConfig = configuration;
return function() {}; return function() {};
} }
} }
} }
} }
) ),
, config = { config = { appenders:
"appenders": [ [ { "type" : "file",
{
"type" : "file",
"filename" : "cheesy-wotsits.log", "filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024, "maxLogSize" : 1024,
"backups" : 3 "backups" : 3
@@ -127,19 +179,56 @@ vows.describe('log4js').addBatch({
} }
}, },
'configuration that causes an error': {
topic: function() {
var log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/file':
{
name: "file",
appender: function() {},
configure: function(configuration) {
throw new Error("oh noes");
}
}
}
}
),
config = { appenders:
[ { "type" : "file",
"filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024,
"backups" : 3
}
]
};
try {
log4js.configure(config);
} catch (e) {
return e;
}
},
'should wrap error in a meaningful message': function(e) {
assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
}
},
'configuration when passed as filename': { 'configuration when passed as filename': {
topic: function() { topic: function() {
var appenderConfig var appenderConfig,
, configFilename configFilename,
, log4js = sandbox.require( log4js = sandbox.require(
'../lib/log4js' '../lib/log4js',
, { requires: { requires:
{ 'fs': { 'fs':
{ { statSync:
statSync: function() { function() {
return { mtime: Date.now() }; return { mtime: Date.now() };
}, },
readFileSync: function(filename) { readFileSync:
function(filename) {
configFilename = filename; configFilename = filename;
return JSON.stringify({ return JSON.stringify({
appenders: [ appenders: [
@@ -149,15 +238,15 @@ vows.describe('log4js').addBatch({
] ]
}); });
}, },
readdirSync: function() { readdirSync:
function() {
return ['file']; return ['file'];
} }
} },
, './appenders/file': './appenders/file':
{ { name: "file",
name: "file" appender: function() {},
, appender: function() {} configure: function(configuration) {
, configure: function(configuration) {
appenderConfig = configuration; appenderConfig = configuration;
return function() {}; return function() {};
} }
@@ -178,22 +267,22 @@ vows.describe('log4js').addBatch({
'with no appenders defined' : { 'with no appenders defined' : {
topic: function() { topic: function() {
var logger var logger,
, that = this that = this,
, fakeConsoleAppender = { fakeConsoleAppender = {
name: "console" name: "console",
, appender: function() { appender: function() {
return function(evt) { return function(evt) {
that.callback(null, evt); that.callback(null, evt);
} };
} },
, configure: function() { configure: function() {
return fakeConsoleAppender.appender(); return fakeConsoleAppender.appender();
} }
} },
, log4js = sandbox.require( log4js = sandbox.require(
'../lib/log4js' '../lib/log4js',
, { {
requires: { requires: {
'./appenders/console': fakeConsoleAppender './appenders/console': fakeConsoleAppender
} }
@@ -215,15 +304,22 @@ vows.describe('log4js').addBatch({
}, },
'without a category': { 'without a category': {
'should register the function as a listener for all loggers': function (log4js) { 'should register the function as a listener for all loggers': function (log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests"); var appenderEvent,
appender = function(evt) { appenderEvent = evt; },
logger = log4js.getLogger("tests");
log4js.addAppender(appender); log4js.addAppender(appender);
logger.debug("This is a test"); logger.debug("This is a test");
assert.equal(appenderEvent.data[0], "This is a test"); assert.equal(appenderEvent.data[0], "This is a test");
assert.equal(appenderEvent.categoryName, "tests"); assert.equal(appenderEvent.categoryName, "tests");
assert.equal(appenderEvent.level.toString(), "DEBUG"); assert.equal(appenderEvent.level.toString(), "DEBUG");
}, },
'should also register as an appender for loggers if an appender for that category is defined': function (log4js) { 'if an appender for a category is defined': {
var otherEvent, appenderEvent, cheeseLogger; 'should register for that category': function (log4js) {
var otherEvent,
appenderEvent,
cheeseLogger;
log4js.addAppender(function (evt) { appenderEvent = evt; }); log4js.addAppender(function (evt) { appenderEvent = evt; });
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese'); log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
@@ -239,11 +335,15 @@ vows.describe('log4js').addBatch({
assert.isUndefined(otherEvent); assert.isUndefined(otherEvent);
assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent"); assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
} }
}
}, },
'with a category': { 'with a category': {
'should only register the function as a listener for that category': function(log4js) { 'should only register the function as a listener for that category': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests"); var appenderEvent,
appender = function(evt) { appenderEvent = evt; },
logger = log4js.getLogger("tests");
log4js.addAppender(appender, 'tests'); log4js.addAppender(appender, 'tests');
logger.debug('this is a category test'); logger.debug('this is a category test');
assert.equal(appenderEvent.data[0], 'this is a category test'); assert.equal(appenderEvent.data[0], 'this is a category test');
@@ -256,7 +356,10 @@ vows.describe('log4js').addBatch({
'with multiple categories': { 'with multiple categories': {
'should register the function as a listener for all the categories': function(log4js) { 'should register the function as a listener for all the categories': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger('tests'); var appenderEvent,
appender = function(evt) { appenderEvent = evt; },
logger = log4js.getLogger('tests');
log4js.addAppender(appender, 'tests', 'biscuits'); log4js.addAppender(appender, 'tests', 'biscuits');
logger.debug('this is a test'); logger.debug('this is a test');
@@ -273,7 +376,9 @@ vows.describe('log4js').addBatch({
assert.isUndefined(appenderEvent); assert.isUndefined(appenderEvent);
}, },
'should register the function when the list of categories is an array': function(log4js) { 'should register the function when the list of categories is an array': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }; var appenderEvent,
appender = function(evt) { appenderEvent = evt; };
log4js.addAppender(appender, ['tests', 'pants']); log4js.addAppender(appender, ['tests', 'pants']);
log4js.getLogger('tests').debug('this is a test'); log4js.getLogger('tests').debug('this is a test');
@@ -296,13 +401,13 @@ vows.describe('log4js').addBatch({
topic: function() { topic: function() {
var appenderEvents = [], var appenderEvents = [],
fakeConsole = { fakeConsole = {
'name': 'console' 'name': 'console',
, 'appender': function () { 'appender': function () {
return function(evt) { return function(evt) {
appenderEvents.push(evt); appenderEvents.push(evt);
} };
} },
, 'configure': function (config) { 'configure': function (config) {
return fakeConsole.appender(); return fakeConsole.appender();
} }
}, },
@@ -388,8 +493,11 @@ vows.describe('log4js').addBatch({
assert.instanceOf(err, Error); assert.instanceOf(err, Error);
assert.equal(err.message, "this should not be called."); assert.equal(err.message, "this should not be called.");
} }
}
}, },
'configuration': { 'console configuration': {
topic: setupConsoleTest,
'when disabled': {
topic: function(test) { topic: function(test) {
test.log4js.replaceConsole(); test.log4js.replaceConsole();
test.log4js.configure({ replaceConsole: false }); test.log4js.configure({ replaceConsole: false });
@@ -403,6 +511,25 @@ vows.describe('log4js').addBatch({
assert.instanceOf(err, Error); assert.instanceOf(err, Error);
assert.equal(err.message, 'this should not be called.'); assert.equal(err.message, 'this should not be called.');
} }
},
'when enabled': {
topic: function(test) {
test.log4js.restoreConsole();
test.log4js.configure({ replaceConsole: true });
//log4js.configure clears all appenders
test.log4js.addAppender(function(evt) {
test.logEvents.push(evt);
});
test.fakeConsole.debug("Some debug");
return test.logEvents;
},
'should allow for turning on console replacement': function (logEvents) {
assert.equal(logEvents.length, 1);
assert.equal(logEvents[0].level.toString(), "DEBUG");
assert.equal(logEvents[0].data[0], "Some debug");
}
} }
}, },
'configuration persistence' : { 'configuration persistence' : {
@@ -423,156 +550,15 @@ vows.describe('log4js').addBatch({
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js"); assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
} }
}, },
'configuration reload with configuration changing' : {
'getDefaultLogger': {
topic: function() { topic: function() {
var pathsChecked = [], return require('../lib/log4js').getDefaultLogger();
logEvents = [],
logger,
modulePath = 'path/to/log4js.json',
fakeFS = {
lastMtime: Date.now(),
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
}, },
readFileSync: function (file, encoding) { 'should return a logger': function(logger) {
assert.equal(file, modulePath); assert.ok(logger.info);
assert.equal(encoding, 'utf8'); assert.ok(logger.debug);
return JSON.stringify(fakeFS.config); assert.ok(logger.error);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
fakeFS.lastMtime += 1;
return { mtime: new Date(fakeFS.lastMtime) };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
fakeFS.config.levels['a-test'] = "DEBUG";
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return logEvents;
},
'should configure log4js from first log4js.json found': function(logEvents) {
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
assert.equal(logEvents[2].data[0], 'debug4');
assert.equal(logEvents.length, 3);
}
},
'configuration reload with configuration staying the same' : {
topic: function() {
var pathsChecked = [],
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: mtime };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure(modulePath, { reloadSecs: 3 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return [ pathsChecked, logEvents, modulePath, fileRead ];
},
'should only read the configuration file once': function(args) {
var fileRead = args[3];
assert.equal(fileRead, 1);
},
'should configure log4js from first log4js.json found': function(args) {
var logEvents = args[1];
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
} }
} }
}).export(module); }).export(module);

View File

@@ -1,6 +1,8 @@
var vows = require('vows'), "use strict";
sandbox = require('sandboxed-module'), var vows = require('vows')
assert = require('assert'); , sandbox = require('sandboxed-module')
, assert = require('assert')
;
function makeFakeNet() { function makeFakeNet() {
return { return {
@@ -67,11 +69,11 @@ vows.describe('Multiprocess Appender').addBatch({
//don't need a proper log event for the worker tests //don't need a proper log event for the worker tests
appender('before connect'); appender('before connect');
fakeNet.cbs['connect'](); fakeNet.cbs.connect();
appender('after connect'); appender('after connect');
fakeNet.cbs['close'](true); fakeNet.cbs.close(true);
appender('after error, before connect'); appender('after error, before connect');
fakeNet.cbs['connect'](); fakeNet.cbs.connect();
appender('after error, after connect'); appender('after error, after connect');
return fakeNet; return fakeNet;
@@ -112,13 +114,13 @@ vows.describe('Multiprocess Appender').addBatch({
//don't need a proper log event for the worker tests //don't need a proper log event for the worker tests
appender('before connect'); appender('before connect');
fakeNet.cbs['connect'](); fakeNet.cbs.connect();
appender('after connect'); appender('after connect');
fakeNet.cbs['timeout'](); fakeNet.cbs.timeout();
appender('after timeout, before close'); appender('after timeout, before close');
fakeNet.cbs['close'](); fakeNet.cbs.close();
appender('after close, before connect'); appender('after close, before connect');
fakeNet.cbs['connect'](); fakeNet.cbs.connect();
appender('after close, after connect'); appender('after close, after connect');
return fakeNet; return fakeNet;
@@ -181,14 +183,27 @@ vows.describe('Multiprocess Appender').addBatch({
}, },
'when a client connects': { 'when a client connects': {
topic: function(net) { topic: function(net) {
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__'; var logString = JSON.stringify(
{ level: { level: 10000, levelStr: 'DEBUG' }
, data: ['some debug']}
) + '__LOG4JS__';
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__'); net.cbs.data(
net.cbs['data'](logString.substring(0, 10)); JSON.stringify(
net.cbs['data'](logString.substring(10)); { level: { level: 40000, levelStr: 'ERROR' }
net.cbs['data'](logString + logString + logString); , data: ['an error message'] }
net.cbs['end'](JSON.stringify({ level: { level: 50000, levelStr: 'FATAL' }, data: ["that's all folks"] }) + '__LOG4JS__'); ) + '__LOG4JS__'
net.cbs['data']('bad message__LOG4JS__'); );
net.cbs.data(logString.substring(0, 10));
net.cbs.data(logString.substring(10));
net.cbs.data(logString + logString + logString);
net.cbs.end(
JSON.stringify(
{ level: { level: 50000, levelStr: 'FATAL' }
, data: ["that's all folks"] }
) + '__LOG4JS__'
);
net.cbs.data('bad message__LOG4JS__');
return net; return net;
}, },
'should parse log messages into log events and send to appender': function(net) { 'should parse log messages into log events and send to appender': function(net) {
@@ -238,4 +253,51 @@ vows.describe('Multiprocess Appender').addBatch({
assert.equal(net.host, 'localhost'); assert.equal(net.host, 'localhost');
} }
} }
}).addBatch({
'configure': {
topic: function() {
var results = {}
, fakeNet = makeFakeNet()
, appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet,
'../log4js': {
loadAppender: function(app) {
results.appenderLoaded = app;
},
appenderMakers: {
'madeupappender': function(config, options) {
results.config = config;
results.options = options;
}
}
}
}
}
).configure(
{
mode: 'master',
appender: {
type: 'madeupappender',
cheese: 'gouda'
}
},
{ crackers: 'jacobs' }
);
return results;
},
'should load underlying appender for master': function(results) {
assert.equal(results.appenderLoaded, 'madeupappender');
},
'should pass config to underlying appender': function(results) {
assert.equal(results.config.cheese, 'gouda');
},
'should pass options to underlying appender': function(results) {
assert.equal(results.options.crackers, 'jacobs');
}
}
}).exportTo(module); }).exportTo(module);

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, levels = require('../lib/levels'); , levels = require('../lib/levels');
@@ -63,8 +64,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages){ 'check message': function(messages){
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 1); assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level)); assert.ok(levels.INFO.isEqualTo(messages[0].level));
@@ -83,8 +84,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -105,8 +106,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages){ 'check message': function(messages){
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 1); assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level)); assert.ok(levels.INFO.isEqualTo(messages[0].level));
@@ -125,8 +126,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -138,8 +139,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -159,8 +160,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages){ 'check message': function(messages){
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 1); assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level)); assert.ok(levels.INFO.isEqualTo(messages[0].level));
@@ -179,8 +180,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -193,8 +194,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -214,8 +215,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages){ 'check message': function(messages){
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 1); assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level)); assert.ok(levels.INFO.isEqualTo(messages[0].level));
@@ -234,8 +235,8 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
@@ -248,12 +249,12 @@ vows.describe('log4js connect logger').addBatch({
d.cl(req, res, function() { }); d.cl(req, res, function() { });
res.end('chunk', 'encoding'); res.end('chunk', 'encoding');
return d.ml.messages; return d.ml.messages;
} },
, 'check message': function(messages) { 'check message': function(messages) {
assert.isArray(messages); assert.isArray(messages);
assert.equal(messages.length, 0); assert.equal(messages.length, 0);
} }
}, }
} }
} }

View File

@@ -0,0 +1,340 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module');
function setupConsoleTest() {
var fakeConsole = {}
, logEvents = []
, log4js;
['trace','debug','log','info','warn','error'].forEach(function(fn) {
fakeConsole[fn] = function() {
throw new Error("this should not be called.");
};
});
log4js = sandbox.require(
'../lib/log4js',
{
globals: {
console: fakeConsole
}
}
);
log4js.clearAppenders();
log4js.addAppender(function(evt) {
logEvents.push(evt);
});
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
}
vows.describe('reload configuration').addBatch({
'with config file changing' : {
topic: function() {
var pathsChecked = [],
logEvents = [],
logger,
modulePath = 'path/to/log4js.json',
fakeFS = {
lastMtime: Date.now(),
config: {
appenders: [
{ type: 'console', layout: { type: 'messagePassThrough' } }
],
levels: { 'a-test' : 'INFO' }
},
readFileSync: function (file, encoding) {
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
fakeFS.lastMtime += 1;
return { mtime: new Date(fakeFS.lastMtime) };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
fakeFS.config.levels['a-test'] = "DEBUG";
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return logEvents;
},
'should configure log4js from first log4js.json found': function(logEvents) {
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
assert.equal(logEvents[2].data[0], 'debug4');
assert.equal(logEvents.length, 3);
}
},
'with config file staying the same' : {
topic: function() {
var pathsChecked = [],
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
appenders: [
{ type: 'console', layout: { type: 'messagePassThrough' } }
],
levels: { 'a-test' : 'INFO' }
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: mtime };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure(modulePath, { reloadSecs: 3 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return [ pathsChecked, logEvents, modulePath, fileRead ];
},
'should only read the configuration file once': function(args) {
var fileRead = args[3];
assert.equal(fileRead, 1);
},
'should configure log4js from first log4js.json found': function(args) {
var logEvents = args[1];
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
}
},
'when config file is removed': {
topic: function() {
var pathsChecked = [],
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: {
appenders: [
{ type: 'console', layout: { type: 'messagePassThrough' } }
],
levels: { 'a-test' : 'INFO' }
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
this.statSync = function() {
throw new Error("no such file");
};
return { mtime: new Date() };
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure(modulePath, { reloadSecs: 3 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return [ pathsChecked, logEvents, modulePath, fileRead ];
},
'should only read the configuration file once': function(args) {
var fileRead = args[3];
assert.equal(fileRead, 1);
},
'should not clear configuration when config file not found': function(args) {
var logEvents = args[1];
assert.equal(logEvents.length, 3);
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].level.toString(), 'WARN');
assert.include(logEvents[1].data[0], 'Failed to load configuration file');
assert.equal(logEvents[2].data[0], 'info3');
}
},
'when passed an object': {
topic: function() {
var test = setupConsoleTest();
test.log4js.configure({}, { reloadSecs: 30 });
return test.logEvents;
},
'should log a warning': function(events) {
assert.equal(events[0].level.toString(), 'WARN');
assert.equal(
events[0].data[0],
'Ignoring configuration reload parameter for "object" configuration.'
);
}
},
'when called twice with reload options': {
topic: function() {
var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
fakeFS = {
readFileSync: function (file, encoding) {
return JSON.stringify({});
},
statSync: function (path) {
return { mtime: new Date() };
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
intervalCleared = false,
clearedId,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
return 1234;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
'clearInterval': function(interval) {
intervalCleared = true;
clearedId = interval;
}
}
}
);
log4js.configure(modulePath, { reloadSecs: 3 });
log4js.configure(modulePath, { reloadSecs: 15 });
return { cleared: intervalCleared, id: clearedId };
},
'should clear the previous interval': function(result) {
assert.isTrue(result.cleared);
assert.equal(result.id, 1234);
}
}
}).exportTo(module);

View File

@@ -1,5 +1,9 @@
// This test shows an asymmetry between setLevel and isLevelEnabled (in log4js-node@0.4.3 and earlier): "use strict";
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently does not (sets the level to TRACE). /* jshint loopfunc: true */
// This test shows an asymmetry between setLevel and isLevelEnabled
// (in log4js-node@0.4.3 and earlier):
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
// does not (sets the level to TRACE).
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo). // 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
// //
@@ -19,7 +23,8 @@ var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
var log4jsLevels =[]; var log4jsLevels =[];
// populate an array with the log4js.levels that match the strLevels. // populate an array with the log4js.levels that match the strLevels.
// Would be nice if we could iterate over log4js.levels instead, but log4js.levels.toLevel prevents that for now. // Would be nice if we could iterate over log4js.levels instead,
// but log4js.levels.toLevel prevents that for now.
strLevels.forEach(function(l) { strLevels.forEach(function(l) {
log4jsLevels.push(log4js.levels.toLevel(l)); log4jsLevels.push(log4js.levels.toLevel(l));
}); });
@@ -29,18 +34,19 @@ strLevels.forEach(function(l) {
var levelTypes = { var levelTypes = {
'string': strLevels, 'string': strLevels,
'log4js.levels.level': log4jsLevels, 'log4js.levels.level': log4jsLevels,
} };
// Set up the basic vows batch for this test // Set up the basic vows batch for this test
var batch = { var batch = {
setLevel: { setLevel: {
} }
} };
showProgress('Populating batch object...'); showProgress('Populating batch object...');
// Populating the batch object programmatically, // Populating the batch object programmatically,
// as I don't have the patience to manually populate it with the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations // as I don't have the patience to manually populate it with
// the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
for (var type in levelTypes) { for (var type in levelTypes) {
var context = 'is called with a '+type; var context = 'is called with a '+type;
var levelsToTest = levelTypes[type]; var levelsToTest = levelTypes[type];
@@ -58,15 +64,30 @@ for (var type in levelTypes) {
var t = type; var t = type;
var ct = comparisonType; var ct = comparisonType;
var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel); var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
var vow = 'isLevelEnabled('+comparisonLevel+') called with a '+comparisonType+' should return '+expectedResult; var vow = 'isLevelEnabled(' + comparisonLevel +
') called with a ' + comparisonType +
' should return ' + expectedResult;
showProgress('Setting up the vows vow for '+vow); showProgress('Setting up the vows vow for '+vow);
batch.setLevel[context][subContext][vow] = function(levelToSet) { batch.setLevel[context][subContext][vow] = function(levelToSet) {
logger.setLevel(levelToSet); logger.setLevel(levelToSet);
showProgress('*** Checking setLevel( '+level+' ) of type '+t+', and isLevelEnabled( '+comparisonLevel+' ) of type '+ct+'. Expecting: '+expectedResult); showProgress(
assert.equal(logger.isLevelEnabled(comparisonLevel), expectedResult, 'Failed: calling setLevel( '+level+' ) with type '+type+', isLevelEnabled( '+comparisonLevel+' ) of type '+comparisonType+' did not return '+expectedResult); '*** Checking setLevel( ' + level +
' ) of type ' + t +
', and isLevelEnabled( ' + comparisonLevel +
' ) of type ' + ct + '. Expecting: ' + expectedResult
);
assert.equal(
logger.isLevelEnabled(comparisonLevel),
expectedResult,
'Failed: calling setLevel( ' + level +
' ) with type ' + type +
', isLevelEnabled( ' + comparisonLevel +
' ) of type ' + comparisonType +
' did not return ' + expectedResult
);
}; };
}) });
} }
}); });

View File

@@ -1,7 +1,9 @@
var vows = require('vows'), "use strict";
assert = require('assert'), var vows = require('vows')
log4js = require('../lib/log4js'), , assert = require('assert')
sandbox = require('sandboxed-module'); , log4js = require('../lib/log4js')
, sandbox = require('sandboxed-module')
;
function setupLogging(category, options) { function setupLogging(category, options) {
var msgs = []; var msgs = [];
@@ -13,14 +15,36 @@ function setupLogging(category, options) {
sendMail: function (msg, callback) { sendMail: function (msg, callback) {
msgs.push(msg); msgs.push(msg);
callback(null, true); callback(null, true);
},
close: function() {}
};
} }
}; };
var fakeLayouts = {
layout: function(type, config) {
this.type = type;
this.config = config;
return log4js.layouts.messagePassThroughLayout;
},
basicLayout: log4js.layouts.basicLayout,
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
};
var fakeConsole = {
errors: [],
error: function(msg, value) {
this.errors.push({ msg: msg, value: value });
} }
}; };
var smtpModule = sandbox.require('../lib/appenders/smtp', { var smtpModule = sandbox.require('../lib/appenders/smtp', {
requires: { requires: {
'nodemailer': fakeMailer 'nodemailer': fakeMailer,
'../layouts': fakeLayouts
},
globals: {
console: fakeConsole
} }
}); });
@@ -29,6 +53,8 @@ function setupLogging(category, options) {
return { return {
logger: log4js.getLogger(category), logger: log4js.getLogger(category),
mailer: fakeMailer, mailer: fakeMailer,
layouts: fakeLayouts,
console: fakeConsole,
results: msgs results: msgs
}; };
} }
@@ -90,6 +116,19 @@ vows.describe('log4js smtpAppender').addBatch({
checkMessages(result, 'sender@domain.com', 'This is subject'); checkMessages(result, 'sender@domain.com', 'This is subject');
} }
}, },
'config with layout': {
topic: function() {
var setup = setupLogging('config with layout', {
layout: {
type: "tester"
}
});
return setup;
},
'should configure layout': function(result) {
assert.equal(result.layouts.type, 'tester');
}
},
'separate email for each event': { 'separate email for each event': {
topic: function() { topic: function() {
var self = this; var self = this;
@@ -157,11 +196,37 @@ vows.describe('log4js smtpAppender').addBatch({
assert.equal(result.results[0].to, 'recipient@domain.com'); assert.equal(result.results[0].to, 'recipient@domain.com');
assert.equal(result.results[0].subject, 'Log event #1'); assert.equal(result.results[0].subject, 'Log event #1');
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2); assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
assert.equal(result.results[1].to, 'recipient@domain.com'); assert.equal(result.results[1].to, 'recipient@domain.com');
assert.equal(result.results[1].subject, 'Log event #3'); assert.equal(result.results[1].subject, 'Log event #3');
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text)); assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
} }
},
'error when sending email': {
topic: function() {
var setup = setupLogging('error when sending email', {
recipients: 'recipient@domain.com',
sendInterval: 0,
transport: 'SMTP',
SMTP: { port: 25, auth: { user: 'user@domain.com' } }
});
setup.mailer.createTransport = function() {
return {
sendMail: function(msg, cb) {
cb({ message: "oh noes" });
},
close: function() { }
};
};
setup.logger.info("This will break");
return setup.console;
},
'should be logged to console': function(cons) {
assert.equal(cons.errors.length, 1);
assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
assert.equal(cons.errors[0].value.message, 'oh noes');
}
} }
}).export(module); }).export(module);

View File

@@ -0,0 +1,93 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, sandbox = require('sandboxed-module');
vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({
'when node version < 0.10.0': {
topic: function() {
var streamLib = sandbox.load(
'../../lib/streams/BaseRollingFileStream',
{
globals: {
process: {
version: '0.8.11'
}
},
requires: {
'readable-stream': {
Writable: function() {}
}
}
}
);
return streamLib.required;
},
'it should use readable-stream to maintain compatibility': function(required) {
assert.ok(required['readable-stream']);
assert.ok(!required.stream);
}
},
'when node version > 0.10.0': {
topic: function() {
var streamLib = sandbox.load(
'../../lib/streams/BaseRollingFileStream',
{
globals: {
process: {
version: '0.10.1'
}
},
requires: {
'stream': {
Writable: function() {}
}
}
}
);
return streamLib.required;
},
'it should use the core stream module': function(required) {
assert.ok(required.stream);
assert.ok(!required['readable-stream']);
}
},
'when no filename is passed': {
topic: require('../../lib/streams/BaseRollingFileStream'),
'it should throw an error': function(BaseRollingFileStream) {
try {
new BaseRollingFileStream();
assert.fail('should not get here');
} catch (e) {
assert.ok(e);
}
}
},
'default behaviour': {
topic: function() {
var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream')
, stream = new BaseRollingFileStream('basetest.log');
return stream;
},
teardown: function() {
try {
fs.unlink('basetest.log');
} catch (e) {
console.error("could not remove basetest.log", e);
}
},
'it should not want to roll': function(stream) {
assert.isFalse(stream.shouldRoll());
},
'it should not roll': function(stream) {
var cbCalled = false;
//just calls the callback straight away, no async calls
stream.roll('basetest.log', function() { cbCalled = true; });
assert.isTrue(cbCalled);
}
}
}).exportTo(module);

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, fs = require('fs') , fs = require('fs')
@@ -11,7 +12,7 @@ if (semver.satisfies(process.version, '>=0.10.0')) {
} else { } else {
streams = require('readable-stream'); streams = require('readable-stream');
} }
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
function cleanUp(filename) { function cleanUp(filename) {
return function() { return function() {
@@ -25,7 +26,10 @@ function now() {
vows.describe('DateRollingFileStream').addBatch({ vows.describe('DateRollingFileStream').addBatch({
'arguments': { 'arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'), topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-1',
'yyyy-mm-dd.hh'
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) { 'should take a filename and a pattern and return a WritableStream': function(stream) {
@@ -51,20 +55,27 @@ vows.describe('DateRollingFileStream').addBatch({
}, },
'with stream arguments': { 'with stream arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }), topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-3',
'yyyy-MM-dd',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, parseInt('0666', 8));
} }
}, },
'with stream arguments but no pattern': { 'with stream arguments but no pattern': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }), topic: new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-4',
{ mode: parseInt('0666', 8) }
),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, parseInt('0666', 8));
}, },
'should use default pattern': function(stream) { 'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd'); assert.equal(stream.pattern, '.yyyy-MM-dd');
@@ -74,7 +85,11 @@ vows.describe('DateRollingFileStream').addBatch({
'with a pattern of .yyyy-MM-dd': { 'with a pattern of .yyyy-MM-dd': {
topic: function() { topic: function() {
var that = this, var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now); stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd',
null,
now
);
stream.write("First message\n", 'utf8', function() { stream.write("First message\n", 'utf8', function() {
that.callback(null, stream); that.callback(null, stream);
}); });
@@ -103,7 +118,14 @@ vows.describe('DateRollingFileStream').addBatch({
fs.readdir(__dirname, this.callback); fs.readdir(__dirname, this.callback);
}, },
'should be two': function(files) { 'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2); assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-5') > -1;
}
).length,
2
);
} }
}, },
@@ -125,6 +147,81 @@ vows.describe('DateRollingFileStream').addBatch({
} }
} }
} }
},
'with alwaysIncludePattern': {
topic: function() {
var that = this,
testTime = new Date(2012, 8, 12, 0, 10, 12),
stream = new DateRollingFileStream(
__dirname + '/test-date-rolling-file-stream-pattern',
'.yyyy-MM-dd',
{alwaysIncludePattern: true},
now
);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
'should create a file with the pattern set': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(
files.filter(
function(file) {
return file.indexOf('test-date-rolling-file-stream-pattern') > -1;
}
).length,
2
);
}
},
'the file with the later date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13',
this.callback
);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12',
this.callback
);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
} }
}).exportTo(module); }).exportTo(module);

View File

@@ -1,3 +1,4 @@
"use strict";
var vows = require('vows') var vows = require('vows')
, async = require('async') , async = require('async')
, assert = require('assert') , assert = require('assert')
@@ -22,13 +23,17 @@ function remove(filename) {
} }
} }
function create(filename) {
fs.writeFileSync(filename, "test file");
}
vows.describe('RollingFileStream').addBatch({ vows.describe('RollingFileStream').addBatch({
'arguments': { 'arguments': {
topic: function() { topic: function() {
remove(__dirname + "/test-rolling-file-stream"); remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5); return new RollingFileStream("test-rolling-file-stream", 1024, 5);
}, },
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) { 'should take a filename, file size (bytes), no. backups, return Writable': function(stream) {
assert.instanceOf(stream, streams.Writable); assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream"); assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024); assert.equal(stream.size, 1024);
@@ -44,10 +49,15 @@ vows.describe('RollingFileStream').addBatch({
'with stream arguments': { 'with stream arguments': {
topic: function() { topic: function() {
remove(__dirname + '/test-rolling-file-stream'); remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 }); return new RollingFileStream(
'test-rolling-file-stream',
1024,
5,
{ mode: parseInt('0666', 8) }
);
}, },
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, parseInt('0666', 8));
} }
}, },
'without size': { 'without size': {
@@ -74,7 +84,11 @@ vows.describe('RollingFileStream').addBatch({
'writing less than the file size': { 'writing less than the file size': {
topic: function() { topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less"); remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100); var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-less",
100
);
stream.write("cheese", "utf8", function() { stream.write("cheese", "utf8", function() {
stream.end(); stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback); fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
@@ -88,7 +102,14 @@ vows.describe('RollingFileStream').addBatch({
fs.readdir(__dirname, this.callback); fs.readdir(__dirname, this.callback);
}, },
'should be one': function(files) { 'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1); assert.equal(
files.filter(
function(file) {
return file.indexOf('test-rolling-file-stream-write-less') > -1;
}
).length,
1
);
} }
} }
}, },
@@ -96,13 +117,21 @@ vows.describe('RollingFileStream').addBatch({
topic: function() { topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more"); remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1"); remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45); var that = this
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) { , stream = new RollingFileStream(
__dirname + "/test-rolling-file-stream-write-more",
45
);
async.forEach(
[0, 1, 2, 3, 4, 5, 6],
function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb); stream.write(i +".cheese\n", "utf8", cb);
}, function() { },
function() {
stream.end(); stream.end();
that.callback(); that.callback();
}); }
);
}, },
'the number of files': { 'the number of files': {
topic: function() { topic: function() {
@@ -110,7 +139,9 @@ vows.describe('RollingFileStream').addBatch({
}, },
'should be two': function(files) { 'should be two': function(files) {
assert.equal(files.filter( assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; } function(file) {
return file.indexOf('test-rolling-file-stream-write-more') > -1;
}
).length, 2); ).length, 2);
} }
}, },
@@ -130,5 +161,50 @@ vows.describe('RollingFileStream').addBatch({
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n'); assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
} }
} }
},
'when many files already exist': {
topic: function() {
remove(__dirname + '/test-rolling-stream-with-existing-files.11');
remove(__dirname + '/test-rolling-stream-with-existing-files.20');
remove(__dirname + '/test-rolling-stream-with-existing-files.-1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1.1');
remove(__dirname + '/test-rolling-stream-with-existing-files.1');
create(__dirname + '/test-rolling-stream-with-existing-files.11');
create(__dirname + '/test-rolling-stream-with-existing-files.20');
create(__dirname + '/test-rolling-stream-with-existing-files.-1');
create(__dirname + '/test-rolling-stream-with-existing-files.1.1');
create(__dirname + '/test-rolling-stream-with-existing-files.1');
var that = this
, stream = new RollingFileStream(
__dirname + "/test-rolling-stream-with-existing-files",
45,
5
);
async.forEach(
[0, 1, 2, 3, 4, 5, 6],
function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
},
function() {
stream.end();
that.callback();
}
);
},
'the files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be rolled': function(files) {
assert.include(files, 'test-rolling-stream-with-existing-files');
assert.include(files, 'test-rolling-stream-with-existing-files.1');
assert.include(files, 'test-rolling-stream-with-existing-files.2');
assert.include(files, 'test-rolling-stream-with-existing-files.11');
assert.include(files, 'test-rolling-stream-with-existing-files.20');
}
}
} }
}).exportTo(module); }).exportTo(module);