Compare commits
114 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad7e844d68 | ||
|
|
bef2075c60 | ||
|
|
a046523804 | ||
|
|
0ed1a137d6 | ||
|
|
33a92b5dd6 | ||
|
|
0901794b35 | ||
|
|
05d5265554 | ||
|
|
9a29d6222e | ||
|
|
38a89dcf3d | ||
|
|
754ac2c5ac | ||
|
|
ccc4976206 | ||
|
|
6e7348f8d8 | ||
|
|
61078e88ef | ||
|
|
613a077a61 | ||
|
|
68d1c8fa07 | ||
|
|
216937637d | ||
|
|
ff5b8d2939 | ||
|
|
6a20efb965 | ||
|
|
872bc791c7 | ||
|
|
2c7b56853b | ||
|
|
c8157cef5c | ||
|
|
352653dcbe | ||
|
|
cff6928761 | ||
|
|
1fb8962b83 | ||
|
|
d276bbc2f8 | ||
|
|
e78f4e33ce | ||
|
|
53367785b4 | ||
|
|
cff20b99e3 | ||
|
|
0a422e5749 | ||
|
|
37b94cf195 | ||
|
|
0c04c6807c | ||
|
|
b4ca201a91 | ||
|
|
2ab6f5fa24 | ||
|
|
9bad070b8a | ||
|
|
5aaa9fcd50 | ||
|
|
b7e77b11ad | ||
|
|
615b534b56 | ||
|
|
788de0cac3 | ||
|
|
4d484ad752 | ||
|
|
449893fd24 | ||
|
|
5bdeaf68d7 | ||
|
|
a5b09b3ead | ||
|
|
05c4c59c20 | ||
|
|
b4a5227fc0 | ||
|
|
b152618dbc | ||
|
|
a999d8fc00 | ||
|
|
78de73a274 | ||
|
|
4cf1d1cfa4 | ||
|
|
e5d0b3348f | ||
|
|
f10a6e164e | ||
|
|
cea3dc97d1 | ||
|
|
a3a0c55322 | ||
|
|
51d48165fd | ||
|
|
7d50c45801 | ||
|
|
40c5f5ee70 | ||
|
|
1d769fdf33 | ||
|
|
bc665b875e | ||
|
|
154c0dc299 | ||
|
|
050fae5230 | ||
|
|
342286e062 | ||
|
|
537f1058b9 | ||
|
|
283a403a11 | ||
|
|
ae8aaa5376 | ||
|
|
a95117c0d3 | ||
|
|
097390bc89 | ||
|
|
0a0119300b | ||
|
|
fde66f92f5 | ||
|
|
516659f733 | ||
|
|
5aabebbdb7 | ||
|
|
8b376eb46e | ||
|
|
ced570413c | ||
|
|
b2827076da | ||
|
|
07e920cc1b | ||
|
|
89f3659825 | ||
|
|
23a2758a6d | ||
|
|
25aa075fad | ||
|
|
d099a9fc3f | ||
|
|
7bc460e8e0 | ||
|
|
681decf51f | ||
|
|
b93691b82a | ||
|
|
f82ecf8f2a | ||
|
|
3b77a42706 | ||
|
|
b5bc9c8322 | ||
|
|
c7d3ac4fe1 | ||
|
|
0aca64623e | ||
|
|
ff68e46858 | ||
|
|
f9768eb56e | ||
|
|
75e5584060 | ||
|
|
b78fd77015 | ||
|
|
2a06048114 | ||
|
|
9a34d9edfd | ||
|
|
12e71bda4e | ||
|
|
53a481d4da | ||
|
|
8d7b5513fb | ||
|
|
d13b2fb3b4 | ||
|
|
4f7d73bc97 | ||
|
|
163db0e5fd | ||
|
|
71f9eef6fe | ||
|
|
623bc1859f | ||
|
|
b72182c0cf | ||
|
|
ef9fe3a4b1 | ||
|
|
3b241095cb | ||
|
|
545681287f | ||
|
|
80474c6881 | ||
|
|
7aa076c278 | ||
|
|
e6b69ff7f2 | ||
|
|
69e64932b1 | ||
|
|
4b32456db7 | ||
|
|
ec21ec63f0 | ||
|
|
a9a698cf09 | ||
|
|
925c280c68 | ||
|
|
d0b4563ba0 | ||
|
|
aac8ca0eb0 | ||
|
|
0968c6709f |
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
*.log
|
||||
*.log??
|
||||
build
|
||||
node_modules
|
||||
|
||||
2
.npmignore
Normal file
2
.npmignore
Normal file
@@ -0,0 +1,2 @@
|
||||
*.log
|
||||
*.log??
|
||||
4
.travis.yml
Normal file
4
.travis.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.7
|
||||
128
README.md
128
README.md
@@ -1,18 +1,27 @@
|
||||
# log4js-node
|
||||
# log4js-node [](http://travis-ci.org/nomiddlename/log4js-node)
|
||||
|
||||
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code
|
||||
and tidied up some of the javascript. It includes a basic file logger, with log rolling based on file size, and also replaces node's console.log functions.
|
||||
|
||||
NOTE: in v0.2.x require('log4js') returned a function, and you needed to call that function in your code before you could use it. This was to make testing easier. v0.3.x make use of [felixge's sandbox-module](https://github.com/felixge/node-sandboxed-module), so we don't need to return a function.
|
||||
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
|
||||
|
||||
Out of the box it supports the following features:
|
||||
|
||||
* coloured console logging
|
||||
* replacement of node's console.log functions (optional)
|
||||
* file appender, with log rolling based on file size
|
||||
* SMTP appender
|
||||
* GELF appender
|
||||
* hook.io appender
|
||||
* multiprocess appender (useful when you've got worker processes)
|
||||
* a logger for connect/express servers
|
||||
* configurable log message layout/patterns
|
||||
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
|
||||
|
||||
|
||||
## installation
|
||||
|
||||
npm install log4js
|
||||
|
||||
## tests
|
||||
|
||||
Tests now use [vows](http://vowsjs.org), run with `vows test/*.js`.
|
||||
|
||||
## usage
|
||||
|
||||
@@ -28,66 +37,93 @@ By default, log4js outputs to stdout with the coloured layout (thanks to [masylu
|
||||
|
||||
See example.js:
|
||||
|
||||
var log4js = require('log4js'); //note the need to call the function
|
||||
log4js.addAppender(log4js.consoleAppender());
|
||||
log4js.addAppender(log4js.fileAppender('logs/cheese.log'), 'cheese');
|
||||
|
||||
var log4js = require('log4js');
|
||||
log4js.loadAppender('console');
|
||||
log4js.loadAppender('file');
|
||||
log4js.addAppender(log4js.appenders.console());
|
||||
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
|
||||
|
||||
Output:
|
||||
|
||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||
|
||||
The first 5 lines of the code above could also be written as:
|
||||
|
||||
var log4js = require('log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'console' },
|
||||
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
||||
]
|
||||
});
|
||||
|
||||
|
||||
|
||||
## configuration
|
||||
|
||||
You can either configure the appenders and log levels manually (as above), or provide a
|
||||
configuration file (`log4js.configure('path/to/file.json')`) explicitly, or just let log4js look for a file called `log4js.json` (it looks in the current directory first, then the require paths, and finally looks for the default config included in the same directory as the `log4js.js` file).
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`
|
||||
You can also pass an object to the configure function, which has the same properties as the json versions.
|
||||
You can configure the appenders and log levels manually (as above), or provide a
|
||||
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object.
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
||||
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
||||
|
||||
## connect/express logger
|
||||
To turn off configuration file change checking, configure with:
|
||||
|
||||
A connect/express logger has been added to log4js, by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See example-connect-logger.js.
|
||||
var log4js = require('log4js');
|
||||
log4js.configure('my_log4js_configuration.json', {});
|
||||
|
||||
var log4js = require('./lib/log4js');
|
||||
log4js.addAppender(log4js.consoleAppender());
|
||||
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
To specify a different period:
|
||||
|
||||
log4js.configure('file.json', { reloadSecs: 300 });
|
||||
|
||||
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
|
||||
|
||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||
|
||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||
|
||||
#### my_log4js_configuration.json ####
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "relative/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 3,
|
||||
"pollInterval": 15,
|
||||
"category": "relative-logger"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"absolute": true,
|
||||
"filename": "/absolute/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 10,
|
||||
"pollInterval": 15,
|
||||
"category": "absolute-logger"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
logger.setLevel('INFO');
|
||||
|
||||
var app = require('express').createServer();
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
});
|
||||
app.get('/', function(req,res) {
|
||||
res.send('hello world');
|
||||
});
|
||||
app.listen(5000);
|
||||
Documentation for most of the core appenders can be found on the [wiki](wiki/Appenders), otherwise take a look at the tests and the examples.
|
||||
|
||||
The options object that is passed to log4js.connectLogger supports a format string the same as the connect/express logger. For example:
|
||||
## Documentation
|
||||
See the [wiki](wiki). Improve the [wiki](wiki), please.
|
||||
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO, format: ':method :url' }));
|
||||
});
|
||||
|
||||
## author (of this node version)
|
||||
|
||||
Gareth Jones (csausdev - gareth.jones@sensis.com.au)
|
||||
## Contributing
|
||||
Contributions welcome, but take a look at the [rules](wiki/Contributing) first.
|
||||
|
||||
## License
|
||||
|
||||
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
|
||||
keep the original copyright and author credits in place, except in sections that I have rewritten
|
||||
keep the original copyright and author credits in place, except in sections that I have rewritten
|
||||
extensively.
|
||||
|
||||
@@ -8,7 +8,7 @@ var app = require('express').createServer();
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
});
|
||||
app.get('/', function(req,res) {
|
||||
res.send('hello world');
|
||||
app.get('*', function(req,res) {
|
||||
res.send('hello world\n <a href="/cheese">cheese</a>\n');
|
||||
});
|
||||
app.listen(5000);
|
||||
|
||||
20
lib/appenders/console.js
Normal file
20
lib/appenders/console.js
Normal file
@@ -0,0 +1,20 @@
|
||||
var layouts = require('../layouts'),
|
||||
consoleLog = console.log;
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
consoleLog(layout(loggingEvent));
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
}
|
||||
|
||||
exports.appender = consoleAppender;
|
||||
exports.configure = configure;
|
||||
67
lib/appenders/file.js
Normal file
67
lib/appenders/file.js
Normal file
@@ -0,0 +1,67 @@
|
||||
var layouts = require('../layouts')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams');
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups) {
|
||||
var bytesWritten = 0;
|
||||
file = path.normalize(file);
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||
//there has to be at least one backup if logSize has been specified
|
||||
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
var stream;
|
||||
if (fileSize) {
|
||||
stream = new streams.BufferedWriteStream(
|
||||
new streams.RollingFileStream(
|
||||
file,
|
||||
fileSize,
|
||||
numFiles
|
||||
)
|
||||
);
|
||||
} else {
|
||||
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
|
||||
}
|
||||
stream.on("error", function (err) {
|
||||
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
//close the file on process exit.
|
||||
process.on('exit', function() {
|
||||
logFile.end();
|
||||
});
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent)+'\n', "utf8");
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
||||
93
lib/appenders/gelf.js
Normal file
93
lib/appenders/gelf.js
Normal file
@@ -0,0 +1,93 @@
|
||||
var zlib = require('zlib');
|
||||
var layouts = require('../layouts');
|
||||
var levels = require('../levels');
|
||||
var dgram = require('dgram');
|
||||
var util = require('util');
|
||||
|
||||
var LOG_EMERG=0; // system is unusable
|
||||
var LOG_ALERT=1; // action must be taken immediately
|
||||
var LOG_CRIT=2; // critical conditions
|
||||
var LOG_ERR=3; // error conditions
|
||||
var LOG_ERROR=3; // because people WILL typo
|
||||
var LOG_WARNING=4; // warning conditions
|
||||
var LOG_NOTICE=5; // normal, but significant, condition
|
||||
var LOG_INFO=6; // informational message
|
||||
var LOG_DEBUG=7; // debug-level message
|
||||
|
||||
var levelMapping = {};
|
||||
levelMapping[levels.ALL] = LOG_DEBUG;
|
||||
levelMapping[levels.TRACE] = LOG_DEBUG;
|
||||
levelMapping[levels.DEBUG] = LOG_DEBUG;
|
||||
levelMapping[levels.INFO] = LOG_INFO;
|
||||
levelMapping[levels.WARN] = LOG_WARNING;
|
||||
levelMapping[levels.ERROR] = LOG_ERR;
|
||||
levelMapping[levels.FATAL] = LOG_CRIT;
|
||||
|
||||
/**
|
||||
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
|
||||
*
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to none).
|
||||
* @param host - host to which to send logs (default:localhost)
|
||||
* @param port - port at which to send logs to (default:12201)
|
||||
* @param hostname - hostname of the current host (default:os hostname)
|
||||
* @param facility - facility to log to (default:nodejs-server)
|
||||
*/
|
||||
function gelfAppender (layout, host, port, hostname, facility) {
|
||||
|
||||
host = host || 'localhost';
|
||||
port = port || 12201;
|
||||
hostname = hostname || require('os').hostname();
|
||||
facility = facility || 'nodejs-server';
|
||||
layout = layout || layouts.messagePassThroughLayout;
|
||||
|
||||
var client = dgram.createSocket("udp4");
|
||||
|
||||
process.on('exit', function() {
|
||||
if (client) client.close();
|
||||
});
|
||||
|
||||
function preparePacket(loggingEvent) {
|
||||
var msg = {};
|
||||
msg.full_message = layout(loggingEvent);
|
||||
msg.short_message = msg.full_message;
|
||||
|
||||
msg.version="1.0";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0;
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
msg.facility = facility;
|
||||
return msg;
|
||||
}
|
||||
|
||||
function sendPacket(packet) {
|
||||
try {
|
||||
client.send(packet, 0, packet.length, port, host);
|
||||
} catch(e) {}
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var message = preparePacket(loggingEvent);
|
||||
zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
|
||||
if (err) {
|
||||
console.error(err.stack);
|
||||
} else {
|
||||
if (packet.length > 8192) {
|
||||
util.debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
|
||||
} else {
|
||||
sendPacket(packet);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return gelfAppender(layout, config.host, config.port, config.hostname, config.facility);
|
||||
}
|
||||
|
||||
exports.appender = gelfAppender;
|
||||
exports.configure = configure;
|
||||
75
lib/appenders/hookio.js
Normal file
75
lib/appenders/hookio.js
Normal file
@@ -0,0 +1,75 @@
|
||||
var log4js = require('../log4js');
|
||||
var layouts = require('../layouts');
|
||||
var Hook = require('hook.io').Hook;
|
||||
var util = require('util');
|
||||
|
||||
var Logger = function createLogger(options) {
|
||||
var self = this;
|
||||
var actualAppender = options.actualAppender;
|
||||
Hook.call(self, options);
|
||||
self.on('hook::ready', function hookReady() {
|
||||
self.on('*::' + options.name + '::log', function log(loggingEvent) {
|
||||
deserializeLoggingEvent(loggingEvent);
|
||||
actualAppender(loggingEvent);
|
||||
});
|
||||
});
|
||||
}
|
||||
util.inherits(Logger, Hook);
|
||||
|
||||
function deserializeLoggingEvent(loggingEvent) {
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level.toString = function levelToString() {
|
||||
return loggingEvent.level.levelStr;
|
||||
};
|
||||
}
|
||||
|
||||
function initHook(hookioOptions) {
|
||||
var loggerHook;
|
||||
if (hookioOptions.mode === 'master') {
|
||||
// Start the master hook, handling the actual logging
|
||||
loggerHook = new Logger(hookioOptions);
|
||||
} else {
|
||||
// Start a worker, just emitting events for a master
|
||||
loggerHook = new Hook(hookioOptions);
|
||||
}
|
||||
loggerHook.start();
|
||||
return loggerHook;
|
||||
}
|
||||
|
||||
function getBufferedHook(hook, eventName) {
|
||||
var hookBuffer = [];
|
||||
var hookReady = false;
|
||||
hook.on('hook::ready', function emptyBuffer() {
|
||||
hookBuffer.forEach(function logBufferItem(loggingEvent) {
|
||||
hook.emit(eventName, loggingEvent);
|
||||
})
|
||||
hookReady = true;
|
||||
});
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (hookReady) {
|
||||
hook.emit(eventName, loggingEvent);
|
||||
} else {
|
||||
hookBuffer.push(loggingEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createAppender(hookioOptions) {
|
||||
var loggerHook = initHook(hookioOptions);
|
||||
var loggerEvent = hookioOptions.name + '::log';
|
||||
return getBufferedHook(loggerHook, loggerEvent);
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
20
lib/appenders/logLevelFilter.js
Normal file
20
lib/appenders/logLevelFilter.js
Normal file
@@ -0,0 +1,20 @@
|
||||
var levels = require('../levels');
|
||||
var log4js = require('../log4js');
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
|
||||
exports.appender = logLevelFilter;
|
||||
exports.configure = configure;
|
||||
76
lib/appenders/multiprocess.js
Normal file
76
lib/appenders/multiprocess.js
Normal file
@@ -0,0 +1,76 @@
|
||||
var log4js = require('../log4js');
|
||||
var layouts = require('../layouts');
|
||||
var net = require('net');
|
||||
var util = require('util');
|
||||
|
||||
var LogServer = function createLogServer(config) {
|
||||
var actualAppender = config.actualAppender;
|
||||
var server = net.createServer(function serverCreated(clientSocket) {
|
||||
clientSocket.on('connect', function clientConnected() {
|
||||
var logMessage = '';
|
||||
clientSocket.on('data', function chunkReceived(chunk) {
|
||||
logMessage += chunk;
|
||||
});
|
||||
clientSocket.on('end', function gotChunks() {
|
||||
try {
|
||||
var loggingEvent = JSON.parse(logMessage);
|
||||
deserializeLoggingEvent(loggingEvent);
|
||||
actualAppender(loggingEvent);
|
||||
} catch (e) {
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
actualAppender(createLoggingEvent('ERROR', 'Unable to parse log: ' + logMessage));
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
}
|
||||
|
||||
function createLoggingEvent(level, message) {
|
||||
return {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: { toString: function () {
|
||||
return level;
|
||||
}},
|
||||
data: [ message ]
|
||||
};
|
||||
}
|
||||
|
||||
function deserializeLoggingEvent(loggingEvent) {
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level.toString = function levelToString() {
|
||||
return loggingEvent.level.levelStr;
|
||||
};
|
||||
}
|
||||
|
||||
function workerAppender(config) {
|
||||
return function log(loggingEvent) {
|
||||
var socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
socket.on('connect', function socketConnected() {
|
||||
socket.end(JSON.stringify(loggingEvent), 'utf8');
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function createAppender(config) {
|
||||
if (config.mode === 'master') {
|
||||
var server = new LogServer(config);
|
||||
return config.actualAppender;
|
||||
} else {
|
||||
return workerAppender(config);
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
75
lib/appenders/smtp.js
Normal file
75
lib/appenders/smtp.js
Normal file
@@ -0,0 +1,75 @@
|
||||
var layouts = require("../layouts"),
|
||||
mailer = require("nodemailer");
|
||||
|
||||
/**
|
||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||
* It can either send an email on each event or group several logging events gathered during specified interval.
|
||||
*
|
||||
* @param recipients comma separated list of email recipients
|
||||
* @param sender sender of all emails (defaults to SMTP user)
|
||||
* @param subject subject of all email messages (defaults to first event's message)
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param smtpConfig SMTP configuration for 'nodemailer'
|
||||
* @param sendInterval the time in seconds between sending attempts (defaults to 0);
|
||||
* all events are buffered and sent in one email during this time; if 0 than every event sends an email
|
||||
*/
|
||||
function smtpAppender(recipients, sender, subject, layout, smtpConfig, sendInterval) {
|
||||
sender = sender || smtpConfig.user;
|
||||
layout = layout || layouts.basicLayout;
|
||||
subjectLayout = layouts.messagePassThroughLayout;
|
||||
mailer.SMTP = smtpConfig;
|
||||
sendInterval = sendInterval*1000 || 0;
|
||||
|
||||
var logEventBuffer = [];
|
||||
var sendTimer;
|
||||
|
||||
function sendBuffer() {
|
||||
if (logEventBuffer.length == 0)
|
||||
return;
|
||||
|
||||
var firstEvent = logEventBuffer[0];
|
||||
var body = "";
|
||||
while (logEventBuffer.length > 0) {
|
||||
body += layout(logEventBuffer.shift()) + "\n";
|
||||
}
|
||||
|
||||
var msg = {
|
||||
sender: sender,
|
||||
to: recipients,
|
||||
subject: subject || subjectLayout(firstEvent),
|
||||
body: body
|
||||
};
|
||||
mailer.send_mail(msg, function(error, success) {
|
||||
if (error) {
|
||||
console.error("log4js.smtpAppender - Error happened ", error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function scheduleSend() {
|
||||
if (!sendTimer)
|
||||
sendTimer = setTimeout(function() {
|
||||
sendTimer = null;
|
||||
sendBuffer();
|
||||
}, sendInterval);
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
logEventBuffer.push(loggingEvent);
|
||||
if (sendInterval > 0)
|
||||
scheduleSend();
|
||||
else
|
||||
sendBuffer();
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return smtpAppender(config.recipients, config.sender, config.subject, layout, config.smtp, config.sendInterval);
|
||||
}
|
||||
|
||||
exports.appender = smtpAppender;
|
||||
exports.configure = configure;
|
||||
@@ -37,13 +37,17 @@ function getLogger(logger4js, options) {
|
||||
|
||||
var thislogger = logger4js
|
||||
, level = levels.toLevel(options.level, levels.INFO)
|
||||
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":req[referer]" ":user-agent"';
|
||||
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"'
|
||||
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
|
||||
|
||||
return function (req, res, next) {
|
||||
|
||||
// mount safety
|
||||
if (req._logging) return next();
|
||||
|
||||
// nologs
|
||||
if (nolog && nolog.test(req.originalUrl)) return next();
|
||||
|
||||
if (thislogger.isLevelEnabled(level)) {
|
||||
|
||||
var start = +new Date
|
||||
@@ -75,9 +79,10 @@ function getLogger(logger4js, options) {
|
||||
thislogger.log(level, format(fmt, req, res));
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
//ensure next gets always called
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
@@ -111,4 +116,53 @@ function format(str, req, res) {
|
||||
});
|
||||
}
|
||||
|
||||
exports.connectLogger = getLogger;
|
||||
/**
|
||||
* Return RegExp Object about nolog
|
||||
*
|
||||
* @param {String} nolog
|
||||
* @return {RegExp}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
/**
|
||||
* syntax
|
||||
* 1. String
|
||||
* 1.1 "\\.gif"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
|
||||
* LOGGING http://example.com/hoge.agif
|
||||
* 1.2 in "\\.gif|\\.jpg$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
|
||||
* LOGGING http://example.com/hoge.agif, http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
|
||||
* 1.3 in "\\.(gif|jpe?g|png)$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
|
||||
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
|
||||
* 2. RegExp
|
||||
* 2.1 in /\.(gif|jpe?g|png)$/
|
||||
* SAME AS 1.3
|
||||
* 3. Array
|
||||
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
|
||||
* SAME AS "\\.jpg|\\.png|\\.gif"
|
||||
*/
|
||||
function createNoLogCondition(nolog, type) {
|
||||
if(!nolog) return null;
|
||||
type = type || '';
|
||||
|
||||
if(nolog instanceof RegExp){
|
||||
if(type === 'string')
|
||||
return nolog.source;
|
||||
return nolog;
|
||||
} else if(typeof nolog === 'string'){
|
||||
if(type === 'string')
|
||||
return nolog;
|
||||
try{
|
||||
return new RegExp(nolog);
|
||||
} catch (ex) {
|
||||
return null;
|
||||
}
|
||||
} else if(nolog instanceof Array){
|
||||
var regexps = nolog.map(function(o){ return createNoLogCondition(o, 'string')});
|
||||
return new RegExp(regexps.join('|'));
|
||||
}
|
||||
}
|
||||
|
||||
exports.connectLogger = getLogger;
|
||||
|
||||
@@ -38,13 +38,16 @@ function formatLogData(logData) {
|
||||
return match;
|
||||
};
|
||||
});
|
||||
if (data.length > 0) {
|
||||
output += '\n';
|
||||
}
|
||||
} else {
|
||||
//put it back, it's not a format string
|
||||
data.unshift(format);
|
||||
}
|
||||
|
||||
data.forEach(function (item) {
|
||||
if (item.stack) {
|
||||
if (output) {
|
||||
output += ' ';
|
||||
}
|
||||
if (item && item.stack) {
|
||||
output += item.stack;
|
||||
} else {
|
||||
output += util.inspect(item);
|
||||
|
||||
@@ -5,24 +5,28 @@ function Level(level, levelStr) {
|
||||
|
||||
/**
|
||||
* converts given String to corresponding Level
|
||||
* @param {String} sArg String value of Level
|
||||
* @param {String} sArg String value of Level OR Log4js.Level
|
||||
* @param {Log4js.Level} defaultLevel default Level, if no String representation
|
||||
* @return Level object
|
||||
* @type Log4js.Level
|
||||
*/
|
||||
function toLevel(sArg, defaultLevel) {
|
||||
|
||||
if (sArg === null) {
|
||||
return defaultLevel;
|
||||
}
|
||||
if (!sArg) {
|
||||
return defaultLevel;
|
||||
}
|
||||
|
||||
if (typeof sArg == "string") {
|
||||
var s = sArg.toUpperCase();
|
||||
if (module.exports[s]) {
|
||||
return module.exports[s];
|
||||
} else {
|
||||
return defaultLevel;
|
||||
}
|
||||
}
|
||||
|
||||
return toLevel(sArg.toString());
|
||||
|
||||
if (typeof sArg == "string") {
|
||||
var s = sArg.toUpperCase();
|
||||
if (module.exports[s]) {
|
||||
return module.exports[s];
|
||||
}
|
||||
}
|
||||
return defaultLevel;
|
||||
};
|
||||
|
||||
Level.prototype.toString = function() {
|
||||
@@ -31,18 +35,25 @@ Level.prototype.toString = function() {
|
||||
|
||||
Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = Level.toLevel(otherLevel);
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level <= otherLevel.level;
|
||||
};
|
||||
|
||||
Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = Level.toLevel(otherLevel);
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level >= otherLevel.level;
|
||||
};
|
||||
|
||||
Level.prototype.isEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel == "string") {
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level === otherLevel.level;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ALL: new Level(Number.MIN_VALUE, "ALL")
|
||||
, TRACE: new Level(5000, "TRACE")
|
||||
|
||||
439
lib/log4js.js
439
lib/log4js.js
@@ -18,8 +18,6 @@
|
||||
* @fileoverview log4js is a library to log in JavaScript in similar manner
|
||||
* than in log4j for Java. The API should be nearly the same.
|
||||
*
|
||||
* This file contains all log4js code and is the only file required for logging.
|
||||
*
|
||||
* <h3>Example:</h3>
|
||||
* <pre>
|
||||
* var logging = require('log4js');
|
||||
@@ -37,6 +35,8 @@
|
||||
* log.trace("trace me" );
|
||||
* </pre>
|
||||
*
|
||||
* NOTE: the authors below are the original browser-based log4js authors
|
||||
* don't try to contact them about bugs in this version :)
|
||||
* @version 1.0
|
||||
* @author Stephan Strittmatter - http://jroller.com/page/stritti
|
||||
* @author Seth Chisamore - http://www.chisamore.com
|
||||
@@ -47,32 +47,20 @@
|
||||
var events = require('events')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, sys = require('sys')
|
||||
, util = require('util')
|
||||
, layouts = require('./layouts')
|
||||
, levels = require('./levels')
|
||||
, DEFAULT_CATEGORY = '[default]'
|
||||
, LoggingEvent = require('./logger').LoggingEvent
|
||||
, Logger = require('./logger').Logger
|
||||
, ALL_CATEGORIES = '[all]'
|
||||
, appenders = {}
|
||||
, loggers = {}
|
||||
, appenderMakers = {
|
||||
"file": function(config, fileAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
|
||||
},
|
||||
"console": function(config, fileAppender, consoleAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
},
|
||||
"logLevelFilter": function(config, fileAppender, consoleAppender) {
|
||||
var appender = appenderMakers[config.appender.type](config.appender, fileAppender, consoleAppender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
, appenderMakers = {}
|
||||
, defaultConfig = {
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
],
|
||||
replaceConsole: false
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -85,25 +73,25 @@ function getLogger (categoryName) {
|
||||
|
||||
// Use default logger if categoryName is not specified or invalid
|
||||
if (!(typeof categoryName == "string")) {
|
||||
categoryName = DEFAULT_CATEGORY;
|
||||
categoryName = Logger.DEFAULT_CATEGORY;
|
||||
}
|
||||
|
||||
var appenderList;
|
||||
if (!loggers[categoryName]) {
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
appenderList = appenders[categoryName];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
if (appenders[ALL_CATEGORIES]) {
|
||||
appenderList = appenders[ALL_CATEGORIES];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
appenderList = appenders[categoryName];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
if (appenders[ALL_CATEGORIES]) {
|
||||
appenderList = appenders[ALL_CATEGORIES];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return loggers[categoryName];
|
||||
@@ -116,132 +104,67 @@ function addAppender () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var appender = args.shift();
|
||||
if (args.length == 0 || args[0] === undefined) {
|
||||
args = [ ALL_CATEGORIES ];
|
||||
args = [ ALL_CATEGORIES ];
|
||||
}
|
||||
//argument may already be an array
|
||||
if (Array.isArray(args[0])) {
|
||||
args = args[0];
|
||||
args = args[0];
|
||||
}
|
||||
|
||||
args.forEach(function(category) {
|
||||
if (!appenders[category]) {
|
||||
appenders[category] = [];
|
||||
}
|
||||
appenders[category].push(appender);
|
||||
if (!appenders[category]) {
|
||||
appenders[category] = [];
|
||||
}
|
||||
appenders[category].push(appender);
|
||||
|
||||
if (category === ALL_CATEGORIES) {
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
} else if (loggers[category]) {
|
||||
loggers[category].addListener("log", appender);
|
||||
}
|
||||
if (category === ALL_CATEGORIES) {
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
} else if (loggers[category]) {
|
||||
loggers[category].addListener("log", appender);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function clearAppenders () {
|
||||
appenders = {};
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].removeAllListeners("log");
|
||||
}
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].removeAllListeners("log");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configureAppenders(appenderList, fileAppender, consoleAppender) {
|
||||
function configureAppenders(appenderList, options) {
|
||||
clearAppenders();
|
||||
if (appenderList) {
|
||||
appenderList.forEach(function(appenderConfig) {
|
||||
var appender = appenderMakers[appenderConfig.type](appenderConfig, fileAppender, consoleAppender);
|
||||
if (appender) {
|
||||
addAppender(appender, appenderConfig.category);
|
||||
} else {
|
||||
throw new Error("log4js configuration problem for "+sys.inspect(appenderConfig));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
addAppender(consoleAppender);
|
||||
appenderList.forEach(function(appenderConfig) {
|
||||
loadAppender(appenderConfig.type);
|
||||
var appender;
|
||||
appenderConfig.makers = appenderMakers;
|
||||
appender = appenderMakers[appenderConfig.type](appenderConfig, options);
|
||||
if (appender) {
|
||||
addAppender(appender, appenderConfig.category);
|
||||
} else {
|
||||
throw new Error("log4js configuration problem for "+util.inspect(appenderConfig));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function configureLevels(levels) {
|
||||
if (levels) {
|
||||
for (var category in levels) {
|
||||
if (levels.hasOwnProperty(category)) {
|
||||
getLogger(category).setLevel(levels[category]);
|
||||
}
|
||||
}
|
||||
for (var category in levels) {
|
||||
if (levels.hasOwnProperty(category)) {
|
||||
getLogger(category).setLevel(levels[category]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Models a logging event.
|
||||
* @constructor
|
||||
* @param {String} categoryName name of category
|
||||
* @param {Log4js.Level} level level of message
|
||||
* @param {Array} data objects to log
|
||||
* @param {Log4js.Logger} logger the associated logger
|
||||
* @author Seth Chisamore
|
||||
*/
|
||||
function LoggingEvent (categoryName, level, data, logger) {
|
||||
this.startTime = new Date();
|
||||
this.categoryName = categoryName;
|
||||
this.data = data;
|
||||
this.level = level;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger to log messages.
|
||||
* use {@see Log4js#getLogger(String)} to get an instance.
|
||||
* @constructor
|
||||
* @param name name of category to log to
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function Logger (name, level) {
|
||||
this.category = name || DEFAULT_CATEGORY;
|
||||
|
||||
if (! this.level) {
|
||||
this.__proto__.level = levels.TRACE;
|
||||
}
|
||||
}
|
||||
sys.inherits(Logger, events.EventEmitter);
|
||||
|
||||
Logger.prototype.setLevel = function(level) {
|
||||
this.level = levels.toLevel(level, levels.TRACE);
|
||||
};
|
||||
|
||||
Logger.prototype.removeLevel = function() {
|
||||
delete this.level;
|
||||
};
|
||||
|
||||
Logger.prototype.log = function(logLevel, args) {
|
||||
var data = Array.prototype.slice.call(args)
|
||||
, loggingEvent = new LoggingEvent(this.category, logLevel, data, this);
|
||||
this.emit("log", loggingEvent);
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||
};
|
||||
|
||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
||||
function(levelString) {
|
||||
var level = levels.toLevel(levelString);
|
||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (this.isLevelEnabled(level)) {
|
||||
this.log(level, arguments);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
function setGlobalLogLevel(level) {
|
||||
Logger.prototype.level = levels.toLevel(level, levels.TRACE);
|
||||
}
|
||||
@@ -252,170 +175,144 @@ function setGlobalLogLevel(level) {
|
||||
* @static
|
||||
*/
|
||||
function getDefaultLogger () {
|
||||
return getLogger(DEFAULT_CATEGORY);
|
||||
return getLogger(Logger.DEFAULT_CATEGORY);
|
||||
}
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
var configState = {};
|
||||
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
console._preLog4js_log(layout(loggingEvent));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
* @param filePollInterval - the time in seconds between file size checks (default 30s)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
var logFile = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
|
||||
|
||||
if (logSize > 0) {
|
||||
setupLogRolling(logFile, file, logSize, numBackups || 5, (filePollInterval * 1000) || 30000);
|
||||
}
|
||||
|
||||
//close the file on process exit.
|
||||
process.on('exit', function() {
|
||||
logFile.end();
|
||||
logFile.destroySoon();
|
||||
});
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent)+'\n');
|
||||
};
|
||||
}
|
||||
|
||||
function setupLogRolling (logFile, filename, logSize, numBackups, filePollInterval) {
|
||||
fs.watchFile(
|
||||
filename,
|
||||
{
|
||||
persistent: false,
|
||||
interval: filePollInterval
|
||||
},
|
||||
function (curr, prev) {
|
||||
if (curr.size >= logSize) {
|
||||
rollThatLog(logFile, filename, numBackups);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function rollThatLog (logFile, filename, numBackups) {
|
||||
//first close the current one.
|
||||
logFile.end();
|
||||
logFile.destroySoon();
|
||||
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
|
||||
for (var i=numBackups; i > 0; i--) {
|
||||
if (i > 1) {
|
||||
if (fileExists(filename + '.' + (i-1))) {
|
||||
fs.renameSync(filename+'.'+(i-1), filename+'.'+i);
|
||||
}
|
||||
} else {
|
||||
fs.renameSync(filename, filename+'.1');
|
||||
}
|
||||
}
|
||||
//open it up again
|
||||
logFile = fs.createWriteStream(filename, { flags: 'a', mode: 0644, encoding: "utf8" });
|
||||
}
|
||||
|
||||
function fileExists (filename) {
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function configure (configurationFileOrObject) {
|
||||
var config = configurationFileOrObject;
|
||||
if (typeof(config) === "string") {
|
||||
config = JSON.parse(fs.readFileSync(config, "utf8"));
|
||||
}
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, fileAppender, consoleAppender);
|
||||
configureLevels(config.levels);
|
||||
} catch (e) {
|
||||
throw new Error("Problem reading log4js config " + sys.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function findConfiguration() {
|
||||
//add current directory onto the list of configPaths
|
||||
var paths = ['.'].concat(require.paths);
|
||||
//add this module's directory to the end of the list, so that we pick up the default config
|
||||
paths.push(__dirname);
|
||||
var pathsWithConfig = paths.filter( function (pathToCheck) {
|
||||
try {
|
||||
fs.statSync(path.join(pathToCheck, "log4js.json"));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
if (pathsWithConfig.length > 0) {
|
||||
return path.join(pathsWithConfig[0], 'log4js.json');
|
||||
function loadConfigurationFile(filename) {
|
||||
if (filename && (!configState.lastFilename || filename !== configState.lastFilename ||
|
||||
!configState.lastMTime || fs.statSync(filename).mtime !== configState.lastMTime)) {
|
||||
configState.lastFilename = filename;
|
||||
configState.lastMTime = fs.statSync(filename).mtime;
|
||||
return JSON.parse(fs.readFileSync(filename, "utf8"));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function configureOnceOff(config, options) {
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, options);
|
||||
configureLevels(config.levels);
|
||||
|
||||
if (config.replaceConsole) {
|
||||
replaceConsole();
|
||||
} else {
|
||||
restoreConsole();
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error("Problem reading log4js config " + util.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reloadConfiguration() {
|
||||
var filename = configState.filename,
|
||||
mtime;
|
||||
if (!filename) {
|
||||
// can't find anything to reload
|
||||
return;
|
||||
}
|
||||
try {
|
||||
mtime = fs.statSync(filename).mtime;
|
||||
} catch (e) {
|
||||
getLogger('log4js').warn('Failed to load configuration file ' + filename);
|
||||
return;
|
||||
}
|
||||
if (configState.lastFilename && configState.lastFilename === filename) {
|
||||
if (mtime.getTime() > configState.lastMTime.getTime()) {
|
||||
configureOnceOff(loadConfigurationFile(filename));
|
||||
}
|
||||
} else {
|
||||
configureOnceOff(loadConfigurationFile(filename));
|
||||
}
|
||||
}
|
||||
|
||||
function initReloadConfiguration(filename, options) {
|
||||
if (configState.timerId) {
|
||||
clearInterval(configState.timerId);
|
||||
delete configState.timerId;
|
||||
}
|
||||
configState.filename = filename;
|
||||
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
|
||||
}
|
||||
|
||||
function configure(configurationFileOrObject, options) {
|
||||
var config = configurationFileOrObject;
|
||||
options = options || {};
|
||||
|
||||
if (config === undefined || config === null || typeof(config) === 'string') {
|
||||
if (options.reloadSecs) {
|
||||
initReloadConfiguration(config, options);
|
||||
}
|
||||
config = loadConfigurationFile(config) || defaultConfig;
|
||||
} else {
|
||||
if (options.reloadSecs) {
|
||||
getLogger('log4js').warn('Ignoring configuration reload parameter for "object" configuration.');
|
||||
}
|
||||
}
|
||||
configureOnceOff(config, options);
|
||||
}
|
||||
|
||||
var originalConsoleFunctions = {
|
||||
log: console.log,
|
||||
debug: console.debug,
|
||||
info: console.info,
|
||||
warn: console.warn,
|
||||
error: console.error
|
||||
};
|
||||
|
||||
function replaceConsole(logger) {
|
||||
function replaceWith(fn) {
|
||||
return function() {
|
||||
fn.apply(logger, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
logger = logger || getLogger("console");
|
||||
['log','debug','info','warn','error'].forEach(function (item) {
|
||||
console['_preLog4js_'+item] = console[item];
|
||||
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
//set ourselves up if we can find a default log4js.json
|
||||
configure(findConfiguration());
|
||||
//replace console.log, etc with log4js versions
|
||||
replaceConsole(getLogger("console"));
|
||||
function restoreConsole() {
|
||||
['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) {
|
||||
console[item] = originalConsoleFunctions[item];
|
||||
});
|
||||
}
|
||||
|
||||
function loadAppender(appender) {
|
||||
var appenderModule;
|
||||
try {
|
||||
appenderModule = require('./appenders/' + appender);
|
||||
} catch (e) {
|
||||
appenderModule = require(appender);
|
||||
}
|
||||
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
|
||||
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getLogger: getLogger,
|
||||
getDefaultLogger: getDefaultLogger,
|
||||
|
||||
addAppender: addAppender,
|
||||
loadAppender: loadAppender,
|
||||
clearAppenders: clearAppenders,
|
||||
configure: configure,
|
||||
|
||||
replaceConsole: replaceConsole,
|
||||
restoreConsole: restoreConsole,
|
||||
|
||||
levels: levels,
|
||||
setGlobalLogLevel: setGlobalLogLevel,
|
||||
|
||||
consoleAppender: consoleAppender,
|
||||
fileAppender: fileAppender,
|
||||
logLevelFilter: logLevelFilter,
|
||||
|
||||
layouts: layouts,
|
||||
connectLogger: require('./connect-logger').connectLogger(this)
|
||||
appenders: {},
|
||||
appenderMakers: appenderMakers,
|
||||
connectLogger: require('./connect-logger').connectLogger
|
||||
};
|
||||
|
||||
//keep the old-style layouts
|
||||
['basicLayout','messagePassThroughLayout','colouredLayout','coloredLayout'].forEach(function(item) {
|
||||
module.exports[item] = layouts[item];
|
||||
});
|
||||
//set ourselves up
|
||||
configure();
|
||||
|
||||
|
||||
78
lib/logger.js
Normal file
78
lib/logger.js
Normal file
@@ -0,0 +1,78 @@
|
||||
var levels = require('./levels'),
|
||||
util = require('util'),
|
||||
events = require('events'),
|
||||
DEFAULT_CATEGORY = '[default]';
|
||||
|
||||
/**
|
||||
* Models a logging event.
|
||||
* @constructor
|
||||
* @param {String} categoryName name of category
|
||||
* @param {Log4js.Level} level level of message
|
||||
* @param {Array} data objects to log
|
||||
* @param {Log4js.Logger} logger the associated logger
|
||||
* @author Seth Chisamore
|
||||
*/
|
||||
function LoggingEvent (categoryName, level, data, logger) {
|
||||
this.startTime = new Date();
|
||||
this.categoryName = categoryName;
|
||||
this.data = data;
|
||||
this.level = level;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger to log messages.
|
||||
* use {@see Log4js#getLogger(String)} to get an instance.
|
||||
* @constructor
|
||||
* @param name name of category to log to
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function Logger (name, level) {
|
||||
this.category = name || DEFAULT_CATEGORY;
|
||||
|
||||
if (! this.level) {
|
||||
this.__proto__.level = levels.TRACE;
|
||||
}
|
||||
}
|
||||
util.inherits(Logger, events.EventEmitter);
|
||||
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
|
||||
|
||||
Logger.prototype.setLevel = function(level) {
|
||||
this.level = levels.toLevel(level, this.level || levels.TRACE);
|
||||
};
|
||||
|
||||
Logger.prototype.removeLevel = function() {
|
||||
delete this.level;
|
||||
};
|
||||
|
||||
Logger.prototype.log = function() {
|
||||
var args = Array.prototype.slice.call(arguments)
|
||||
, logLevel = args.shift()
|
||||
, loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||
this.emit("log", loggingEvent);
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||
};
|
||||
|
||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
||||
function(levelString) {
|
||||
var level = levels.toLevel(levelString);
|
||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (this.isLevelEnabled(level)) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift(level);
|
||||
Logger.prototype.log.apply(this, args);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
exports.LoggingEvent = LoggingEvent;
|
||||
exports.Logger = Logger;
|
||||
268
lib/streams.js
Normal file
268
lib/streams.js
Normal file
@@ -0,0 +1,268 @@
|
||||
var util = require('util'),
|
||||
fs = require('fs'),
|
||||
path = require('path'),
|
||||
events = require('events'),
|
||||
async = require('async');
|
||||
|
||||
function debug(message) {
|
||||
// util.debug(message);
|
||||
// console.log(message);
|
||||
}
|
||||
|
||||
function BufferedWriteStream(stream) {
|
||||
var that = this;
|
||||
this.stream = stream;
|
||||
this.buffer = [];
|
||||
this.canWrite = false;
|
||||
this.bytes = 0;
|
||||
|
||||
this.stream.on("open", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
|
||||
this.stream.on("error", function (err) {
|
||||
that.emit("error", err);
|
||||
});
|
||||
|
||||
this.stream.on("drain", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(BufferedWriteStream, events.EventEmitter);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"fd",
|
||||
{
|
||||
get: function() { return this.stream.fd; },
|
||||
set: function(newFd) {
|
||||
this.stream.fd = newFd;
|
||||
this.bytes = 0;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"bytesWritten",
|
||||
{
|
||||
get: function() { return this.bytes; }
|
||||
}
|
||||
);
|
||||
|
||||
BufferedWriteStream.prototype.write = function(data, encoding) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
this.flushBuffer();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.end = function(data, encoding) {
|
||||
if (data) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
}
|
||||
this.flushBufferEvenIfCannotWrite();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
||||
this.bytes += toWrite.data.length;
|
||||
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
||||
while (this.buffer.length > 0) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBuffer = function() {
|
||||
while (this.buffer.length > 0 && this.canWrite) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
|
||||
function BaseRollingFileStream(filename, options) {
|
||||
this.filename = filename;
|
||||
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
||||
this.rolling = false;
|
||||
this.writesWhileRolling = [];
|
||||
this.currentSize = 0;
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
fileSize = fs.statSync(file).size;
|
||||
} catch (e) {
|
||||
// file does not exist
|
||||
}
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename) {
|
||||
throw new Error("You must specify a filename");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
BaseRollingFileStream.super_.call(this, this.filename, this.options);
|
||||
this.currentSize = currentFileSize(this.filename);
|
||||
}
|
||||
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
|
||||
|
||||
BaseRollingFileStream.prototype.initRolling = function() {
|
||||
var that = this;
|
||||
|
||||
function emptyRollingQueue() {
|
||||
debug("emptying the rolling queue");
|
||||
var toWrite;
|
||||
while ((toWrite = that.writesWhileRolling.shift())) {
|
||||
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
|
||||
that.currentSize += toWrite.data.length;
|
||||
if (that.shouldRoll()) {
|
||||
that.flush();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
that.flush();
|
||||
return false;
|
||||
}
|
||||
|
||||
this.rolling = true;
|
||||
this.roll(this.filename, function() {
|
||||
that.currentSize = 0;
|
||||
that.rolling = emptyRollingQueue();
|
||||
if (that.rolling) {
|
||||
process.nextTick(function() { that.initRolling(); });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.write = function(data, encoding) {
|
||||
if (this.rolling) {
|
||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
||||
return false;
|
||||
} else {
|
||||
var canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
|
||||
this.currentSize += data.length;
|
||||
debug('current size = ' + this.currentSize);
|
||||
if (this.shouldRoll()) {
|
||||
this.initRolling();
|
||||
}
|
||||
return canWrite;
|
||||
}
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||
return false; // default behaviour is never to roll
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
callback(); // default behaviour is not to do anything
|
||||
};
|
||||
|
||||
|
||||
function RollingFileStream (filename, size, backups, options) {
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename || !size || size <= 0) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
RollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||
|
||||
RollingFileStream.prototype.shouldRoll = function() {
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
} else if (index(a) < index(b) ) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename, cb) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
function renameTheFiles(cb) {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
fs.readdir(path.dirname(filename), function (err, files) {
|
||||
async.forEachSeries(
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||
increaseFileIndex,
|
||||
cb
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function openANewFile(cb) {
|
||||
debug("Opening a new file");
|
||||
fs.open(
|
||||
filename,
|
||||
that.options.flags,
|
||||
that.options.mode,
|
||||
function (err, fd) {
|
||||
debug("opened new file");
|
||||
var oldLogFileFD = that.fd;
|
||||
that.fd = fd;
|
||||
that.writable = true;
|
||||
fs.close(oldLogFileFD, cb);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
debug("Starting roll");
|
||||
debug("Queueing up data until we've finished rolling");
|
||||
debug("Flushing underlying stream");
|
||||
this.flush();
|
||||
|
||||
async.series([
|
||||
renameTheFiles,
|
||||
openANewFile
|
||||
], callback);
|
||||
|
||||
};
|
||||
|
||||
|
||||
exports.BaseRollingFileStream = BaseRollingFileStream;
|
||||
exports.RollingFileStream = RollingFileStream;
|
||||
exports.BufferedWriteStream = BufferedWriteStream;
|
||||
27
log-rolling.js
Normal file
27
log-rolling.js
Normal file
@@ -0,0 +1,27 @@
|
||||
var log4js = require('./lib/log4js')
|
||||
, log
|
||||
, i = 0;
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
type: "console"
|
||||
, category: "console"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "tmp-test.log",
|
||||
"maxLogSize": 1024,
|
||||
"backups": 3,
|
||||
"category": "test"
|
||||
}
|
||||
]
|
||||
});
|
||||
log = log4js.getLogger("test");
|
||||
|
||||
function doTheLogging(x) {
|
||||
log.info("Logging something %d", x);
|
||||
}
|
||||
|
||||
for ( ; i < 5000; i++) {
|
||||
doTheLogging(i);
|
||||
}
|
||||
26
package.json
26
package.json
@@ -1,27 +1,37 @@
|
||||
{
|
||||
"name": "log4js",
|
||||
"version": "0.3.0",
|
||||
"version": "0.5.0",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
"log",
|
||||
"log4j",
|
||||
"node"
|
||||
"node"
|
||||
],
|
||||
"main": "./lib/log4js",
|
||||
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
|
||||
"bugs": {
|
||||
"web": "http://github.com/csausdev/log4js-node/issues"
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/nomiddlename/log4js-node.git"
|
||||
},
|
||||
"engines": [ "node >=0.1.100" ],
|
||||
"bugs": {
|
||||
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
||||
},
|
||||
"engines": [ "node >=0.6" ],
|
||||
"scripts": {
|
||||
"test": "vows test/*.js"
|
||||
"test": "vows test/*.js"
|
||||
},
|
||||
"directories": {
|
||||
"test": "test",
|
||||
"lib": "lib"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "0.1.15"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vows": ">=0.5.2"
|
||||
}
|
||||
"vows": "0.6.2",
|
||||
"sandboxed-module": "0.1.3",
|
||||
"hook.io": "0.8.10",
|
||||
"underscore": "1.2.1"
|
||||
}
|
||||
}
|
||||
|
||||
130
test/bufferedStream.js
Normal file
130
test/bufferedStream.js
Normal file
@@ -0,0 +1,130 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, BufferedWriteStream = require('../lib/streams').BufferedWriteStream;
|
||||
|
||||
function FakeStream() {
|
||||
this.writes = [];
|
||||
this.canWrite = false;
|
||||
this.callbacks = {};
|
||||
}
|
||||
|
||||
FakeStream.prototype.on = function(event, callback) {
|
||||
this.callbacks[event] = callback;
|
||||
}
|
||||
|
||||
FakeStream.prototype.write = function(data, encoding) {
|
||||
assert.equal("utf8", encoding);
|
||||
this.writes.push(data);
|
||||
return this.canWrite;
|
||||
}
|
||||
|
||||
FakeStream.prototype.emit = function(event, payload) {
|
||||
this.callbacks[event](payload);
|
||||
}
|
||||
|
||||
FakeStream.prototype.block = function() {
|
||||
this.canWrite = false;
|
||||
}
|
||||
|
||||
FakeStream.prototype.unblock = function() {
|
||||
this.canWrite = true;
|
||||
this.emit("drain");
|
||||
}
|
||||
|
||||
vows.describe('BufferedWriteStream').addBatch({
|
||||
'stream': {
|
||||
topic: new BufferedWriteStream(new FakeStream()),
|
||||
'should take a stream as an argument and return a stream': function(stream) {
|
||||
assert.instanceOf(stream, events.EventEmitter);
|
||||
}
|
||||
},
|
||||
'before stream is open': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
stream.write("Some data", "utf8");
|
||||
stream.write("Some more data", "utf8");
|
||||
return fakeStream.writes;
|
||||
},
|
||||
'should buffer writes': function(writes) {
|
||||
assert.equal(writes.length, 0);
|
||||
}
|
||||
},
|
||||
'when stream is open': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
stream.write("Some data", "utf8");
|
||||
fakeStream.canWrite = true;
|
||||
fakeStream.emit("open");
|
||||
stream.write("Some more data", "utf8");
|
||||
return fakeStream.writes;
|
||||
},
|
||||
'should write data to stream from before stream was open': function (writes) {
|
||||
assert.equal(writes[0], "Some data");
|
||||
},
|
||||
'should write data to stream from after stream was open': function (writes) {
|
||||
assert.equal(writes[1], "Some more data");
|
||||
}
|
||||
},
|
||||
'when stream is blocked': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
fakeStream.emit("open");
|
||||
fakeStream.block();
|
||||
stream.write("will not know it is blocked until first write", "utf8");
|
||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
||||
return fakeStream.writes;
|
||||
},
|
||||
'should buffer writes': function (writes) {
|
||||
assert.equal(writes.length, 1);
|
||||
assert.equal(writes[0], "will not know it is blocked until first write");
|
||||
}
|
||||
},
|
||||
'when stream is unblocked': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
fakeStream.emit("open");
|
||||
fakeStream.block();
|
||||
stream.write("will not know it is blocked until first write", "utf8");
|
||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
||||
fakeStream.unblock();
|
||||
return fakeStream.writes;
|
||||
},
|
||||
'should send buffered data': function (writes) {
|
||||
assert.equal(writes.length, 2);
|
||||
assert.equal(writes[1], "so this one will be buffered, but not the previous one");
|
||||
}
|
||||
},
|
||||
'when stream is closed': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
fakeStream.emit("open");
|
||||
fakeStream.block();
|
||||
stream.write("first write to notice stream is blocked", "utf8");
|
||||
stream.write("data while blocked", "utf8");
|
||||
stream.end();
|
||||
return fakeStream.writes;
|
||||
},
|
||||
'should send any buffered writes to the stream': function (writes) {
|
||||
assert.equal(writes.length, 2);
|
||||
assert.equal(writes[1], "data while blocked");
|
||||
}
|
||||
},
|
||||
'when stream errors': {
|
||||
topic: function() {
|
||||
var fakeStream = new FakeStream(),
|
||||
stream = new BufferedWriteStream(fakeStream);
|
||||
stream.on("error", this.callback);
|
||||
fakeStream.emit("error", "oh noes!");
|
||||
},
|
||||
'should emit error': function(err, value) {
|
||||
assert.equal(err, "oh noes!");
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
89
test/configuration.js
Normal file
89
test/configuration.js
Normal file
@@ -0,0 +1,89 @@
|
||||
var assert = require('assert'),
|
||||
vows = require('vows'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
function makeTestAppender() {
|
||||
return {
|
||||
configure: function(config, options) {
|
||||
this.configureCalled = true;
|
||||
this.config = config;
|
||||
this.options = options;
|
||||
return this.appender();
|
||||
},
|
||||
appender: function() {
|
||||
var self = this;
|
||||
return function(logEvt) { self.logEvt = logEvt; }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('log4js configure').addBatch({
|
||||
'appenders': {
|
||||
'when specified by type': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/cheese': testAppender
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.configure(
|
||||
{
|
||||
appenders: [
|
||||
{ type: "cheese", flavour: "gouda" }
|
||||
]
|
||||
},
|
||||
{ pants: "yes" }
|
||||
);
|
||||
return testAppender;
|
||||
},
|
||||
'should load appender': function(testAppender) {
|
||||
assert.ok(testAppender.configureCalled);
|
||||
},
|
||||
'should pass config to appender': function(testAppender) {
|
||||
assert.equal(testAppender.config.flavour, 'gouda');
|
||||
},
|
||||
'should pass log4js options to appender': function(testAppender) {
|
||||
assert.equal(testAppender.options.pants, 'yes');
|
||||
}
|
||||
},
|
||||
'when core appender loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires: { './appenders/cheese': testAppender } }
|
||||
);
|
||||
|
||||
log4js.loadAppender('cheese');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender from ../lib/appenders': function(log4js) {
|
||||
assert.ok(log4js.appenders.cheese);
|
||||
},
|
||||
'should add appender configure function to appenderMakers' : function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers.cheese);
|
||||
}
|
||||
},
|
||||
'when appender in node_modules loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires: { 'some/other/external': testAppender } }
|
||||
);
|
||||
log4js.loadAppender('some/other/external');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender via require': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
155
test/fileAppender.js
Normal file
155
test/fileAppender.js
Normal file
@@ -0,0 +1,155 @@
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, log4js = require('../lib/log4js')
|
||||
, assert = require('assert');
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js fileAppender').addBatch({
|
||||
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
, testFile = path.join(__dirname, '/fa-default-test.log')
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
//log4js.configure({ appenders:[ { type: "file", filename: testFile, category: 'default-settings' } ] });
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'should write log messages to the file': function(err, fileContents) {
|
||||
assert.include(fileContents, "This should be in the file.\n");
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
|
||||
, logger = log4js.getLogger('max-file-size')
|
||||
, that = this;
|
||||
remove(testFile);
|
||||
remove(testFile + '.1');
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), 'max-file-size');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
//wait for the file system to catch up
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'log file should only contain the second message': function(err, fileContents) {
|
||||
assert.include(fileContents, "This is the second log message.\n");
|
||||
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'starting with the test file name should be two': function(err, files) {
|
||||
//there will always be one backup if you've specified a max log size
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
|
||||
assert.equal(logFiles.length, 2);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), 'max-file-size-backups');
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, that.callback);
|
||||
}, 200);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; });
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files.sort(), ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be empty because the last log message triggers rolling': function(contents) {
|
||||
assert.isEmpty(contents);
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure' : {
|
||||
'with fileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/log4js.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile('tmp-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
assert.include(contents, 'this should be written to the file\n');
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
138
test/gelfAppender.js
Normal file
138
test/gelfAppender.js
Normal file
@@ -0,0 +1,138 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, setupLogging = function(options, category, compressedLength) {
|
||||
var fakeDgram = {
|
||||
sent: false,
|
||||
socket: {
|
||||
packetLength: 0,
|
||||
close: function() {
|
||||
},
|
||||
send: function(pkt, offset, pktLength, port, host) {
|
||||
fakeDgram.sent = true;
|
||||
this.packet = pkt;
|
||||
this.offset = offset;
|
||||
this.packetLength = pktLength;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
}
|
||||
},
|
||||
createSocket: function(type) {
|
||||
this.type = type;
|
||||
return this.socket;
|
||||
}
|
||||
}
|
||||
, fakeZlib = {
|
||||
gzip: function(objectToCompress, callback) {
|
||||
fakeZlib.uncompressed = objectToCompress;
|
||||
if (compressedLength) {
|
||||
callback(null, { length: compressedLength });
|
||||
} else {
|
||||
callback(null, "I've been compressed");
|
||||
}
|
||||
}
|
||||
}
|
||||
, appender = sandbox.require('../lib/appenders/gelf', {
|
||||
requires: {
|
||||
dgram: fakeDgram,
|
||||
zlib: fakeZlib
|
||||
}
|
||||
});
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender.configure(options || {}), category || "gelf-test");
|
||||
return {
|
||||
dgram: fakeDgram,
|
||||
compress: fakeZlib,
|
||||
logger: log4js.getLogger(category || "gelf-test")
|
||||
};
|
||||
};
|
||||
|
||||
//log4js.configure({ doNotReplaceConsole: true });
|
||||
|
||||
vows.describe('log4js gelfAppender').addBatch({
|
||||
|
||||
'with default gelfAppender settings': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.logger.info("This is a test");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should be sent via udp to the localhost gelf server': function(dgram) {
|
||||
assert.equal(dgram.type, "udp4");
|
||||
assert.equal(dgram.socket.host, "localhost");
|
||||
assert.equal(dgram.socket.port, 12201);
|
||||
assert.equal(dgram.socket.offset, 0);
|
||||
assert.ok(dgram.socket.packetLength > 0, "Received blank message");
|
||||
},
|
||||
'should be compressed': function(dgram) {
|
||||
assert.equal(dgram.socket.packet, "I've been compressed");
|
||||
}
|
||||
},
|
||||
'the uncompressed log message': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should be in the gelf format': function(message) {
|
||||
assert.equal(message.version, '1.0');
|
||||
assert.equal(message.host, require('os').hostname());
|
||||
assert.equal(message.level, 6); //INFO
|
||||
assert.equal(message.facility, 'nodejs-server');
|
||||
assert.equal(message.full_message, message.short_message);
|
||||
assert.equal(message.full_message, 'This is a test');
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a message longer than 8k': {
|
||||
topic: function() {
|
||||
var setup = setupLogging(undefined, undefined, 10240);
|
||||
setup.logger.info("Blah.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should not be sent': function(dgram) {
|
||||
assert.equal(dgram.sent, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with non-default options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
host: 'somewhere',
|
||||
port: 12345,
|
||||
hostname: 'cheese',
|
||||
facility: 'nonsense'
|
||||
});
|
||||
setup.logger.debug("Just testing.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should pick up the options': function(dgram) {
|
||||
assert.equal(dgram.socket.host, 'somewhere');
|
||||
assert.equal(dgram.socket.port, 12345);
|
||||
}
|
||||
},
|
||||
'the uncompressed packet': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.equal(message.facility, 'nonsense');
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
101
test/hookioAppender.js
Normal file
101
test/hookioAppender.js
Normal file
@@ -0,0 +1,101 @@
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function fancyResultingHookioAppender(opts) {
|
||||
var result = { ons: {}, emissions: {}, logged: [], configs: [] };
|
||||
|
||||
var fakeLog4Js = {
|
||||
appenderMakers: {}
|
||||
};
|
||||
fakeLog4Js.loadAppender = function (appender) {
|
||||
fakeLog4Js.appenderMakers[appender] = function (config) {
|
||||
result.actualLoggerConfig = config;
|
||||
return function log(logEvent) {
|
||||
result.logged.push(logEvent);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
var fakeHookIo = { Hook: function(config) { result.configs.push(config); } };
|
||||
fakeHookIo.Hook.prototype.start = function () {
|
||||
result.startCalled = true;
|
||||
};
|
||||
fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) {
|
||||
result.ons[eventName] = { functionToExec: functionToExec };
|
||||
if (eventName === 'hook::ready') {
|
||||
functionToExec();
|
||||
}
|
||||
};
|
||||
fakeHookIo.Hook.prototype.emit = function (eventName, data) {
|
||||
result.emissions[eventName] = result.emissions[eventName] || [];
|
||||
result.emissions[eventName].push({data: data});
|
||||
var on = '*::' + eventName;
|
||||
if (eventName !== 'hook::ready' && result.ons[on]) {
|
||||
result.ons[on].callingCount = result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1;
|
||||
result.ons[on].functionToExec(data);
|
||||
}
|
||||
};
|
||||
|
||||
return { theResult: result,
|
||||
theModule: sandbox.require('../lib/appenders/hookio', {
|
||||
requires: {
|
||||
'../log4js': fakeLog4Js,
|
||||
'hook.io': fakeHookIo
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
vows.describe('log4js hookioAppender').addBatch({
|
||||
'master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender();
|
||||
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'master', 'hook-port': 5001, appender: { type: 'file' } });
|
||||
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
|
||||
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should write to the actual appender': function (result) {
|
||||
assert.isTrue(result.startCalled);
|
||||
assert.equal(result.configs.length, 1);
|
||||
assert.equal(result.configs[0]['hook-port'], 5001);
|
||||
assert.equal(result.logged.length, 2);
|
||||
assert.equal(result.emissions['ohno::log'].length, 2);
|
||||
assert.equal(result.ons['*::ohno::log'].callingCount, 2);
|
||||
},
|
||||
|
||||
'data written should be formatted correctly': function (result) {
|
||||
assert.equal(result.logged[0].level.toString(), 'INFO');
|
||||
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
|
||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
||||
assert.equal(result.logged[1].level.toString(), 'DEBUG');
|
||||
assert.equal(result.logged[1].data, 'OH WOW');
|
||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
||||
},
|
||||
|
||||
'the actual logger should get the right config': function (result) {
|
||||
assert.equal(result.actualLoggerConfig.type, 'file');
|
||||
}
|
||||
},
|
||||
'worker': {
|
||||
'should emit logging events to the master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender();
|
||||
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'worker', appender: { type: 'file' } });
|
||||
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
|
||||
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should not write to the actual appender': function (result) {
|
||||
assert.isTrue(result.startCalled);
|
||||
assert.equal(result.logged.length, 0);
|
||||
assert.equal(result.emissions['ohno::log'].length, 2);
|
||||
assert.isUndefined(result.ons['*::ohno::log']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
@@ -57,15 +57,27 @@ vows.describe('log4js layouts').addBatch({
|
||||
},
|
||||
'should support the console.log format for the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["thing %d", 1]
|
||||
data: ["thing %d", 1, "cheese"]
|
||||
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
|
||||
, categoryName: "cheese"
|
||||
, level : {
|
||||
colour: "green"
|
||||
, toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "thing 1");
|
||||
}
|
||||
}), "thing 1 'cheese'");
|
||||
},
|
||||
'should output the first item even if it is not a string': function(layout) {
|
||||
assert.equal(layout({
|
||||
data: [ { thing: 1} ]
|
||||
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
|
||||
, categoryName: "cheese"
|
||||
, level: {
|
||||
colour: "green"
|
||||
, toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "{ thing: 1 }");
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
'basicLayout': {
|
||||
@@ -94,11 +106,10 @@ vows.describe('log4js layouts').addBatch({
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.length(lines, stack.length+1);
|
||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
assert.equal(lines[1], "Error: Some made-up error");
|
||||
assert.equal(lines.length, stack.length);
|
||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error");
|
||||
for (var i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+1], stack[i]);
|
||||
assert.equal(lines[i+1], stack[i+1]);
|
||||
}
|
||||
},
|
||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||
@@ -108,11 +119,7 @@ vows.describe('log4js layouts').addBatch({
|
||||
message: 'Gorgonzola smells.'
|
||||
}];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.length(lines, 2);
|
||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
assert.equal(lines[1], "{ name: 'Cheese', message: 'Gorgonzola smells.' }");
|
||||
assert.equal(output, "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test { name: 'Cheese', message: 'Gorgonzola smells.' }");
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
210
test/levels.js
Normal file
210
test/levels.js
Normal file
@@ -0,0 +1,210 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function assertThat(level) {
|
||||
function assertForEach(assertion, test, otherLevels) {
|
||||
otherLevels.forEach(function(other) {
|
||||
assertion.call(assert, test.call(level, other));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isNotLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isNotGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isEqualTo, levels);
|
||||
},
|
||||
isNotEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isEqualTo, levels);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('levels').addBatch({
|
||||
'values': {
|
||||
topic: levels,
|
||||
'should define some levels': function(levels) {
|
||||
assert.isNotNull(levels.ALL);
|
||||
assert.isNotNull(levels.TRACE);
|
||||
assert.isNotNull(levels.DEBUG);
|
||||
assert.isNotNull(levels.INFO);
|
||||
assert.isNotNull(levels.WARN);
|
||||
assert.isNotNull(levels.ERROR);
|
||||
assert.isNotNull(levels.FATAL);
|
||||
assert.isNotNull(levels.OFF);
|
||||
},
|
||||
'ALL': {
|
||||
topic: levels.ALL,
|
||||
'should be less than the other levels': function(all) {
|
||||
assertThat(all).isLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should be greater than no levels': function(all) {
|
||||
assertThat(all).isNotGreaterThanOrEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to ALL': function(all) {
|
||||
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
|
||||
assertThat(all).isNotEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'TRACE': {
|
||||
topic: levels.TRACE,
|
||||
'should be less than DEBUG': function(trace) {
|
||||
assertThat(trace).isLessThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
|
||||
},
|
||||
'should be greater than ALL': function(trace) {
|
||||
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(trace).isNotGreaterThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to TRACE': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
|
||||
assertThat(trace).isNotEqualTo([levels.ALL, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'DEBUG': {
|
||||
topic: levels.DEBUG,
|
||||
'should be less than INFO': function(debug) {
|
||||
assertThat(debug).isLessThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
},
|
||||
'should be greater than TRACE': function(debug) {
|
||||
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(debug).isNotGreaterThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to DEBUG': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
|
||||
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'INFO': {
|
||||
topic: levels.INFO,
|
||||
'should be less than WARN': function(info) {
|
||||
assertThat(info).isLessThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
},
|
||||
'should be greater than DEBUG': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to INFO': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
|
||||
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'WARN': {
|
||||
topic: levels.WARN,
|
||||
'should be less than ERROR': function(warn) {
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(warn).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]);
|
||||
},
|
||||
'should be greater than INFO': function(warn) {
|
||||
assertThat(warn).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to WARN': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'ERROR': {
|
||||
topic: levels.ERROR,
|
||||
'should be less than FATAL': function(error) {
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
assertThat(error).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]);
|
||||
},
|
||||
'should be greater than WARN': function(error) {
|
||||
assertThat(error).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to ERROR': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.FATAL, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'FATAL': {
|
||||
topic: levels.FATAL,
|
||||
'should be less than OFF': function(fatal) {
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(fatal).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]);
|
||||
},
|
||||
'should be greater than ERROR': function(fatal) {
|
||||
assertThat(fatal).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
},
|
||||
'should only be equal to FATAL': function(fatal) {
|
||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||
assertThat(fatal).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.OFF]);
|
||||
}
|
||||
},
|
||||
'OFF': {
|
||||
topic: levels.OFF,
|
||||
'should not be less than anything': function(off) {
|
||||
assertThat(off).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
|
||||
},
|
||||
'should be greater than everything': function(off) {
|
||||
assertThat(off).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
|
||||
},
|
||||
'should only be equal to OFF': function(off) {
|
||||
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
|
||||
assertThat(off).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
|
||||
}
|
||||
}
|
||||
},
|
||||
'isGreaterThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
}
|
||||
},
|
||||
'isLessThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
}
|
||||
},
|
||||
'toLevel': {
|
||||
'with lowercase argument': {
|
||||
topic: levels.toLevel("debug"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with uppercase argument': {
|
||||
topic: levels.toLevel("DEBUG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with varying case': {
|
||||
topic: levels.toLevel("DeBuG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument': {
|
||||
topic: levels.toLevel("cheese"),
|
||||
'should return undefined': function(level) {
|
||||
assert.isUndefined(level);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument and default value': {
|
||||
topic: levels.toLevel("cheese", levels.DEBUG),
|
||||
'should return default value': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
69
test/logLevelFilter.js
Normal file
69
test/logLevelFilter.js
Normal file
@@ -0,0 +1,69 @@
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert');
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js logLevelFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js'), logEvents = [], logger;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/logLevelFilter').appender('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
|
||||
logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass log events greater than or equal to its own level' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
|
||||
remove(__dirname + '/logLevelFilter.log');
|
||||
remove(__dirname + '/logLevelFilter-warnings.log');
|
||||
|
||||
log4js.configure('test/with-logLevelFilter.json');
|
||||
logger = log4js.getLogger("tests");
|
||||
logger.info('main');
|
||||
logger.error('both');
|
||||
logger.warn('both');
|
||||
logger.debug('main');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 100);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['main','both','both','main']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-warnings.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['both','both']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
784
test/logging.js
784
test/logging.js
@@ -2,324 +2,209 @@ var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
|
||||
}
|
||||
|
||||
vows.describe('log4js').addBatch({
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getLogger('tests');
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getLogger('tests');
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(logger) {
|
||||
var events = [];
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
'log events' : {
|
||||
topic: function(logger) {
|
||||
var events = [];
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
return events;
|
||||
},
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.length(events, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
'should include the error if passed in': function (events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
}
|
||||
|
||||
},
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'fileAppender': {
|
||||
topic: function() {
|
||||
var appender
|
||||
, logmessages = []
|
||||
, thing = "thing"
|
||||
, fakeFS = {
|
||||
createWriteStream: function() {
|
||||
assert.equal(arguments[0], './tmp-tests.log');
|
||||
assert.isObject(arguments[1]);
|
||||
assert.equal(arguments[1].flags, 'a');
|
||||
assert.equal(arguments[1].mode, 0644);
|
||||
assert.equal(arguments[1].encoding, 'utf8');
|
||||
return {
|
||||
write: function(message) {
|
||||
logmessages.push(message);
|
||||
}
|
||||
, end: function() {}
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
watchFile: function() {
|
||||
throw new Error("watchFile should not be called if logSize is not defined");
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
|
||||
appender = log4js.fileAppender('./tmp-tests.log', log4js.layouts.messagePassThroughLayout);
|
||||
log4js.addAppender(appender, 'file-test');
|
||||
|
||||
var logger = log4js.getLogger('file-test');
|
||||
logger.debug("this is a test");
|
||||
|
||||
return logmessages;
|
||||
},
|
||||
'should write log messages to file': function(logmessages) {
|
||||
assert.length(logmessages, 1);
|
||||
assert.equal(logmessages, "this is a test\n");
|
||||
}
|
||||
'invalid configuration': {
|
||||
'should throw an exception': function() {
|
||||
assert.throws(function() {
|
||||
require('log4js').configure({ "type": "invalid" });
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
'fileAppender - with rolling based on size and number of files to keep': {
|
||||
'configuration when passed as object': {
|
||||
topic: function() {
|
||||
var watchCb,
|
||||
filesOpened = [],
|
||||
filesEnded = [],
|
||||
filesDestroyedSoon = [],
|
||||
filesRenamed = [],
|
||||
newFilenames = [],
|
||||
existingFiles = ['tests.log'],
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
requires: {
|
||||
'fs': {
|
||||
watchFile: function(file, options, callback) {
|
||||
assert.equal(file, 'tests.log');
|
||||
assert.equal(options.persistent, false);
|
||||
assert.equal(options.interval, 30000);
|
||||
assert.isFunction(callback);
|
||||
watchCb = callback;
|
||||
},
|
||||
createWriteStream: function(file) {
|
||||
assert.equal(file, 'tests.log');
|
||||
filesOpened.push(file);
|
||||
return {
|
||||
end: function() {
|
||||
filesEnded.push(file);
|
||||
},
|
||||
destroySoon: function() {
|
||||
filesDestroyedSoon.push(file);
|
||||
}
|
||||
};
|
||||
},
|
||||
statSync: function(file) {
|
||||
if (existingFiles.indexOf(file) < 0) {
|
||||
throw new Error("this file doesn't exist");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
renameSync: function(oldFile, newFile) {
|
||||
filesRenamed.push(oldFile);
|
||||
existingFiles.push(newFile);
|
||||
}
|
||||
var appenderConfig
|
||||
, log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, { requires:
|
||||
{ './appenders/file':
|
||||
{
|
||||
name: "file"
|
||||
, appender: function() {}
|
||||
, configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
var appender = log4js.fileAppender('tests.log', log4js.messagePassThroughLayout, 1024, 2, 30);
|
||||
return [watchCb, filesOpened, filesEnded, filesDestroyedSoon, filesRenamed, existingFiles];
|
||||
)
|
||||
, config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3,
|
||||
"pollInterval" : 15
|
||||
}
|
||||
]
|
||||
};
|
||||
log4js.configure(config);
|
||||
return appenderConfig;
|
||||
},
|
||||
|
||||
'should close current log file, rename all old ones, open new one on rollover': function(args) {
|
||||
var watchCb = args[0]
|
||||
, filesOpened = args[1]
|
||||
, filesEnded = args[2]
|
||||
, filesDestroyedSoon = args[3]
|
||||
, filesRenamed = args[4]
|
||||
, existingFiles = args[5];
|
||||
assert.isFunction(watchCb);
|
||||
//tell the watchCb that the file is below the threshold
|
||||
watchCb({ size: 891 }, { size: 0 });
|
||||
//filesOpened should still be the first one.
|
||||
assert.length(filesOpened, 1);
|
||||
//tell the watchCb that the file is now over the threshold
|
||||
watchCb({ size: 1053 }, { size: 891 });
|
||||
//it should have closed the first log file.
|
||||
assert.length(filesEnded, 1);
|
||||
assert.length(filesDestroyedSoon, 1);
|
||||
//it should have renamed the previous log file
|
||||
assert.length(filesRenamed, 1);
|
||||
//and we should have two files now
|
||||
assert.length(existingFiles, 2);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1']);
|
||||
//and opened a new log file.
|
||||
assert.length(filesOpened, 2);
|
||||
|
||||
//now tell the watchCb that we've flipped over the threshold again
|
||||
watchCb({ size: 1025 }, { size: 123 });
|
||||
//it should have closed the old file
|
||||
assert.length(filesEnded, 2);
|
||||
assert.length(filesDestroyedSoon, 2);
|
||||
//it should have renamed both the old log file, and the previous '.1' file
|
||||
assert.length(filesRenamed, 3);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more file
|
||||
assert.length(existingFiles, 4);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 3);
|
||||
|
||||
//tell the watchCb we've flipped again.
|
||||
watchCb({ size: 1024 }, { size: 234 });
|
||||
//close the old one again.
|
||||
assert.length(filesEnded, 3);
|
||||
assert.length(filesDestroyedSoon, 3);
|
||||
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
|
||||
assert.length(filesRenamed, 5);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more files
|
||||
assert.length(existingFiles, 6);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 4);
|
||||
}
|
||||
'should be passed to appender config': function(configuration) {
|
||||
assert.equal(configuration.filename, 'cheesy-wotsits.log');
|
||||
}
|
||||
},
|
||||
|
||||
'configure' : {
|
||||
topic: function() {
|
||||
var messages = {}, fakeFS = {
|
||||
createWriteStream: function(file) {
|
||||
return {
|
||||
write: function(message) {
|
||||
if (!messages.hasOwnProperty(file)) {
|
||||
messages[file] = [];
|
||||
}
|
||||
messages[file].push(message);
|
||||
}
|
||||
, end: function() {}
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
readFileSync: function(file, encoding) {
|
||||
return require('fs').readFileSync(file, encoding);
|
||||
},
|
||||
watchFile: function(file) {
|
||||
messages.watchedFile = file;
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'configuration when passed as filename': {
|
||||
topic: function() {
|
||||
var appenderConfig
|
||||
, configFilename
|
||||
, log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
, { requires:
|
||||
{ 'fs':
|
||||
{
|
||||
statSync: function() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync: function(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
appenders: [
|
||||
{ type: "file"
|
||||
, filename: "whatever.log"
|
||||
}
|
||||
]
|
||||
});
|
||||
},
|
||||
readdirSync: function() {
|
||||
return ['file'];
|
||||
}
|
||||
}
|
||||
, './appenders/file':
|
||||
{
|
||||
name: "file"
|
||||
, appender: function() {}
|
||||
, configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
return [ log4js, messages ];
|
||||
},
|
||||
'should load appender configuration from a json file': function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-tests.log'];
|
||||
log4js.clearAppenders();
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/log4js.json');
|
||||
var logger = log4js.getLogger("tests");
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
assert.length(messages['tmp-tests.log'], 1);
|
||||
assert.equal(messages['tmp-tests.log'][0], 'this should be written to the file\n');
|
||||
},
|
||||
'should handle logLevelFilter configuration': function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-tests.log'];
|
||||
delete messages['tmp-tests-warnings.log'];
|
||||
log4js.clearAppenders();
|
||||
log4js.configure('test/with-logLevelFilter.json');
|
||||
var logger = log4js.getLogger("tests");
|
||||
logger.info('main');
|
||||
logger.error('both');
|
||||
logger.warn('both');
|
||||
logger.debug('main');
|
||||
|
||||
assert.length(messages['tmp-tests.log'], 4);
|
||||
assert.length(messages['tmp-tests-warnings.log'], 2);
|
||||
assert.deepEqual(messages['tmp-tests.log'], ['main\n','both\n','both\n','main\n']);
|
||||
assert.deepEqual(messages['tmp-tests-warnings.log'], ['both\n','both\n']);
|
||||
},
|
||||
'should handle fileAppender with log rolling' : function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-test.log'];
|
||||
log4js.configure('test/with-log-rolling.json');
|
||||
assert.equal(messages.watchedFile, 'tmp-test.log');
|
||||
}
|
||||
);
|
||||
log4js.configure("/path/to/cheese.json");
|
||||
return [ configFilename, appenderConfig ];
|
||||
},
|
||||
'should handle an object or a file name': function(args) {
|
||||
var log4js = args[0],
|
||||
messages = args[1],
|
||||
config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3,
|
||||
"pollInterval" : 15
|
||||
}
|
||||
]
|
||||
};
|
||||
delete messages['cheesy-wotsits.log'];
|
||||
log4js.configure(config);
|
||||
assert.equal(messages.watchedFile, 'cheesy-wotsits.log');
|
||||
}
|
||||
'should read the config from a file': function(args) {
|
||||
assert.equal(args[0], '/path/to/cheese.json');
|
||||
},
|
||||
'should pass config to appender': function(args) {
|
||||
assert.equal(args[1].filename, "whatever.log");
|
||||
}
|
||||
},
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger
|
||||
, message
|
||||
, that = this
|
||||
, fakeConsoleAppender = {
|
||||
name: "console"
|
||||
, appender: function() {
|
||||
return function(evt) {
|
||||
that.callback(null, evt);
|
||||
}
|
||||
}
|
||||
, configure: function() {
|
||||
return fakeConsoleAppender.appender();
|
||||
}
|
||||
}
|
||||
, log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
globals: {
|
||||
console: {
|
||||
log: function(msg) {
|
||||
message = msg;
|
||||
}
|
||||
}
|
||||
requires: {
|
||||
'./appenders/console': fakeConsoleAppender
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
logger = log4js.getLogger("some-logger");
|
||||
logger = log4js.getLogger("some-logger");
|
||||
logger.debug("This is a test");
|
||||
return message;
|
||||
},
|
||||
'should default to the console appender': function(message) {
|
||||
assert.isTrue(/This is a test$/.test(message));
|
||||
'should default to the console appender': function(evt) {
|
||||
assert.equal(evt.data[0], "This is a test");
|
||||
}
|
||||
},
|
||||
|
||||
@@ -409,150 +294,287 @@ vows.describe('log4js').addBatch({
|
||||
},
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var appenderEvents = [],
|
||||
fakeConsole = {
|
||||
'name': 'console'
|
||||
, 'appender': function () {
|
||||
return function(evt) {
|
||||
appenderEvents.push(evt);
|
||||
}
|
||||
}
|
||||
, 'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
globalConsole = {
|
||||
log: function() { }
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
console: globalConsole
|
||||
}
|
||||
}
|
||||
),
|
||||
logger = log4js.getLogger('a-test');
|
||||
|
||||
logger.debug("this is a test");
|
||||
globalConsole.log("this should not be logged");
|
||||
|
||||
return appenderEvents;
|
||||
},
|
||||
|
||||
'should configure a console appender': function(appenderEvents) {
|
||||
assert.equal(appenderEvents[0].data[0], 'this is a test');
|
||||
},
|
||||
|
||||
'should not replace console.log with log4js version': function(appenderEvents) {
|
||||
assert.equal(appenderEvents.length, 1);
|
||||
}
|
||||
},
|
||||
|
||||
'console' : {
|
||||
topic: setupConsoleTest,
|
||||
|
||||
'when replaceConsole called': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
|
||||
test.fakeConsole.log("Some debug message someone put in a module");
|
||||
test.fakeConsole.debug("Some debug");
|
||||
test.fakeConsole.error("An error");
|
||||
test.fakeConsole.info("some info");
|
||||
test.fakeConsole.warn("a warning");
|
||||
|
||||
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
test.fakeConsole.log({ lumpy: "tapioca" });
|
||||
test.fakeConsole.log("count %d", 123);
|
||||
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents.length, 9);
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
assert.equal(logEvents[0].level.toString(), "INFO");
|
||||
assert.equal(logEvents[1].data[0], "Some debug");
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[2].data[0], "An error");
|
||||
assert.equal(logEvents[2].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[3].data[0], "some info");
|
||||
assert.equal(logEvents[3].level.toString(), "INFO");
|
||||
assert.equal(logEvents[4].data[0], "a warning");
|
||||
assert.equal(logEvents[4].level.toString(), "WARN");
|
||||
assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)");
|
||||
assert.equal(logEvents[5].data[1], "gouda");
|
||||
assert.equal(logEvents[5].data[2], "garibaldis");
|
||||
}
|
||||
},
|
||||
'when turned off': {
|
||||
topic: function(test) {
|
||||
test.log4js.restoreConsole();
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should call the original console methods': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, "this should not be called.");
|
||||
}
|
||||
},
|
||||
'configuration': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
test.log4js.configure({ replaceConsole: false });
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should allow for turning off console replacement': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, 'this should not be called.');
|
||||
}
|
||||
}
|
||||
},
|
||||
'configuration persistence' : {
|
||||
topic: function() {
|
||||
var logEvent,
|
||||
firstLog4js = require('../lib/log4js'),
|
||||
secondLog4js;
|
||||
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
return logEvent;
|
||||
},
|
||||
'should maintain appenders between requires': function (logEvent) {
|
||||
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
|
||||
}
|
||||
},
|
||||
'configuration reload with configuration changing' : {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
message,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
modulePath = 'path/to/log4js.json',
|
||||
fakeFS = {
|
||||
lastMtime: Date.now(),
|
||||
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||
levels: { 'a-test' : 'INFO' } },
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return '{ "appenders" : [ { "type": "console", "layout": { "type": "messagePassThrough" }} ] }';
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return true;
|
||||
fakeFS.lastMtime += 1;
|
||||
return { mtime: new Date(fakeFS.lastMtime) };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
log : function (msg) { message = msg; },
|
||||
info: this.log,
|
||||
warn: this.log,
|
||||
debug: this.log,
|
||||
error: this.log
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.debug("this is a test");
|
||||
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
fakeFS.config.levels['a-test'] = "DEBUG";
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return [ pathsChecked, message, modulePath ];
|
||||
},
|
||||
|
||||
'should check current directory, require paths, and finally the module dir for log4js.json': function(args) {
|
||||
var pathsChecked = args[0];
|
||||
expectedPaths = ['log4js.json'].concat(
|
||||
require.paths.map(function(item) {
|
||||
return item + '/log4js.json';
|
||||
}),
|
||||
args[2]
|
||||
);
|
||||
assert.deepEqual(pathsChecked, expectedPaths);
|
||||
},
|
||||
|
||||
'should configure log4js from first log4js.json found': function(args) {
|
||||
var message = args[1];
|
||||
assert.equal(message, 'this is a test');
|
||||
}
|
||||
},
|
||||
|
||||
'logLevelFilter': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js'), logEvents = [], logger;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(log4js.logLevelFilter('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
|
||||
logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass log events greater than or equal to its own level' : function(logEvents) {
|
||||
assert.length(logEvents, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
'should configure log4js from first log4js.json found': function(logEvents) {
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
assert.equal(logEvents[2].data[0], 'debug4');
|
||||
assert.equal(logEvents.length, 3);
|
||||
}
|
||||
},
|
||||
|
||||
'console' : {
|
||||
'configuration reload with configuration staying the same' : {
|
||||
topic: function() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
var pathsChecked = [],
|
||||
fileRead = 0,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||
levels: { 'a-test' : 'INFO' } },
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
fakeConsole.log("Some debug message someone put in a module");
|
||||
fakeConsole.debug("Some debug");
|
||||
fakeConsole.error("An error");
|
||||
fakeConsole.info("some info");
|
||||
fakeConsole.warn("a warning");
|
||||
|
||||
fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
fakeConsole.log({ lumpy: "tapioca" });
|
||||
fakeConsole.log("count %d", 123);
|
||||
fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
return logEvents;
|
||||
return [ pathsChecked, logEvents, modulePath, fileRead ];
|
||||
},
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
assert.equal(logEvents[0].level.toString(), "INFO");
|
||||
assert.equal(logEvents[1].data[0], "Some debug");
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[2].data[0], "An error");
|
||||
assert.equal(logEvents[2].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[3].data[0], "some info");
|
||||
assert.equal(logEvents[3].level.toString(), "INFO");
|
||||
assert.equal(logEvents[4].data[0], "a warning");
|
||||
assert.equal(logEvents[4].level.toString(), "WARN");
|
||||
'should only read the configuration file once': function(args) {
|
||||
var fileRead = args[3];
|
||||
assert.equal(fileRead, 1);
|
||||
},
|
||||
'should configure log4js from first log4js.json found': function(args) {
|
||||
var logEvents = args[1];
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
}
|
||||
},
|
||||
'configuration persistence' : {
|
||||
'should maintain appenders between requires': function () {
|
||||
var logEvent, firstLog4js = require('../lib/log4js'), secondLog4js;
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
|
||||
173
test/multiprocessAppender.js
Normal file
173
test/multiprocessAppender.js
Normal file
@@ -0,0 +1,173 @@
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var sandbox = require('sandboxed-module');
|
||||
var _ = require('underscore');
|
||||
|
||||
function fancyResultingMultiprocessAppender(opts) {
|
||||
var result = { clientOns: {}, serverOns: {}, logged: [], ended: [] };
|
||||
|
||||
var fakeSocket = {
|
||||
on: function (event, fn) {
|
||||
result.clientOns[event] = fn;
|
||||
if (event === 'connect') {
|
||||
fn();
|
||||
}
|
||||
},
|
||||
end: function (data, encoding) {
|
||||
result.ended.push({ data: data, encoding: encoding });
|
||||
}
|
||||
}
|
||||
|
||||
var fakeServerSocket = {
|
||||
on: function (event, fn) {
|
||||
result.serverOns[event] = fn;
|
||||
if (event === 'connect') {
|
||||
fn();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var fakeServer = {
|
||||
listen: function (port, host) {
|
||||
result.listenPort = port;
|
||||
result.listenHost = host;
|
||||
}
|
||||
}
|
||||
|
||||
var fakeNet = {
|
||||
createServer: function (fn) {
|
||||
fn(fakeServerSocket);
|
||||
return fakeServer;
|
||||
},
|
||||
createConnection: function (port, host) {
|
||||
result.connectPort = port;
|
||||
result.connectHost = host;
|
||||
return fakeSocket;
|
||||
}
|
||||
}
|
||||
|
||||
var fakeLog4Js = {
|
||||
appenderMakers: {}
|
||||
};
|
||||
fakeLog4Js.loadAppender = function (appender) {
|
||||
fakeLog4Js.appenderMakers[appender] = function (config) {
|
||||
result.actualLoggerConfig = config;
|
||||
return function log(logEvent) {
|
||||
result.logged.push(logEvent);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
return { theResult: result,
|
||||
theModule: sandbox.require('../lib/appenders/multiprocess', {
|
||||
requires: {
|
||||
'../log4js': fakeLog4Js,
|
||||
'net': fakeNet
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
function logMessages(result, logs, raw) {
|
||||
logs.forEach(function log(item) {
|
||||
var logItem = { startTime: "Wed, 02 Nov 2011 21:46:39 GMT", level: { levelStr: 'DEBUG' }, data: [ item ] };
|
||||
result.serverOns.data(JSON.stringify(logItem));
|
||||
result.serverOns.end();
|
||||
result.serverOns.connect();
|
||||
});
|
||||
if (raw) {
|
||||
raw.forEach(function log(rawItem) {
|
||||
result.serverOns.data(rawItem);
|
||||
result.serverOns.end();
|
||||
result.serverOns.connect();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
vows.describe('log4js multiprocessAppender').addBatch({
|
||||
'master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingMultiprocessAppender();
|
||||
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
|
||||
logMessages(fancy.theResult, [ 'ALRIGHTY THEN', 'OH WOW' ]);
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should write to the actual appender': function (result) {
|
||||
assert.equal(result.listenPort, 5001);
|
||||
assert.equal(result.listenHost, 'abba');
|
||||
assert.equal(result.logged.length, 2);
|
||||
assert.equal(result.logged[0].data[0], 'ALRIGHTY THEN');
|
||||
assert.equal(result.logged[1].data[0], 'OH WOW');
|
||||
},
|
||||
|
||||
'data written should be formatted correctly': function (result) {
|
||||
assert.equal(result.logged[0].level.toString(), 'DEBUG');
|
||||
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
|
||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
||||
assert.equal(result.logged[1].level.toString(), 'DEBUG');
|
||||
assert.equal(result.logged[1].data, 'OH WOW');
|
||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
||||
},
|
||||
|
||||
'the actual logger should get the right config': function (result) {
|
||||
assert.equal(result.actualLoggerConfig.type, 'file');
|
||||
},
|
||||
|
||||
'client should not be called': function (result) {
|
||||
assert.equal(_.keys(result.clientOns).length, 0);
|
||||
}
|
||||
},
|
||||
'master with bad request': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingMultiprocessAppender();
|
||||
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
|
||||
logMessages(fancy.theResult, [], [ 'ALRIGHTY THEN', 'OH WOW' ]);
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should write to the actual appender': function (result) {
|
||||
assert.equal(result.listenPort, 5001);
|
||||
assert.equal(result.listenHost, 'abba');
|
||||
assert.equal(result.logged.length, 2);
|
||||
assert.equal(result.logged[0].data[0], 'Unable to parse log: ALRIGHTY THEN');
|
||||
assert.equal(result.logged[1].data[0], 'Unable to parse log: OH WOW');
|
||||
},
|
||||
|
||||
'data written should be formatted correctly': function (result) {
|
||||
assert.equal(result.logged[0].level.toString(), 'ERROR');
|
||||
assert.equal(result.logged[0].data, 'Unable to parse log: ALRIGHTY THEN');
|
||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
||||
assert.equal(result.logged[1].level.toString(), 'ERROR');
|
||||
assert.equal(result.logged[1].data, 'Unable to parse log: OH WOW');
|
||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
||||
}
|
||||
},
|
||||
'worker': {
|
||||
'should emit logging events to the master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingMultiprocessAppender();
|
||||
var logger = fancy.theModule.configure({ loggerHost: 'baba', loggerPort: 1232, name: 'ohno', mode: 'worker', appender: { type: 'file' } });
|
||||
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
|
||||
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'client configuration should be correct': function (result) {
|
||||
assert.equal(result.connectHost, 'baba');
|
||||
assert.equal(result.connectPort, 1232);
|
||||
},
|
||||
|
||||
'should not write to the actual appender': function (result) {
|
||||
assert.equal(result.logged.length, 0);
|
||||
assert.equal(result.ended.length, 2);
|
||||
assert.equal(result.ended[0].data, JSON.stringify({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' }));
|
||||
assert.equal(result.ended[0].encoding, 'utf8');
|
||||
assert.equal(result.ended[1].data, JSON.stringify({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'}));
|
||||
assert.equal(result.ended[1].encoding, 'utf8');
|
||||
assert.equal(_.keys(result.serverOns).length, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
126
test/rollingFileStream.js
Normal file
126
test/rollingFileStream.js
Normal file
@@ -0,0 +1,126 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, fs = require('fs')
|
||||
, RollingFileStream = require('../lib/streams').RollingFileStream;
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('RollingFileStream').addBatch({
|
||||
'arguments': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream");
|
||||
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
||||
},
|
||||
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
|
||||
assert.instanceOf(stream, fs.FileWriteStream);
|
||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||
assert.equal(stream.size, 1024);
|
||||
assert.equal(stream.backups, 5);
|
||||
},
|
||||
'with default settings for the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 420);
|
||||
assert.equal(stream.flags, 'a');
|
||||
assert.equal(stream.encoding, 'utf8');
|
||||
}
|
||||
},
|
||||
'with stream arguments': {
|
||||
topic: function() {
|
||||
remove(__dirname + '/test-rolling-file-stream');
|
||||
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
|
||||
},
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 0666);
|
||||
}
|
||||
},
|
||||
'without size': {
|
||||
topic: function() {
|
||||
try {
|
||||
new RollingFileStream(__dirname + "/test-rolling-file-stream");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should throw an error': function(err) {
|
||||
assert.instanceOf(err, Error);
|
||||
}
|
||||
},
|
||||
'without number of backups': {
|
||||
topic: function() {
|
||||
remove('test-rolling-file-stream');
|
||||
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
|
||||
},
|
||||
'should default to 1 backup': function(stream) {
|
||||
assert.equal(stream.backups, 1);
|
||||
}
|
||||
},
|
||||
'writing less than the file size': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream-write-less");
|
||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
|
||||
stream.on("open", function() { that.callback(null, stream); });
|
||||
},
|
||||
'(when open)': {
|
||||
topic: function(stream) {
|
||||
stream.write("cheese", "utf8");
|
||||
stream.end();
|
||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", this.callback);
|
||||
},
|
||||
'should write to the file': function(contents) {
|
||||
assert.equal(contents, "cheese");
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be one': function(files) {
|
||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'writing more than the file size': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream-write-more");
|
||||
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
|
||||
stream.on("open", function() {
|
||||
for (var i=0; i < 7; i++) {
|
||||
stream.write(i +".cheese\n", "utf8");
|
||||
}
|
||||
//wait for the file system to catch up with us
|
||||
setTimeout(that.callback, 100);
|
||||
});
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be two': function(files) {
|
||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }).length, 2);
|
||||
}
|
||||
},
|
||||
'the first file': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
|
||||
},
|
||||
'should contain the last two log messages': function(contents) {
|
||||
assert.equal(contents, '5.cheese\n6.cheese\n');
|
||||
}
|
||||
},
|
||||
'the second file': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
|
||||
},
|
||||
'should contain the first five log messages': function(contents) {
|
||||
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
162
test/smtpAppender.js
Normal file
162
test/smtpAppender.js
Normal file
@@ -0,0 +1,162 @@
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
log4js = require('../lib/log4js'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeMailer = {
|
||||
send_mail: function (msg, callback) {
|
||||
msgs.push(msg);
|
||||
callback(null, true);
|
||||
}
|
||||
};
|
||||
|
||||
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
||||
requires: {
|
||||
'nodemailer': fakeMailer
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(smtpModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailer,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages (result, sender, subject) {
|
||||
for (var i = 0; i < result.results.length; ++i) {
|
||||
assert.equal(result.results[i].sender, sender ? sender : result.mailer.SMTP.user);
|
||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
|
||||
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].body));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js smtpAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('minimal config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
smtp: {
|
||||
port: 25,
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'mailer should be configured properly': function (result) {
|
||||
assert.ok(result.mailer.SMTP);
|
||||
assert.equal(result.mailer.SMTP.port, 25);
|
||||
assert.equal(result.mailer.SMTP.user, 'user@domain.com');
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'fancy config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('fancy config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sender: 'sender@domain.com',
|
||||
subject: 'This is subject',
|
||||
smtp: {
|
||||
port: 25,
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'mailer should be configured properly': function (result) {
|
||||
assert.ok(result.mailer.SMTP);
|
||||
assert.equal(result.mailer.SMTP.port, 25);
|
||||
assert.equal(result.mailer.SMTP.user, 'user@domain.com');
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
recipients: 'recipient@domain.com',
|
||||
smtp: {
|
||||
port: 25,
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1050);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 2100);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.results.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'multiple events in one email': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('multiple events in one email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 1,
|
||||
smtp: {
|
||||
port: 25,
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1050);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 2100);
|
||||
},
|
||||
'there should be two messages': function (result) {
|
||||
assert.equal(result.results.length, 2);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].sender, result.mailer.SMTP.user);
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(result.results[0].body.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
||||
|
||||
assert.equal(result.results[1].sender, result.mailer.SMTP.user);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].body));
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
126
test/test-configureNoLevels.js
Normal file
126
test/test-configureNoLevels.js
Normal file
@@ -0,0 +1,126 @@
|
||||
// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
|
||||
// 1) log4js.configure(), log4js.configure(null), log4js.configure({}), log4js.configure(<some object with no levels prop>)
|
||||
// all set all loggers levels to trace, even if they were previously set to something else.
|
||||
// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo: bar}}) leaves previously set logger levels intact.
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var toLevel = require('../lib/levels').toLevel;
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
// setup the configurations we want to test
|
||||
var configs = {
|
||||
'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure
|
||||
'is undefined': undefined,
|
||||
'is null': null,
|
||||
'is empty': {},
|
||||
'has no levels': {foo: 'bar'},
|
||||
'has null levels': {levels: null},
|
||||
'has empty levels': {levels: {}},
|
||||
'has random levels': {levels: {foo: 'bar'}},
|
||||
'has some valid levels': {levels: {A: 'INFO'}}
|
||||
}
|
||||
|
||||
// Set up the basic vows batches for this test
|
||||
var batches = [];
|
||||
|
||||
|
||||
function getLoggerName(level) {
|
||||
return level+'-logger';
|
||||
}
|
||||
|
||||
// the common vows top-level context, whether log4js.configure is called or not
|
||||
// just making sure that the code is common,
|
||||
// so that there are no spurious errors in the tests themselves.
|
||||
function getTopLevelContext(nop, configToTest, name) {
|
||||
return {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
// create loggers for each level,
|
||||
// keeping the level in the logger's name for traceability
|
||||
strLevels.forEach(function(l) {
|
||||
log4js.getLogger(getLoggerName(l)).setLevel(l);
|
||||
});
|
||||
|
||||
if (!nop) {
|
||||
showProgress('** Configuring log4js with', configToTest);
|
||||
log4js.configure(configToTest);
|
||||
}
|
||||
else {
|
||||
showProgress('** Not configuring log4js');
|
||||
}
|
||||
return log4js;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
// Populating the batches programmatically,
|
||||
// as there are (configs.length x strLevels.length x strLevels.length) = 324 possible test combinations
|
||||
for (var cfg in configs) {
|
||||
var configToTest = configs[cfg];
|
||||
var nop = configToTest === 'nop';
|
||||
var context;
|
||||
if (nop) {
|
||||
context = 'Setting up loggers with initial levels, then NOT setting a configuration,';
|
||||
}
|
||||
else {
|
||||
context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+',';
|
||||
}
|
||||
|
||||
showProgress('Setting up the vows batch and context for '+context);
|
||||
// each config to be tested has its own vows batch with a single top-level context
|
||||
var batch={};
|
||||
batch[context]= getTopLevelContext(nop, configToTest, context);
|
||||
batches.push(batch);
|
||||
|
||||
// each top-level context has strLevels sub-contexts, one per logger which has set to a specific level in the top-level context's topic
|
||||
strLevels.forEach(function (baseLevel) {
|
||||
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
|
||||
batch[context][baseLevelSubContext] = {topic: baseLevel};
|
||||
|
||||
// each logging level has strLevels sub-contexts,
|
||||
// to exhaustively test all the combinations of setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
|
||||
strLevels.forEach(function (comparisonLevel) {
|
||||
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
|
||||
|
||||
// calculate this independently of log4js, but we'll add a vow later on to check that we're not mismatched with log4js
|
||||
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
|
||||
|
||||
// the topic simply gathers all the parameters for the vow into an object, to simplify the vow's work.
|
||||
batch[context][baseLevelSubContext][comparisonLevelSubContext] = {topic: function(baseLevel, log4js){
|
||||
return {comparisonLevel: comparisonLevel, baseLevel: baseLevel, log4js: log4js, expectedResult: expectedResult};
|
||||
}};
|
||||
|
||||
var vow = 'should return '+expectedResult;
|
||||
batch[context][baseLevelSubContext][comparisonLevelSubContext][vow] = function(topic){
|
||||
var result = topic.log4js.getLogger(getLoggerName(topic.baseLevel)).isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
assert.equal(result, topic.expectedResult, 'Failed: '+getLoggerName(topic.baseLevel)+'.isLevelEnabled( '+topic.comparisonLevel+' ) returned '+result);
|
||||
};
|
||||
|
||||
// the extra vow to check the comparison between baseLevel and comparisonLevel we performed earlier matches log4js' comparison too
|
||||
batch[context][baseLevelSubContext][comparisonLevelSubContext]['finally checking for comparison mismatch with log4js'] = function(topic){
|
||||
var er = topic.log4js.levels.toLevel(topic.baseLevel).isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
assert.equal(er, topic.expectedResult, 'Mismatch: for setLevel('+topic.baseLevel+') was expecting a comparison with '+topic.comparisonLevel+' to be '+topic.expectedResult);
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
showProgress('Running tests');
|
||||
var v = vows.describe('log4js.configure(), with or without a "levels" property');
|
||||
|
||||
batches.forEach(function(batch) {v=v.addBatch(batch)});
|
||||
|
||||
v.export(module);
|
||||
|
||||
@@ -76,8 +76,8 @@ vows.describe('log4js connect logger').addBatch({
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.length(messages, 1);
|
||||
assert.equal(messages[0].level, levels.INFO);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
@@ -117,8 +117,8 @@ vows.describe('log4js connect logger').addBatch({
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.length(messages, 1);
|
||||
assert.equal(messages[0].level, levels.INFO);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
}
|
||||
}
|
||||
|
||||
68
test/test-log-abspath.js
Normal file
68
test/test-log-abspath.js
Normal file
@@ -0,0 +1,68 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('log4js-abspath').addBatch({
|
||||
'options': {
|
||||
topic: function() {
|
||||
var appenderOptions,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires:
|
||||
{ './appenders/fake':
|
||||
{
|
||||
name: "fake",
|
||||
appender: function() {},
|
||||
configure: function(configuration, options) {
|
||||
appenderOptions = options;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "fake",
|
||||
"filename" : "cheesy-wotsits.log"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config, {
|
||||
cwd: '/absolute/path/to'
|
||||
});
|
||||
return appenderOptions;
|
||||
},
|
||||
'should be passed to appenders during configuration': function(options) {
|
||||
assert.equal(options.cwd, '/absolute/path/to');
|
||||
}
|
||||
},
|
||||
|
||||
'file appender': {
|
||||
topic: function() {
|
||||
var fileOpened,
|
||||
fileAppender = sandbox.require(
|
||||
'../lib/appenders/file',
|
||||
{ requires:
|
||||
{ '../streams':
|
||||
{
|
||||
RollingFileStream: function(file) {
|
||||
fileOpened = file;
|
||||
},
|
||||
BufferedWriteStream: function(other) {
|
||||
return { on: function() { }, end: function() {} }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.configure({ filename: "whatever.log", maxLogSize: 10 }, { cwd: '/absolute/path/to' });
|
||||
return fileOpened;
|
||||
},
|
||||
'should prepend options.cwd to config.filename': function(fileOpened) {
|
||||
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
|
||||
}
|
||||
},
|
||||
}).export(module);
|
||||
260
test/test-nolog.js
Normal file
260
test/test-nolog.js
Normal file
@@ -0,0 +1,260 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
function MockResponse(statusCode) {
|
||||
|
||||
this.statusCode = statusCode;
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'nolog String' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, { nolog: "\\.gif" });
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'nolog Strings' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
'nolog Array<String>' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
'nolog RegExp' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
}
|
||||
, 'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
79
test/test-setLevel-asymmetry.js
Normal file
79
test/test-setLevel-asymmetry.js
Normal file
@@ -0,0 +1,79 @@
|
||||
// This test shows an asymmetry between setLevel and isLevelEnabled (in log4js-node@0.4.3 and earlier):
|
||||
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently does not (sets the level to TRACE).
|
||||
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../lib/log4js');
|
||||
var logger = log4js.getLogger('test-setLevel-asymmetry');
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
var log4jsLevels =[];
|
||||
// populate an array with the log4js.levels that match the strLevels.
|
||||
// Would be nice if we could iterate over log4js.levels instead, but log4js.levels.toLevel prevents that for now.
|
||||
strLevels.forEach(function(l) {
|
||||
log4jsLevels.push(log4js.levels.toLevel(l));
|
||||
});
|
||||
|
||||
|
||||
// We are going to iterate over this object's properties to define an exhaustive list of vows.
|
||||
var levelTypes = {
|
||||
'string': strLevels,
|
||||
'log4js.levels.level': log4jsLevels,
|
||||
}
|
||||
|
||||
// Set up the basic vows batch for this test
|
||||
var batch = {
|
||||
setLevel: {
|
||||
}
|
||||
}
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
// Populating the batch object programmatically,
|
||||
// as I don't have the patience to manually populate it with the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
|
||||
for (var type in levelTypes) {
|
||||
var context = 'is called with a '+type;
|
||||
var levelsToTest = levelTypes[type];
|
||||
showProgress('Setting up the vows context for '+context);
|
||||
|
||||
batch.setLevel[context]= {};
|
||||
levelsToTest.forEach( function(level) {
|
||||
var subContext = 'of '+level;
|
||||
var log4jsLevel=log4js.levels.toLevel(level.toString());
|
||||
|
||||
showProgress('Setting up the vows sub-context for '+subContext);
|
||||
batch.setLevel[context][subContext] = {topic: level};
|
||||
for (var comparisonType in levelTypes) {
|
||||
levelTypes[comparisonType].forEach(function(comparisonLevel) {
|
||||
var t = type;
|
||||
var ct = comparisonType;
|
||||
var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
|
||||
var vow = 'isLevelEnabled('+comparisonLevel+') called with a '+comparisonType+' should return '+expectedResult;
|
||||
showProgress('Setting up the vows vow for '+vow);
|
||||
|
||||
batch.setLevel[context][subContext][vow] = function(levelToSet) {
|
||||
logger.setLevel(levelToSet);
|
||||
showProgress('*** Checking setLevel( '+level+' ) of type '+t+', and isLevelEnabled( '+comparisonLevel+' ) of type '+ct+'. Expecting: '+expectedResult);
|
||||
assert.equal(logger.isLevelEnabled(comparisonLevel), expectedResult, 'Failed: calling setLevel( '+level+' ) with type '+type+', isLevelEnabled( '+comparisonLevel+' ) of type '+comparisonType+' did not return '+expectedResult);
|
||||
};
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
showProgress('Running tests...');
|
||||
|
||||
vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module);
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"level": "WARN",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "tmp-tests-warnings.log",
|
||||
"filename": "test/logLevelFilter-warnings.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
@@ -15,7 +15,7 @@
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "file",
|
||||
"filename": "tmp-tests.log",
|
||||
"filename": "test/logLevelFilter.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user