Merged Gareth's latest changes in

This commit is contained in:
Daniel Bell
2011-07-26 11:11:27 +10:00
12 changed files with 620 additions and 408 deletions

20
lib/appenders/console.js Normal file
View File

@@ -0,0 +1,20 @@
var layouts = require('../layouts');
function consoleAppender (layout) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
console._preLog4js_log(layout(loggingEvent));
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return consoleAppender(layout);
}
exports.name = "console";
exports.appender = consoleAppender;
exports.configure = configure;

124
lib/appenders/file.js Normal file
View File

@@ -0,0 +1,124 @@
var layouts = require('../layouts')
, fs = require('fs');
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
* @param filePollInterval - the time in seconds between file size checks (default 30s)
*/
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
filePollInterval = filePollInterval * 1000 || 30000;
function setupLogRolling () {
fs.watchFile(
file,
{
persistent: false,
interval: filePollInterval
},
function (curr, prev) {
if (curr.size >= logSize) {
rollThatLog();
}
}
);
}
function rollThatLog () {
if (numBackups > 0) {
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(file + '.' + (i-1))) {
fs.renameSync(file+'.'+(i-1), file+'.'+i);
}
} else {
fs.renameSync(file, file+'.1');
}
}
//let's make a new file
var newLogFileFD = fs.openSync(file, 'a', 0644)
, oldLogFileFD = logFile.fd;
logFile.fd = newLogFileFD;
fs.close(oldLogFileFD);
} else {
fs.truncate(logFile.fd, logSize);
}
}
function fileExists (filename) {
try {
fs.statSync(filename);
return true;
} catch (e) {
return false;
}
}
function openTheStream() {
var stream = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
stream.on("open", function() {
canWrite = true;
while (logEventBuffer.length > 0 && canWrite) {
canWrite = writeToLog(logEventBuffer.shift());
}
});
stream.on("error", function (err) {
console.error("log4js.fileAppender - Error happened ", err);
});
stream.on("drain", function() {
canWrite = true;
while (logEventBuffer.length > 0 && canWrite) {
canWrite = writeToLog(logEventBuffer.shift());
}
});
return stream;
}
var logEventBuffer = []
, canWrite = false
, logFile = openTheStream();
if (logSize > 0) {
setupLogRolling();
}
//close the file on process exit.
process.on('exit', function() {
logFile.end();
logFile.destroy();
});
function writeToLog(loggingEvent) {
return logFile.write(layout(loggingEvent)+'\n', "utf8");
}
return function(loggingEvent) {
//because the log stream is opened asynchronously, we don't want to write
//until it is ready.
if (canWrite) {
canWrite = writeToLog(loggingEvent);
} else {
logEventBuffer.push(loggingEvent);
}
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
}
exports.name = "file";
exports.appender = fileAppender;
exports.configure = configure;

View File

@@ -0,0 +1,19 @@
var levels = require('../levels');
function logLevelFilter (levelString, appender) {
var level = levels.toLevel(levelString);
return function(logEvent) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
appender(logEvent);
}
}
}
function configure(config) {
var appender = config.makers[config.appender.type](config.appender);
return logLevelFilter(config.level, appender);
}
exports.name = "logLevelFilter";
exports.appender = logLevelFilter;
exports.configure = configure;

View File

@@ -38,9 +38,9 @@ function formatLogData(logData) {
return match;
};
});
if (data.length > 0) {
output += '\n';
}
} else {
//put it back, it's not a format string
data.unshift(format);
}
data.forEach(function (item) {

View File

@@ -18,8 +18,6 @@
* @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java. The API should be nearly the same.
*
* This file contains all log4js code and is the only file required for logging.
*
* <h3>Example:</h3>
* <pre>
* var logging = require('log4js');
@@ -37,6 +35,8 @@
* log.trace("trace me" );
* </pre>
*
* NOTE: the authors below are the original browser-based log4js authors
* don't try to contact them about bugs in this version :)
* @version 1.0
* @author Stephan Strittmatter - http://jroller.com/page/stritti
* @author Seth Chisamore - http://www.chisamore.com
@@ -50,30 +50,12 @@ var events = require('events')
, sys = require('sys')
, layouts = require('./layouts')
, levels = require('./levels')
, consoleAppender = require('./appenders/console').appender
, DEFAULT_CATEGORY = '[default]'
, ALL_CATEGORIES = '[all]'
, appenders = {}
, loggers = {}
, appenderMakers = {
"file": function(config, fileAppender) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
},
"console": function(config, fileAppender, consoleAppender) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return consoleAppender(layout);
},
"logLevelFilter": function(config, fileAppender, consoleAppender) {
var appender = appenderMakers[config.appender.type](config.appender, fileAppender, consoleAppender);
return logLevelFilter(config.level, appender);
}
};
, appenderMakers = {};
/**
* Get a logger instance. Instance is cached on categoryName level.
@@ -150,11 +132,13 @@ function clearAppenders () {
}
}
function configureAppenders(appenderList, fileAppender, consoleAppender) {
function configureAppenders(appenderList) {
clearAppenders();
if (appenderList) {
appenderList.forEach(function(appenderConfig) {
var appender = appenderMakers[appenderConfig.type](appenderConfig, fileAppender, consoleAppender);
var appender;
appenderConfig.makers = appenderMakers;
appender = appenderMakers[appenderConfig.type](appenderConfig);
if (appender) {
addAppender(appender, appenderConfig.category);
} else {
@@ -162,7 +146,7 @@ function configureAppenders(appenderList, fileAppender, consoleAppender) {
}
});
} else {
addAppender(consoleAppender);
addAppender(consoleAppender());
}
}
@@ -262,91 +246,6 @@ function getDefaultLogger () {
return getLogger(DEFAULT_CATEGORY);
}
function logLevelFilter (levelString, appender) {
var level = levels.toLevel(levelString);
return function(logEvent) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
appender(logEvent);
}
}
}
function consoleAppender (layout) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
console._preLog4js_log(layout(loggingEvent));
};
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
* @param filePollInterval - the time in seconds between file size checks (default 30s)
*/
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
layout = layout || layouts.basicLayout;
var logFile = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
if (logSize > 0) {
setupLogRolling(logFile, file, logSize, numBackups || 5, (filePollInterval * 1000) || 30000);
}
//close the file on process exit.
process.on('exit', function() {
logFile.end();
});
return function(loggingEvent) {
logFile.write(layout(loggingEvent)+'\n');
};
}
function setupLogRolling (logFile, filename, logSize, numBackups, filePollInterval) {
fs.watchFile(
filename,
{
persistent: false,
interval: filePollInterval
},
function (curr, prev) {
if (curr.size >= logSize) {
rollThatLog(logFile, filename, numBackups);
}
}
);
}
function rollThatLog (logFile, filename, numBackups) {
//first close the current one.
logFile.end();
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(filename + '.' + (i-1))) {
fs.renameSync(filename+'.'+(i-1), filename+'.'+i);
}
} else {
fs.renameSync(filename, filename+'.1');
}
}
//open it up again
logFile = fs.createWriteStream(filename, { flags: 'a', mode: 0644, encoding: "utf8" });
}
function fileExists (filename) {
try {
fs.statSync(filename);
return true;
} catch (e) {
return false;
}
}
function findConfiguration() {
//add current directory onto the list of configPaths
var paths = ['.'].concat(require.paths);
@@ -377,7 +276,7 @@ function loadConfigurationFile(filename) {
function configureOnceOff(config) {
if (config) {
try {
configureAppenders(config.appenders, fileAppender, consoleAppender);
configureAppenders(config.appenders);
configureLevels(config.levels);
} catch (e) {
throw new Error("Problem reading log4js config " + sys.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
@@ -447,16 +346,21 @@ function replaceConsole(logger) {
}
['log','debug','info','warn','error'].forEach(function (item) {
console['_preLog4js_'+item] = console[item];
if (console['_preLog4js_log'] === undefined) {
console['_preLog4js_'+item] = console[item];
}
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
});
}
//set ourselves up if we can find a default log4js.json
configure(findConfiguration());
//replace console.log, etc with log4js versions
replaceConsole(getLogger("console"));
function loadAppenders() {
var appenderList = fs.readdirSync(__dirname + '/appenders');
appenderList.forEach(function(file) {
var appenderModule = require('./appenders/' + file);
module.exports.appenders[appenderModule.name] = appenderModule.appender;
appenderMakers[appenderModule.name] = appenderModule.configure;
});
}
module.exports = {
getLogger: getLogger,
@@ -469,16 +373,23 @@ module.exports = {
levels: levels,
setGlobalLogLevel: setGlobalLogLevel,
consoleAppender: consoleAppender,
fileAppender: fileAppender,
logLevelFilter: logLevelFilter,
layouts: layouts,
appenders: {},
connectLogger: require('./connect-logger').connectLogger
};
loadAppenders();
//set ourselves up if we can find a default log4js.json
configure(findConfiguration());
//replace console.log, etc with log4js versions
replaceConsole(getLogger("console"));
//keep the old-style layouts
['basicLayout','messagePassThroughLayout','colouredLayout','coloredLayout'].forEach(function(item) {
module.exports[item] = layouts[item];
});
//and the old-style appenders
module.exports.consoleAppender = module.exports.appenders.console;
module.exports.fileAppender = module.exports.appenders.file;
module.exports.logLevelFilter = module.exports.appenders.logLevelFilter;

28
log-rolling.js Normal file
View File

@@ -0,0 +1,28 @@
var log4js = require('./lib/log4js')
, log
, i = 0;
log4js.configure({
"appenders": [
{
"type": "file",
"filename": "tmp-test.log",
"maxLogSize": 1024,
"backups": 3,
"pollInterval": 0.1,
"category": "test"
},
{
type: "console"
, category: "console"
}
]
});
log = log4js.getLogger("test");
function doTheLogging(x) {
log.info("Logging something %d", x);
}
for ( ; i < 100000; i++) {
doTheLogging(i);
}

View File

@@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.3.4",
"version": "0.3.5",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",

183
test/fileAppender.js Normal file
View File

@@ -0,0 +1,183 @@
var vows = require('vows')
, fs = require('fs')
, log4js = require('../lib/log4js')
, assert = require('assert');
log4js.clearAppenders();
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('log4js fileAppender').addBatch({
'with default fileAppender settings': {
topic: function() {
var testFile = __dirname + '/fa-default-test.log'
, logger = log4js.getLogger('default-settings');
remove(testFile);
log4js.addAppender(log4js.fileAppender(testFile), 'default-settings');
logger.info("This should be in the file.");
fs.readFile(testFile, "utf8", this.callback);
},
'should write log messages to the file': function(err, fileContents) {
assert.include(fileContents, "This should be in the file.\n");
},
'log messages should be in the basic layout format': function(err, fileContents) {
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
}
},
'with a max file size and no backups': {
topic: function() {
var testFile = __dirname + '/fa-maxFileSize-test.log'
, logger = log4js.getLogger('max-file-size');
remove(testFile);
//log file of 50 bytes maximum, no backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 0, 0.01), 'max-file-size');
logger.info("This is the first log message.");
//we have to wait before writing the second one, because node is too fast for the file system.
var that = this;
setTimeout(function() {
logger.info("This is the second log message.");
fs.readFile(testFile, "utf8", that.callback);
}, 500);
},
'log file should only contain the second message': function(err, fileContents) {
assert.include(fileContents, "This is the second log message.\n");
assert.equal(fileContents.indexOf("This is the first log message."), -1);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'starting with the test file name should be one': function(err, files) {
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
assert.length(logFiles, 1);
}
}
},
'with a max file size and 2 backups': {
topic: function() {
var testFile = __dirname + '/fa-maxFileSize-with-backups-test.log'
, logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(testFile+'.1');
remove(testFile+'.2');
//log file of 50 bytes maximum, 2 backups, check every 10ms for changes
log4js.addAppender(log4js.fileAppender(testFile, log4js.layouts.basicLayout, 50, 2, 0.01), 'max-file-size-backups');
logger.info("This is the first log message.");
//we have to wait before writing the second one, because node is too fast for the file system.
var that = this;
setTimeout(function() {
logger.info("This is the second log message.");
}, 200);
setTimeout(function() {
logger.info("This is the third log message.");
}, 400);
setTimeout(function() {
logger.info("This is the fourth log message.");
fs.readdir(__dirname, that.callback);
}, 600);
},
'the log files': {
topic: function(files) {
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; });
return logFiles;
},
'should be 3': function (files) {
assert.length(files, 3);
},
'should be named in sequence': function (files) {
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
},
'and the contents of the first file': {
topic: function(logFiles) {
fs.readFile(logFiles[0], "utf8", this.callback);
},
'should be the last log message': function(err, contents) {
assert.include(contents, 'This is the fourth log message.');
}
},
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(logFiles[1], "utf8", this.callback);
},
'should be the third log message': function(err, contents) {
assert.include(contents, 'This is the third log message.');
}
},
'and the contents of the third file': {
topic: function(logFiles) {
fs.readFile(logFiles[2], "utf8", this.callback);
},
'should be the second log message': function(err, contents) {
assert.include(contents, 'This is the second log message.');
}
}
}
}
}).addBatch({
'configure' : {
'with fileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile('tmp-tests.log', 'utf8', this.callback);
},
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1);
},
'and log rolling': {
topic: function() {
var sandbox = require('sandboxed-module')
, that = this
, fileAppender = sandbox.require(
'../lib/appenders/file.js'
, { requires:
{ 'fs':
{
watchFile: function(filename, options, cb) {
that.callback(null, filename);
}
, createWriteStream: function() {
return {
on: function() {}
, end: function() {}
, destroy: function() {}
};
}
}
}
}
);
fileAppender.configure({
filename: "cheese.log"
, maxLogSize: 100
, backups: 5
, pollInterval: 30
});
},
'should watch the log file': function(watchedFile) {
assert.equal(watchedFile, 'cheese.log');
}
}
}
}
}).export(module);

View File

@@ -65,7 +65,19 @@ vows.describe('log4js layouts').addBatch({
, toString: function() { return "ERROR"; }
}
}), "thing 1");
}
},
'should output the first item even if it is not a string': function(layout) {
assert.equal(layout({
data: [ { thing: 1} ]
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
, categoryName: "cheese"
, level: {
colour: "green"
, toString: function() { return "ERROR"; }
}
}), "{ thing: 1 }");
}
},
'basicLayout': {
@@ -90,29 +102,24 @@ vows.describe('log4js layouts').addBatch({
error = new Error("Some made-up error"),
stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
event.data = ['this is a test ', error];
output = layout(event);
lines = output.split(/\n/);
assert.length(lines, stack.length+1);
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
assert.equal(lines[1], "Error: Some made-up error");
assert.length(lines, stack.length);
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error");
for (var i = 1; i < stack.length; i++) {
assert.equal(lines[i+1], stack[i]);
assert.equal(lines[i+1], stack[i+1]);
}
},
'should output any extra data in the log event as util.inspect strings': function(args) {
var layout = args[0], event = args[1], output, lines;
event.data = ['this is a test', {
event.data = ['this is a test ', {
name: 'Cheese',
message: 'Gorgonzola smells.'
}];
output = layout(event);
lines = output.split(/\n/);
assert.length(lines, 2);
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
assert.equal(lines[1], "{ name: 'Cheese', message: 'Gorgonzola smells.' }");
assert.equal(output, "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test { name: 'Cheese', message: 'Gorgonzola smells.' }");
}
},

69
test/logLevelFilter.js Normal file
View File

@@ -0,0 +1,69 @@
var vows = require('vows')
, fs = require('fs')
, assert = require('assert');
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('log4js logLevelFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(log4js.logLevelFilter('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
logger = log4js.getLogger("logLevelTest");
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
return logEvents;
},
'should only pass log events greater than or equal to its own level' : function(logEvents) {
assert.length(logEvents, 2);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.equal(logEvents[1].data[0], 'so should this');
}
},
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
remove(__dirname + '/logLevelFilter.log');
remove(__dirname + '/logLevelFilter-warnings.log');
log4js.configure('test/with-logLevelFilter.json');
logger = log4js.getLogger("tests");
logger.info('main');
logger.error('both');
logger.warn('both');
logger.debug('main');
return logger;
},
'tmp-tests.log': {
topic: function() {
fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
},
'should contain all log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['main','both','both','main']);
}
},
'tmp-tests-warnings.log': {
topic: function() {
fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
},
'should contain only error and warning log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['both','both']);
}
}
}
}).export(module);

View File

@@ -55,225 +55,33 @@ vows.describe('log4js').addBatch({
},
'fileAppender': {
topic: function() {
var appender
, logmessages = []
, thing = "thing"
, fakeFS = {
createWriteStream: function() {
assert.equal(arguments[0], './tmp-tests.log');
assert.isObject(arguments[1]);
assert.equal(arguments[1].flags, 'a');
assert.equal(arguments[1].mode, 0644);
assert.equal(arguments[1].encoding, 'utf8');
return {
write: function(message) {
logmessages.push(message);
}
, end: function() {}
, destroySoon: function() {}
};
},
watchFile: function() {
throw new Error("watchFile should not be called if logSize is not defined");
}
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS
}
}
);
log4js.clearAppenders();
appender = log4js.fileAppender('./tmp-tests.log', log4js.layouts.messagePassThroughLayout);
log4js.addAppender(appender, 'file-test');
var logger = log4js.getLogger('file-test');
logger.debug("this is a test");
return logmessages;
},
'should write log messages to file': function(logmessages) {
assert.length(logmessages, 1);
assert.equal(logmessages, "this is a test\n");
'invalid configuration': {
'should throw an exception': function() {
assert.throws(function() {
log4js.configure({ "type": "invalid" });
});
}
},
'fileAppender - with rolling based on size and number of files to keep': {
'configuration when passed as object': {
topic: function() {
var watchCb,
filesOpened = [],
filesEnded = [],
filesRenamed = [],
newFilenames = [],
existingFiles = ['tests.log'],
log4js = sandbox.require(
'../lib/log4js'
, {
requires: {
'fs': {
watchFile: function(file, options, callback) {
assert.equal(file, 'tests.log');
assert.equal(options.persistent, false);
assert.equal(options.interval, 30000);
assert.isFunction(callback);
watchCb = callback;
},
createWriteStream: function(file) {
assert.equal(file, 'tests.log');
filesOpened.push(file);
return {
end: function() {
filesEnded.push(file);
}
};
},
statSync: function(file) {
if (existingFiles.indexOf(file) < 0) {
throw new Error("this file doesn't exist");
} else {
return true;
}
},
renameSync: function(oldFile, newFile) {
filesRenamed.push(oldFile);
existingFiles.push(newFile);
}
var appenderConfig
, log4js = sandbox.require(
'../lib/log4js'
, { requires:
{ './appenders/file.js':
{
name: "file"
, appender: function() {}
, configure: function(configuration) {
appenderConfig = configuration;
return function() {};
}
}
}
);
var appender = log4js.fileAppender('tests.log', log4js.messagePassThroughLayout, 1024, 2, 30);
return [watchCb, filesOpened, filesEnded, filesRenamed, existingFiles];
},
'should close current log file, rename all old ones, open new one on rollover': function(args) {
var watchCb = args[0]
, filesOpened = args[1]
, filesEnded = args[2]
, filesRenamed = args[3]
, existingFiles = args[4];
assert.isFunction(watchCb);
//tell the watchCb that the file is below the threshold
watchCb({ size: 891 }, { size: 0 });
//filesOpened should still be the first one.
assert.length(filesOpened, 1);
//tell the watchCb that the file is now over the threshold
watchCb({ size: 1053 }, { size: 891 });
//it should have closed the first log file.
assert.length(filesEnded, 1);
//it should have renamed the previous log file
assert.length(filesRenamed, 1);
//and we should have two files now
assert.length(existingFiles, 2);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1']);
//and opened a new log file.
assert.length(filesOpened, 2);
//now tell the watchCb that we've flipped over the threshold again
watchCb({ size: 1025 }, { size: 123 });
//it should have closed the old file
assert.length(filesEnded, 2);
//it should have renamed both the old log file, and the previous '.1' file
assert.length(filesRenamed, 3);
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
//it should have renamed 2 more file
assert.length(existingFiles, 4);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
//and opened a new log file
assert.length(filesOpened, 3);
//tell the watchCb we've flipped again.
watchCb({ size: 1024 }, { size: 234 });
//close the old one again.
assert.length(filesEnded, 3);
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
assert.length(filesRenamed, 5);
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
//it should have renamed 2 more files
assert.length(existingFiles, 6);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
//and opened a new log file
assert.length(filesOpened, 4);
}
},
'configure' : {
topic: function() {
var messages = {}, fakeFS = {
createWriteStream: function(file) {
return {
write: function(message) {
if (!messages.hasOwnProperty(file)) {
messages[file] = [];
}
messages[file].push(message);
}
, end: function() {}
, destroySoon: function() {}
};
},
readFileSync: function(file, encoding) {
return require('fs').readFileSync(file, encoding);
},
watchFile: function(file) {
messages.watchedFile = file;
}
},
log4js = sandbox.require(
'../lib/log4js'
, {
requires: {
'fs': fakeFS
}
}
);
return [ log4js, messages ];
},
'should load appender configuration from a json file': function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-tests.log'];
log4js.clearAppenders();
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
var logger = log4js.getLogger("tests");
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
assert.length(messages['tmp-tests.log'], 1);
assert.equal(messages['tmp-tests.log'][0], 'this should be written to the file\n');
},
'should handle logLevelFilter configuration': function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-tests.log'];
delete messages['tmp-tests-warnings.log'];
log4js.clearAppenders();
log4js.configure('test/with-logLevelFilter.json');
var logger = log4js.getLogger("tests");
logger.info('main');
logger.error('both');
logger.warn('both');
logger.debug('main');
assert.length(messages['tmp-tests.log'], 4);
assert.length(messages['tmp-tests-warnings.log'], 2);
assert.deepEqual(messages['tmp-tests.log'], ['main\n','both\n','both\n','main\n']);
assert.deepEqual(messages['tmp-tests-warnings.log'], ['both\n','both\n']);
},
'should handle fileAppender with log rolling' : function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-test.log'];
log4js.configure('test/with-log-rolling.json');
assert.equal(messages.watchedFile, 'tmp-test.log');
},
'should handle an object or a file name': function(args) {
var log4js = args[0],
messages = args[1],
config = {
)
, config = {
"appenders": [
{
"type" : "file",
@@ -284,34 +92,88 @@ vows.describe('log4js').addBatch({
}
]
};
delete messages['cheesy-wotsits.log'];
log4js.configure(config);
assert.equal(messages.watchedFile, 'cheesy-wotsits.log');
return appenderConfig;
},
'should be passed to appender config': function(configuration) {
assert.equal(configuration.filename, 'cheesy-wotsits.log');
}
},
'configuration when passed as filename': {
topic: function() {
var appenderConfig
, configFilename
, log4js = sandbox.require(
'../lib/log4js'
, { requires:
{ 'fs':
{
readFileSync: function(filename) {
configFilename = filename;
return JSON.stringify({
appenders: [
{ type: "file"
, filename: "whatever.log"
}
]
});
},
readdirSync: function() {
return ['file.js'];
}
}
, './appenders/file.js':
{
name: "file"
, appender: function() {}
, configure: function(configuration) {
appenderConfig = configuration;
return function() {};
}
}
}
}
);
log4js.configure("/path/to/cheese.json");
return [ configFilename, appenderConfig ];
},
'should read the config from a file': function(args) {
assert.equal(args[0], '/path/to/cheese.json');
},
'should pass config to appender': function(args) {
assert.equal(args[1].filename, "whatever.log");
}
},
'with no appenders defined' : {
topic: function() {
var logger
, message
, that = this
, fakeConsoleAppender = {
name: "console"
, appender: function() {
return function(evt) {
that.callback(null, evt);
}
}
, configure: function() {
return fakeConsoleAppender.appender();
}
}
, log4js = sandbox.require(
'../lib/log4js'
, {
globals: {
console: {
log: function(msg) {
message = msg;
}
}
requires: {
'./appenders/console.js': fakeConsoleAppender
}
}
}
);
logger = log4js.getLogger("some-logger");
logger.debug("This is a test");
return message;
},
'should default to the console appender': function(message) {
assert.isTrue(/This is a test$/.test(message));
'should default to the console appender': function(evt) {
assert.equal(evt.data[0], "This is a test");
}
},
@@ -403,10 +265,13 @@ vows.describe('log4js').addBatch({
'default setup': {
topic: function() {
var pathsChecked = [],
message,
appenderEvent,
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
fakeFS = {
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
@@ -422,28 +287,27 @@ vows.describe('log4js').addBatch({
}
},
fakeConsole = {
log : function (msg) { message = msg; },
info: this.log,
warn: this.log,
debug: this.log,
error: this.log
'name': 'console'
, 'appender': function () {
return function(evt) { appenderEvent = evt; }
}
, 'configure': function (config) {
return fakeConsole.appender();
}
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS
},
globals: {
'console': fakeConsole
, './appenders/console.js': fakeConsole
}
}
);
logger = log4js.getLogger('a-test');
logger.debug("this is a test");
return [ pathsChecked, message, modulePath ];
return [ pathsChecked, appenderEvent, modulePath ];
},
'should check current directory, require paths, and finally the module dir for log4js.json': function(args) {
@@ -458,27 +322,8 @@ vows.describe('log4js').addBatch({
},
'should configure log4js from first log4js.json found': function(args) {
var message = args[1];
assert.equal(message, 'this is a test');
}
},
'logLevelFilter': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(log4js.logLevelFilter('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
logger = log4js.getLogger("logLevelTest");
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
return logEvents;
},
'should only pass log events greater than or equal to its own level' : function(logEvents) {
assert.length(logEvents, 2);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.equal(logEvents[1].data[0], 'so should this');
var appenderEvent = args[1];
assert.equal(appenderEvent.data[0], 'this is a test');
}
},
@@ -549,12 +394,15 @@ vows.describe('log4js').addBatch({
'configuration reload' : {
topic: function() {
var pathsChecked = [],
messages = [],
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
fakeFS = {
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
@@ -570,11 +418,13 @@ vows.describe('log4js').addBatch({
}
},
fakeConsole = {
log : function (msg) { messages.push(msg); },
info: this.log,
warn: this.log,
debug: this.log,
error: this.log
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
@@ -584,7 +434,8 @@ vows.describe('log4js').addBatch({
'../lib/log4js',
{
requires: {
'fs': fakeFS
'fs': fakeFS,
'./appenders/console.js': fakeConsole
},
globals: {
'console': fakeConsole,
@@ -601,14 +452,14 @@ vows.describe('log4js').addBatch({
logger.info("info3");
logger.debug("debug4");
return [ pathsChecked, messages, modulePath ];
return [ pathsChecked, logEvents, modulePath ];
},
'should configure log4js from first log4js.json found': function(args) {
var messages = args[1];
assert.length(messages, 3);
assert.equal(messages[0], 'info1');
assert.equal(messages[1], 'info3');
assert.equal(messages[2], 'debug4');
var logEvents = args[1];
assert.length(logEvents, 3);
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
assert.equal(logEvents[2].data[0], 'debug4');
}
}

View File

@@ -6,7 +6,7 @@
"level": "WARN",
"appender": {
"type": "file",
"filename": "tmp-tests-warnings.log",
"filename": "test/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
@@ -15,7 +15,7 @@
{
"category": "tests",
"type": "file",
"filename": "tmp-tests.log",
"filename": "test/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}