Compare commits

...

59 Commits

Author SHA1 Message Date
Gareth Jones
a703f2dc12 0.6.15 2014-07-03 08:16:07 +10:00
Gareth Jones
b2edbb1146 Merge pull request #175 from devotis/master
Loggly appender should not make use of any layout
2014-06-28 08:22:20 +10:00
Gareth Jones
99e7c0981d Merge pull request #189 from jengler/add-support-for-prerequired-appender-styles
Add support for prerequired appender styles
2014-06-28 08:20:12 +10:00
Gareth Jones
06bab894af Merge pull request #203 from idalv/master
Clusterred appender should consider the categories.
2014-06-28 08:17:16 +10:00
Gareth Jones
101739ebef Merge pull request #209 from Icehunter/patch-1
Update for "write after end" uncaught error.
2014-06-28 08:02:27 +10:00
Ryan Wilson
f8ffccffd5 epic day with a forgotten console 2014-06-20 13:23:57 -07:00
Ryan Wilson
7d50b4aeff removed check for writeable
caused broken unit tests
2014-06-20 13:23:09 -07:00
Ryan Wilson
b12200fabc Update for "write after end" uncaught error. 2014-06-20 13:16:23 -07:00
Vladimir Mitev
f8b6cc7c39 Revert "Update connect-logger.js to work correctly with express"
This reverts commit d18fb466fb.
2014-05-26 15:56:23 +03:00
idalv
6314e4a344 Merge pull request #1 from idalv/automation
Update connect-logger.js to work correctly with express
2014-05-26 15:46:07 +03:00
mishless
d18fb466fb Update connect-logger.js to work correctly with express
When used with express levels are wrong since send() does not call writeHead, but sets responseCode on response.
2014-05-23 13:58:52 +03:00
Vladimir Mitev
e638ff7271 Unit test.
Fixed the old unit tests.
Added validation for new functionality.

Signed-off-by: Vladimir Mitev <idalv@users.noreply.github.com>
2014-05-20 14:42:29 +03:00
Vladimir Mitev
2daf29b400 Clusterred appender should consider the categories.
It turns out that whenever the clusterred appender is used the log event is passed to all actual appenders.
The actual appender's category is ignored.

Signed-off-by: Vladimir Mitev <idalv@users.noreply.github.com>
2014-05-20 13:45:46 +03:00
Gareth Jones
ca5272aacc 0.6.14 2014-04-22 10:06:04 +10:00
Gareth Jones
614127bb10 added shutdown to datefile 2014-04-22 10:05:37 +10:00
Gareth Jones
a549df44b4 Merge pull request #199 from lulurun/fix_file_appender
fix shutdown method: make sure to callback when write completed immediat...
2014-04-22 09:58:03 +10:00
Gareth Jones
5e0982f0b1 Merge pull request #200 from lulurun/datefile_appender_fix
file won't get rolled if the process restarted daily
2014-04-22 09:29:15 +10:00
Xiaolu Liu
f5a76d9073 file won't get rolled if the process restarted daily 2014-04-24 00:07:11 +09:00
Xiaolu Liu
29d941f0a6 fix shutdown method: make sure to callback when write completed immediately 2014-04-21 16:27:51 +09:00
Gareth Jones
0c2baa9690 0.6.13 2014-04-09 07:44:23 +10:00
Gareth Jones
9b538ee8ed fixed timezone flakiness 2014-04-09 07:43:40 +10:00
Gareth Jones
e4d5228f2b Merge branch 'flush-on-exit' 2014-04-09 07:37:17 +10:00
Gareth Jones
6aacb0da0b Merge pull request #195 from jengler/flush-on-exit
Flush on exit
2014-04-09 07:35:25 +10:00
John Engler
6e3da6f44b Added error throwing when error loading test file.
This will hopefully give us better visibility into our Travis CI
build failures.
2014-04-08 12:40:27 -07:00
John Engler
3b5eb28115 Update dateFile EOL usage to be consistent with appender.
From the looks of the Travis CI failure, this could be the issue
causing failures. Not sure as I can't reproduce locally. However,
it is still an inconsistency and worth fixing.
2014-04-08 10:47:18 -07:00
John Engler
633ed3cddb Support for disabling log writes on shutdown.
Updated logger.js to support disabling all log writes.
Updated log4js.js shutdown function to disable log writes.
Added tests.
Update gitignore to ignore rolling date stream's test output.
2014-04-07 19:06:29 -07:00
John Engler
8ca092cdb9 Removed callback to write, as it is not needed. 2014-04-05 16:14:56 -07:00
John Engler
3ec9811b5e Update log4js module to expose a shutdown function.
loadAppender will check for a shutdown function exposed by
a loaded appender. If present, it will be cached so that the
shutdown function can execute it.

The intent here is that a Node application would not invoked
process.exit until after the log4js shutdown callback returns.
2014-04-05 15:12:45 -07:00
Gareth Jones
c852fceaf4 Update README.md 2014-04-01 11:09:01 +11:00
John Engler
c569919160 Simplied loadAppender logic. 2014-03-11 02:19:00 -07:00
John Engler
28f7c87a0e Allow adding of appenders as objects
Previously, appenders could only be added by specifying the filepath
to the appender. This required the appender's path to be specified
either relative to the log4js installation, relative to a NODE_PATH
token, or absolute. This creates a coupling between the log4js
configurer and the log4js installation location, or a coupling between
the log4js configurer and the global NODE_PATH. This commit removes
the coupling by allowing the loading of appenders to be done relative
to the log4js configurer.
2014-03-11 02:07:58 -07:00
Gareth Jones
492919b940 0.6.12 2014-03-05 13:17:15 +11:00
Gareth Jones
470baa6c09 Merge pull request #187 from jci-fox/addHasLogger
adding ability to check if a logger exists
2014-03-05 13:16:51 +11:00
Gareth Jones
cd2ee14bde 0.6.11 2014-03-05 09:25:03 +11:00
Gareth Jones
c09c11b147 Merge branch 'master' of https://github.com/nomiddlename/log4js-node 2014-03-05 09:24:09 +11:00
Gareth Jones
b74a514369 Merge pull request #186 from jci-fox/issue184_dynamicloglevels
Adding level checks on dynamic logging
2014-03-05 09:17:16 +11:00
jci-fox
fd05d90c2f adding ability to check if a logger exists
This allows for not accidentally adding a non-configured logger
2014-03-04 09:45:56 -06:00
jci-fox
73344ba79f fixing unit test
logger.log requires 2 params, and with the 1st being level and filtering the level, the log call must provide a level that will result in log messages
2014-03-04 09:27:04 -06:00
jci-fox
22c156582f Adding level checks on dynamic logging
using levels.toLevel and this.isLevelEnabled prior to emiting the event will prevent the appenders from being notified if the log level provided is below the loggers level.
2014-03-04 09:08:27 -06:00
Gareth Jones
72bfb5d980 0.6.10 2014-02-11 08:51:21 +11:00
Gareth Jones
83ad0babf3 changed my email address 2014-02-11 08:50:36 +11:00
Christiaan Westerbeek
ae1a55fed9 Stop making use of any layout
Stop making use of any layout by default, because they are intended to
format a line for human reading. Loggly indexes the values (of all
properties of objects) and makes them available for querying.
2014-01-21 12:11:32 +01:00
Gareth Jones
94034e1226 Merge pull request #172 from devotis/master
Fork with Loggly appender here
2014-01-16 14:00:16 -08:00
Christiaan Westerbeek
9b4c7d1574 Fixes the error with test/logglyAppender-test
Refs #172

I will add more relevant tests later
2014-01-16 16:39:45 +01:00
Christiaan Westerbeek
770f2da627 Cleanup 2014-01-10 21:59:39 +01:00
Christiaan Westerbeek
eb51aa99be First working version
tried examples/loggly-appender.js [OK]
2014-01-10 21:51:08 +01:00
Christiaan Westerbeek
5286c50375 Added the basic files for Loggly appender
appender, example, test
not tested yet!
2014-01-10 21:18:16 +01:00
Christiaan Westerbeek
bb644a1632 Update README.md
Will try to add a appender for Loggly in this Fork. I will keep it in to this feature alone as outlined in the rules
2014-01-10 20:47:48 +01:00
Gareth Jones
a6efbf6273 Merge pull request #168 from macedigital/connectlogger-ip-fix
fix 'remote-addr' property in connect-logger
2013-12-30 12:40:40 -08:00
Matthias Adler
2118d8f7b3 fix 'remote-addr' property in connect-logger 2013-12-21 19:42:44 +01:00
Gareth Jones
d2f044a451 0.6.9 2013-09-30 08:48:15 +10:00
Gareth Jones
d0661322aa Merge pull request #158 from emilecantin/master
Added logic to serialize Error objects correctly
2013-09-29 15:36:40 -07:00
Emile Cantin
8b8844694f Fixed unit tests, now with regexes. 2013-09-27 09:45:10 -04:00
Emile Cantin
abdba8e56f Added logic to serialize Error objects correctly
This should fix #97.
2013-09-26 14:55:20 -04:00
Gareth Jones
093f693232 Merge pull request #157 from karlvlam/gelf-timefix
GELF time precision should be millisecond level
2013-09-17 14:04:49 -07:00
Karl Lam
b9bba00d8c GELF time precision should be millisecond level 2013-09-16 18:31:23 +08:00
Gareth Jones
2a38f460dc tried adding process.nexttick - didn't help 2013-08-05 07:56:02 +10:00
Gareth Jones
9f77734f74 test case for flush on exit 2013-08-05 07:55:07 +10:00
Gareth Jones
ce8b6b06b9 trying out a shutdown function 2013-08-05 07:21:12 +10:00
24 changed files with 554 additions and 66 deletions

1
.gitignore vendored
View File

@@ -4,3 +4,4 @@ build
node_modules
.bob/
test/streams/test-rolling-file-stream*
test/streams/test-rolling-stream-with-existing-files*

View File

@@ -1,7 +1,7 @@
# log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node)
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
This is a conversion of the [log4js](https://github.com/stritti/log4js)
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
Out of the box it supports the following features:
@@ -12,6 +12,7 @@ Out of the box it supports the following features:
* SMTP appender
* GELF appender
* hook.io appender
* Loggly appender
* multiprocess appender (useful when you've got worker processes)
* a logger for connect/express servers
* configurable log message layout/patterns

27
examples/flush-on-exit.js Normal file
View File

@@ -0,0 +1,27 @@
/**
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
* another shell)
*/
var log4js = require('../lib/log4js');
log4js.configure({
appenders: [
{ type: 'file', filename: 'cheese.log', category: 'cheese' },
{ type: 'console'}
]
});
var logger = log4js.getLogger('cheese');
logger.setLevel('INFO');
var http=require('http');
var server = http.createServer(function(request, response){
response.writeHead(200, {'Content-Type': 'text/plain'});
var rd = Math.random() * 50;
logger.info("hello " + rd);
response.write('hello ');
if (Math.floor(rd) == 30){
log4js.shutdown(function() { process.exit(1); });
}
response.end();
}).listen(4444);

View File

@@ -0,0 +1,24 @@
//Note that loggly appender needs node-loggly to work.
//If you haven't got node-loggly installed, you'll get cryptic
//"cannot find module" errors when using the loggly appender
var log4js = require('../lib/log4js');
log4js.configure({
"appenders": [
{
type: "console",
category: "test"
},
{
"type" : "loggly",
"token" : "12345678901234567890",
"subdomain": "your-subdomain",
"tags" : ["test"],
"category" : "loggly"
}
]
});
var logger = log4js.getLogger("loggly");
logger.info("Test log message");
//logger.debug("Test log message");

View File

@@ -7,6 +7,14 @@ var log4js = require('../log4js');
* Takes a loggingEvent object, returns string representation of it.
*/
function serializeLoggingEvent(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
if (item && item.stack && JSON.stringify(item) === '{}') { // Validate that we really are in this case
loggingEvent.data[i] = {stack : item.stack};
}
}
return JSON.stringify(loggingEvent);
}
@@ -63,7 +71,10 @@ function createAppender(config) {
if (config.actualAppenders) {
var size = config.actualAppenders.length;
for(var i = 0; i < size; i++) {
config.actualAppenders[i](loggingEvent);
if (!config.appenders[i].category || config.appenders[i].category === loggingEvent.categoryName) {
// Relying on the index is not a good practice but otherwise the change would have been bigger.
config.actualAppenders[i](loggingEvent);
}
}
}
}
@@ -115,4 +126,4 @@ function configure(config, options) {
}
exports.appender = createAppender;
exports.configure = configure;
exports.configure = configure;

View File

@@ -1,6 +1,7 @@
"use strict";
var streams = require('../streams')
, layouts = require('../layouts')
, async = require('async')
, path = require('path')
, os = require('os')
, eol = os.EOL || '\n'
@@ -24,12 +25,12 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
layout = layout || layouts.basicLayout;
var logFile = new streams.DateRollingFileStream(
filename,
pattern,
filename,
pattern,
{ alwaysIncludePattern: alwaysIncludePattern }
);
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent) + eol, "utf8");
};
@@ -38,15 +39,15 @@ function appender(filename, pattern, alwaysIncludePattern, layout) {
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
@@ -54,5 +55,18 @@ function configure(config, options) {
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
}
function shutdown(cb) {
async.forEach(openFiles, function(file, done) {
if (!file.write(eol, "utf-8")) {
file.once('drain', function() {
file.end(done);
});
} else {
file.end(done);
}
}, cb);
}
exports.appender = appender;
exports.configure = configure;
exports.shutdown = shutdown;

View File

@@ -1,5 +1,6 @@
"use strict";
var layouts = require('../layouts')
, async = require('async')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
@@ -78,5 +79,18 @@ function configure(config, options) {
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
}
function shutdown(cb) {
async.forEach(openFiles, function(file, done) {
if (!file.write(eol, "utf-8")) {
file.once('drain', function() {
file.end(done);
});
} else {
file.end(done);
}
}, cb);
}
exports.appender = fileAppender;
exports.configure = configure;
exports.shutdown = shutdown;

View File

@@ -100,7 +100,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
msg.short_message = msg.full_message;
msg.version="1.0";
msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0;
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
msg.host = hostname;
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
msg.facility = facility;

84
lib/appenders/loggly.js Normal file
View File

@@ -0,0 +1,84 @@
'use strict';
var layouts = require('../layouts')
, loggly = require('loggly')
, os = require('os');
/**
* Loggly Appender. Sends logging events to Loggly using node-loggly
*
* @param config object with loggly configuration data
* {
* token: 'your-really-long-input-token',
* subdomain: 'your-subdomain',
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
* }
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
*/
function logglyAppender(config, layout) {
var client = loggly.createClient(config);
var packageMessage = function (loggingEvent) {
var BaseItem = function(level, msg) {
this.level = level || loggingEvent.level.toString();
this.category = loggingEvent.categoryName;
this.hostname = os.hostname().toString();
if (typeof msg !== 'undefined')
this.msg = msg;
};
var packageItem = function (item) {
if (item instanceof Error)
return new BaseItem('ERROR', item.message);
if (['string', 'number', 'boolean'].indexOf(typeof item) > -1 )
return new BaseItem(undefined, item);
var obj = new BaseItem();
if (Array.isArray(item))
return item.unshift(obj); //add base object as first item
if (item && Object.prototype.toString.call(item) === '[object Object]') {
for (var key in item) {
if (item.hasOwnProperty(key)) {
obj[key] = item[key]; //don't do packageItem on nested items, because level, category and hostname are needed on top level items only.
}
}
}
return obj;
};
if (loggingEvent.data.length === 1) {
return packageItem(loggingEvent.data[0]);
}
//length >1
var msg = loggingEvent.data;
for (var i = 0, l = msg.length; i < l; i++) {
msg[i] = packageItem(msg[i]);
}
return msg;
};
return function(loggingEvent) {
var a = layout ? layout(loggingEvent) : packageMessage(loggingEvent);
//console.log('log now', a);
client.log(a, config.tags, function(err, result) {
if (err) {
throw err;
}
});
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return logglyAppender(config, layout);
}
exports.name = 'loggly';
exports.appender = logglyAppender;
exports.configure = configure;

View File

@@ -94,6 +94,11 @@ function workerAppender(config) {
}
function write(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
loggingEvent = {stack : loggingEvent.stack};
}
socket.write(JSON.stringify(loggingEvent), 'utf8');
socket.write(END_MSG, 'utf8');
}

View File

@@ -120,10 +120,10 @@ function format(str, req, res) {
.replace(':referrer', req.headers.referer || req.headers.referrer || '')
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
.replace(
':remote-addr',
':remote-addr', req.ip || req._remoteAddress || (
req.socket &&
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
)
))
.replace(':user-agent', req.headers['user-agent'] || '')
.replace(
':content-length',

View File

@@ -44,17 +44,20 @@
* Website: http://log4js.berlios.de
*/
var events = require('events')
, async = require('async')
, fs = require('fs')
, path = require('path')
, util = require('util')
, layouts = require('./layouts')
, levels = require('./levels')
, LoggingEvent = require('./logger').LoggingEvent
, Logger = require('./logger').Logger
, loggerModule = require('./logger')
, LoggingEvent = loggerModule.LoggingEvent
, Logger = loggerModule.Logger
, ALL_CATEGORIES = '[all]'
, appenders = {}
, loggers = {}
, appenderMakers = {}
, appenderShutdowns = {}
, defaultConfig = {
appenders: [
{ type: "console" }
@@ -62,6 +65,11 @@ var events = require('events')
replaceConsole: false
};
function hasLogger(logger) {
return loggers.hasOwnProperty(logger);
}
/**
* Get a logger instance. Instance is cached on categoryName level.
* @param {String} categoryName name of category to log to.
@@ -76,7 +84,7 @@ function getLogger (categoryName) {
}
var appenderList;
if (!loggers[categoryName]) {
if (!hasLogger(categoryName)) {
// Create the logger for this name if it doesn't already exist
loggers[categoryName] = new Logger(categoryName);
if (appenders[categoryName]) {
@@ -115,7 +123,7 @@ function addAppender () {
if (category === ALL_CATEGORIES) {
addAppenderToAllLoggers(appender);
} else if (loggers[category]) {
} else if (hasLogger(category)) {
loggers[category].addListener("log", appender);
}
});
@@ -123,7 +131,7 @@ function addAppender () {
function addAppenderToAllLoggers(appender) {
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
if (hasLogger(logger)) {
loggers[logger].addListener("log", appender);
}
}
@@ -139,7 +147,7 @@ function addAppenderToCategory(appender, category) {
function clearAppenders () {
appenders = {};
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
if (hasLogger(logger)) {
loggers[logger].removeAllListeners("log");
}
}
@@ -290,25 +298,89 @@ function restoreConsole() {
});
}
function loadAppender(appender) {
/**
* Load an appenderModule based on the provided appender filepath. Will first
* check if the appender path is a subpath of the log4js "lib/appenders" directory.
* If not, it will attempt to load the the appender as complete path.
*
* @param {string} appender The filepath for the appender.
* @returns {Object|null} The required appender or null if appender could not be loaded.
* @private
*/
function requireAppender(appender) {
var appenderModule;
try {
appenderModule = require('./appenders/' + appender);
} catch (e) {
appenderModule = require(appender);
}
return appenderModule;
}
/**
* Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
* it will be used in place of requiring the appender module.
*
* @param {string} appender The path to the appender module.
* @param {Object|void} [appenderModule] The pre-required appender module. When provided,
* instead of requiring the appender by its path, this object will be used.
* @returns {void}
* @private
*/
function loadAppender(appender, appenderModule) {
appenderModule = appenderModule || requireAppender(appender);
if (!appenderModule) {
throw new Error("Invalid log4js appender: " + util.inspect(appender));
}
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
if (appenderModule.shutdown) {
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
}
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
}
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @params {Function} cb - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
* @returns {void}
*/
function shutdown(cb) {
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
loggerModule.disableAllLogWrites();
// Next, get all the shutdown functions for appenders as an array.
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
function(accum, category) {
return accum.concat(appenderShutdowns[category]);
}, []);
// Call each of the shutdown functions.
async.forEach(
shutdownFunctions,
function(shutdownFn, done) {
shutdownFn(done);
},
cb
);
}
module.exports = {
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
hasLogger: hasLogger,
addAppender: addAppender,
loadAppender: loadAppender,
clearAppenders: clearAppenders,
configure: configure,
shutdown: shutdown,
replaceConsole: replaceConsole,
restoreConsole: restoreConsole,

View File

@@ -4,6 +4,8 @@ var levels = require('./levels')
, events = require('events')
, DEFAULT_CATEGORY = '[default]';
var logWritesEnabled = true;
/**
* Models a logging event.
* @constructor
@@ -49,9 +51,12 @@ Logger.prototype.removeLevel = function() {
Logger.prototype.log = function() {
var args = Array.prototype.slice.call(arguments)
, logLevel = args.shift()
, loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
this.emit("log", loggingEvent);
, logLevel = levels.toLevel(args.shift())
, loggingEvent;
if (this.isLevelEnabled(logLevel)) {
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
this.emit("log", loggingEvent);
}
};
Logger.prototype.isLevelEnabled = function(otherLevel) {
@@ -66,7 +71,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
};
Logger.prototype[levelString.toLowerCase()] = function () {
if (this.isLevelEnabled(level)) {
if (logWritesEnabled && this.isLevelEnabled(level)) {
var args = Array.prototype.slice.call(arguments);
args.unshift(level);
Logger.prototype.log.apply(this, args);
@@ -75,6 +80,23 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
}
);
/**
* Disable all log writes.
* @returns {void}
*/
function disableAllLogWrites() {
logWritesEnabled = false;
}
/**
* Enable log writes.
* @returns {void}
*/
function enableAllLogWrites() {
logWritesEnabled = true;
}
exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger;
exports.disableAllLogWrites = disableAllLogWrites;
exports.enableAllLogWrites = enableAllLogWrites;

View File

@@ -48,7 +48,13 @@ BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
function writeTheChunk() {
debug("writing the chunk to the underlying stream");
that.currentSize += chunk.length;
that.theStream.write(chunk, encoding, callback);
try {
that.theStream.write(chunk, encoding, callback);
}
catch (err){
debug(err);
callback();
}
}
debug("in _write");

View File

@@ -17,7 +17,14 @@ function DateRollingFileStream(filename, pattern, options, now) {
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
if (fs.existsSync(filename)) {
var stat = fs.statSync(filename);
this.lastTimeWeWroteSomething = format.asString(this.pattern, stat.mtime);
} else {
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
}
this.baseFilename = filename;
this.alwaysIncludePattern = false;

View File

@@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.6.8",
"version": "0.6.15",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@@ -9,7 +9,7 @@
"node"
],
"main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
"author": "Gareth Jones <gareth.nomiddlename@gmail.com>",
"repository": {
"type": "git",
"url": "https://github.com/nomiddlename/log4js-node.git"

View File

@@ -37,7 +37,8 @@ vows.describe('log4js cluster appender').addBatch({
});
var masterAppender = appenderModule.appender({
actualAppenders: [ fakeActualAppender ]
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
appenders: [{}, {category: "test"}, {category: "wovs"}]
});
// Actual test - log message using masterAppender
@@ -56,7 +57,9 @@ vows.describe('log4js cluster appender').addBatch({
},
"should log using actual appender": function(topic) {
assert.equal(topic.loggingEvents.length, 2)
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
},
},
@@ -97,6 +100,7 @@ vows.describe('log4js cluster appender').addBatch({
// Actual test - log message using masterAppender
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
var returnValue = {
registeredProcessEvents: registeredProcessEvents,
@@ -109,6 +113,14 @@ vows.describe('log4js cluster appender').addBatch({
"worker appender should call process.send" : function(topic) {
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
},
"worker should serialize an Error correctly" : function(topic) {
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
var expectedRegex = /^Error: Error test/;
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
}
}

View File

@@ -86,6 +86,21 @@ vows.describe('log4js configure').addBatch({
assert.isFunction(log4js.appenderMakers['some/other/external']);
}
},
'when appender object loaded via loadAppender': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require('../lib/log4js');
log4js.loadAppender('some/other/external', testAppender);
return log4js;
},
'should load appender with provided object': function(log4js) {
assert.ok(log4js.appenders['some/other/external']);
},
'should add appender configure function to appenderMakers': function(log4js) {
assert.isFunction(log4js.appenderMakers['some/other/external']);
}
},
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
topic: function() {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';

View File

@@ -4,7 +4,8 @@ var vows = require('vows')
, path = require('path')
, fs = require('fs')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js');
, log4js = require('../lib/log4js')
, EOL = require('os').EOL || '\n';
function removeFile(filename) {
return function() {
@@ -134,7 +135,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file' + require('os').EOL);
if (err) {
throw err;
}
assert.include(contents, 'this should be written to the file' + EOL);
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
},
@@ -161,7 +165,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
, thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(
path.join(__dirname, 'date-file-test' + thisTime),
"this is existing data" + require('os').EOL,
"this is existing data" + EOL,
'utf8'
);
log4js.clearAppenders();

View File

@@ -7,7 +7,7 @@ function test(args, pattern, value) {
var layout = args[0]
, event = args[1]
, tokens = args[2];
assert.equal(layout(pattern, tokens)(event), value);
}
@@ -16,7 +16,7 @@ vows.describe('log4js layouts').addBatch({
topic: function() {
return require('../lib/layouts').colouredLayout;
},
'should apply level colour codes to output': function(layout) {
var output = layout({
data: ["nonsense"],
@@ -40,7 +40,7 @@ vows.describe('log4js layouts').addBatch({
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
}
},
'messagePassThroughLayout': {
topic: function() {
return require('../lib/layouts').messagePassThroughLayout;
@@ -58,49 +58,49 @@ vows.describe('log4js layouts').addBatch({
},
'should support the console.log format for the message' : function(layout) {
assert.equal(layout({
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
data: ["thing %d", 1, "cheese"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level : {
colour: "green",
colour: "green",
toString: function() { return "ERROR"; }
}
}), "thing 1 cheese");
},
'should output the first item even if it is not a string': function(layout) {
assert.equal(layout({
data: [ { thing: 1} ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
data: [ { thing: 1} ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
colour: "green",
toString: function() { return "ERROR"; }
}
}), "{ thing: 1 }");
},
'should print the stacks of a passed error objects': function(layout) {
assert.isArray(layout({
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
data: [ new Error() ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
colour: "green",
toString: function() { return "ERROR"; }
}
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
, 'regexp did not return a match');
},
'with passed augmented errors': {
'with passed augmented errors': {
topic: function(layout){
var e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value";
e.augObj = { at1: "at2" };
return layout({
data: [ e ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
data: [ e ],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
colour: "green",
toString: function() { return "ERROR"; }
}
});
@@ -118,10 +118,10 @@ vows.describe('log4js layouts').addBatch({
assert.isArray(m);
}
}
},
'basicLayout': {
topic: function() {
var layout = require('../lib/layouts').basicLayout,
@@ -143,17 +143,17 @@ vows.describe('log4js layouts').addBatch({
var layout = args[0], event = args[1], output, lines,
error = new Error("Some made-up error"),
stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
output = layout(event);
lines = output.split(/\n/);
assert.equal(lines.length - 1, stack.length);
assert.equal(
lines[0],
lines[0],
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
);
for (var i = 1; i < stack.length; i++) {
assert.equal(lines[i+2], stack[i+1]);
}
@@ -166,13 +166,13 @@ vows.describe('log4js layouts').addBatch({
}];
output = layout(event);
assert.equal(
output,
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
output,
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
);
}
},
'patternLayout': {
topic: function() {
var event = {
@@ -188,9 +188,12 @@ vows.describe('log4js layouts').addBatch({
testFunction: function() { return 'testFunctionToken'; },
fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
};
//override getTimezoneOffset
event.startTime.getTimezoneOffset = function() { return 0; };
return [layout, event, tokens];
},
'should default to "time logLevel loggerName - message"': function(args) {
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n");
},

View File

@@ -2,7 +2,8 @@
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels')
, Logger = require('../lib/logger').Logger;
, loggerModule = require('../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
'constructor with no parameters': {
@@ -53,5 +54,28 @@ vows.describe('../lib/logger').addBatch({
assert.isTrue(logger.isErrorEnabled());
assert.isTrue(logger.isFatalEnabled());
}
},
'should emit log events': {
topic: function() {
var events = [],
logger = new Logger();
logger.addListener('log', function (logEvent) { events.push(logEvent); });
logger.debug('Event 1');
loggerModule.disableAllLogWrites();
logger.debug('Event 2');
loggerModule.enableAllLogWrites();
logger.debug('Event 3');
return events;
},
'when log writes are enabled': function(events) {
assert.equal(events[0].data[0], 'Event 1');
},
'but not when log writes are disabled': function(events) {
assert.equal(events.length, 2);
assert.equal(events[1].data[0], 'Event 3');
}
}
}).exportTo(module);

View File

@@ -75,13 +75,65 @@ vows.describe('log4js').addBatch({
assert.equal(events[1].level.toString(), 'WARN');
},
'should include the error if passed in': function (events) {
'should include the error if passed in': function(events) {
assert.instanceOf(events[2].data[1], Error);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
}
}
},
'when shutdown is called': {
topic: function() {
var events = {
appenderShutdownCalled: false,
shutdownCallbackCalled: false
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/file':
{
name: "file",
appender: function() {},
configure: function(configuration) {
return function() {};
},
shutdown: function(cb) {
events.appenderShutdownCalled = true;
cb();
}
}
}
}
),
shutdownCallback = function() {
events.shutdownCallbackCalled = true;
},
config = { appenders:
[ { "type" : "file",
"filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024,
"backups" : 3
}
]
};
log4js.configure(config);
log4js.shutdown(shutdownCallback);
// Re-enable log writing so other tests that use logger are not
// affected.
require('../lib/logger').enableAllLogWrites();
return events;
},
'should invoke appender shutdowns': function(events) {
assert.ok(events.appenderShutdownCalled);
},
'should call callback': function(events) {
assert.ok(events.shutdownCallbackCalled);
}
},
'invalid configuration': {

View File

@@ -0,0 +1,82 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, log4js = require('../lib/log4js')
, sandbox = require('sandboxed-module')
;
function setupLogging(category, options) {
var msgs = [];
var fakeLoggly = {
createClient: function (options) {
return {
config: options,
log: function (msg, tags) {
msgs.push({
msg: msg,
tags: tags
});
}
};
}
};
var fakeLayouts = {
layout: function(type, config) {
this.type = type;
this.config = config;
return log4js.layouts.messagePassThroughLayout;
},
basicLayout: log4js.layouts.basicLayout,
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
};
var fakeConsole = {
errors: [],
error: function(msg, value) {
this.errors.push({ msg: msg, value: value });
}
};
var logglyModule = sandbox.require('../lib/appenders/loggly', {
requires: {
'loggly': fakeLoggly,
'../layouts': fakeLayouts
},
globals: {
console: fakeConsole
}
});
log4js.addAppender(logglyModule.configure(options), category);
return {
logger: log4js.getLogger(category),
loggly: fakeLoggly,
layouts: fakeLayouts,
console: fakeConsole,
results: msgs
};
}
log4js.clearAppenders();
vows.describe('log4js logglyAppender').addBatch({
'minimal config': {
topic: function() {
var setup = setupLogging('loggly', {
token: 'your-really-long-input-token',
subdomain: 'your-subdomain',
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
});
setup.logger.log('trace', 'Log event #1');
return setup;
},
'there should be one message only': function (topic) {
//console.log('topic', topic);
assert.equal(topic.results.length, 1);
}
}
}).export(module);

View File

@@ -75,6 +75,7 @@ vows.describe('Multiprocess Appender').addBatch({
appender('after error, before connect');
fakeNet.cbs.connect();
appender('after error, after connect');
appender(new Error('Error test'));
return fakeNet;
},
@@ -98,6 +99,13 @@ vows.describe('Multiprocess Appender').addBatch({
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
assert.equal(net.data[7], '__LOG4JS__');
assert.equal(net.createConnectionCalled, 2);
},
'should serialize an Error correctly': function(net) {
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
var actual = JSON.parse(net.data[8]).stack;
var expectedRegex = /^Error: Error test/;
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
}
},
'worker with timeout': {