Compare commits
32 Commits
v0.5.2
...
release-0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
67b19aeaf3 | ||
|
|
50eefcc701 | ||
|
|
8e53c6213e | ||
|
|
a15a628311 | ||
|
|
b75e3660f4 | ||
|
|
22da6226e5 | ||
|
|
a3bdac8e14 | ||
|
|
af428c5669 | ||
|
|
5c75ba9468 | ||
|
|
bec0d05847 | ||
|
|
e4bf405f20 | ||
|
|
95568f352b | ||
|
|
6da6f3c90e | ||
|
|
7f57d14e70 | ||
|
|
f478793da3 | ||
|
|
ec2f8fec3b | ||
|
|
0167c84ea5 | ||
|
|
3e1a27e522 | ||
|
|
8b42e46071 | ||
|
|
4a7a90ed53 | ||
|
|
a9307fd6da | ||
|
|
4739c65c68 | ||
|
|
892181f88f | ||
|
|
bdfa7f9a9b | ||
|
|
ad63b801f7 | ||
|
|
2bfad6362a | ||
|
|
2b889fe776 | ||
|
|
9ac61e37f4 | ||
|
|
185f343e68 | ||
|
|
be1272cd7c | ||
|
|
cbc1dd32f9 | ||
|
|
a6fb26efb1 |
145
README.md
145
README.md
@@ -21,10 +21,10 @@ NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of no
|
||||
|
||||
```javascript
|
||||
{
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
],
|
||||
replaceConsole: true
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
],
|
||||
replaceConsole: true
|
||||
}
|
||||
```
|
||||
|
||||
@@ -36,95 +36,98 @@ npm install log4js
|
||||
## usage
|
||||
|
||||
Minimalist version:
|
||||
|
||||
var log4js = require('log4js');
|
||||
var logger = log4js.getLogger();
|
||||
logger.debug("Some debug messages");
|
||||
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
var logger = log4js.getLogger();
|
||||
logger.debug("Some debug messages");
|
||||
```
|
||||
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
|
||||
|
||||
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
||||
|
||||
```bash
|
||||
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
||||
```
|
||||
See example.js for a full example, but here's a snippet (also in fromreadme.js):
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
//console log is loaded by default, so you won't normally need to do this
|
||||
//log4js.loadAppender('console');
|
||||
log4js.loadAppender('file');
|
||||
//log4js.addAppender(log4js.appenders.console());
|
||||
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
||||
|
||||
var log4js = require('log4js');
|
||||
//console log is loaded by default, so you won't normally need to do this
|
||||
//log4js.loadAppender('console');
|
||||
log4js.loadAppender('file');
|
||||
//log4js.addAppender(log4js.appenders.console());
|
||||
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
```
|
||||
Output:
|
||||
|
||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||
|
||||
```bash
|
||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||
```
|
||||
The first 5 lines of the code above could also be written as:
|
||||
|
||||
var log4js = require('log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'console' },
|
||||
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
||||
]
|
||||
});
|
||||
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'console' },
|
||||
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
||||
]
|
||||
});
|
||||
```
|
||||
|
||||
## configuration
|
||||
|
||||
You can configure the appenders and log levels manually (as above), or provide a
|
||||
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object.
|
||||
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
|
||||
configuration file location may also be specified via the environment variable
|
||||
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
||||
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
||||
|
||||
To turn off configuration file change checking, configure with:
|
||||
|
||||
var log4js = require('log4js');
|
||||
log4js.configure('my_log4js_configuration.json', {});
|
||||
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
log4js.configure('my_log4js_configuration.json', {});
|
||||
```
|
||||
To specify a different period:
|
||||
|
||||
log4js.configure('file.json', { reloadSecs: 300 });
|
||||
|
||||
```javascript
|
||||
log4js.configure('file.json', { reloadSecs: 300 });
|
||||
```
|
||||
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
|
||||
|
||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||
|
||||
```javascript
|
||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||
```
|
||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||
|
||||
#### my_log4js_configuration.json ####
|
||||
```json
|
||||
#### my_log4js_configuration.json ####
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "relative/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 3,
|
||||
"pollInterval": 15,
|
||||
"category": "relative-logger"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"absolute": true,
|
||||
"filename": "/absolute/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 10,
|
||||
"pollInterval": 15,
|
||||
"category": "absolute-logger"
|
||||
}
|
||||
]
|
||||
"type": "file",
|
||||
"filename": "relative/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 3,
|
||||
"category": "relative-logger"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"absolute": true,
|
||||
"filename": "/absolute/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 10,
|
||||
"category": "absolute-logger"
|
||||
}
|
||||
|
||||
]
|
||||
}
|
||||
```
|
||||
Documentation for most of the core appenders can be found on the [wiki](log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
|
||||
|
||||
## Documentation
|
||||
|
||||
21
examples/patternLayout-tokens.js
Normal file
21
examples/patternLayout-tokens.js
Normal file
@@ -0,0 +1,21 @@
|
||||
var log4js = require('./lib/log4js');
|
||||
|
||||
var config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type": "console",
|
||||
"layout": {
|
||||
"type": "pattern",
|
||||
"pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
|
||||
"tokens": {
|
||||
"pid" : function() { return process.pid; }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config, {});
|
||||
|
||||
var logger = log4js.getLogger("app");
|
||||
logger.info("Test log message");
|
||||
49
lib/appenders/dateFile.js
Normal file
49
lib/appenders/dateFile.js
Normal file
@@ -0,0 +1,49 @@
|
||||
var streams = require('../streams'),
|
||||
layouts = require('../layouts'),
|
||||
path = require('path'),
|
||||
os = require('os'),
|
||||
eol = os.EOL || '\n',
|
||||
openFiles = [];
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File appender that rolls files according to a date pattern.
|
||||
* @filename base filename.
|
||||
* @pattern the format that will be added to the end of filename when rolling,
|
||||
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||
* @layout layout function for log messages - defaults to basicLayout
|
||||
*/
|
||||
function appender(filename, pattern, layout) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
|
||||
var logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
|
||||
openFiles.push(logFile);
|
||||
|
||||
return function(logEvent) {
|
||||
logFile.write(layout(logEvent) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return appender(config.filename, config.pattern, layout);
|
||||
}
|
||||
|
||||
exports.appender = appender;
|
||||
exports.configure = configure;
|
||||
@@ -1,12 +1,17 @@
|
||||
var layouts = require('../layouts')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n';
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = [];
|
||||
|
||||
var openFiles = [];
|
||||
var listenerAtttached = false;
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
@@ -48,16 +53,6 @@ function fileAppender (file, layout, logSize, numBackups) {
|
||||
// push file to the stack of open handlers
|
||||
openFiles.push(logFile);
|
||||
|
||||
//close the file on process exit.
|
||||
if (!listenerAtttached) {
|
||||
listenerAtttached = true;
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent) + eol, "utf8");
|
||||
};
|
||||
|
||||
@@ -16,20 +16,15 @@ function logServer(config) {
|
||||
function deserializeLoggingEvent(clientSocket, msg) {
|
||||
var loggingEvent;
|
||||
try {
|
||||
loggingEvent = JSON.parse(msg);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level.toString = function levelToString() {
|
||||
return loggingEvent.level.levelStr;
|
||||
};
|
||||
loggingEvent = JSON.parse(msg);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
} catch (e) {
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: { toString: function () {
|
||||
return 'ERROR';
|
||||
}
|
||||
},
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', msg ]
|
||||
};
|
||||
}
|
||||
@@ -120,11 +115,11 @@ function createAppender(config) {
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
function configure(config, options) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
|
||||
@@ -10,8 +10,9 @@ var dateFormat = require('./date_format')
|
||||
, "coloured": function() { return colouredLayout; }
|
||||
, "pattern": function (config) {
|
||||
var pattern = config.pattern || undefined;
|
||||
return patternLayout(pattern);
|
||||
}
|
||||
var tokens = config.tokens || undefined;
|
||||
return patternLayout(pattern, tokens);
|
||||
}
|
||||
}
|
||||
, colours = {
|
||||
ALL: "grey"
|
||||
@@ -24,7 +25,6 @@ var dateFormat = require('./date_format')
|
||||
, OFF: "grey"
|
||||
};
|
||||
|
||||
|
||||
function formatLogData(logData) {
|
||||
var output = ""
|
||||
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
|
||||
@@ -58,30 +58,36 @@ function formatLogData(logData) {
|
||||
return output;
|
||||
}
|
||||
|
||||
var styles = {
|
||||
//styles
|
||||
'bold' : [1, 22],
|
||||
'italic' : [3, 23],
|
||||
'underline' : [4, 24],
|
||||
'inverse' : [7, 27],
|
||||
//grayscale
|
||||
'white' : [37, 39],
|
||||
'grey' : [90, 39],
|
||||
'black' : [90, 39],
|
||||
//colors
|
||||
'blue' : [34, 39],
|
||||
'cyan' : [36, 39],
|
||||
'green' : [32, 39],
|
||||
'magenta' : [35, 39],
|
||||
'red' : [31, 39],
|
||||
'yellow' : [33, 39]
|
||||
};
|
||||
|
||||
function colorizeStart(style) {
|
||||
return style ? '\033[' + styles[style][0] + 'm' : '';
|
||||
}
|
||||
function colorizeEnd(style) {
|
||||
return style ? '\033[' + styles[style][1] + 'm' : '';
|
||||
}
|
||||
/**
|
||||
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
|
||||
*/
|
||||
function colorize (str, style) {
|
||||
var styles = {
|
||||
//styles
|
||||
'bold' : [1, 22],
|
||||
'italic' : [3, 23],
|
||||
'underline' : [4, 24],
|
||||
'inverse' : [7, 27],
|
||||
//grayscale
|
||||
'white' : [37, 39],
|
||||
'grey' : [90, 39],
|
||||
'black' : [90, 39],
|
||||
//colors
|
||||
'blue' : [34, 39],
|
||||
'cyan' : [36, 39],
|
||||
'green' : [32, 39],
|
||||
'magenta' : [35, 39],
|
||||
'red' : [31, 39],
|
||||
'yellow' : [33, 39]
|
||||
};
|
||||
return style ? '\033[' + styles[style][0] + 'm' + str +
|
||||
'\033[' + styles[style][1] + 'm' : str;
|
||||
return colorizeStart(style) + str + colorizeEnd(style);
|
||||
}
|
||||
|
||||
function timestampLevelAndCategory(loggingEvent, colour) {
|
||||
@@ -134,12 +140,26 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
* - %d date in various formats
|
||||
* - %% %
|
||||
* - %n newline
|
||||
* Takes a pattern string and returns a layout function.
|
||||
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
|
||||
* You can use %[ and %] to define a colored block.
|
||||
*
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* The key represents the token name whereas the value can be a string or function
|
||||
* which is called to extract the value to put in the log message. If token is not
|
||||
* found, it doesn't replace the field.
|
||||
*
|
||||
* A sample token would be: { "pid" : function() { return process.pid; } }
|
||||
*
|
||||
* Takes a pattern string, array of tokens and returns a layout function.
|
||||
* @param {String} Log format pattern String
|
||||
* @param {object} map object of different tokens
|
||||
* @return {Function}
|
||||
* @author Stephan Strittmatter
|
||||
* @author Jan Schmidle
|
||||
*/
|
||||
function patternLayout (pattern) {
|
||||
function patternLayout (pattern, tokens) {
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([cdmnpr%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
|
||||
@@ -206,9 +226,26 @@ function patternLayout (pattern) {
|
||||
case "r":
|
||||
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
|
||||
break;
|
||||
case "[":
|
||||
replacement = colorizeStart(colours[loggingEvent.level.toString()]);
|
||||
break;
|
||||
case "]":
|
||||
replacement = colorizeEnd(colours[loggingEvent.level.toString()]);
|
||||
break;
|
||||
case "%":
|
||||
replacement = "%";
|
||||
break;
|
||||
case "x":
|
||||
if(typeof(tokens[specifier]) !== 'undefined') {
|
||||
if(typeof(tokens[specifier]) === 'function') {
|
||||
replacement = tokens[specifier]();
|
||||
} else {
|
||||
replacement = tokens[specifier];
|
||||
}
|
||||
} else {
|
||||
replacement = matchedString;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
replacement = matchedString;
|
||||
break;
|
||||
@@ -248,7 +285,6 @@ function patternLayout (pattern) {
|
||||
|
||||
};
|
||||
|
||||
|
||||
module.exports = {
|
||||
basicLayout: basicLayout
|
||||
, messagePassThroughLayout: messagePassThroughLayout
|
||||
|
||||
@@ -240,8 +240,9 @@ function initReloadConfiguration(filename, options) {
|
||||
|
||||
function configure(configurationFileOrObject, options) {
|
||||
var config = configurationFileOrObject;
|
||||
config = config || process.env.LOG4JS_CONFIG;
|
||||
options = options || {};
|
||||
|
||||
|
||||
if (config === undefined || config === null || typeof(config) === 'string') {
|
||||
if (options.reloadSecs) {
|
||||
initReloadConfiguration(config, options);
|
||||
|
||||
268
lib/streams.js
268
lib/streams.js
@@ -1,268 +0,0 @@
|
||||
var util = require('util'),
|
||||
fs = require('fs'),
|
||||
path = require('path'),
|
||||
events = require('events'),
|
||||
async = require('async');
|
||||
|
||||
function debug(message) {
|
||||
// util.debug(message);
|
||||
// console.log(message);
|
||||
}
|
||||
|
||||
function BufferedWriteStream(stream) {
|
||||
var that = this;
|
||||
this.stream = stream;
|
||||
this.buffer = [];
|
||||
this.canWrite = false;
|
||||
this.bytes = 0;
|
||||
|
||||
this.stream.on("open", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
|
||||
this.stream.on("error", function (err) {
|
||||
that.emit("error", err);
|
||||
});
|
||||
|
||||
this.stream.on("drain", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(BufferedWriteStream, events.EventEmitter);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"fd",
|
||||
{
|
||||
get: function() { return this.stream.fd; },
|
||||
set: function(newFd) {
|
||||
this.stream.fd = newFd;
|
||||
this.bytes = 0;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"bytesWritten",
|
||||
{
|
||||
get: function() { return this.bytes; }
|
||||
}
|
||||
);
|
||||
|
||||
BufferedWriteStream.prototype.write = function(data, encoding) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
this.flushBuffer();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.end = function(data, encoding) {
|
||||
if (data) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
}
|
||||
this.flushBufferEvenIfCannotWrite();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
||||
this.bytes += toWrite.data.length;
|
||||
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
||||
while (this.buffer.length > 0) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBuffer = function() {
|
||||
while (this.buffer.length > 0 && this.canWrite) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
|
||||
function BaseRollingFileStream(filename, options) {
|
||||
this.filename = filename;
|
||||
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
||||
this.rolling = false;
|
||||
this.writesWhileRolling = [];
|
||||
this.currentSize = 0;
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
fileSize = fs.statSync(file).size;
|
||||
} catch (e) {
|
||||
// file does not exist
|
||||
}
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename) {
|
||||
throw new Error("You must specify a filename");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
BaseRollingFileStream.super_.call(this, this.filename, this.options);
|
||||
this.currentSize = currentFileSize(this.filename);
|
||||
}
|
||||
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
|
||||
|
||||
BaseRollingFileStream.prototype.initRolling = function() {
|
||||
var that = this;
|
||||
|
||||
function emptyRollingQueue() {
|
||||
debug("emptying the rolling queue");
|
||||
var toWrite;
|
||||
while ((toWrite = that.writesWhileRolling.shift())) {
|
||||
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
|
||||
that.currentSize += toWrite.data.length;
|
||||
if (that.shouldRoll()) {
|
||||
that.flush();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
that.flush();
|
||||
return false;
|
||||
}
|
||||
|
||||
this.rolling = true;
|
||||
this.roll(this.filename, function() {
|
||||
that.currentSize = 0;
|
||||
that.rolling = emptyRollingQueue();
|
||||
if (that.rolling) {
|
||||
process.nextTick(function() { that.initRolling(); });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.write = function(data, encoding) {
|
||||
if (this.rolling) {
|
||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
||||
return false;
|
||||
} else {
|
||||
var canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
|
||||
this.currentSize += data.length;
|
||||
debug('current size = ' + this.currentSize);
|
||||
if (this.shouldRoll()) {
|
||||
this.initRolling();
|
||||
}
|
||||
return canWrite;
|
||||
}
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||
return false; // default behaviour is never to roll
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
callback(); // default behaviour is not to do anything
|
||||
};
|
||||
|
||||
|
||||
function RollingFileStream (filename, size, backups, options) {
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename || !size || size <= 0) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
RollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||
|
||||
RollingFileStream.prototype.shouldRoll = function() {
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
} else if (index(a) < index(b) ) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename, cb) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
function renameTheFiles(cb) {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
fs.readdir(path.dirname(filename), function (err, files) {
|
||||
async.forEachSeries(
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||
increaseFileIndex,
|
||||
cb
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function openANewFile(cb) {
|
||||
debug("Opening a new file");
|
||||
fs.open(
|
||||
filename,
|
||||
that.options.flags,
|
||||
that.options.mode,
|
||||
function (err, fd) {
|
||||
debug("opened new file");
|
||||
var oldLogFileFD = that.fd;
|
||||
that.fd = fd;
|
||||
that.writable = true;
|
||||
fs.close(oldLogFileFD, cb);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
debug("Starting roll");
|
||||
debug("Queueing up data until we've finished rolling");
|
||||
debug("Flushing underlying stream");
|
||||
this.flush();
|
||||
|
||||
async.series([
|
||||
renameTheFiles,
|
||||
openANewFile
|
||||
], callback);
|
||||
|
||||
};
|
||||
|
||||
|
||||
exports.BaseRollingFileStream = BaseRollingFileStream;
|
||||
exports.RollingFileStream = RollingFileStream;
|
||||
exports.BufferedWriteStream = BufferedWriteStream;
|
||||
99
lib/streams/BaseRollingFileStream.js
Normal file
99
lib/streams/BaseRollingFileStream.js
Normal file
@@ -0,0 +1,99 @@
|
||||
var fs = require('fs'),
|
||||
util = require('util');
|
||||
|
||||
function debug(message) {
|
||||
// console.log(message);
|
||||
}
|
||||
|
||||
module.exports = BaseRollingFileStream;
|
||||
|
||||
function BaseRollingFileStream(filename, options) {
|
||||
|
||||
debug("In BaseRollingFileStream");
|
||||
this.filename = filename;
|
||||
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
||||
this.rolling = false;
|
||||
this.writesWhileRolling = [];
|
||||
this.currentSize = 0;
|
||||
this.rollBeforeWrite = false;
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
fileSize = fs.statSync(file).size;
|
||||
} catch (e) {
|
||||
// file does not exist
|
||||
}
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename) {
|
||||
throw new Error("You must specify a filename");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
debug("Calling BaseRollingFileStream.super");
|
||||
BaseRollingFileStream.super_.call(this, this.filename, this.options);
|
||||
this.currentSize = currentFileSize(this.filename);
|
||||
}
|
||||
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
|
||||
|
||||
BaseRollingFileStream.prototype.initRolling = function() {
|
||||
var that = this;
|
||||
|
||||
function emptyRollingQueue() {
|
||||
debug("emptying the rolling queue");
|
||||
var toWrite;
|
||||
while ((toWrite = that.writesWhileRolling.shift())) {
|
||||
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
|
||||
that.currentSize += toWrite.data.length;
|
||||
if (that.shouldRoll()) {
|
||||
that.flush();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
that.flush();
|
||||
return false;
|
||||
}
|
||||
|
||||
this.rolling = true;
|
||||
this.roll(this.filename, function() {
|
||||
that.currentSize = 0;
|
||||
that.rolling = emptyRollingQueue();
|
||||
if (that.rolling) {
|
||||
process.nextTick(function() { that.initRolling(); });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.write = function(data, encoding) {
|
||||
var canWrite = false;
|
||||
if (this.rolling) {
|
||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
||||
} else {
|
||||
if (this.rollBeforeWrite && this.shouldRoll()) {
|
||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
||||
this.initRolling();
|
||||
} else {
|
||||
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
|
||||
this.currentSize += data.length;
|
||||
debug('current size = ' + this.currentSize);
|
||||
|
||||
if (!this.rollBeforeWrite && this.shouldRoll()) {
|
||||
this.initRolling();
|
||||
}
|
||||
}
|
||||
}
|
||||
return canWrite;
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||
return false; // default behaviour is never to roll
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
callback(); // default behaviour is not to do anything
|
||||
};
|
||||
|
||||
78
lib/streams/BufferedWriteStream.js
Normal file
78
lib/streams/BufferedWriteStream.js
Normal file
@@ -0,0 +1,78 @@
|
||||
var events = require('events'),
|
||||
Dequeue = require('dequeue'),
|
||||
util = require('util');
|
||||
|
||||
module.exports = BufferedWriteStream;
|
||||
|
||||
function BufferedWriteStream(stream) {
|
||||
var that = this;
|
||||
this.stream = stream;
|
||||
this.buffer = new Dequeue();
|
||||
this.canWrite = false;
|
||||
this.bytes = 0;
|
||||
|
||||
this.stream.on("open", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
|
||||
this.stream.on("error", function (err) {
|
||||
that.emit("error", err);
|
||||
});
|
||||
|
||||
this.stream.on("drain", function() {
|
||||
that.canWrite = true;
|
||||
that.flushBuffer();
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(BufferedWriteStream, events.EventEmitter);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"fd",
|
||||
{
|
||||
get: function() { return this.stream.fd; },
|
||||
set: function(newFd) {
|
||||
this.stream.fd = newFd;
|
||||
this.bytes = 0;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Object.defineProperty(
|
||||
BufferedWriteStream.prototype,
|
||||
"bytesWritten",
|
||||
{
|
||||
get: function() { return this.bytes; }
|
||||
}
|
||||
);
|
||||
|
||||
BufferedWriteStream.prototype.write = function(data, encoding) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
this.flushBuffer();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.end = function(data, encoding) {
|
||||
if (data) {
|
||||
this.buffer.push({ data: data, encoding: encoding });
|
||||
}
|
||||
this.flushBufferEvenIfCannotWrite();
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
||||
this.bytes += toWrite.data.length;
|
||||
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
||||
while (this.buffer.length > 0) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
|
||||
BufferedWriteStream.prototype.flushBuffer = function() {
|
||||
while (this.buffer.length > 0 && this.canWrite) {
|
||||
this.writeToStream(this.buffer.shift());
|
||||
}
|
||||
};
|
||||
89
lib/streams/DateRollingFileStream.js
Normal file
89
lib/streams/DateRollingFileStream.js
Normal file
@@ -0,0 +1,89 @@
|
||||
var BaseRollingFileStream = require('./BaseRollingFileStream'),
|
||||
format = require('../date_format'),
|
||||
async = require('async'),
|
||||
fs = require('fs'),
|
||||
util = require('util');
|
||||
|
||||
module.exports = DateRollingFileStream;
|
||||
|
||||
function debug(message) {
|
||||
// console.log(message);
|
||||
}
|
||||
|
||||
function DateRollingFileStream(filename, pattern, options, now) {
|
||||
debug("Now is " + now);
|
||||
if (pattern && typeof(pattern) === 'object') {
|
||||
now = options;
|
||||
options = pattern;
|
||||
pattern = null;
|
||||
}
|
||||
this.pattern = pattern || '.yyyy-MM-dd';
|
||||
this.now = now || Date.now;
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
debug("this.now is " + this.now + ", now is " + now);
|
||||
|
||||
DateRollingFileStream.super_.call(this, filename, options);
|
||||
this.rollBeforeWrite = true;
|
||||
}
|
||||
|
||||
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
|
||||
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||
var lastTime = this.lastTimeWeWroteSomething,
|
||||
thisTime = format.asString(this.pattern, new Date(this.now()));
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
|
||||
|
||||
this.lastTimeWeWroteSomething = thisTime;
|
||||
this.previousTime = lastTime;
|
||||
|
||||
return thisTime !== lastTime;
|
||||
};
|
||||
|
||||
DateRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this,
|
||||
newFilename = filename + this.previousTime;
|
||||
|
||||
debug("Starting roll");
|
||||
debug("Queueing up data until we've finished rolling");
|
||||
debug("Flushing underlying stream");
|
||||
this.flush();
|
||||
|
||||
async.series([
|
||||
deleteAnyExistingFile,
|
||||
renameTheCurrentFile,
|
||||
openANewFile
|
||||
], callback);
|
||||
|
||||
function deleteAnyExistingFile(cb) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(newFilename, function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
function renameTheCurrentFile(cb) {
|
||||
debug("Renaming the " + filename + " -> " + newFilename);
|
||||
fs.rename(filename, newFilename, cb);
|
||||
}
|
||||
|
||||
function openANewFile(cb) {
|
||||
debug("Opening a new file");
|
||||
fs.open(
|
||||
filename,
|
||||
that.options.flags,
|
||||
that.options.mode,
|
||||
function (err, fd) {
|
||||
debug("opened new file");
|
||||
var oldLogFileFD = that.fd;
|
||||
that.fd = fd;
|
||||
that.writable = true;
|
||||
fs.close(oldLogFileFD, cb);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
110
lib/streams/RollingFileStream.js
Normal file
110
lib/streams/RollingFileStream.js
Normal file
@@ -0,0 +1,110 @@
|
||||
var BaseRollingFileStream = require('./BaseRollingFileStream'),
|
||||
util = require('util'),
|
||||
path = require('path'),
|
||||
fs = require('fs'),
|
||||
async = require('async');
|
||||
|
||||
function debug(message) {
|
||||
// util.debug(message);
|
||||
// console.log(message);
|
||||
}
|
||||
|
||||
module.exports = RollingFileStream;
|
||||
|
||||
function RollingFileStream (filename, size, backups, options) {
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename || !size || size <= 0) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
RollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||
|
||||
RollingFileStream.prototype.shouldRoll = function() {
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
} else if (index(a) < index(b) ) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename, cb) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
function renameTheFiles(cb) {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
fs.readdir(path.dirname(filename), function (err, files) {
|
||||
async.forEachSeries(
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||
increaseFileIndex,
|
||||
cb
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function openANewFile(cb) {
|
||||
debug("Opening a new file");
|
||||
fs.open(
|
||||
filename,
|
||||
that.options.flags,
|
||||
that.options.mode,
|
||||
function (err, fd) {
|
||||
debug("opened new file");
|
||||
var oldLogFileFD = that.fd;
|
||||
that.fd = fd;
|
||||
that.writable = true;
|
||||
fs.close(oldLogFileFD, cb);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
debug("Starting roll");
|
||||
debug("Queueing up data until we've finished rolling");
|
||||
debug("Flushing underlying stream");
|
||||
this.flush();
|
||||
|
||||
async.series([
|
||||
renameTheFiles,
|
||||
openANewFile
|
||||
], callback);
|
||||
|
||||
};
|
||||
3
lib/streams/index.js
Normal file
3
lib/streams/index.js
Normal file
@@ -0,0 +1,3 @@
|
||||
exports.BufferedWriteStream = require('./BufferedWriteStream');
|
||||
exports.RollingFileStream = require('./RollingFileStream');
|
||||
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "log4js",
|
||||
"version": "0.5.2",
|
||||
"version": "0.5.8",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
@@ -17,7 +17,9 @@
|
||||
"bugs": {
|
||||
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
||||
},
|
||||
"engines": [ "node >=0.6" ],
|
||||
"engines": {
|
||||
"node": "~0.6||~0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "vows"
|
||||
},
|
||||
@@ -26,7 +28,8 @@
|
||||
"lib": "lib"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "0.1.15"
|
||||
"async": "0.1.15",
|
||||
"dequeue": "1.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vows": "0.6.2",
|
||||
|
||||
@@ -84,6 +84,48 @@ vows.describe('log4js configure').addBatch({
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
|
||||
topic: function() {
|
||||
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||
var fileRead = 0,
|
||||
modulePath = 'some/path/to/mylog4js.json',
|
||||
pathsChecked = [],
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||
levels: { 'a-test' : 'INFO' } },
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require('../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
}
|
||||
});
|
||||
delete process.env.LOG4JS_CONFIG;
|
||||
return fileRead;
|
||||
},
|
||||
'should load the specified local configuration file' : function(fileRead) {
|
||||
assert.equal(fileRead, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
|
||||
98
test/dateFileAppender-test.js
Normal file
98
test/dateFileAppender-test.js
Normal file
@@ -0,0 +1,98 @@
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
path = require('path'),
|
||||
fs = require('fs'),
|
||||
log4js = require('../lib/log4js');
|
||||
|
||||
function removeFile(filename) {
|
||||
return function() {
|
||||
fs.unlink(path.join(__dirname, filename), function(err) {
|
||||
if (err) {
|
||||
console.log("Could not delete ", filename, err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('../lib/appenders/dateFile').addBatch({
|
||||
'appender': {
|
||||
'adding multiple dateFileAppenders': {
|
||||
topic: function () {
|
||||
var listenersCount = process.listeners('exit').length,
|
||||
dateFileAppender = require('../lib/appenders/dateFile'),
|
||||
count = 5,
|
||||
logfile;
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
|
||||
log4js.addAppender(dateFileAppender.appender(logfile));
|
||||
}
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
teardown: function() {
|
||||
removeFile('datefa-default-test0.log')();
|
||||
removeFile('datefa-default-test1.log')();
|
||||
removeFile('datefa-default-test2.log')();
|
||||
removeFile('datefa-default-test3.log')();
|
||||
removeFile('datefa-default-test4.log')();
|
||||
},
|
||||
|
||||
'should only add one `exit` listener': function (initialCount) {
|
||||
assert.equal(process.listeners('exit').length, initialCount + 1);
|
||||
}
|
||||
},
|
||||
|
||||
'with default settings': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
testFile = path.join(__dirname, 'date-appender-default.log'),
|
||||
appender = require('../lib/appenders/dateFile').appender(testFile),
|
||||
logger = log4js.getLogger('default-settings');
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender, 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
|
||||
},
|
||||
teardown: removeFile('date-appender-default.log'),
|
||||
|
||||
'should write to the file': function(contents) {
|
||||
assert.include(contents, 'This should be in the file');
|
||||
},
|
||||
|
||||
'should use the basic layout': function(contents) {
|
||||
assert.match(contents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).addBatch({
|
||||
'configure': {
|
||||
'with dateFileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./date-file-test.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/with-dateFile.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
|
||||
},
|
||||
teardown: removeFile('date-file-test.log'),
|
||||
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
assert.include(contents, 'this should be written to the file\n');
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).exportTo(module);
|
||||
@@ -4,9 +4,10 @@ assert = require('assert');
|
||||
//used for patternLayout tests.
|
||||
function test(args, pattern, value) {
|
||||
var layout = args[0]
|
||||
, event = args[1];
|
||||
, event = args[1]
|
||||
, tokens = args[2];
|
||||
|
||||
assert.equal(layout(pattern)(event), value);
|
||||
assert.equal(layout(pattern, tokens)(event), value);
|
||||
}
|
||||
|
||||
vows.describe('log4js layouts').addBatch({
|
||||
@@ -175,8 +176,12 @@ vows.describe('log4js layouts').addBatch({
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
}, layout = require('../lib/layouts').patternLayout;
|
||||
return [layout, event];
|
||||
}, layout = require('../lib/layouts').patternLayout
|
||||
, tokens = {
|
||||
testString: 'testStringToken',
|
||||
testFunction: function() { return 'testFunctionToken'; }
|
||||
};
|
||||
return [layout, event, tokens];
|
||||
},
|
||||
|
||||
'should default to "time logLevel loggerName - message"': function(args) {
|
||||
@@ -243,9 +248,21 @@ vows.describe('log4js layouts').addBatch({
|
||||
test(args, '%-6p', 'DEBUG ');
|
||||
test(args, '%-8p', 'DEBUG ');
|
||||
test(args, '%-10p', 'DEBUG ');
|
||||
}
|
||||
|
||||
},
|
||||
'%[%r%] should output colored time': function(args) {
|
||||
test(args, '%[%r%]', '\033[36m14:18:30\033[39m');
|
||||
},
|
||||
'%x{testString} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{testString}', 'testStringToken');
|
||||
},
|
||||
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
|
||||
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||
},
|
||||
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
|
||||
},
|
||||
'%x should output the string stored in tokens': function(args) {
|
||||
test(args, '%x', '%x');
|
||||
},
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
|
||||
|
||||
@@ -115,8 +115,7 @@ vows.describe('log4js').addBatch({
|
||||
"type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3,
|
||||
"pollInterval" : 15
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
@@ -181,14 +181,14 @@ vows.describe('Multiprocess Appender').addBatch({
|
||||
},
|
||||
'when a client connects': {
|
||||
topic: function(net) {
|
||||
var logString = JSON.stringify({ level: 'DEBUG', data: ['some debug']}) + '__LOG4JS__';
|
||||
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__';
|
||||
|
||||
net.cbs['connect']();
|
||||
net.cbs['data'](JSON.stringify({ level: 'ERROR', data: ['an error message'] }) + '__LOG4JS__');
|
||||
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__');
|
||||
net.cbs['data'](logString.substring(0, 10));
|
||||
net.cbs['data'](logString.substring(10));
|
||||
net.cbs['data'](logString + logString + logString);
|
||||
net.cbs['end'](JSON.stringify({ level: 'FATAL', data: ["that's all folks"] }) + '__LOG4JS__');
|
||||
net.cbs['end'](JSON.stringify({ level: { level: 50000, levelStr: 'FATAL' }, data: ["that's all folks"] }) + '__LOG4JS__');
|
||||
net.cbs['data']('bad message__LOG4JS__');
|
||||
return net;
|
||||
},
|
||||
|
||||
123
test/streams/DateRollingFileStream-test.js
Normal file
123
test/streams/DateRollingFileStream-test.js
Normal file
@@ -0,0 +1,123 @@
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
fs = require('fs'),
|
||||
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream,
|
||||
testTime = new Date(2012, 8, 12, 10, 37, 11);
|
||||
|
||||
function cleanUp(filename) {
|
||||
return function() {
|
||||
fs.unlink(filename);
|
||||
};
|
||||
}
|
||||
|
||||
function now() {
|
||||
return testTime.getTime();
|
||||
}
|
||||
|
||||
vows.describe('DateRollingFileStream').addBatch({
|
||||
'arguments': {
|
||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
|
||||
|
||||
'should take a filename and a pattern and return a FileWriteStream': function(stream) {
|
||||
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
|
||||
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
|
||||
assert.instanceOf(stream, fs.FileWriteStream);
|
||||
},
|
||||
'with default settings for the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 420);
|
||||
assert.equal(stream.flags, 'a');
|
||||
assert.equal(stream.encoding, 'utf8');
|
||||
}
|
||||
},
|
||||
|
||||
'default arguments': {
|
||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
|
||||
|
||||
'pattern should be .yyyy-MM-dd': function(stream) {
|
||||
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||
}
|
||||
},
|
||||
|
||||
'with stream arguments': {
|
||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
|
||||
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 0666);
|
||||
}
|
||||
},
|
||||
|
||||
'with stream arguments but no pattern': {
|
||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
|
||||
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 0666);
|
||||
},
|
||||
'should use default pattern': function(stream) {
|
||||
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||
}
|
||||
},
|
||||
|
||||
'with a pattern of .yyyy-MM-dd': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
|
||||
stream.on("open", function() {
|
||||
stream.write("First message\n");
|
||||
//wait for the file system to catch up with us
|
||||
that.callback(null, stream);
|
||||
});
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
|
||||
|
||||
'should create a file with the base name': {
|
||||
topic: function(stream) {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||
},
|
||||
'file should contain first message': function(result) {
|
||||
assert.equal(result.toString(), "First message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'when the day changes': {
|
||||
topic: function(stream) {
|
||||
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
||||
stream.write("Second message\n");
|
||||
setTimeout(this.callback, 100);
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
|
||||
|
||||
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be two': function(files) {
|
||||
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
|
||||
}
|
||||
},
|
||||
|
||||
'the file without a date': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||
},
|
||||
'should contain the second message': function(contents) {
|
||||
assert.equal(contents.toString(), "Second message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'the file with the date': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
|
||||
},
|
||||
'should contain the first message': function(contents) {
|
||||
assert.equal(contents.toString(), "First message\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
@@ -1,7 +1,7 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, BufferedWriteStream = require('../lib/streams').BufferedWriteStream;
|
||||
, BufferedWriteStream = require('../../lib/streams').BufferedWriteStream;
|
||||
|
||||
function FakeStream() {
|
||||
this.writes = [];
|
||||
@@ -127,4 +127,4 @@ vows.describe('BufferedWriteStream').addBatch({
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
}).exportTo(module);
|
||||
@@ -2,7 +2,7 @@ var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, fs = require('fs')
|
||||
, RollingFileStream = require('../lib/streams').RollingFileStream;
|
||||
, RollingFileStream = require('../../lib/streams').RollingFileStream;
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
@@ -19,11 +19,11 @@ vows.describe('RollingFileStream').addBatch({
|
||||
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
||||
},
|
||||
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
|
||||
assert.instanceOf(stream, fs.FileWriteStream);
|
||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||
assert.equal(stream.size, 1024);
|
||||
assert.equal(stream.backups, 5);
|
||||
},
|
||||
assert.instanceOf(stream, fs.FileWriteStream);
|
||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||
assert.equal(stream.size, 1024);
|
||||
assert.equal(stream.backups, 5);
|
||||
},
|
||||
'with default settings for the underlying stream': function(stream) {
|
||||
assert.equal(stream.mode, 420);
|
||||
assert.equal(stream.flags, 'a');
|
||||
17
test/with-dateFile.json
Normal file
17
test/with-dateFile.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "dateFile",
|
||||
"filename": "test/date-file-test.log",
|
||||
"pattern": "-from-MM-dd",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"levels": {
|
||||
"tests": "WARN"
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,7 @@
|
||||
"type": "file",
|
||||
"filename": "tmp-test.log",
|
||||
"maxLogSize": 1024,
|
||||
"backups": 3,
|
||||
"pollInterval": 15
|
||||
"backups": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user