Compare commits
81 Commits
v0.5.0
...
isaacg-alw
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
936ad4da8e | ||
|
|
097ae3d7f1 | ||
|
|
04de4ed8d3 | ||
|
|
29b02921b6 | ||
|
|
48ed5d1222 | ||
|
|
7844b0d2e4 | ||
|
|
8b49ba9f3d | ||
|
|
ed7462885f | ||
|
|
36c5175a55 | ||
|
|
22160f90b3 | ||
|
|
73437ecb40 | ||
|
|
107e33c0d1 | ||
|
|
6352632fb2 | ||
|
|
0544342e9f | ||
|
|
1d1153d32f | ||
|
|
e58cf201ca | ||
|
|
83271e47fc | ||
|
|
f3271a3997 | ||
|
|
4b7cf589a2 | ||
|
|
c8f401c47d | ||
|
|
ecbf41bc83 | ||
|
|
65e490cbd2 | ||
|
|
5e242c9dc9 | ||
|
|
50eefcc701 | ||
|
|
8e53c6213e | ||
|
|
a15a628311 | ||
|
|
b75e3660f4 | ||
|
|
22da6226e5 | ||
|
|
a3bdac8e14 | ||
|
|
af428c5669 | ||
|
|
5c75ba9468 | ||
|
|
bec0d05847 | ||
|
|
e4bf405f20 | ||
|
|
95568f352b | ||
|
|
6da6f3c90e | ||
|
|
7f57d14e70 | ||
|
|
f478793da3 | ||
|
|
ec2f8fec3b | ||
|
|
0167c84ea5 | ||
|
|
3e1a27e522 | ||
|
|
8b42e46071 | ||
|
|
4a7a90ed53 | ||
|
|
a9307fd6da | ||
|
|
4739c65c68 | ||
|
|
892181f88f | ||
|
|
bdfa7f9a9b | ||
|
|
ad63b801f7 | ||
|
|
2bfad6362a | ||
|
|
2b889fe776 | ||
|
|
9ac61e37f4 | ||
|
|
185f343e68 | ||
|
|
be1272cd7c | ||
|
|
cbc1dd32f9 | ||
|
|
a6fb26efb1 | ||
|
|
012b0d5ed7 | ||
|
|
de72005e7e | ||
|
|
c6a0e58409 | ||
|
|
f832a2ba79 | ||
|
|
3f10b68c30 | ||
|
|
54c311842c | ||
|
|
f948b5f5cd | ||
|
|
54e420eb58 | ||
|
|
40ba24a55d | ||
|
|
e3a20a1746 | ||
|
|
7a02f39921 | ||
|
|
b6ba3bce00 | ||
|
|
638ce187bb | ||
|
|
3cbae96a97 | ||
|
|
a33e48cb07 | ||
|
|
df491c0b14 | ||
|
|
6ff1a2499f | ||
|
|
ce2d7df8df | ||
|
|
1b12265800 | ||
|
|
32e9045334 | ||
|
|
1aed671137 | ||
|
|
68b47dd51c | ||
|
|
8f9b4444f6 | ||
|
|
e49f7107fb | ||
|
|
077302c772 | ||
|
|
6f0dfa0c5f | ||
|
|
82a6bee331 |
@@ -1,4 +1,5 @@
|
|||||||
language: node_js
|
language: node_js
|
||||||
node_js:
|
node_js:
|
||||||
- 0.6
|
- "0.10"
|
||||||
- 0.7
|
- "0.8"
|
||||||
|
|
||||||
|
|||||||
154
README.md
154
README.md
@@ -17,6 +17,16 @@ Out of the box it supports the following features:
|
|||||||
* configurable log message layout/patterns
|
* configurable log message layout/patterns
|
||||||
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
|
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
|
||||||
|
|
||||||
|
NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
appenders: [
|
||||||
|
{ type: "console" }
|
||||||
|
],
|
||||||
|
replaceConsole: true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## installation
|
## installation
|
||||||
|
|
||||||
@@ -26,101 +36,105 @@ npm install log4js
|
|||||||
## usage
|
## usage
|
||||||
|
|
||||||
Minimalist version:
|
Minimalist version:
|
||||||
|
```javascript
|
||||||
var log4js = require('log4js');
|
var log4js = require('log4js');
|
||||||
var logger = log4js.getLogger();
|
var logger = log4js.getLogger();
|
||||||
logger.debug("Some debug messages");
|
logger.debug("Some debug messages");
|
||||||
|
```
|
||||||
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
|
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
|
||||||
|
```bash
|
||||||
|
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
||||||
|
```
|
||||||
|
See example.js for a full example, but here's a snippet (also in fromreadme.js):
|
||||||
|
```javascript
|
||||||
|
var log4js = require('log4js');
|
||||||
|
//console log is loaded by default, so you won't normally need to do this
|
||||||
|
//log4js.loadAppender('console');
|
||||||
|
log4js.loadAppender('file');
|
||||||
|
//log4js.addAppender(log4js.appenders.console());
|
||||||
|
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
||||||
|
|
||||||
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
var logger = log4js.getLogger('cheese');
|
||||||
|
logger.setLevel('ERROR');
|
||||||
See example.js:
|
|
||||||
|
|
||||||
var log4js = require('log4js');
|
|
||||||
log4js.loadAppender('console');
|
|
||||||
log4js.loadAppender('file');
|
|
||||||
log4js.addAppender(log4js.appenders.console());
|
|
||||||
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
|
||||||
|
|
||||||
var logger = log4js.getLogger('cheese');
|
|
||||||
logger.setLevel('ERROR');
|
|
||||||
|
|
||||||
logger.trace('Entering cheese testing');
|
|
||||||
logger.debug('Got cheese.');
|
|
||||||
logger.info('Cheese is Gouda.');
|
|
||||||
logger.warn('Cheese is quite smelly.');
|
|
||||||
logger.error('Cheese is too ripe!');
|
|
||||||
logger.fatal('Cheese was breeding ground for listeria.');
|
|
||||||
|
|
||||||
|
logger.trace('Entering cheese testing');
|
||||||
|
logger.debug('Got cheese.');
|
||||||
|
logger.info('Cheese is Gouda.');
|
||||||
|
logger.warn('Cheese is quite smelly.');
|
||||||
|
logger.error('Cheese is too ripe!');
|
||||||
|
logger.fatal('Cheese was breeding ground for listeria.');
|
||||||
|
```
|
||||||
Output:
|
Output:
|
||||||
|
```bash
|
||||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||||
|
```
|
||||||
The first 5 lines of the code above could also be written as:
|
The first 5 lines of the code above could also be written as:
|
||||||
|
```javascript
|
||||||
var log4js = require('log4js');
|
var log4js = require('log4js');
|
||||||
log4js.configure({
|
log4js.configure({
|
||||||
appenders: [
|
appenders: [
|
||||||
{ type: 'console' },
|
{ type: 'console' },
|
||||||
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
```
|
||||||
|
|
||||||
## configuration
|
## configuration
|
||||||
|
|
||||||
You can configure the appenders and log levels manually (as above), or provide a
|
You can configure the appenders and log levels manually (as above), or provide a
|
||||||
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object.
|
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
|
||||||
|
configuration file location may also be specified via the environment variable
|
||||||
|
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
|
||||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
||||||
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
||||||
|
|
||||||
To turn off configuration file change checking, configure with:
|
To turn off configuration file change checking, configure with:
|
||||||
|
|
||||||
var log4js = require('log4js');
|
```javascript
|
||||||
log4js.configure('my_log4js_configuration.json', {});
|
var log4js = require('log4js');
|
||||||
|
log4js.configure('my_log4js_configuration.json', {});
|
||||||
|
```
|
||||||
To specify a different period:
|
To specify a different period:
|
||||||
|
|
||||||
log4js.configure('file.json', { reloadSecs: 300 });
|
```javascript
|
||||||
|
log4js.configure('file.json', { reloadSecs: 300 });
|
||||||
|
```
|
||||||
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
|
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
|
||||||
|
|
||||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
```javascript
|
||||||
|
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||||
|
```
|
||||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||||
|
```json
|
||||||
#### my_log4js_configuration.json ####
|
#### my_log4js_configuration.json ####
|
||||||
|
{
|
||||||
|
"appenders": [
|
||||||
{
|
{
|
||||||
"appenders": [
|
"type": "file",
|
||||||
{
|
"filename": "relative/path/to/log_file.log",
|
||||||
"type": "file",
|
"maxLogSize": 20480,
|
||||||
"filename": "relative/path/to/log_file.log",
|
"backups": 3,
|
||||||
"maxLogSize": 20480,
|
"category": "relative-logger"
|
||||||
"backups": 3,
|
},
|
||||||
"pollInterval": 15,
|
{
|
||||||
"category": "relative-logger"
|
"type": "file",
|
||||||
},
|
"absolute": true,
|
||||||
{
|
"filename": "/absolute/path/to/log_file.log",
|
||||||
"type": "file",
|
"maxLogSize": 20480,
|
||||||
"absolute": true,
|
"backups": 10,
|
||||||
"filename": "/absolute/path/to/log_file.log",
|
"category": "absolute-logger"
|
||||||
"maxLogSize": 20480,
|
|
||||||
"backups": 10,
|
|
||||||
"pollInterval": 15,
|
|
||||||
"category": "absolute-logger"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
]
|
||||||
Documentation for most of the core appenders can be found on the [wiki](wiki/Appenders), otherwise take a look at the tests and the examples.
|
}
|
||||||
|
```
|
||||||
|
Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
See the [wiki](wiki). Improve the [wiki](wiki), please.
|
See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
Contributions welcome, but take a look at the [rules](wiki/Contributing) first.
|
Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
22
example.js
22
example.js
@@ -1,22 +0,0 @@
|
|||||||
var log4js = require('./lib/log4js');
|
|
||||||
//log the cheese logger messages to a file, and the console ones as well.
|
|
||||||
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese', 'console');
|
|
||||||
|
|
||||||
var logger = log4js.getLogger('cheese');
|
|
||||||
//only errors and above get logged.
|
|
||||||
logger.setLevel('ERROR');
|
|
||||||
|
|
||||||
//console logging methds have been replaced with log4js ones.
|
|
||||||
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
|
||||||
|
|
||||||
//these will not appear (logging level beneath error)
|
|
||||||
logger.trace('Entering cheese testing');
|
|
||||||
logger.debug('Got cheese.');
|
|
||||||
logger.info('Cheese is Gouda.');
|
|
||||||
logger.warn('Cheese is quite smelly.');
|
|
||||||
//these end up on the console and in cheese.log
|
|
||||||
logger.error('Cheese %s is too ripe!', "gouda");
|
|
||||||
logger.fatal('Cheese was breeding ground for listeria.');
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
45
examples/example-socket.js
Normal file
45
examples/example-socket.js
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
var log4js = require('./lib/log4js')
|
||||||
|
, cluster = require('cluster')
|
||||||
|
, numCPUs = require('os').cpus().length
|
||||||
|
, i = 0;
|
||||||
|
|
||||||
|
if (cluster.isMaster) {
|
||||||
|
log4js.configure({
|
||||||
|
appenders: [
|
||||||
|
{
|
||||||
|
type: "multiprocess",
|
||||||
|
mode: "master",
|
||||||
|
appender: {
|
||||||
|
type: "console"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
console.info("Master creating %d workers", numCPUs);
|
||||||
|
for (i=0; i < numCPUs; i++) {
|
||||||
|
cluster.fork();
|
||||||
|
}
|
||||||
|
|
||||||
|
cluster.on('death', function(worker) {
|
||||||
|
console.info("Worker %d died.", worker.pid);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
log4js.configure({
|
||||||
|
appenders: [
|
||||||
|
{
|
||||||
|
type: "multiprocess",
|
||||||
|
mode: "worker"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
var logger = log4js.getLogger('example-socket');
|
||||||
|
|
||||||
|
console.info("Worker %d started.", process.pid);
|
||||||
|
for (i=0; i < 1000; i++) {
|
||||||
|
logger.info("Worker %d - logging something %d", process.pid, i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
58
examples/example.js
Normal file
58
examples/example.js
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
var log4js = require('../lib/log4js');
|
||||||
|
//log the cheese logger messages to a file, and the console ones as well.
|
||||||
|
log4js.configure({
|
||||||
|
appenders: [
|
||||||
|
{
|
||||||
|
type: "file",
|
||||||
|
filename: "cheese.log",
|
||||||
|
category: [ 'cheese','console' ]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "console"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
replaceConsole: true
|
||||||
|
});
|
||||||
|
|
||||||
|
//to add an appender programmatically, and without clearing other appenders
|
||||||
|
//loadAppender is only necessary if you haven't already configured an appender of this type
|
||||||
|
log4js.loadAppender('file');
|
||||||
|
log4js.addAppender(log4js.appenders.file('pants.log'), 'pants');
|
||||||
|
//a custom logger outside of the log4js/lib/appenders directory can be accessed like so
|
||||||
|
//log4js.loadAppender('what/you/would/put/in/require');
|
||||||
|
//log4js.addAppender(log4js.appenders['what/you/would/put/in/require'](args));
|
||||||
|
//or through configure as:
|
||||||
|
//log4js.configure({
|
||||||
|
// appenders: [ { type: 'what/you/would/put/in/require', otherArgs: 'blah' } ]
|
||||||
|
//});
|
||||||
|
|
||||||
|
var logger = log4js.getLogger('cheese');
|
||||||
|
//only errors and above get logged.
|
||||||
|
//you can also set this log level in the config object
|
||||||
|
//via the levels field.
|
||||||
|
logger.setLevel('ERROR');
|
||||||
|
|
||||||
|
//console logging methods have been replaced with log4js ones.
|
||||||
|
//so this will get coloured output on console, and appear in cheese.log
|
||||||
|
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
||||||
|
|
||||||
|
//these will not appear (logging level beneath error)
|
||||||
|
logger.trace('Entering cheese testing');
|
||||||
|
logger.debug('Got cheese.');
|
||||||
|
logger.info('Cheese is Gouda.');
|
||||||
|
logger.warn('Cheese is quite smelly.');
|
||||||
|
//these end up on the console and in cheese.log
|
||||||
|
logger.error('Cheese %s is too ripe!', "gouda");
|
||||||
|
logger.fatal('Cheese was breeding ground for listeria.');
|
||||||
|
|
||||||
|
//these don't end up in cheese.log, but will appear on the console
|
||||||
|
var anotherLogger = log4js.getLogger('another');
|
||||||
|
anotherLogger.debug("Just checking");
|
||||||
|
|
||||||
|
//one for pants.log
|
||||||
|
//will also go to console, since that's configured for all categories
|
||||||
|
var pantsLog = log4js.getLogger('pants');
|
||||||
|
pantsLog.debug("Something for pants");
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
19
examples/fromreadme.js
Normal file
19
examples/fromreadme.js
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
//remember to change the require to just 'log4js' if you've npm install'ed it
|
||||||
|
var log4js = require('./lib/log4js');
|
||||||
|
//by default the console appender is loaded
|
||||||
|
//log4js.loadAppender('console');
|
||||||
|
//you'd only need to add the console appender if you
|
||||||
|
//had previously called log4js.clearAppenders();
|
||||||
|
//log4js.addAppender(log4js.appenders.console());
|
||||||
|
log4js.loadAppender('file');
|
||||||
|
log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese');
|
||||||
|
|
||||||
|
var logger = log4js.getLogger('cheese');
|
||||||
|
logger.setLevel('ERROR');
|
||||||
|
|
||||||
|
logger.trace('Entering cheese testing');
|
||||||
|
logger.debug('Got cheese.');
|
||||||
|
logger.info('Cheese is Gouda.');
|
||||||
|
logger.warn('Cheese is quite smelly.');
|
||||||
|
logger.error('Cheese is too ripe!');
|
||||||
|
logger.fatal('Cheese was breeding ground for listeria.');
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
var log4js = require('./lib/log4js')
|
var log4js = require('../lib/log4js')
|
||||||
, log
|
, log
|
||||||
, i = 0;
|
, i = 0;
|
||||||
log4js.configure({
|
log4js.configure({
|
||||||
21
examples/patternLayout-tokens.js
Normal file
21
examples/patternLayout-tokens.js
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
var log4js = require('./lib/log4js');
|
||||||
|
|
||||||
|
var config = {
|
||||||
|
"appenders": [
|
||||||
|
{
|
||||||
|
"type": "console",
|
||||||
|
"layout": {
|
||||||
|
"type": "pattern",
|
||||||
|
"pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
|
||||||
|
"tokens": {
|
||||||
|
"pid" : function() { return process.pid; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
log4js.configure(config, {});
|
||||||
|
|
||||||
|
var logger = log4js.getLogger("app");
|
||||||
|
logger.info("Test log message");
|
||||||
53
lib/appenders/dateFile.js
Normal file
53
lib/appenders/dateFile.js
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
var streams = require('../streams'),
|
||||||
|
layouts = require('../layouts'),
|
||||||
|
path = require('path'),
|
||||||
|
os = require('os'),
|
||||||
|
eol = os.EOL || '\n',
|
||||||
|
openFiles = [];
|
||||||
|
|
||||||
|
//close open files on process exit.
|
||||||
|
process.on('exit', function() {
|
||||||
|
openFiles.forEach(function (file) {
|
||||||
|
file.end();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File appender that rolls files according to a date pattern.
|
||||||
|
* @filename base filename.
|
||||||
|
* @pattern the format that will be added to the end of filename when rolling,
|
||||||
|
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||||
|
* @layout layout function for log messages - defaults to basicLayout
|
||||||
|
*/
|
||||||
|
function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||||
|
layout = layout || layouts.basicLayout;
|
||||||
|
|
||||||
|
var logFile = new streams.DateRollingFileStream(filename, pattern, { alwaysIncludePattern: alwaysIncludePattern });
|
||||||
|
openFiles.push(logFile);
|
||||||
|
|
||||||
|
return function(logEvent) {
|
||||||
|
logFile.write(layout(logEvent) + eol, "utf8");
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function configure(config, options) {
|
||||||
|
var layout;
|
||||||
|
|
||||||
|
if (config.layout) {
|
||||||
|
layout = layouts.layout(config.layout.type, config.layout);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!config.alwaysIncludePattern) {
|
||||||
|
config.alwaysIncludePattern = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options && options.cwd && !config.absolute) {
|
||||||
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.appender = appender;
|
||||||
|
exports.configure = configure;
|
||||||
@@ -1,7 +1,17 @@
|
|||||||
var layouts = require('../layouts')
|
var layouts = require('../layouts')
|
||||||
, path = require('path')
|
, path = require('path')
|
||||||
, fs = require('fs')
|
, fs = require('fs')
|
||||||
, streams = require('../streams');
|
, streams = require('../streams')
|
||||||
|
, os = require('os')
|
||||||
|
, eol = os.EOL || '\n'
|
||||||
|
, openFiles = [];
|
||||||
|
|
||||||
|
//close open files on process exit.
|
||||||
|
process.on('exit', function() {
|
||||||
|
openFiles.forEach(function (file) {
|
||||||
|
file.end();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||||
@@ -12,55 +22,51 @@ var layouts = require('../layouts')
|
|||||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||||
*/
|
*/
|
||||||
function fileAppender (file, layout, logSize, numBackups) {
|
function fileAppender (file, layout, logSize, numBackups) {
|
||||||
var bytesWritten = 0;
|
var bytesWritten = 0;
|
||||||
file = path.normalize(file);
|
file = path.normalize(file);
|
||||||
layout = layout || layouts.basicLayout;
|
layout = layout || layouts.basicLayout;
|
||||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||||
//there has to be at least one backup if logSize has been specified
|
//there has to be at least one backup if logSize has been specified
|
||||||
numBackups = numBackups === 0 ? 1 : numBackups;
|
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||||
|
|
||||||
function openTheStream(file, fileSize, numFiles) {
|
function openTheStream(file, fileSize, numFiles) {
|
||||||
var stream;
|
var stream;
|
||||||
if (fileSize) {
|
if (fileSize) {
|
||||||
stream = new streams.BufferedWriteStream(
|
stream = new streams.RollingFileStream(
|
||||||
new streams.RollingFileStream(
|
file,
|
||||||
file,
|
fileSize,
|
||||||
fileSize,
|
numFiles
|
||||||
numFiles
|
);
|
||||||
)
|
} else {
|
||||||
);
|
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' });
|
||||||
} else {
|
|
||||||
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
|
|
||||||
}
|
|
||||||
stream.on("error", function (err) {
|
|
||||||
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
|
||||||
});
|
|
||||||
return stream;
|
|
||||||
}
|
}
|
||||||
|
stream.on("error", function (err) {
|
||||||
var logFile = openTheStream(file, logSize, numBackups);
|
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
||||||
|
|
||||||
//close the file on process exit.
|
|
||||||
process.on('exit', function() {
|
|
||||||
logFile.end();
|
|
||||||
});
|
});
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
return function(loggingEvent) {
|
var logFile = openTheStream(file, logSize, numBackups);
|
||||||
logFile.write(layout(loggingEvent)+'\n', "utf8");
|
|
||||||
};
|
// push file to the stack of open handlers
|
||||||
|
openFiles.push(logFile);
|
||||||
|
|
||||||
|
return function(loggingEvent) {
|
||||||
|
logFile.write(layout(loggingEvent) + eol, "utf8");
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function configure(config, options) {
|
function configure(config, options) {
|
||||||
var layout;
|
var layout;
|
||||||
if (config.layout) {
|
if (config.layout) {
|
||||||
layout = layouts.layout(config.layout.type, config.layout);
|
layout = layouts.layout(config.layout.type, config.layout);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options && options.cwd && !config.absolute) {
|
if (options && options.cwd && !config.absolute) {
|
||||||
config.filename = path.join(options.cwd, config.filename);
|
config.filename = path.join(options.cwd, config.filename);
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.appender = fileAppender;
|
exports.appender = fileAppender;
|
||||||
|
|||||||
@@ -1,75 +1,127 @@
|
|||||||
var log4js = require('../log4js');
|
var log4js = require('../log4js'),
|
||||||
var layouts = require('../layouts');
|
net = require('net'),
|
||||||
var net = require('net');
|
END_MSG = '__LOG4JS__';
|
||||||
var util = require('util');
|
|
||||||
|
|
||||||
var LogServer = function createLogServer(config) {
|
/**
|
||||||
var actualAppender = config.actualAppender;
|
* Creates a server, listening on config.loggerPort, config.loggerHost.
|
||||||
var server = net.createServer(function serverCreated(clientSocket) {
|
* Output goes to config.actualAppender (config.appender is used to
|
||||||
clientSocket.on('connect', function clientConnected() {
|
* set up that appender).
|
||||||
var logMessage = '';
|
*/
|
||||||
clientSocket.on('data', function chunkReceived(chunk) {
|
function logServer(config) {
|
||||||
logMessage += chunk;
|
|
||||||
});
|
/**
|
||||||
clientSocket.on('end', function gotChunks() {
|
* Takes a utf-8 string, returns an object with
|
||||||
|
* the correct log properties.
|
||||||
|
*/
|
||||||
|
function deserializeLoggingEvent(clientSocket, msg) {
|
||||||
|
var loggingEvent;
|
||||||
try {
|
try {
|
||||||
var loggingEvent = JSON.parse(logMessage);
|
loggingEvent = JSON.parse(msg);
|
||||||
deserializeLoggingEvent(loggingEvent);
|
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||||
actualAppender(loggingEvent);
|
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// JSON.parse failed, just log the contents probably a naughty.
|
// JSON.parse failed, just log the contents probably a naughty.
|
||||||
actualAppender(createLoggingEvent('ERROR', 'Unable to parse log: ' + logMessage));
|
loggingEvent = {
|
||||||
|
startTime: new Date(),
|
||||||
|
categoryName: 'log4js',
|
||||||
|
level: log4js.levels.ERROR,
|
||||||
|
data: [ 'Unable to parse log:', msg ]
|
||||||
|
};
|
||||||
}
|
}
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
|
||||||
}
|
|
||||||
|
|
||||||
function createLoggingEvent(level, message) {
|
loggingEvent.remoteAddress = clientSocket.remoteAddress;
|
||||||
return {
|
loggingEvent.remotePort = clientSocket.remotePort;
|
||||||
startTime: new Date(),
|
|
||||||
categoryName: 'log4js',
|
|
||||||
level: { toString: function () {
|
|
||||||
return level;
|
|
||||||
}},
|
|
||||||
data: [ message ]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function deserializeLoggingEvent(loggingEvent) {
|
return loggingEvent;
|
||||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
}
|
||||||
loggingEvent.level.toString = function levelToString() {
|
|
||||||
return loggingEvent.level.levelStr;
|
var actualAppender = config.actualAppender,
|
||||||
};
|
server = net.createServer(function serverCreated(clientSocket) {
|
||||||
|
clientSocket.setEncoding('utf8');
|
||||||
|
var logMessage = '';
|
||||||
|
|
||||||
|
function logTheMessage(msg) {
|
||||||
|
if (logMessage.length > 0) {
|
||||||
|
actualAppender(deserializeLoggingEvent(clientSocket, msg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function chunkReceived(chunk) {
|
||||||
|
var event;
|
||||||
|
logMessage += chunk || '';
|
||||||
|
if (logMessage.indexOf(END_MSG) > -1) {
|
||||||
|
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
|
||||||
|
logTheMessage(event);
|
||||||
|
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
|
||||||
|
//check for more, maybe it was a big chunk
|
||||||
|
chunkReceived();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clientSocket.on('data', chunkReceived);
|
||||||
|
clientSocket.on('end', chunkReceived);
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||||
|
|
||||||
|
return actualAppender;
|
||||||
}
|
}
|
||||||
|
|
||||||
function workerAppender(config) {
|
function workerAppender(config) {
|
||||||
return function log(loggingEvent) {
|
var canWrite = false,
|
||||||
var socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
buffer = [],
|
||||||
socket.on('connect', function socketConnected() {
|
socket;
|
||||||
socket.end(JSON.stringify(loggingEvent), 'utf8');
|
|
||||||
});
|
createSocket();
|
||||||
};
|
|
||||||
|
function createSocket() {
|
||||||
|
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||||
|
socket.on('connect', function() {
|
||||||
|
emptyBuffer();
|
||||||
|
canWrite = true;
|
||||||
|
});
|
||||||
|
socket.on('timeout', socket.end.bind(socket));
|
||||||
|
//don't bother listening for 'error', 'close' gets called after that anyway
|
||||||
|
socket.on('close', createSocket);
|
||||||
|
}
|
||||||
|
|
||||||
|
function emptyBuffer() {
|
||||||
|
var evt;
|
||||||
|
while ((evt = buffer.shift())) {
|
||||||
|
write(evt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function write(loggingEvent) {
|
||||||
|
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||||
|
socket.write(END_MSG, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
return function log(loggingEvent) {
|
||||||
|
if (canWrite) {
|
||||||
|
write(loggingEvent);
|
||||||
|
} else {
|
||||||
|
buffer.push(loggingEvent);
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function createAppender(config) {
|
function createAppender(config) {
|
||||||
if (config.mode === 'master') {
|
if (config.mode === 'master') {
|
||||||
var server = new LogServer(config);
|
return logServer(config);
|
||||||
return config.actualAppender;
|
} else {
|
||||||
} else {
|
return workerAppender(config);
|
||||||
return workerAppender(config);
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function configure(config) {
|
function configure(config, options) {
|
||||||
var actualAppender;
|
var actualAppender;
|
||||||
if (config.appender && config.mode === 'master') {
|
if (config.appender && config.mode === 'master') {
|
||||||
log4js.loadAppender(config.appender.type);
|
log4js.loadAppender(config.appender.type);
|
||||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
|
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||||
config.actualAppender = actualAppender;
|
config.actualAppender = actualAppender;
|
||||||
}
|
}
|
||||||
return createAppender(config);
|
return createAppender(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.appender = createAppender;
|
exports.appender = createAppender;
|
||||||
|
|||||||
@@ -1,27 +1,23 @@
|
|||||||
var layouts = require("../layouts"),
|
var layouts = require("../layouts"),
|
||||||
mailer = require("nodemailer");
|
mailer = require("nodemailer"),
|
||||||
|
os = require('os');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||||
* It can either send an email on each event or group several logging events gathered during specified interval.
|
* It can either send an email on each event or group several logging events gathered during specified interval.
|
||||||
*
|
*
|
||||||
* @param recipients comma separated list of email recipients
|
* @param config appender configuration data
|
||||||
* @param sender sender of all emails (defaults to SMTP user)
|
|
||||||
* @param subject subject of all email messages (defaults to first event's message)
|
|
||||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||||
* @param smtpConfig SMTP configuration for 'nodemailer'
|
|
||||||
* @param sendInterval the time in seconds between sending attempts (defaults to 0);
|
|
||||||
* all events are buffered and sent in one email during this time; if 0 than every event sends an email
|
* all events are buffered and sent in one email during this time; if 0 than every event sends an email
|
||||||
*/
|
*/
|
||||||
function smtpAppender(recipients, sender, subject, layout, smtpConfig, sendInterval) {
|
function smtpAppender(config, layout) {
|
||||||
sender = sender || smtpConfig.user;
|
|
||||||
layout = layout || layouts.basicLayout;
|
layout = layout || layouts.basicLayout;
|
||||||
subjectLayout = layouts.messagePassThroughLayout;
|
var subjectLayout = layouts.messagePassThroughLayout;
|
||||||
mailer.SMTP = smtpConfig;
|
var sendInterval = config.sendInterval*1000 || 0;
|
||||||
sendInterval = sendInterval*1000 || 0;
|
|
||||||
|
|
||||||
var logEventBuffer = [];
|
var logEventBuffer = [];
|
||||||
var sendTimer;
|
var sendTimer;
|
||||||
|
var transport = mailer.createTransport(config.transport, config[config.transport]);
|
||||||
|
|
||||||
function sendBuffer() {
|
function sendBuffer() {
|
||||||
if (logEventBuffer.length == 0)
|
if (logEventBuffer.length == 0)
|
||||||
@@ -34,12 +30,14 @@ function smtpAppender(recipients, sender, subject, layout, smtpConfig, sendInter
|
|||||||
}
|
}
|
||||||
|
|
||||||
var msg = {
|
var msg = {
|
||||||
sender: sender,
|
to: config.recipients,
|
||||||
to: recipients,
|
subject: config.subject || subjectLayout(firstEvent),
|
||||||
subject: subject || subjectLayout(firstEvent),
|
text: body,
|
||||||
body: body
|
headers: {"Hostname": os.hostname()}
|
||||||
};
|
};
|
||||||
mailer.send_mail(msg, function(error, success) {
|
if (config.sender)
|
||||||
|
msg.from = config.sender;
|
||||||
|
transport.sendMail(msg, function(error, success) {
|
||||||
if (error) {
|
if (error) {
|
||||||
console.error("log4js.smtpAppender - Error happened ", error);
|
console.error("log4js.smtpAppender - Error happened ", error);
|
||||||
}
|
}
|
||||||
@@ -68,8 +66,10 @@ function configure(config) {
|
|||||||
if (config.layout) {
|
if (config.layout) {
|
||||||
layout = layouts.layout(config.layout.type, config.layout);
|
layout = layouts.layout(config.layout.type, config.layout);
|
||||||
}
|
}
|
||||||
return smtpAppender(config.recipients, config.sender, config.subject, layout, config.smtp, config.sendInterval);
|
return smtpAppender(config, layout);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.name = "smtp";
|
||||||
exports.appender = smtpAppender;
|
exports.appender = smtpAppender;
|
||||||
exports.configure = configure;
|
exports.configure = configure;
|
||||||
|
|
||||||
|
|||||||
143
lib/layouts.js
143
lib/layouts.js
@@ -1,32 +1,34 @@
|
|||||||
var dateFormat = require('./date_format')
|
var dateFormat = require('./date_format')
|
||||||
, util = require('util')
|
, os = require('os')
|
||||||
, replacementRegExp = /%[sdj]/g
|
, eol = os.EOL || '\n'
|
||||||
, layoutMakers = {
|
, util = require('util')
|
||||||
"messagePassThrough": function() { return messagePassThroughLayout; }
|
, replacementRegExp = /%[sdj]/g
|
||||||
, "basic": function() { return basicLayout; }
|
, layoutMakers = {
|
||||||
, "colored": function() { return colouredLayout; }
|
"messagePassThrough": function() { return messagePassThroughLayout; }
|
||||||
, "coloured": function() { return colouredLayout; }
|
, "basic": function() { return basicLayout; }
|
||||||
, "pattern": function (config) {
|
, "colored": function() { return colouredLayout; }
|
||||||
var pattern = config.pattern || undefined;
|
, "coloured": function() { return colouredLayout; }
|
||||||
return patternLayout(pattern);
|
, "pattern": function (config) {
|
||||||
}
|
var pattern = config.pattern || undefined;
|
||||||
}
|
var tokens = config.tokens || undefined;
|
||||||
, colours = {
|
return patternLayout(pattern, tokens);
|
||||||
ALL: "grey"
|
}
|
||||||
, TRACE: "blue"
|
}
|
||||||
, DEBUG: "cyan"
|
, colours = {
|
||||||
, INFO: "green"
|
ALL: "grey"
|
||||||
, WARN: "yellow"
|
, TRACE: "blue"
|
||||||
, ERROR: "red"
|
, DEBUG: "cyan"
|
||||||
, FATAL: "magenta"
|
, INFO: "green"
|
||||||
, OFF: "grey"
|
, WARN: "yellow"
|
||||||
};
|
, ERROR: "red"
|
||||||
|
, FATAL: "magenta"
|
||||||
|
, OFF: "grey"
|
||||||
|
};
|
||||||
|
|
||||||
function formatLogData(logData) {
|
function formatLogData(logData) {
|
||||||
var output = ""
|
var output = ""
|
||||||
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
|
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
|
||||||
, format = data.shift();
|
, format = data.shift();
|
||||||
|
|
||||||
if (typeof format === "string") {
|
if (typeof format === "string") {
|
||||||
output = format.replace(replacementRegExp, function(match) {
|
output = format.replace(replacementRegExp, function(match) {
|
||||||
@@ -47,40 +49,45 @@ function formatLogData(logData) {
|
|||||||
if (output) {
|
if (output) {
|
||||||
output += ' ';
|
output += ' ';
|
||||||
}
|
}
|
||||||
|
output += util.inspect(item);
|
||||||
if (item && item.stack) {
|
if (item && item.stack) {
|
||||||
output += item.stack;
|
output += "\n" + item.stack;
|
||||||
} else {
|
|
||||||
output += util.inspect(item);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var styles = {
|
||||||
|
//styles
|
||||||
|
'bold' : [1, 22],
|
||||||
|
'italic' : [3, 23],
|
||||||
|
'underline' : [4, 24],
|
||||||
|
'inverse' : [7, 27],
|
||||||
|
//grayscale
|
||||||
|
'white' : [37, 39],
|
||||||
|
'grey' : [90, 39],
|
||||||
|
'black' : [90, 39],
|
||||||
|
//colors
|
||||||
|
'blue' : [34, 39],
|
||||||
|
'cyan' : [36, 39],
|
||||||
|
'green' : [32, 39],
|
||||||
|
'magenta' : [35, 39],
|
||||||
|
'red' : [31, 39],
|
||||||
|
'yellow' : [33, 39]
|
||||||
|
};
|
||||||
|
|
||||||
|
function colorizeStart(style) {
|
||||||
|
return style ? '\033[' + styles[style][0] + 'm' : '';
|
||||||
|
}
|
||||||
|
function colorizeEnd(style) {
|
||||||
|
return style ? '\033[' + styles[style][1] + 'm' : '';
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
|
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
|
||||||
*/
|
*/
|
||||||
function colorize (str, style) {
|
function colorize (str, style) {
|
||||||
var styles = {
|
return colorizeStart(style) + str + colorizeEnd(style);
|
||||||
//styles
|
|
||||||
'bold' : [1, 22],
|
|
||||||
'italic' : [3, 23],
|
|
||||||
'underline' : [4, 24],
|
|
||||||
'inverse' : [7, 27],
|
|
||||||
//grayscale
|
|
||||||
'white' : [37, 39],
|
|
||||||
'grey' : [90, 39],
|
|
||||||
'black' : [90, 39],
|
|
||||||
//colors
|
|
||||||
'blue' : [34, 39],
|
|
||||||
'cyan' : [36, 39],
|
|
||||||
'green' : [32, 39],
|
|
||||||
'magenta' : [35, 39],
|
|
||||||
'red' : [31, 39],
|
|
||||||
'yellow' : [33, 39]
|
|
||||||
};
|
|
||||||
return style ? '\033[' + styles[style][0] + 'm' + str +
|
|
||||||
'\033[' + styles[style][1] + 'm' : str;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function timestampLevelAndCategory(loggingEvent, colour) {
|
function timestampLevelAndCategory(loggingEvent, colour) {
|
||||||
@@ -133,12 +140,26 @@ function messagePassThroughLayout (loggingEvent) {
|
|||||||
* - %d date in various formats
|
* - %d date in various formats
|
||||||
* - %% %
|
* - %% %
|
||||||
* - %n newline
|
* - %n newline
|
||||||
* Takes a pattern string and returns a layout function.
|
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
|
||||||
|
* You can use %[ and %] to define a colored block.
|
||||||
|
*
|
||||||
|
* Tokens are specified as simple key:value objects.
|
||||||
|
* The key represents the token name whereas the value can be a string or function
|
||||||
|
* which is called to extract the value to put in the log message. If token is not
|
||||||
|
* found, it doesn't replace the field.
|
||||||
|
*
|
||||||
|
* A sample token would be: { "pid" : function() { return process.pid; } }
|
||||||
|
*
|
||||||
|
* Takes a pattern string, array of tokens and returns a layout function.
|
||||||
|
* @param {String} Log format pattern String
|
||||||
|
* @param {object} map object of different tokens
|
||||||
|
* @return {Function}
|
||||||
* @author Stephan Strittmatter
|
* @author Stephan Strittmatter
|
||||||
|
* @author Jan Schmidle
|
||||||
*/
|
*/
|
||||||
function patternLayout (pattern) {
|
function patternLayout (pattern, tokens) {
|
||||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([cdmnpr%])(\{([^\}]+)\})?|([^%]+)/;
|
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||||
|
|
||||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||||
|
|
||||||
@@ -197,7 +218,7 @@ function patternLayout (pattern) {
|
|||||||
replacement = formatLogData(loggingEvent.data);
|
replacement = formatLogData(loggingEvent.data);
|
||||||
break;
|
break;
|
||||||
case "n":
|
case "n":
|
||||||
replacement = "\n";
|
replacement = eol;
|
||||||
break;
|
break;
|
||||||
case "p":
|
case "p":
|
||||||
replacement = loggingEvent.level.toString();
|
replacement = loggingEvent.level.toString();
|
||||||
@@ -205,9 +226,26 @@ function patternLayout (pattern) {
|
|||||||
case "r":
|
case "r":
|
||||||
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
|
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
|
||||||
break;
|
break;
|
||||||
|
case "[":
|
||||||
|
replacement = colorizeStart(colours[loggingEvent.level.toString()]);
|
||||||
|
break;
|
||||||
|
case "]":
|
||||||
|
replacement = colorizeEnd(colours[loggingEvent.level.toString()]);
|
||||||
|
break;
|
||||||
case "%":
|
case "%":
|
||||||
replacement = "%";
|
replacement = "%";
|
||||||
break;
|
break;
|
||||||
|
case "x":
|
||||||
|
if(typeof(tokens[specifier]) !== 'undefined') {
|
||||||
|
if(typeof(tokens[specifier]) === 'function') {
|
||||||
|
replacement = tokens[specifier]();
|
||||||
|
} else {
|
||||||
|
replacement = tokens[specifier];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
replacement = matchedString;
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
replacement = matchedString;
|
replacement = matchedString;
|
||||||
break;
|
break;
|
||||||
@@ -247,7 +285,6 @@ function patternLayout (pattern) {
|
|||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
basicLayout: basicLayout
|
basicLayout: basicLayout
|
||||||
, messagePassThroughLayout: messagePassThroughLayout
|
, messagePassThroughLayout: messagePassThroughLayout
|
||||||
|
|||||||
@@ -240,6 +240,7 @@ function initReloadConfiguration(filename, options) {
|
|||||||
|
|
||||||
function configure(configurationFileOrObject, options) {
|
function configure(configurationFileOrObject, options) {
|
||||||
var config = configurationFileOrObject;
|
var config = configurationFileOrObject;
|
||||||
|
config = config || process.env.LOG4JS_CONFIG;
|
||||||
options = options || {};
|
options = options || {};
|
||||||
|
|
||||||
if (config === undefined || config === null || typeof(config) === 'string') {
|
if (config === undefined || config === null || typeof(config) === 'string') {
|
||||||
|
|||||||
268
lib/streams.js
268
lib/streams.js
@@ -1,268 +0,0 @@
|
|||||||
var util = require('util'),
|
|
||||||
fs = require('fs'),
|
|
||||||
path = require('path'),
|
|
||||||
events = require('events'),
|
|
||||||
async = require('async');
|
|
||||||
|
|
||||||
function debug(message) {
|
|
||||||
// util.debug(message);
|
|
||||||
// console.log(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function BufferedWriteStream(stream) {
|
|
||||||
var that = this;
|
|
||||||
this.stream = stream;
|
|
||||||
this.buffer = [];
|
|
||||||
this.canWrite = false;
|
|
||||||
this.bytes = 0;
|
|
||||||
|
|
||||||
this.stream.on("open", function() {
|
|
||||||
that.canWrite = true;
|
|
||||||
that.flushBuffer();
|
|
||||||
});
|
|
||||||
|
|
||||||
this.stream.on("error", function (err) {
|
|
||||||
that.emit("error", err);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.stream.on("drain", function() {
|
|
||||||
that.canWrite = true;
|
|
||||||
that.flushBuffer();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
util.inherits(BufferedWriteStream, events.EventEmitter);
|
|
||||||
|
|
||||||
Object.defineProperty(
|
|
||||||
BufferedWriteStream.prototype,
|
|
||||||
"fd",
|
|
||||||
{
|
|
||||||
get: function() { return this.stream.fd; },
|
|
||||||
set: function(newFd) {
|
|
||||||
this.stream.fd = newFd;
|
|
||||||
this.bytes = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
Object.defineProperty(
|
|
||||||
BufferedWriteStream.prototype,
|
|
||||||
"bytesWritten",
|
|
||||||
{
|
|
||||||
get: function() { return this.bytes; }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.write = function(data, encoding) {
|
|
||||||
this.buffer.push({ data: data, encoding: encoding });
|
|
||||||
this.flushBuffer();
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.end = function(data, encoding) {
|
|
||||||
if (data) {
|
|
||||||
this.buffer.push({ data: data, encoding: encoding });
|
|
||||||
}
|
|
||||||
this.flushBufferEvenIfCannotWrite();
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
|
|
||||||
this.bytes += toWrite.data.length;
|
|
||||||
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
|
|
||||||
while (this.buffer.length > 0) {
|
|
||||||
this.writeToStream(this.buffer.shift());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
BufferedWriteStream.prototype.flushBuffer = function() {
|
|
||||||
while (this.buffer.length > 0 && this.canWrite) {
|
|
||||||
this.writeToStream(this.buffer.shift());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
function BaseRollingFileStream(filename, options) {
|
|
||||||
this.filename = filename;
|
|
||||||
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
|
||||||
this.rolling = false;
|
|
||||||
this.writesWhileRolling = [];
|
|
||||||
this.currentSize = 0;
|
|
||||||
|
|
||||||
function currentFileSize(file) {
|
|
||||||
var fileSize = 0;
|
|
||||||
try {
|
|
||||||
fileSize = fs.statSync(file).size;
|
|
||||||
} catch (e) {
|
|
||||||
// file does not exist
|
|
||||||
}
|
|
||||||
return fileSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
function throwErrorIfArgumentsAreNotValid() {
|
|
||||||
if (!filename) {
|
|
||||||
throw new Error("You must specify a filename");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throwErrorIfArgumentsAreNotValid();
|
|
||||||
|
|
||||||
BaseRollingFileStream.super_.call(this, this.filename, this.options);
|
|
||||||
this.currentSize = currentFileSize(this.filename);
|
|
||||||
}
|
|
||||||
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
|
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.initRolling = function() {
|
|
||||||
var that = this;
|
|
||||||
|
|
||||||
function emptyRollingQueue() {
|
|
||||||
debug("emptying the rolling queue");
|
|
||||||
var toWrite;
|
|
||||||
while ((toWrite = that.writesWhileRolling.shift())) {
|
|
||||||
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
|
|
||||||
that.currentSize += toWrite.data.length;
|
|
||||||
if (that.shouldRoll()) {
|
|
||||||
that.flush();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
that.flush();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.rolling = true;
|
|
||||||
this.roll(this.filename, function() {
|
|
||||||
that.currentSize = 0;
|
|
||||||
that.rolling = emptyRollingQueue();
|
|
||||||
if (that.rolling) {
|
|
||||||
process.nextTick(function() { that.initRolling(); });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.write = function(data, encoding) {
|
|
||||||
if (this.rolling) {
|
|
||||||
this.writesWhileRolling.push({ data: data, encoding: encoding });
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
var canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
|
|
||||||
this.currentSize += data.length;
|
|
||||||
debug('current size = ' + this.currentSize);
|
|
||||||
if (this.shouldRoll()) {
|
|
||||||
this.initRolling();
|
|
||||||
}
|
|
||||||
return canWrite;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
|
||||||
return false; // default behaviour is never to roll
|
|
||||||
};
|
|
||||||
|
|
||||||
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
|
||||||
callback(); // default behaviour is not to do anything
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
function RollingFileStream (filename, size, backups, options) {
|
|
||||||
this.size = size;
|
|
||||||
this.backups = backups || 1;
|
|
||||||
|
|
||||||
function throwErrorIfArgumentsAreNotValid() {
|
|
||||||
if (!filename || !size || size <= 0) {
|
|
||||||
throw new Error("You must specify a filename and file size");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throwErrorIfArgumentsAreNotValid();
|
|
||||||
|
|
||||||
RollingFileStream.super_.call(this, filename, options);
|
|
||||||
}
|
|
||||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
|
||||||
|
|
||||||
RollingFileStream.prototype.shouldRoll = function() {
|
|
||||||
return this.currentSize >= this.size;
|
|
||||||
};
|
|
||||||
|
|
||||||
RollingFileStream.prototype.roll = function(filename, callback) {
|
|
||||||
var that = this,
|
|
||||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
|
||||||
|
|
||||||
function justTheseFiles (item) {
|
|
||||||
return nameMatcher.test(item);
|
|
||||||
}
|
|
||||||
|
|
||||||
function index(filename_) {
|
|
||||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function byIndex(a, b) {
|
|
||||||
if (index(a) > index(b)) {
|
|
||||||
return 1;
|
|
||||||
} else if (index(a) < index(b) ) {
|
|
||||||
return -1;
|
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function increaseFileIndex (fileToRename, cb) {
|
|
||||||
var idx = index(fileToRename);
|
|
||||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
|
||||||
if (idx < that.backups) {
|
|
||||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
|
||||||
//so, we'll try to delete the file we're renaming to first
|
|
||||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
|
||||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
|
||||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
|
||||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
cb();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renameTheFiles(cb) {
|
|
||||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
|
||||||
debug("Renaming the old files");
|
|
||||||
fs.readdir(path.dirname(filename), function (err, files) {
|
|
||||||
async.forEachSeries(
|
|
||||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
|
||||||
increaseFileIndex,
|
|
||||||
cb
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function openANewFile(cb) {
|
|
||||||
debug("Opening a new file");
|
|
||||||
fs.open(
|
|
||||||
filename,
|
|
||||||
that.options.flags,
|
|
||||||
that.options.mode,
|
|
||||||
function (err, fd) {
|
|
||||||
debug("opened new file");
|
|
||||||
var oldLogFileFD = that.fd;
|
|
||||||
that.fd = fd;
|
|
||||||
that.writable = true;
|
|
||||||
fs.close(oldLogFileFD, cb);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug("Starting roll");
|
|
||||||
debug("Queueing up data until we've finished rolling");
|
|
||||||
debug("Flushing underlying stream");
|
|
||||||
this.flush();
|
|
||||||
|
|
||||||
async.series([
|
|
||||||
renameTheFiles,
|
|
||||||
openANewFile
|
|
||||||
], callback);
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
exports.BaseRollingFileStream = BaseRollingFileStream;
|
|
||||||
exports.RollingFileStream = RollingFileStream;
|
|
||||||
exports.BufferedWriteStream = BufferedWriteStream;
|
|
||||||
89
lib/streams/BaseRollingFileStream.js
Normal file
89
lib/streams/BaseRollingFileStream.js
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
var fs = require('fs'),
|
||||||
|
stream,
|
||||||
|
util = require('util'),
|
||||||
|
semver = require('semver');
|
||||||
|
|
||||||
|
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||||
|
stream = require('stream');
|
||||||
|
} else {
|
||||||
|
stream = require('readable-stream');
|
||||||
|
}
|
||||||
|
|
||||||
|
var debug;
|
||||||
|
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
|
||||||
|
debug = function(message) { console.error('LOG4JS: (BaseRollingFileStream) %s', message); };
|
||||||
|
} else {
|
||||||
|
debug = function() { };
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = BaseRollingFileStream;
|
||||||
|
|
||||||
|
function BaseRollingFileStream(filename, options) {
|
||||||
|
debug("In BaseRollingFileStream");
|
||||||
|
this.filename = filename;
|
||||||
|
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
|
||||||
|
this.currentSize = 0;
|
||||||
|
|
||||||
|
function currentFileSize(file) {
|
||||||
|
var fileSize = 0;
|
||||||
|
try {
|
||||||
|
fileSize = fs.statSync(file).size;
|
||||||
|
} catch (e) {
|
||||||
|
// file does not exist
|
||||||
|
}
|
||||||
|
return fileSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
function throwErrorIfArgumentsAreNotValid() {
|
||||||
|
if (!filename) {
|
||||||
|
throw new Error("You must specify a filename");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throwErrorIfArgumentsAreNotValid();
|
||||||
|
debug("Calling BaseRollingFileStream.super");
|
||||||
|
BaseRollingFileStream.super_.call(this);
|
||||||
|
this.openTheStream();
|
||||||
|
this.currentSize = currentFileSize(this.filename);
|
||||||
|
}
|
||||||
|
util.inherits(BaseRollingFileStream, stream.Writable);
|
||||||
|
|
||||||
|
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
|
||||||
|
var that = this;
|
||||||
|
function writeTheChunk() {
|
||||||
|
debug("writing the chunk to the underlying stream");
|
||||||
|
that.currentSize += chunk.length;
|
||||||
|
that.theStream.write(chunk, encoding, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("in _write");
|
||||||
|
|
||||||
|
if (this.shouldRoll()) {
|
||||||
|
this.currentSize = 0;
|
||||||
|
this.roll(this.filename, writeTheChunk);
|
||||||
|
} else {
|
||||||
|
writeTheChunk();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
BaseRollingFileStream.prototype.openTheStream = function(cb) {
|
||||||
|
debug("opening the underlying stream");
|
||||||
|
this.theStream = fs.createWriteStream(this.filename, this.options);
|
||||||
|
if (cb) {
|
||||||
|
this.theStream.on("open", cb);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
|
||||||
|
debug("closing the underlying stream");
|
||||||
|
this.theStream.end(cb);
|
||||||
|
};
|
||||||
|
|
||||||
|
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||||
|
return false; // default behaviour is never to roll
|
||||||
|
};
|
||||||
|
|
||||||
|
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
||||||
|
callback(); // default behaviour is not to do anything
|
||||||
|
};
|
||||||
|
|
||||||
82
lib/streams/DateRollingFileStream.js
Normal file
82
lib/streams/DateRollingFileStream.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
var BaseRollingFileStream = require('./BaseRollingFileStream'),
|
||||||
|
format = require('../date_format'),
|
||||||
|
async = require('async'),
|
||||||
|
fs = require('fs'),
|
||||||
|
util = require('util');
|
||||||
|
|
||||||
|
module.exports = DateRollingFileStream;
|
||||||
|
|
||||||
|
var debug;
|
||||||
|
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
|
||||||
|
debug = function(message) { console.error('LOG4JS: (DateRollingFileStream) %s', message); };
|
||||||
|
} else {
|
||||||
|
debug = function() { };
|
||||||
|
}
|
||||||
|
|
||||||
|
function DateRollingFileStream(filename, pattern, options, now) {
|
||||||
|
debug("Now is " + now);
|
||||||
|
if (pattern && typeof(pattern) === 'object') {
|
||||||
|
now = options;
|
||||||
|
options = pattern;
|
||||||
|
pattern = null;
|
||||||
|
}
|
||||||
|
this.pattern = pattern || '.yyyy-MM-dd';
|
||||||
|
this.now = now || Date.now;
|
||||||
|
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||||
|
this.baseFilename = filename;
|
||||||
|
|
||||||
|
if (options) {
|
||||||
|
if (options.alwaysIncludePattern) {
|
||||||
|
filename = filename + this.lastTimeWeWroteSomething;
|
||||||
|
}
|
||||||
|
delete options.alwaysIncludePattern;
|
||||||
|
if (options === {}) {
|
||||||
|
options = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
debug("this.now is " + this.now + ", now is " + now);
|
||||||
|
|
||||||
|
DateRollingFileStream.super_.call(this, filename, options);
|
||||||
|
}
|
||||||
|
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||||
|
|
||||||
|
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||||
|
var lastTime = this.lastTimeWeWroteSomething,
|
||||||
|
thisTime = format.asString(this.pattern, new Date(this.now()));
|
||||||
|
|
||||||
|
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
|
||||||
|
|
||||||
|
this.lastTimeWeWroteSomething = thisTime;
|
||||||
|
this.previousTime = lastTime;
|
||||||
|
|
||||||
|
return thisTime !== lastTime;
|
||||||
|
};
|
||||||
|
|
||||||
|
DateRollingFileStream.prototype.roll = function(filename, callback) {
|
||||||
|
var that = this,
|
||||||
|
newFilename = this.baseFilename + this.previousTime;
|
||||||
|
|
||||||
|
debug("Starting roll");
|
||||||
|
|
||||||
|
async.series([
|
||||||
|
this.closeTheStream.bind(this),
|
||||||
|
deleteAnyExistingFile,
|
||||||
|
renameTheCurrentFile,
|
||||||
|
this.openTheStream.bind(this)
|
||||||
|
], callback);
|
||||||
|
|
||||||
|
function deleteAnyExistingFile(cb) {
|
||||||
|
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||||
|
//so, we'll try to delete the file we're renaming to first
|
||||||
|
fs.unlink(newFilename, function (err) {
|
||||||
|
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function renameTheCurrentFile(cb) {
|
||||||
|
debug("Renaming the " + filename + " -> " + newFilename);
|
||||||
|
fs.rename(filename, newFilename, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
94
lib/streams/RollingFileStream.js
Normal file
94
lib/streams/RollingFileStream.js
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
var BaseRollingFileStream = require('./BaseRollingFileStream'),
|
||||||
|
util = require('util'),
|
||||||
|
path = require('path'),
|
||||||
|
fs = require('fs'),
|
||||||
|
async = require('async');
|
||||||
|
|
||||||
|
var debug;
|
||||||
|
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
|
||||||
|
debug = function(message) { console.error('LOG4JS: (RollingFileStream) %s', message); };
|
||||||
|
} else {
|
||||||
|
debug = function() { };
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = RollingFileStream;
|
||||||
|
|
||||||
|
function RollingFileStream (filename, size, backups, options) {
|
||||||
|
this.size = size;
|
||||||
|
this.backups = backups || 1;
|
||||||
|
|
||||||
|
function throwErrorIfArgumentsAreNotValid() {
|
||||||
|
if (!filename || !size || size <= 0) {
|
||||||
|
throw new Error("You must specify a filename and file size");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throwErrorIfArgumentsAreNotValid();
|
||||||
|
|
||||||
|
RollingFileStream.super_.call(this, filename, options);
|
||||||
|
}
|
||||||
|
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||||
|
|
||||||
|
RollingFileStream.prototype.shouldRoll = function() {
|
||||||
|
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||||
|
return this.currentSize >= this.size;
|
||||||
|
};
|
||||||
|
|
||||||
|
RollingFileStream.prototype.roll = function(filename, callback) {
|
||||||
|
var that = this,
|
||||||
|
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||||
|
|
||||||
|
function justTheseFiles (item) {
|
||||||
|
return nameMatcher.test(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
function index(filename_) {
|
||||||
|
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function byIndex(a, b) {
|
||||||
|
if (index(a) > index(b)) {
|
||||||
|
return 1;
|
||||||
|
} else if (index(a) < index(b) ) {
|
||||||
|
return -1;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function increaseFileIndex (fileToRename, cb) {
|
||||||
|
var idx = index(fileToRename);
|
||||||
|
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||||
|
if (idx < that.backups) {
|
||||||
|
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||||
|
//so, we'll try to delete the file we're renaming to first
|
||||||
|
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||||
|
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||||
|
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||||
|
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renameTheFiles(cb) {
|
||||||
|
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||||
|
debug("Renaming the old files");
|
||||||
|
fs.readdir(path.dirname(filename), function (err, files) {
|
||||||
|
async.forEachSeries(
|
||||||
|
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||||
|
increaseFileIndex,
|
||||||
|
cb
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("Rolling, rolling, rolling");
|
||||||
|
async.series([
|
||||||
|
this.closeTheStream.bind(this),
|
||||||
|
renameTheFiles,
|
||||||
|
this.openTheStream.bind(this)
|
||||||
|
], callback);
|
||||||
|
|
||||||
|
};
|
||||||
2
lib/streams/index.js
Normal file
2
lib/streams/index.js
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
exports.RollingFileStream = require('./RollingFileStream');
|
||||||
|
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||||
75
package.json
75
package.json
@@ -1,37 +1,42 @@
|
|||||||
{
|
{
|
||||||
"name": "log4js",
|
"name": "log4js",
|
||||||
"version": "0.5.0",
|
"version": "0.6.3",
|
||||||
"description": "Port of Log4js to work with node.",
|
"description": "Port of Log4js to work with node.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"logging",
|
"logging",
|
||||||
"log",
|
"log",
|
||||||
"log4j",
|
"log4j",
|
||||||
"node"
|
"node"
|
||||||
],
|
],
|
||||||
"main": "./lib/log4js",
|
"main": "./lib/log4js",
|
||||||
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
|
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/nomiddlename/log4js-node.git"
|
"url": "https://github.com/nomiddlename/log4js-node.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
||||||
},
|
},
|
||||||
"engines": [ "node >=0.6" ],
|
"engines": {
|
||||||
"scripts": {
|
"node": ">=0.8"
|
||||||
"test": "vows test/*.js"
|
},
|
||||||
},
|
"scripts": {
|
||||||
"directories": {
|
"test": "vows"
|
||||||
"test": "test",
|
},
|
||||||
"lib": "lib"
|
"directories": {
|
||||||
},
|
"test": "test",
|
||||||
"dependencies": {
|
"lib": "lib"
|
||||||
"async": "0.1.15"
|
},
|
||||||
},
|
"dependencies": {
|
||||||
"devDependencies": {
|
"async": "0.1.15",
|
||||||
"vows": "0.6.2",
|
"dequeue": "1.0.3",
|
||||||
"sandboxed-module": "0.1.3",
|
"semver": "~1.1.4",
|
||||||
"hook.io": "0.8.10",
|
"readable-stream": "~1.0.2"
|
||||||
"underscore": "1.2.1"
|
},
|
||||||
}
|
"devDependencies": {
|
||||||
|
"vows": "0.7.0",
|
||||||
|
"sandboxed-module": "0.1.3",
|
||||||
|
"hook.io": "0.8.10",
|
||||||
|
"underscore": "1.2.1"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,130 +0,0 @@
|
|||||||
var vows = require('vows')
|
|
||||||
, assert = require('assert')
|
|
||||||
, events = require('events')
|
|
||||||
, BufferedWriteStream = require('../lib/streams').BufferedWriteStream;
|
|
||||||
|
|
||||||
function FakeStream() {
|
|
||||||
this.writes = [];
|
|
||||||
this.canWrite = false;
|
|
||||||
this.callbacks = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.on = function(event, callback) {
|
|
||||||
this.callbacks[event] = callback;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.write = function(data, encoding) {
|
|
||||||
assert.equal("utf8", encoding);
|
|
||||||
this.writes.push(data);
|
|
||||||
return this.canWrite;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.emit = function(event, payload) {
|
|
||||||
this.callbacks[event](payload);
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.block = function() {
|
|
||||||
this.canWrite = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
FakeStream.prototype.unblock = function() {
|
|
||||||
this.canWrite = true;
|
|
||||||
this.emit("drain");
|
|
||||||
}
|
|
||||||
|
|
||||||
vows.describe('BufferedWriteStream').addBatch({
|
|
||||||
'stream': {
|
|
||||||
topic: new BufferedWriteStream(new FakeStream()),
|
|
||||||
'should take a stream as an argument and return a stream': function(stream) {
|
|
||||||
assert.instanceOf(stream, events.EventEmitter);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'before stream is open': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.write("Some data", "utf8");
|
|
||||||
stream.write("Some more data", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should buffer writes': function(writes) {
|
|
||||||
assert.equal(writes.length, 0);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is open': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.write("Some data", "utf8");
|
|
||||||
fakeStream.canWrite = true;
|
|
||||||
fakeStream.emit("open");
|
|
||||||
stream.write("Some more data", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should write data to stream from before stream was open': function (writes) {
|
|
||||||
assert.equal(writes[0], "Some data");
|
|
||||||
},
|
|
||||||
'should write data to stream from after stream was open': function (writes) {
|
|
||||||
assert.equal(writes[1], "Some more data");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is blocked': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("will not know it is blocked until first write", "utf8");
|
|
||||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should buffer writes': function (writes) {
|
|
||||||
assert.equal(writes.length, 1);
|
|
||||||
assert.equal(writes[0], "will not know it is blocked until first write");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is unblocked': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("will not know it is blocked until first write", "utf8");
|
|
||||||
stream.write("so this one will be buffered, but not the previous one", "utf8");
|
|
||||||
fakeStream.unblock();
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should send buffered data': function (writes) {
|
|
||||||
assert.equal(writes.length, 2);
|
|
||||||
assert.equal(writes[1], "so this one will be buffered, but not the previous one");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream is closed': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
fakeStream.emit("open");
|
|
||||||
fakeStream.block();
|
|
||||||
stream.write("first write to notice stream is blocked", "utf8");
|
|
||||||
stream.write("data while blocked", "utf8");
|
|
||||||
stream.end();
|
|
||||||
return fakeStream.writes;
|
|
||||||
},
|
|
||||||
'should send any buffered writes to the stream': function (writes) {
|
|
||||||
assert.equal(writes.length, 2);
|
|
||||||
assert.equal(writes[1], "data while blocked");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'when stream errors': {
|
|
||||||
topic: function() {
|
|
||||||
var fakeStream = new FakeStream(),
|
|
||||||
stream = new BufferedWriteStream(fakeStream);
|
|
||||||
stream.on("error", this.callback);
|
|
||||||
fakeStream.emit("error", "oh noes!");
|
|
||||||
},
|
|
||||||
'should emit error': function(err, value) {
|
|
||||||
assert.equal(err, "oh noes!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}).exportTo(module);
|
|
||||||
@@ -84,6 +84,48 @@ vows.describe('log4js configure').addBatch({
|
|||||||
'should add appender configure function to appenderMakers': function(log4js) {
|
'should add appender configure function to appenderMakers': function(log4js) {
|
||||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
|
||||||
|
topic: function() {
|
||||||
|
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||||
|
var fileRead = 0,
|
||||||
|
modulePath = 'some/path/to/mylog4js.json',
|
||||||
|
pathsChecked = [],
|
||||||
|
mtime = new Date(),
|
||||||
|
fakeFS = {
|
||||||
|
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||||
|
levels: { 'a-test' : 'INFO' } },
|
||||||
|
readdirSync: function(dir) {
|
||||||
|
return require('fs').readdirSync(dir);
|
||||||
|
},
|
||||||
|
readFileSync: function (file, encoding) {
|
||||||
|
fileRead += 1;
|
||||||
|
assert.isString(file);
|
||||||
|
assert.equal(file, modulePath);
|
||||||
|
assert.equal(encoding, 'utf8');
|
||||||
|
return JSON.stringify(fakeFS.config);
|
||||||
|
},
|
||||||
|
statSync: function (path) {
|
||||||
|
pathsChecked.push(path);
|
||||||
|
if (path === modulePath) {
|
||||||
|
return { mtime: mtime };
|
||||||
|
} else {
|
||||||
|
throw new Error("no such file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
log4js = sandbox.require('../lib/log4js',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'fs': fakeFS,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
delete process.env.LOG4JS_CONFIG;
|
||||||
|
return fileRead;
|
||||||
|
},
|
||||||
|
'should load the specified local configuration file' : function(fileRead) {
|
||||||
|
assert.equal(fileRead, 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}).exportTo(module);
|
}).exportTo(module);
|
||||||
129
test/dateFileAppender-test.js
Normal file
129
test/dateFileAppender-test.js
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
var vows = require('vows'),
|
||||||
|
assert = require('assert'),
|
||||||
|
path = require('path'),
|
||||||
|
fs = require('fs'),
|
||||||
|
log4js = require('../lib/log4js');
|
||||||
|
|
||||||
|
function removeFile(filename) {
|
||||||
|
return function() {
|
||||||
|
fs.unlink(path.join(__dirname, filename), function(err) {
|
||||||
|
if (err) {
|
||||||
|
console.log("Could not delete ", filename, err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
vows.describe('../lib/appenders/dateFile').addBatch({
|
||||||
|
'appender': {
|
||||||
|
'adding multiple dateFileAppenders': {
|
||||||
|
topic: function () {
|
||||||
|
var listenersCount = process.listeners('exit').length,
|
||||||
|
dateFileAppender = require('../lib/appenders/dateFile'),
|
||||||
|
count = 5,
|
||||||
|
logfile;
|
||||||
|
|
||||||
|
while (count--) {
|
||||||
|
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
|
||||||
|
log4js.addAppender(dateFileAppender.appender(logfile));
|
||||||
|
}
|
||||||
|
|
||||||
|
return listenersCount;
|
||||||
|
},
|
||||||
|
teardown: function() {
|
||||||
|
removeFile('datefa-default-test0.log')();
|
||||||
|
removeFile('datefa-default-test1.log')();
|
||||||
|
removeFile('datefa-default-test2.log')();
|
||||||
|
removeFile('datefa-default-test3.log')();
|
||||||
|
removeFile('datefa-default-test4.log')();
|
||||||
|
},
|
||||||
|
|
||||||
|
'should only add one `exit` listener': function (initialCount) {
|
||||||
|
assert.equal(process.listeners('exit').length, initialCount + 1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'with default settings': {
|
||||||
|
topic: function() {
|
||||||
|
var that = this,
|
||||||
|
testFile = path.join(__dirname, 'date-appender-default.log'),
|
||||||
|
appender = require('../lib/appenders/dateFile').appender(testFile),
|
||||||
|
logger = log4js.getLogger('default-settings');
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.addAppender(appender, 'default-settings');
|
||||||
|
|
||||||
|
logger.info("This should be in the file.");
|
||||||
|
|
||||||
|
setTimeout(function() {
|
||||||
|
fs.readFile(testFile, "utf8", that.callback);
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
},
|
||||||
|
teardown: removeFile('date-appender-default.log'),
|
||||||
|
|
||||||
|
'should write to the file': function(contents) {
|
||||||
|
assert.include(contents, 'This should be in the file');
|
||||||
|
},
|
||||||
|
|
||||||
|
'should use the basic layout': function(contents) {
|
||||||
|
assert.match(contents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}).addBatch({
|
||||||
|
'configure': {
|
||||||
|
'with dateFileAppender': {
|
||||||
|
topic: function() {
|
||||||
|
var log4js = require('../lib/log4js')
|
||||||
|
, logger;
|
||||||
|
//this config file defines one file appender (to ./date-file-test.log)
|
||||||
|
//and sets the log level for "tests" to WARN
|
||||||
|
log4js.configure('test/with-dateFile.json');
|
||||||
|
logger = log4js.getLogger('tests');
|
||||||
|
logger.info('this should not be written to the file');
|
||||||
|
logger.warn('this should be written to the file');
|
||||||
|
|
||||||
|
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
|
||||||
|
},
|
||||||
|
teardown: removeFile('date-file-test.log'),
|
||||||
|
|
||||||
|
'should load appender configuration from a json file': function(err, contents) {
|
||||||
|
assert.include(contents, 'this should be written to the file' + require('os').EOL);
|
||||||
|
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'with options.alwaysIncludePattern': {
|
||||||
|
topic: function() {
|
||||||
|
var log4js = require('../lib/log4js')
|
||||||
|
, format = require('../lib/date_format')
|
||||||
|
, logger
|
||||||
|
, options = {
|
||||||
|
"appenders": [
|
||||||
|
{
|
||||||
|
"category": "tests",
|
||||||
|
"type": "dateFile",
|
||||||
|
"filename": "test/date-file-test",
|
||||||
|
"pattern": "-from-MM-dd.log",
|
||||||
|
"alwaysIncludePattern": true,
|
||||||
|
"layout": {
|
||||||
|
"type": "messagePassThrough"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
, thisTime = format.asString(options.appenders[0].pattern, new Date());
|
||||||
|
log4js.clearAppenders();
|
||||||
|
log4js.configure(options);
|
||||||
|
logger = log4js.getLogger('tests');
|
||||||
|
logger.warn('this should be written to the file with the appended date');
|
||||||
|
this.teardown = removeFile('date-file-test' + thisTime);
|
||||||
|
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', this.callback);
|
||||||
|
},
|
||||||
|
'should create file with the correct pattern': function(contents) {
|
||||||
|
assert.include(contents, 'this should be written to the file with the appended date');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}).exportTo(module);
|
||||||
@@ -15,6 +15,24 @@ function remove(filename) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
vows.describe('log4js fileAppender').addBatch({
|
vows.describe('log4js fileAppender').addBatch({
|
||||||
|
'adding multiple fileAppenders': {
|
||||||
|
topic: function () {
|
||||||
|
var listenersCount = process.listeners('exit').length
|
||||||
|
, logger = log4js.getLogger('default-settings')
|
||||||
|
, count = 5, logfile;
|
||||||
|
|
||||||
|
while (count--) {
|
||||||
|
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
|
||||||
|
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
|
||||||
|
}
|
||||||
|
|
||||||
|
return listenersCount;
|
||||||
|
},
|
||||||
|
|
||||||
|
'does not adds more than one `exit` listeners': function (initialCount) {
|
||||||
|
assert.ok(process.listeners('exit').length <= initialCount + 1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
'with default fileAppender settings': {
|
'with default fileAppender settings': {
|
||||||
topic: function() {
|
topic: function() {
|
||||||
@@ -90,7 +108,13 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
var that = this;
|
var that = this;
|
||||||
//give the system a chance to open the stream
|
//give the system a chance to open the stream
|
||||||
setTimeout(function() {
|
setTimeout(function() {
|
||||||
fs.readdir(__dirname, that.callback);
|
fs.readdir(__dirname, function(err, files) {
|
||||||
|
if (files) {
|
||||||
|
that.callback(null, files.sort());
|
||||||
|
} else {
|
||||||
|
that.callback(err, files);
|
||||||
|
}
|
||||||
|
});
|
||||||
}, 200);
|
}, 200);
|
||||||
},
|
},
|
||||||
'the log files': {
|
'the log files': {
|
||||||
@@ -102,31 +126,31 @@ vows.describe('log4js fileAppender').addBatch({
|
|||||||
assert.equal(files.length, 3);
|
assert.equal(files.length, 3);
|
||||||
},
|
},
|
||||||
'should be named in sequence': function (files) {
|
'should be named in sequence': function (files) {
|
||||||
assert.deepEqual(files.sort(), ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
|
||||||
},
|
},
|
||||||
'and the contents of the first file': {
|
'and the contents of the first file': {
|
||||||
topic: function(logFiles) {
|
topic: function(logFiles) {
|
||||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||||
},
|
|
||||||
'should be empty because the last log message triggers rolling': function(contents) {
|
|
||||||
assert.isEmpty(contents);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'and the contents of the second file': {
|
|
||||||
topic: function(logFiles) {
|
|
||||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
|
||||||
},
|
},
|
||||||
'should be the last log message': function(contents) {
|
'should be the last log message': function(contents) {
|
||||||
assert.include(contents, 'This is the fourth log message.');
|
assert.include(contents, 'This is the fourth log message.');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'and the contents of the third file': {
|
'and the contents of the second file': {
|
||||||
topic: function(logFiles) {
|
topic: function(logFiles) {
|
||||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||||
},
|
},
|
||||||
'should be the third log message': function(contents) {
|
'should be the third log message': function(contents) {
|
||||||
assert.include(contents, 'This is the third log message.');
|
assert.include(contents, 'This is the third log message.');
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
'and the contents of the third file': {
|
||||||
|
topic: function(logFiles) {
|
||||||
|
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should be the second log message': function(contents) {
|
||||||
|
assert.include(contents, 'This is the second log message.');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -4,9 +4,10 @@ assert = require('assert');
|
|||||||
//used for patternLayout tests.
|
//used for patternLayout tests.
|
||||||
function test(args, pattern, value) {
|
function test(args, pattern, value) {
|
||||||
var layout = args[0]
|
var layout = args[0]
|
||||||
, event = args[1];
|
, event = args[1]
|
||||||
|
, tokens = args[2];
|
||||||
|
|
||||||
assert.equal(layout(pattern)(event), value);
|
assert.equal(layout(pattern, tokens)(event), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
vows.describe('log4js layouts').addBatch({
|
vows.describe('log4js layouts').addBatch({
|
||||||
@@ -76,8 +77,50 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
, toString: function() { return "ERROR"; }
|
, toString: function() { return "ERROR"; }
|
||||||
}
|
}
|
||||||
}), "{ thing: 1 }");
|
}), "{ thing: 1 }");
|
||||||
|
},
|
||||||
|
'should print the stacks of a passed error objects': function(layout) {
|
||||||
|
assert.isArray(layout({
|
||||||
|
data: [ new Error() ]
|
||||||
|
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
|
||||||
|
, categoryName: "cheese"
|
||||||
|
, level: {
|
||||||
|
colour: "green"
|
||||||
|
, toString: function() { return "ERROR"; }
|
||||||
|
}
|
||||||
|
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
|
||||||
|
, 'regexp did not return a match');
|
||||||
|
},
|
||||||
|
'with passed augmented errors':
|
||||||
|
{ topic:
|
||||||
|
function(layout){
|
||||||
|
var e = new Error("My Unique Error Message");
|
||||||
|
e.augmented = "My Unique attribute value"
|
||||||
|
e.augObj = { at1: "at2" }
|
||||||
|
return layout({
|
||||||
|
data: [ e ]
|
||||||
|
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
|
||||||
|
, categoryName: "cheese"
|
||||||
|
, level: {
|
||||||
|
colour: "green"
|
||||||
|
, toString: function() { return "ERROR"; }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
},
|
||||||
|
'should print error the contained error message': function(layoutOutput) {
|
||||||
|
var m = layoutOutput.match(/\{ \[Error: My Unique Error Message\]/);
|
||||||
|
assert.isArray(m);
|
||||||
|
},
|
||||||
|
'should print error augmented string attributes': function(layoutOutput) {
|
||||||
|
var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/);
|
||||||
|
assert.isArray(m);
|
||||||
|
},
|
||||||
|
'should print error augmented object attributes': function(layoutOutput) {
|
||||||
|
var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/);
|
||||||
|
assert.isArray(m);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
},
|
},
|
||||||
|
|
||||||
'basicLayout': {
|
'basicLayout': {
|
||||||
@@ -106,10 +149,11 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
output = layout(event);
|
output = layout(event);
|
||||||
lines = output.split(/\n/);
|
lines = output.split(/\n/);
|
||||||
|
|
||||||
assert.equal(lines.length, stack.length);
|
assert.equal(lines.length - 1, stack.length);
|
||||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error");
|
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]");
|
||||||
|
|
||||||
for (var i = 1; i < stack.length; i++) {
|
for (var i = 1; i < stack.length; i++) {
|
||||||
assert.equal(lines[i+1], stack[i+1]);
|
assert.equal(lines[i+2], stack[i+1]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||||
@@ -132,8 +176,12 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
level: {
|
level: {
|
||||||
toString: function() { return "DEBUG"; }
|
toString: function() { return "DEBUG"; }
|
||||||
}
|
}
|
||||||
}, layout = require('../lib/layouts').patternLayout;
|
}, layout = require('../lib/layouts').patternLayout
|
||||||
return [layout, event];
|
, tokens = {
|
||||||
|
testString: 'testStringToken',
|
||||||
|
testFunction: function() { return 'testFunctionToken'; }
|
||||||
|
};
|
||||||
|
return [layout, event, tokens];
|
||||||
},
|
},
|
||||||
|
|
||||||
'should default to "time logLevel loggerName - message"': function(args) {
|
'should default to "time logLevel loggerName - message"': function(args) {
|
||||||
@@ -200,9 +248,21 @@ vows.describe('log4js layouts').addBatch({
|
|||||||
test(args, '%-6p', 'DEBUG ');
|
test(args, '%-6p', 'DEBUG ');
|
||||||
test(args, '%-8p', 'DEBUG ');
|
test(args, '%-8p', 'DEBUG ');
|
||||||
test(args, '%-10p', 'DEBUG ');
|
test(args, '%-10p', 'DEBUG ');
|
||||||
}
|
},
|
||||||
|
'%[%r%] should output colored time': function(args) {
|
||||||
|
test(args, '%[%r%]', '\033[36m14:18:30\033[39m');
|
||||||
|
},
|
||||||
|
'%x{testString} should output the string stored in tokens': function(args) {
|
||||||
|
test(args, '%x{testString}', 'testStringToken');
|
||||||
|
},
|
||||||
|
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
|
||||||
|
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||||
|
},
|
||||||
|
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||||
|
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
|
||||||
|
},
|
||||||
|
'%x should output the string stored in tokens': function(args) {
|
||||||
|
test(args, '%x', '%x');
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
}).export(module);
|
}).export(module);
|
||||||
|
|
||||||
@@ -50,9 +50,10 @@ vows.describe('log4js-abspath').addBatch({
|
|||||||
{
|
{
|
||||||
RollingFileStream: function(file) {
|
RollingFileStream: function(file) {
|
||||||
fileOpened = file;
|
fileOpened = file;
|
||||||
},
|
return {
|
||||||
BufferedWriteStream: function(other) {
|
on: function() {},
|
||||||
return { on: function() { }, end: function() {} }
|
end: function() {}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -115,8 +115,7 @@ vows.describe('log4js').addBatch({
|
|||||||
"type" : "file",
|
"type" : "file",
|
||||||
"filename" : "cheesy-wotsits.log",
|
"filename" : "cheesy-wotsits.log",
|
||||||
"maxLogSize" : 1024,
|
"maxLogSize" : 1024,
|
||||||
"backups" : 3,
|
"backups" : 3
|
||||||
"pollInterval" : 15
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
@@ -576,5 +575,4 @@ vows.describe('log4js').addBatch({
|
|||||||
assert.equal(logEvents[1].data[0], 'info3');
|
assert.equal(logEvents[1].data[0], 'info3');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}).export(module);
|
}).export(module);
|
||||||
241
test/multiprocess-test.js
Normal file
241
test/multiprocess-test.js
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
var vows = require('vows'),
|
||||||
|
sandbox = require('sandboxed-module'),
|
||||||
|
assert = require('assert');
|
||||||
|
|
||||||
|
function makeFakeNet() {
|
||||||
|
return {
|
||||||
|
logEvents: [],
|
||||||
|
data: [],
|
||||||
|
cbs: {},
|
||||||
|
createConnectionCalled: 0,
|
||||||
|
fakeAppender: function(logEvent) {
|
||||||
|
this.logEvents.push(logEvent);
|
||||||
|
},
|
||||||
|
createConnection: function(port, host) {
|
||||||
|
var fakeNet = this;
|
||||||
|
this.port = port;
|
||||||
|
this.host = host;
|
||||||
|
this.createConnectionCalled += 1;
|
||||||
|
return {
|
||||||
|
on: function(evt, cb) {
|
||||||
|
fakeNet.cbs[evt] = cb;
|
||||||
|
},
|
||||||
|
write: function(data, encoding) {
|
||||||
|
fakeNet.data.push(data);
|
||||||
|
fakeNet.encoding = encoding;
|
||||||
|
},
|
||||||
|
end: function() {
|
||||||
|
fakeNet.closeCalled = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
},
|
||||||
|
createServer: function(cb) {
|
||||||
|
var fakeNet = this;
|
||||||
|
cb({
|
||||||
|
remoteAddress: '1.2.3.4',
|
||||||
|
remotePort: '1234',
|
||||||
|
setEncoding: function(encoding) {
|
||||||
|
fakeNet.encoding = encoding;
|
||||||
|
},
|
||||||
|
on: function(event, cb) {
|
||||||
|
fakeNet.cbs[event] = cb;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
listen: function(port, host) {
|
||||||
|
fakeNet.port = port;
|
||||||
|
fakeNet.host = host;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
vows.describe('Multiprocess Appender').addBatch({
|
||||||
|
'worker': {
|
||||||
|
topic: function() {
|
||||||
|
var fakeNet = makeFakeNet(),
|
||||||
|
appender = sandbox.require(
|
||||||
|
'../lib/appenders/multiprocess',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'net': fakeNet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
|
||||||
|
|
||||||
|
//don't need a proper log event for the worker tests
|
||||||
|
appender('before connect');
|
||||||
|
fakeNet.cbs['connect']();
|
||||||
|
appender('after connect');
|
||||||
|
fakeNet.cbs['close'](true);
|
||||||
|
appender('after error, before connect');
|
||||||
|
fakeNet.cbs['connect']();
|
||||||
|
appender('after error, after connect');
|
||||||
|
|
||||||
|
return fakeNet;
|
||||||
|
},
|
||||||
|
'should open a socket to the loggerPort and loggerHost': function(net) {
|
||||||
|
assert.equal(net.port, 1234);
|
||||||
|
assert.equal(net.host, 'pants');
|
||||||
|
},
|
||||||
|
'should buffer messages written before socket is connected': function(net) {
|
||||||
|
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||||
|
},
|
||||||
|
'should write log messages to socket as json strings with a terminator string': function(net) {
|
||||||
|
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||||
|
assert.equal(net.data[1], '__LOG4JS__');
|
||||||
|
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||||
|
assert.equal(net.data[3], '__LOG4JS__');
|
||||||
|
assert.equal(net.encoding, 'utf8');
|
||||||
|
},
|
||||||
|
'should attempt to re-open the socket on error': function(net) {
|
||||||
|
assert.equal(net.data[4], JSON.stringify('after error, before connect'));
|
||||||
|
assert.equal(net.data[5], '__LOG4JS__');
|
||||||
|
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
|
||||||
|
assert.equal(net.data[7], '__LOG4JS__');
|
||||||
|
assert.equal(net.createConnectionCalled, 2);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'worker with timeout': {
|
||||||
|
topic: function() {
|
||||||
|
var fakeNet = makeFakeNet(),
|
||||||
|
appender = sandbox.require(
|
||||||
|
'../lib/appenders/multiprocess',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'net': fakeNet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).appender({ mode: 'worker' });
|
||||||
|
|
||||||
|
//don't need a proper log event for the worker tests
|
||||||
|
appender('before connect');
|
||||||
|
fakeNet.cbs['connect']();
|
||||||
|
appender('after connect');
|
||||||
|
fakeNet.cbs['timeout']();
|
||||||
|
appender('after timeout, before close');
|
||||||
|
fakeNet.cbs['close']();
|
||||||
|
appender('after close, before connect');
|
||||||
|
fakeNet.cbs['connect']();
|
||||||
|
appender('after close, after connect');
|
||||||
|
|
||||||
|
return fakeNet;
|
||||||
|
},
|
||||||
|
'should attempt to re-open the socket': function(net) {
|
||||||
|
//skipping the __LOG4JS__ separators
|
||||||
|
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||||
|
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||||
|
assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
|
||||||
|
assert.equal(net.data[6], JSON.stringify('after close, before connect'));
|
||||||
|
assert.equal(net.data[8], JSON.stringify('after close, after connect'));
|
||||||
|
assert.equal(net.createConnectionCalled, 2);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'worker defaults': {
|
||||||
|
topic: function() {
|
||||||
|
var fakeNet = makeFakeNet(),
|
||||||
|
appender = sandbox.require(
|
||||||
|
'../lib/appenders/multiprocess',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'net': fakeNet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).appender({ mode: 'worker' });
|
||||||
|
|
||||||
|
return fakeNet;
|
||||||
|
},
|
||||||
|
'should open a socket to localhost:5000': function(net) {
|
||||||
|
assert.equal(net.port, 5000);
|
||||||
|
assert.equal(net.host, 'localhost');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'master': {
|
||||||
|
topic: function() {
|
||||||
|
var fakeNet = makeFakeNet(),
|
||||||
|
appender = sandbox.require(
|
||||||
|
'../lib/appenders/multiprocess',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'net': fakeNet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).appender({ mode: 'master',
|
||||||
|
loggerHost: 'server',
|
||||||
|
loggerPort: 1234,
|
||||||
|
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
|
||||||
|
});
|
||||||
|
|
||||||
|
appender('this should be sent to the actual appender directly');
|
||||||
|
|
||||||
|
return fakeNet;
|
||||||
|
},
|
||||||
|
'should listen for log messages on loggerPort and loggerHost': function(net) {
|
||||||
|
assert.equal(net.port, 1234);
|
||||||
|
assert.equal(net.host, 'server');
|
||||||
|
},
|
||||||
|
'should return the underlying appender': function(net) {
|
||||||
|
assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
|
||||||
|
},
|
||||||
|
'when a client connects': {
|
||||||
|
topic: function(net) {
|
||||||
|
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__';
|
||||||
|
|
||||||
|
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__');
|
||||||
|
net.cbs['data'](logString.substring(0, 10));
|
||||||
|
net.cbs['data'](logString.substring(10));
|
||||||
|
net.cbs['data'](logString + logString + logString);
|
||||||
|
net.cbs['end'](JSON.stringify({ level: { level: 50000, levelStr: 'FATAL' }, data: ["that's all folks"] }) + '__LOG4JS__');
|
||||||
|
net.cbs['data']('bad message__LOG4JS__');
|
||||||
|
return net;
|
||||||
|
},
|
||||||
|
'should parse log messages into log events and send to appender': function(net) {
|
||||||
|
assert.equal(net.logEvents[1].level.toString(), 'ERROR');
|
||||||
|
assert.equal(net.logEvents[1].data[0], 'an error message');
|
||||||
|
assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
|
||||||
|
assert.equal(net.logEvents[1].remotePort, '1234');
|
||||||
|
},
|
||||||
|
'should parse log messages split into multiple chunks': function(net) {
|
||||||
|
assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
|
||||||
|
assert.equal(net.logEvents[2].data[0], 'some debug');
|
||||||
|
assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
|
||||||
|
assert.equal(net.logEvents[2].remotePort, '1234');
|
||||||
|
},
|
||||||
|
'should parse multiple log messages in a single chunk': function(net) {
|
||||||
|
assert.equal(net.logEvents[3].data[0], 'some debug');
|
||||||
|
assert.equal(net.logEvents[4].data[0], 'some debug');
|
||||||
|
assert.equal(net.logEvents[5].data[0], 'some debug');
|
||||||
|
},
|
||||||
|
'should handle log messages sent as part of end event': function(net) {
|
||||||
|
assert.equal(net.logEvents[6].data[0], "that's all folks");
|
||||||
|
},
|
||||||
|
'should handle unparseable log messages': function(net) {
|
||||||
|
assert.equal(net.logEvents[7].level.toString(), 'ERROR');
|
||||||
|
assert.equal(net.logEvents[7].categoryName, 'log4js');
|
||||||
|
assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
|
||||||
|
assert.equal(net.logEvents[7].data[1], 'bad message');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'master defaults': {
|
||||||
|
topic: function() {
|
||||||
|
var fakeNet = makeFakeNet(),
|
||||||
|
appender = sandbox.require(
|
||||||
|
'../lib/appenders/multiprocess',
|
||||||
|
{
|
||||||
|
requires: {
|
||||||
|
'net': fakeNet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
).appender({ mode: 'master' });
|
||||||
|
|
||||||
|
return fakeNet;
|
||||||
|
},
|
||||||
|
'should listen for log messages on localhost:5000': function(net) {
|
||||||
|
assert.equal(net.port, 5000);
|
||||||
|
assert.equal(net.host, 'localhost');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).exportTo(module);
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
var vows = require('vows');
|
|
||||||
var assert = require('assert');
|
|
||||||
var sandbox = require('sandboxed-module');
|
|
||||||
var _ = require('underscore');
|
|
||||||
|
|
||||||
function fancyResultingMultiprocessAppender(opts) {
|
|
||||||
var result = { clientOns: {}, serverOns: {}, logged: [], ended: [] };
|
|
||||||
|
|
||||||
var fakeSocket = {
|
|
||||||
on: function (event, fn) {
|
|
||||||
result.clientOns[event] = fn;
|
|
||||||
if (event === 'connect') {
|
|
||||||
fn();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
end: function (data, encoding) {
|
|
||||||
result.ended.push({ data: data, encoding: encoding });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fakeServerSocket = {
|
|
||||||
on: function (event, fn) {
|
|
||||||
result.serverOns[event] = fn;
|
|
||||||
if (event === 'connect') {
|
|
||||||
fn();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fakeServer = {
|
|
||||||
listen: function (port, host) {
|
|
||||||
result.listenPort = port;
|
|
||||||
result.listenHost = host;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fakeNet = {
|
|
||||||
createServer: function (fn) {
|
|
||||||
fn(fakeServerSocket);
|
|
||||||
return fakeServer;
|
|
||||||
},
|
|
||||||
createConnection: function (port, host) {
|
|
||||||
result.connectPort = port;
|
|
||||||
result.connectHost = host;
|
|
||||||
return fakeSocket;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var fakeLog4Js = {
|
|
||||||
appenderMakers: {}
|
|
||||||
};
|
|
||||||
fakeLog4Js.loadAppender = function (appender) {
|
|
||||||
fakeLog4Js.appenderMakers[appender] = function (config) {
|
|
||||||
result.actualLoggerConfig = config;
|
|
||||||
return function log(logEvent) {
|
|
||||||
result.logged.push(logEvent);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
return { theResult: result,
|
|
||||||
theModule: sandbox.require('../lib/appenders/multiprocess', {
|
|
||||||
requires: {
|
|
||||||
'../log4js': fakeLog4Js,
|
|
||||||
'net': fakeNet
|
|
||||||
}
|
|
||||||
})
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function logMessages(result, logs, raw) {
|
|
||||||
logs.forEach(function log(item) {
|
|
||||||
var logItem = { startTime: "Wed, 02 Nov 2011 21:46:39 GMT", level: { levelStr: 'DEBUG' }, data: [ item ] };
|
|
||||||
result.serverOns.data(JSON.stringify(logItem));
|
|
||||||
result.serverOns.end();
|
|
||||||
result.serverOns.connect();
|
|
||||||
});
|
|
||||||
if (raw) {
|
|
||||||
raw.forEach(function log(rawItem) {
|
|
||||||
result.serverOns.data(rawItem);
|
|
||||||
result.serverOns.end();
|
|
||||||
result.serverOns.connect();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
vows.describe('log4js multiprocessAppender').addBatch({
|
|
||||||
'master': {
|
|
||||||
topic: function() {
|
|
||||||
var fancy = fancyResultingMultiprocessAppender();
|
|
||||||
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
|
|
||||||
logMessages(fancy.theResult, [ 'ALRIGHTY THEN', 'OH WOW' ]);
|
|
||||||
return fancy.theResult;
|
|
||||||
},
|
|
||||||
|
|
||||||
'should write to the actual appender': function (result) {
|
|
||||||
assert.equal(result.listenPort, 5001);
|
|
||||||
assert.equal(result.listenHost, 'abba');
|
|
||||||
assert.equal(result.logged.length, 2);
|
|
||||||
assert.equal(result.logged[0].data[0], 'ALRIGHTY THEN');
|
|
||||||
assert.equal(result.logged[1].data[0], 'OH WOW');
|
|
||||||
},
|
|
||||||
|
|
||||||
'data written should be formatted correctly': function (result) {
|
|
||||||
assert.equal(result.logged[0].level.toString(), 'DEBUG');
|
|
||||||
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
|
|
||||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
|
||||||
assert.equal(result.logged[1].level.toString(), 'DEBUG');
|
|
||||||
assert.equal(result.logged[1].data, 'OH WOW');
|
|
||||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
|
||||||
},
|
|
||||||
|
|
||||||
'the actual logger should get the right config': function (result) {
|
|
||||||
assert.equal(result.actualLoggerConfig.type, 'file');
|
|
||||||
},
|
|
||||||
|
|
||||||
'client should not be called': function (result) {
|
|
||||||
assert.equal(_.keys(result.clientOns).length, 0);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'master with bad request': {
|
|
||||||
topic: function() {
|
|
||||||
var fancy = fancyResultingMultiprocessAppender();
|
|
||||||
var logger = fancy.theModule.configure({ mode: 'master', 'loggerPort': 5001, 'loggerHost': 'abba', appender: { type: 'file' } });
|
|
||||||
logMessages(fancy.theResult, [], [ 'ALRIGHTY THEN', 'OH WOW' ]);
|
|
||||||
return fancy.theResult;
|
|
||||||
},
|
|
||||||
|
|
||||||
'should write to the actual appender': function (result) {
|
|
||||||
assert.equal(result.listenPort, 5001);
|
|
||||||
assert.equal(result.listenHost, 'abba');
|
|
||||||
assert.equal(result.logged.length, 2);
|
|
||||||
assert.equal(result.logged[0].data[0], 'Unable to parse log: ALRIGHTY THEN');
|
|
||||||
assert.equal(result.logged[1].data[0], 'Unable to parse log: OH WOW');
|
|
||||||
},
|
|
||||||
|
|
||||||
'data written should be formatted correctly': function (result) {
|
|
||||||
assert.equal(result.logged[0].level.toString(), 'ERROR');
|
|
||||||
assert.equal(result.logged[0].data, 'Unable to parse log: ALRIGHTY THEN');
|
|
||||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
|
||||||
assert.equal(result.logged[1].level.toString(), 'ERROR');
|
|
||||||
assert.equal(result.logged[1].data, 'Unable to parse log: OH WOW');
|
|
||||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'worker': {
|
|
||||||
'should emit logging events to the master': {
|
|
||||||
topic: function() {
|
|
||||||
var fancy = fancyResultingMultiprocessAppender();
|
|
||||||
var logger = fancy.theModule.configure({ loggerHost: 'baba', loggerPort: 1232, name: 'ohno', mode: 'worker', appender: { type: 'file' } });
|
|
||||||
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
|
|
||||||
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
|
|
||||||
return fancy.theResult;
|
|
||||||
},
|
|
||||||
|
|
||||||
'client configuration should be correct': function (result) {
|
|
||||||
assert.equal(result.connectHost, 'baba');
|
|
||||||
assert.equal(result.connectPort, 1232);
|
|
||||||
},
|
|
||||||
|
|
||||||
'should not write to the actual appender': function (result) {
|
|
||||||
assert.equal(result.logged.length, 0);
|
|
||||||
assert.equal(result.ended.length, 2);
|
|
||||||
assert.equal(result.ended[0].data, JSON.stringify({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' }));
|
|
||||||
assert.equal(result.ended[0].encoding, 'utf8');
|
|
||||||
assert.equal(result.ended[1].data, JSON.stringify({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'}));
|
|
||||||
assert.equal(result.ended[1].encoding, 'utf8');
|
|
||||||
assert.equal(_.keys(result.serverOns).length, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}).exportTo(module);
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
var vows = require('vows')
|
|
||||||
, assert = require('assert')
|
|
||||||
, events = require('events')
|
|
||||||
, fs = require('fs')
|
|
||||||
, RollingFileStream = require('../lib/streams').RollingFileStream;
|
|
||||||
|
|
||||||
function remove(filename) {
|
|
||||||
try {
|
|
||||||
fs.unlinkSync(filename);
|
|
||||||
} catch (e) {
|
|
||||||
//doesn't really matter if it failed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vows.describe('RollingFileStream').addBatch({
|
|
||||||
'arguments': {
|
|
||||||
topic: function() {
|
|
||||||
remove(__dirname + "/test-rolling-file-stream");
|
|
||||||
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
|
||||||
},
|
|
||||||
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
|
|
||||||
assert.instanceOf(stream, fs.FileWriteStream);
|
|
||||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
|
||||||
assert.equal(stream.size, 1024);
|
|
||||||
assert.equal(stream.backups, 5);
|
|
||||||
},
|
|
||||||
'with default settings for the underlying stream': function(stream) {
|
|
||||||
assert.equal(stream.mode, 420);
|
|
||||||
assert.equal(stream.flags, 'a');
|
|
||||||
assert.equal(stream.encoding, 'utf8');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'with stream arguments': {
|
|
||||||
topic: function() {
|
|
||||||
remove(__dirname + '/test-rolling-file-stream');
|
|
||||||
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
|
|
||||||
},
|
|
||||||
'should pass them to the underlying stream': function(stream) {
|
|
||||||
assert.equal(stream.mode, 0666);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'without size': {
|
|
||||||
topic: function() {
|
|
||||||
try {
|
|
||||||
new RollingFileStream(__dirname + "/test-rolling-file-stream");
|
|
||||||
} catch (e) {
|
|
||||||
return e;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'should throw an error': function(err) {
|
|
||||||
assert.instanceOf(err, Error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'without number of backups': {
|
|
||||||
topic: function() {
|
|
||||||
remove('test-rolling-file-stream');
|
|
||||||
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
|
|
||||||
},
|
|
||||||
'should default to 1 backup': function(stream) {
|
|
||||||
assert.equal(stream.backups, 1);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'writing less than the file size': {
|
|
||||||
topic: function() {
|
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-less");
|
|
||||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
|
|
||||||
stream.on("open", function() { that.callback(null, stream); });
|
|
||||||
},
|
|
||||||
'(when open)': {
|
|
||||||
topic: function(stream) {
|
|
||||||
stream.write("cheese", "utf8");
|
|
||||||
stream.end();
|
|
||||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", this.callback);
|
|
||||||
},
|
|
||||||
'should write to the file': function(contents) {
|
|
||||||
assert.equal(contents, "cheese");
|
|
||||||
},
|
|
||||||
'the number of files': {
|
|
||||||
topic: function() {
|
|
||||||
fs.readdir(__dirname, this.callback);
|
|
||||||
},
|
|
||||||
'should be one': function(files) {
|
|
||||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'writing more than the file size': {
|
|
||||||
topic: function() {
|
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-more");
|
|
||||||
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
|
||||||
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
|
|
||||||
stream.on("open", function() {
|
|
||||||
for (var i=0; i < 7; i++) {
|
|
||||||
stream.write(i +".cheese\n", "utf8");
|
|
||||||
}
|
|
||||||
//wait for the file system to catch up with us
|
|
||||||
setTimeout(that.callback, 100);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
'the number of files': {
|
|
||||||
topic: function() {
|
|
||||||
fs.readdir(__dirname, this.callback);
|
|
||||||
},
|
|
||||||
'should be two': function(files) {
|
|
||||||
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }).length, 2);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'the first file': {
|
|
||||||
topic: function() {
|
|
||||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
|
|
||||||
},
|
|
||||||
'should contain the last two log messages': function(contents) {
|
|
||||||
assert.equal(contents, '5.cheese\n6.cheese\n');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'the second file': {
|
|
||||||
topic: function() {
|
|
||||||
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
|
|
||||||
},
|
|
||||||
'should contain the first five log messages': function(contents) {
|
|
||||||
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}).exportTo(module);
|
|
||||||
168
test/smtpAppender-test.js
Normal file
168
test/smtpAppender-test.js
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
var vows = require('vows'),
|
||||||
|
assert = require('assert'),
|
||||||
|
log4js = require('../lib/log4js'),
|
||||||
|
sandbox = require('sandboxed-module');
|
||||||
|
|
||||||
|
function setupLogging(category, options) {
|
||||||
|
var msgs = [];
|
||||||
|
|
||||||
|
var fakeMailer = {
|
||||||
|
createTransport: function (name, options) {
|
||||||
|
return {
|
||||||
|
config: options,
|
||||||
|
sendMail: function (msg, callback) {
|
||||||
|
msgs.push(msg);
|
||||||
|
callback(null, true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
||||||
|
requires: {
|
||||||
|
'nodemailer': fakeMailer
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
log4js.addAppender(smtpModule.configure(options), category);
|
||||||
|
|
||||||
|
return {
|
||||||
|
logger: log4js.getLogger(category),
|
||||||
|
mailer: fakeMailer,
|
||||||
|
results: msgs
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkMessages (result, sender, subject) {
|
||||||
|
for (var i = 0; i < result.results.length; ++i) {
|
||||||
|
assert.equal(result.results[i].from, sender);
|
||||||
|
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||||
|
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
|
||||||
|
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].text));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log4js.clearAppenders();
|
||||||
|
vows.describe('log4js smtpAppender').addBatch({
|
||||||
|
'minimal config': {
|
||||||
|
topic: function() {
|
||||||
|
var setup = setupLogging('minimal config', {
|
||||||
|
recipients: 'recipient@domain.com',
|
||||||
|
transport: "SMTP",
|
||||||
|
SMTP: {
|
||||||
|
port: 25,
|
||||||
|
auth: {
|
||||||
|
user: 'user@domain.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setup.logger.info('Log event #1');
|
||||||
|
return setup;
|
||||||
|
},
|
||||||
|
'there should be one message only': function (result) {
|
||||||
|
assert.equal(result.results.length, 1);
|
||||||
|
},
|
||||||
|
'message should contain proper data': function (result) {
|
||||||
|
checkMessages(result);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'fancy config': {
|
||||||
|
topic: function() {
|
||||||
|
var setup = setupLogging('fancy config', {
|
||||||
|
recipients: 'recipient@domain.com',
|
||||||
|
sender: 'sender@domain.com',
|
||||||
|
subject: 'This is subject',
|
||||||
|
transport: "SMTP",
|
||||||
|
SMTP: {
|
||||||
|
port: 25,
|
||||||
|
auth: {
|
||||||
|
user: 'user@domain.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setup.logger.info('Log event #1');
|
||||||
|
return setup;
|
||||||
|
},
|
||||||
|
'there should be one message only': function (result) {
|
||||||
|
assert.equal(result.results.length, 1);
|
||||||
|
},
|
||||||
|
'message should contain proper data': function (result) {
|
||||||
|
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'separate email for each event': {
|
||||||
|
topic: function() {
|
||||||
|
var self = this;
|
||||||
|
var setup = setupLogging('separate email for each event', {
|
||||||
|
recipients: 'recipient@domain.com',
|
||||||
|
transport: "SMTP",
|
||||||
|
SMTP: {
|
||||||
|
port: 25,
|
||||||
|
auth: {
|
||||||
|
user: 'user@domain.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #1');
|
||||||
|
}, 0);
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #2');
|
||||||
|
}, 500);
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #3');
|
||||||
|
}, 1050);
|
||||||
|
setTimeout(function () {
|
||||||
|
self.callback(null, setup);
|
||||||
|
}, 2100);
|
||||||
|
},
|
||||||
|
'there should be three messages': function (result) {
|
||||||
|
assert.equal(result.results.length, 3);
|
||||||
|
},
|
||||||
|
'messages should contain proper data': function (result) {
|
||||||
|
checkMessages(result);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'multiple events in one email': {
|
||||||
|
topic: function() {
|
||||||
|
var self = this;
|
||||||
|
var setup = setupLogging('multiple events in one email', {
|
||||||
|
recipients: 'recipient@domain.com',
|
||||||
|
sendInterval: 1,
|
||||||
|
transport: "SMTP",
|
||||||
|
SMTP: {
|
||||||
|
port: 25,
|
||||||
|
auth: {
|
||||||
|
user: 'user@domain.com'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #1');
|
||||||
|
}, 0);
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #2');
|
||||||
|
}, 500);
|
||||||
|
setTimeout(function () {
|
||||||
|
setup.logger.info('Log event #3');
|
||||||
|
}, 1050);
|
||||||
|
setTimeout(function () {
|
||||||
|
self.callback(null, setup);
|
||||||
|
}, 2100);
|
||||||
|
},
|
||||||
|
'there should be two messages': function (result) {
|
||||||
|
assert.equal(result.results.length, 2);
|
||||||
|
},
|
||||||
|
'messages should contain proper data': function (result) {
|
||||||
|
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||||
|
assert.equal(result.results[0].subject, 'Log event #1');
|
||||||
|
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
||||||
|
|
||||||
|
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||||
|
assert.equal(result.results[1].subject, 'Log event #3');
|
||||||
|
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}).export(module);
|
||||||
|
|
||||||
@@ -1,162 +0,0 @@
|
|||||||
var vows = require('vows'),
|
|
||||||
assert = require('assert'),
|
|
||||||
log4js = require('../lib/log4js'),
|
|
||||||
sandbox = require('sandboxed-module');
|
|
||||||
|
|
||||||
function setupLogging(category, options) {
|
|
||||||
var msgs = [];
|
|
||||||
|
|
||||||
var fakeMailer = {
|
|
||||||
send_mail: function (msg, callback) {
|
|
||||||
msgs.push(msg);
|
|
||||||
callback(null, true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
|
||||||
requires: {
|
|
||||||
'nodemailer': fakeMailer
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
log4js.addAppender(smtpModule.configure(options), category);
|
|
||||||
|
|
||||||
return {
|
|
||||||
logger: log4js.getLogger(category),
|
|
||||||
mailer: fakeMailer,
|
|
||||||
results: msgs
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function checkMessages (result, sender, subject) {
|
|
||||||
for (var i = 0; i < result.results.length; ++i) {
|
|
||||||
assert.equal(result.results[i].sender, sender ? sender : result.mailer.SMTP.user);
|
|
||||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
|
||||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
|
|
||||||
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].body));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log4js.clearAppenders();
|
|
||||||
vows.describe('log4js smtpAppender').addBatch({
|
|
||||||
'minimal config': {
|
|
||||||
topic: function() {
|
|
||||||
var setup = setupLogging('minimal config', {
|
|
||||||
recipients: 'recipient@domain.com',
|
|
||||||
smtp: {
|
|
||||||
port: 25,
|
|
||||||
user: 'user@domain.com'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
setup.logger.info('Log event #1');
|
|
||||||
return setup;
|
|
||||||
},
|
|
||||||
'mailer should be configured properly': function (result) {
|
|
||||||
assert.ok(result.mailer.SMTP);
|
|
||||||
assert.equal(result.mailer.SMTP.port, 25);
|
|
||||||
assert.equal(result.mailer.SMTP.user, 'user@domain.com');
|
|
||||||
},
|
|
||||||
'there should be one message only': function (result) {
|
|
||||||
assert.equal(result.results.length, 1);
|
|
||||||
},
|
|
||||||
'message should contain proper data': function (result) {
|
|
||||||
checkMessages(result);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'fancy config': {
|
|
||||||
topic: function() {
|
|
||||||
var setup = setupLogging('fancy config', {
|
|
||||||
recipients: 'recipient@domain.com',
|
|
||||||
sender: 'sender@domain.com',
|
|
||||||
subject: 'This is subject',
|
|
||||||
smtp: {
|
|
||||||
port: 25,
|
|
||||||
user: 'user@domain.com'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
setup.logger.info('Log event #1');
|
|
||||||
return setup;
|
|
||||||
},
|
|
||||||
'mailer should be configured properly': function (result) {
|
|
||||||
assert.ok(result.mailer.SMTP);
|
|
||||||
assert.equal(result.mailer.SMTP.port, 25);
|
|
||||||
assert.equal(result.mailer.SMTP.user, 'user@domain.com');
|
|
||||||
},
|
|
||||||
'there should be one message only': function (result) {
|
|
||||||
assert.equal(result.results.length, 1);
|
|
||||||
},
|
|
||||||
'message should contain proper data': function (result) {
|
|
||||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'separate email for each event': {
|
|
||||||
topic: function() {
|
|
||||||
var self = this;
|
|
||||||
var setup = setupLogging('separate email for each event', {
|
|
||||||
recipients: 'recipient@domain.com',
|
|
||||||
smtp: {
|
|
||||||
port: 25,
|
|
||||||
user: 'user@domain.com'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #1');
|
|
||||||
}, 0);
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #2');
|
|
||||||
}, 500);
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #3');
|
|
||||||
}, 1050);
|
|
||||||
setTimeout(function () {
|
|
||||||
self.callback(null, setup);
|
|
||||||
}, 2100);
|
|
||||||
},
|
|
||||||
'there should be three messages': function (result) {
|
|
||||||
assert.equal(result.results.length, 3);
|
|
||||||
},
|
|
||||||
'messages should contain proper data': function (result) {
|
|
||||||
checkMessages(result);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'multiple events in one email': {
|
|
||||||
topic: function() {
|
|
||||||
var self = this;
|
|
||||||
var setup = setupLogging('multiple events in one email', {
|
|
||||||
recipients: 'recipient@domain.com',
|
|
||||||
sendInterval: 1,
|
|
||||||
smtp: {
|
|
||||||
port: 25,
|
|
||||||
user: 'user@domain.com'
|
|
||||||
}
|
|
||||||
});
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #1');
|
|
||||||
}, 0);
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #2');
|
|
||||||
}, 500);
|
|
||||||
setTimeout(function () {
|
|
||||||
setup.logger.info('Log event #3');
|
|
||||||
}, 1050);
|
|
||||||
setTimeout(function () {
|
|
||||||
self.callback(null, setup);
|
|
||||||
}, 2100);
|
|
||||||
},
|
|
||||||
'there should be two messages': function (result) {
|
|
||||||
assert.equal(result.results.length, 2);
|
|
||||||
},
|
|
||||||
'messages should contain proper data': function (result) {
|
|
||||||
assert.equal(result.results[0].sender, result.mailer.SMTP.user);
|
|
||||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
|
||||||
assert.equal(result.results[0].subject, 'Log event #1');
|
|
||||||
assert.equal(result.results[0].body.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
|
||||||
|
|
||||||
assert.equal(result.results[1].sender, result.mailer.SMTP.user);
|
|
||||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
|
||||||
assert.equal(result.results[1].subject, 'Log event #3');
|
|
||||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].body));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}).export(module);
|
|
||||||
130
test/streams/DateRollingFileStream-test.js
Normal file
130
test/streams/DateRollingFileStream-test.js
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
var vows = require('vows')
|
||||||
|
, assert = require('assert')
|
||||||
|
, fs = require('fs')
|
||||||
|
, semver = require('semver')
|
||||||
|
, streams
|
||||||
|
, DateRollingFileStream
|
||||||
|
, testTime = new Date(2012, 8, 12, 10, 37, 11);
|
||||||
|
|
||||||
|
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||||
|
streams = require('stream');
|
||||||
|
} else {
|
||||||
|
streams = require('readable-stream');
|
||||||
|
}
|
||||||
|
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
|
||||||
|
|
||||||
|
function cleanUp(filename) {
|
||||||
|
return function() {
|
||||||
|
fs.unlink(filename);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function now() {
|
||||||
|
return testTime.getTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
vows.describe('DateRollingFileStream').addBatch({
|
||||||
|
'arguments': {
|
||||||
|
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
|
||||||
|
|
||||||
|
'should take a filename and a pattern and return a WritableStream': function(stream) {
|
||||||
|
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
|
||||||
|
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
|
||||||
|
assert.instanceOf(stream, streams.Writable);
|
||||||
|
},
|
||||||
|
'with default settings for the underlying stream': function(stream) {
|
||||||
|
assert.equal(stream.theStream.mode, 420);
|
||||||
|
assert.equal(stream.theStream.flags, 'a');
|
||||||
|
//encoding is not available on the underlying stream
|
||||||
|
//assert.equal(stream.encoding, 'utf8');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'default arguments': {
|
||||||
|
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
|
||||||
|
|
||||||
|
'pattern should be .yyyy-MM-dd': function(stream) {
|
||||||
|
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'with stream arguments': {
|
||||||
|
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
|
||||||
|
|
||||||
|
'should pass them to the underlying stream': function(stream) {
|
||||||
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'with stream arguments but no pattern': {
|
||||||
|
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
|
||||||
|
|
||||||
|
'should pass them to the underlying stream': function(stream) {
|
||||||
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
|
},
|
||||||
|
'should use default pattern': function(stream) {
|
||||||
|
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'with a pattern of .yyyy-MM-dd': {
|
||||||
|
topic: function() {
|
||||||
|
var that = this,
|
||||||
|
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
|
||||||
|
stream.write("First message\n", 'utf8', function() {
|
||||||
|
that.callback(null, stream);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
|
||||||
|
|
||||||
|
'should create a file with the base name': {
|
||||||
|
topic: function(stream) {
|
||||||
|
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||||
|
},
|
||||||
|
'file should contain first message': function(result) {
|
||||||
|
assert.equal(result.toString(), "First message\n");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'when the day changes': {
|
||||||
|
topic: function(stream) {
|
||||||
|
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
||||||
|
stream.write("Second message\n", 'utf8', this.callback);
|
||||||
|
},
|
||||||
|
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
|
||||||
|
|
||||||
|
|
||||||
|
'the number of files': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readdir(__dirname, this.callback);
|
||||||
|
},
|
||||||
|
'should be two': function(files) {
|
||||||
|
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'the file without a date': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||||
|
},
|
||||||
|
'should contain the second message': function(contents) {
|
||||||
|
assert.equal(contents.toString(), "Second message\n");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
'the file with the date': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
|
||||||
|
},
|
||||||
|
'should contain the first message': function(contents) {
|
||||||
|
assert.equal(contents.toString(), "First message\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}).exportTo(module);
|
||||||
134
test/streams/rollingFileStream-test.js
Normal file
134
test/streams/rollingFileStream-test.js
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
var vows = require('vows')
|
||||||
|
, async = require('async')
|
||||||
|
, assert = require('assert')
|
||||||
|
, events = require('events')
|
||||||
|
, fs = require('fs')
|
||||||
|
, semver = require('semver')
|
||||||
|
, streams
|
||||||
|
, RollingFileStream;
|
||||||
|
|
||||||
|
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||||
|
streams = require('stream');
|
||||||
|
} else {
|
||||||
|
streams = require('readable-stream');
|
||||||
|
}
|
||||||
|
RollingFileStream = require('../../lib/streams').RollingFileStream;
|
||||||
|
|
||||||
|
function remove(filename) {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(filename);
|
||||||
|
} catch (e) {
|
||||||
|
//doesn't really matter if it failed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vows.describe('RollingFileStream').addBatch({
|
||||||
|
'arguments': {
|
||||||
|
topic: function() {
|
||||||
|
remove(__dirname + "/test-rolling-file-stream");
|
||||||
|
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
||||||
|
},
|
||||||
|
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
|
||||||
|
assert.instanceOf(stream, streams.Writable);
|
||||||
|
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||||
|
assert.equal(stream.size, 1024);
|
||||||
|
assert.equal(stream.backups, 5);
|
||||||
|
},
|
||||||
|
'with default settings for the underlying stream': function(stream) {
|
||||||
|
assert.equal(stream.theStream.mode, 420);
|
||||||
|
assert.equal(stream.theStream.flags, 'a');
|
||||||
|
//encoding isn't a property on the underlying stream
|
||||||
|
//assert.equal(stream.theStream.encoding, 'utf8');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'with stream arguments': {
|
||||||
|
topic: function() {
|
||||||
|
remove(__dirname + '/test-rolling-file-stream');
|
||||||
|
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
|
||||||
|
},
|
||||||
|
'should pass them to the underlying stream': function(stream) {
|
||||||
|
assert.equal(stream.theStream.mode, 0666);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'without size': {
|
||||||
|
topic: function() {
|
||||||
|
try {
|
||||||
|
new RollingFileStream(__dirname + "/test-rolling-file-stream");
|
||||||
|
} catch (e) {
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'should throw an error': function(err) {
|
||||||
|
assert.instanceOf(err, Error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'without number of backups': {
|
||||||
|
topic: function() {
|
||||||
|
remove('test-rolling-file-stream');
|
||||||
|
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
|
||||||
|
},
|
||||||
|
'should default to 1 backup': function(stream) {
|
||||||
|
assert.equal(stream.backups, 1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'writing less than the file size': {
|
||||||
|
topic: function() {
|
||||||
|
remove(__dirname + "/test-rolling-file-stream-write-less");
|
||||||
|
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
|
||||||
|
stream.write("cheese", "utf8", function() {
|
||||||
|
stream.end();
|
||||||
|
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
'should write to the file': function(contents) {
|
||||||
|
assert.equal(contents, "cheese");
|
||||||
|
},
|
||||||
|
'the number of files': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readdir(__dirname, this.callback);
|
||||||
|
},
|
||||||
|
'should be one': function(files) {
|
||||||
|
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'writing more than the file size': {
|
||||||
|
topic: function() {
|
||||||
|
remove(__dirname + "/test-rolling-file-stream-write-more");
|
||||||
|
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
||||||
|
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
|
||||||
|
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
|
||||||
|
stream.write(i +".cheese\n", "utf8", cb);
|
||||||
|
}, function() {
|
||||||
|
stream.end();
|
||||||
|
that.callback();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
'the number of files': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readdir(__dirname, this.callback);
|
||||||
|
},
|
||||||
|
'should be two': function(files) {
|
||||||
|
assert.equal(files.filter(
|
||||||
|
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
|
||||||
|
).length, 2);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'the first file': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should contain the last two log messages': function(contents) {
|
||||||
|
assert.equal(contents, '5.cheese\n6.cheese\n');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'the second file': {
|
||||||
|
topic: function() {
|
||||||
|
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
|
||||||
|
},
|
||||||
|
'should contain the first five log messages': function(contents) {
|
||||||
|
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).exportTo(module);
|
||||||
17
test/with-dateFile.json
Normal file
17
test/with-dateFile.json
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"appenders": [
|
||||||
|
{
|
||||||
|
"category": "tests",
|
||||||
|
"type": "dateFile",
|
||||||
|
"filename": "test/date-file-test.log",
|
||||||
|
"pattern": "-from-MM-dd",
|
||||||
|
"layout": {
|
||||||
|
"type": "messagePassThrough"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"levels": {
|
||||||
|
"tests": "WARN"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,8 +4,7 @@
|
|||||||
"type": "file",
|
"type": "file",
|
||||||
"filename": "tmp-test.log",
|
"filename": "tmp-test.log",
|
||||||
"maxLogSize": 1024,
|
"maxLogSize": 1024,
|
||||||
"backups": 3,
|
"backups": 3
|
||||||
"pollInterval": 15
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user