Compare commits
355 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a703f2dc12 | ||
|
|
b2edbb1146 | ||
|
|
99e7c0981d | ||
|
|
06bab894af | ||
|
|
101739ebef | ||
|
|
f8ffccffd5 | ||
|
|
7d50b4aeff | ||
|
|
b12200fabc | ||
|
|
f8b6cc7c39 | ||
|
|
6314e4a344 | ||
|
|
d18fb466fb | ||
|
|
e638ff7271 | ||
|
|
2daf29b400 | ||
|
|
ca5272aacc | ||
|
|
614127bb10 | ||
|
|
a549df44b4 | ||
|
|
5e0982f0b1 | ||
|
|
f5a76d9073 | ||
|
|
29d941f0a6 | ||
|
|
0c2baa9690 | ||
|
|
9b538ee8ed | ||
|
|
e4d5228f2b | ||
|
|
6aacb0da0b | ||
|
|
6e3da6f44b | ||
|
|
3b5eb28115 | ||
|
|
633ed3cddb | ||
|
|
8ca092cdb9 | ||
|
|
3ec9811b5e | ||
|
|
c852fceaf4 | ||
|
|
c569919160 | ||
|
|
28f7c87a0e | ||
|
|
492919b940 | ||
|
|
470baa6c09 | ||
|
|
cd2ee14bde | ||
|
|
c09c11b147 | ||
|
|
b74a514369 | ||
|
|
fd05d90c2f | ||
|
|
73344ba79f | ||
|
|
22c156582f | ||
|
|
72bfb5d980 | ||
|
|
83ad0babf3 | ||
|
|
ae1a55fed9 | ||
|
|
94034e1226 | ||
|
|
9b4c7d1574 | ||
|
|
770f2da627 | ||
|
|
eb51aa99be | ||
|
|
5286c50375 | ||
|
|
bb644a1632 | ||
|
|
a6efbf6273 | ||
|
|
2118d8f7b3 | ||
|
|
d2f044a451 | ||
|
|
d0661322aa | ||
|
|
8b8844694f | ||
|
|
abdba8e56f | ||
|
|
093f693232 | ||
|
|
b9bba00d8c | ||
|
|
731e217505 | ||
|
|
3018a49bde | ||
|
|
a5bb94a048 | ||
|
|
7a1a895e46 | ||
|
|
48dc22eb63 | ||
|
|
7888381991 | ||
|
|
cd286fa25f | ||
|
|
6df4753822 | ||
|
|
613474eb44 | ||
|
|
112246dd55 | ||
|
|
069ed31759 | ||
|
|
9e72189574 | ||
|
|
5a167d853a | ||
|
|
5755faa7bb | ||
|
|
1ed026a8d9 | ||
|
|
2d177d517b | ||
|
|
21aebbde33 | ||
|
|
49892f35d3 | ||
|
|
61beac28d3 | ||
|
|
8ad1cd67e2 | ||
|
|
c67ab855bb | ||
|
|
4905761f60 | ||
|
|
2a38f460dc | ||
|
|
9f77734f74 | ||
|
|
ce8b6b06b9 | ||
|
|
1e17f88ded | ||
|
|
d25e1abd48 | ||
|
|
dde2e69948 | ||
|
|
351a912a86 | ||
|
|
c5fd75dac3 | ||
|
|
4dd5989d27 | ||
|
|
46721465a1 | ||
|
|
76ff7aa5fa | ||
|
|
be5fa838be | ||
|
|
a86bed975c | ||
|
|
baaebef2ed | ||
|
|
837d007de3 | ||
|
|
be754f0c0e | ||
|
|
946b216a79 | ||
|
|
508dbdadf8 | ||
|
|
2e7f6e5a66 | ||
|
|
cbadb5fa19 | ||
|
|
c258470cda | ||
|
|
2b070e5470 | ||
|
|
4cd546e8b3 | ||
|
|
0e5da1d361 | ||
|
|
fc7f686f65 | ||
|
|
4a8f0580de | ||
|
|
f50fab2b86 | ||
|
|
f1c0767ca3 | ||
|
|
652888944b | ||
|
|
efc4e36317 | ||
|
|
d2f30b473f | ||
|
|
fa179ecba2 | ||
|
|
dd25d30228 | ||
|
|
11fe5bde5f | ||
|
|
41ddf5eea7 | ||
|
|
81fa9c3568 | ||
|
|
7ca517b5ed | ||
|
|
6368de1094 | ||
|
|
94dbd22c71 | ||
|
|
0a2a6c0769 | ||
|
|
5d6f00eda4 | ||
|
|
f998d7e81a | ||
|
|
46ae1a586d | ||
|
|
516320c79a | ||
|
|
40ec9e98e4 | ||
|
|
cc2e94cf11 | ||
|
|
2de838bc76 | ||
|
|
87dc7cf5aa | ||
|
|
913c748ee0 | ||
|
|
def0e8e371 | ||
|
|
20f80ff775 | ||
|
|
f24db59523 | ||
|
|
07869b915f | ||
|
|
2cd27e4293 | ||
|
|
3d11cbc0ad | ||
|
|
e5dba219d1 | ||
|
|
9853e13429 | ||
|
|
4fd138f87d | ||
|
|
1ad4977aec | ||
|
|
7cb7e6df72 | ||
|
|
2192a094b6 | ||
|
|
6a9441d261 | ||
|
|
50b676dec5 | ||
|
|
8b3c036245 | ||
|
|
b356dec318 | ||
|
|
8383dfc4f4 | ||
|
|
4e8fb26099 | ||
|
|
8492519e3b | ||
|
|
fdc9d253c9 | ||
|
|
18e21ca473 | ||
|
|
ab8c7ed89d | ||
|
|
aa4f7c071b | ||
|
|
dc632f4705 | ||
|
|
ac6284add1 | ||
|
|
2da01cc611 | ||
|
|
ad8229145e | ||
|
|
8c12c948d9 | ||
|
|
af6ae7af98 | ||
|
|
936ad4da8e | ||
|
|
097ae3d7f1 | ||
|
|
04de4ed8d3 | ||
|
|
29b02921b6 | ||
|
|
48ed5d1222 | ||
|
|
7844b0d2e4 | ||
|
|
8b49ba9f3d | ||
|
|
ed7462885f | ||
|
|
36c5175a55 | ||
|
|
22160f90b3 | ||
|
|
73437ecb40 | ||
|
|
107e33c0d1 | ||
|
|
6352632fb2 | ||
|
|
0544342e9f | ||
|
|
1d1153d32f | ||
|
|
e58cf201ca | ||
|
|
83271e47fc | ||
|
|
f3271a3997 | ||
|
|
4b7cf589a2 | ||
|
|
c8f401c47d | ||
|
|
ecbf41bc83 | ||
|
|
65e490cbd2 | ||
|
|
eb21e10208 | ||
|
|
f272e3fd0a | ||
|
|
5e242c9dc9 | ||
|
|
50eefcc701 | ||
|
|
8e53c6213e | ||
|
|
a15a628311 | ||
|
|
b75e3660f4 | ||
|
|
22da6226e5 | ||
|
|
c9a890b37b | ||
|
|
a3bdac8e14 | ||
|
|
af428c5669 | ||
|
|
5c75ba9468 | ||
|
|
bec0d05847 | ||
|
|
e4bf405f20 | ||
|
|
95568f352b | ||
|
|
6da6f3c90e | ||
|
|
7f57d14e70 | ||
|
|
f478793da3 | ||
|
|
0dbc4921a3 | ||
|
|
ec2f8fec3b | ||
|
|
0167c84ea5 | ||
|
|
3e1a27e522 | ||
|
|
8b42e46071 | ||
|
|
4a7a90ed53 | ||
|
|
a9307fd6da | ||
|
|
4739c65c68 | ||
|
|
892181f88f | ||
|
|
bdfa7f9a9b | ||
|
|
ad63b801f7 | ||
|
|
2bfad6362a | ||
|
|
2b889fe776 | ||
|
|
9ac61e37f4 | ||
|
|
185f343e68 | ||
|
|
be1272cd7c | ||
|
|
cbc1dd32f9 | ||
|
|
a6fb26efb1 | ||
|
|
012b0d5ed7 | ||
|
|
de72005e7e | ||
|
|
c6a0e58409 | ||
|
|
f832a2ba79 | ||
|
|
3f10b68c30 | ||
|
|
54c311842c | ||
|
|
f948b5f5cd | ||
|
|
54e420eb58 | ||
|
|
40ba24a55d | ||
|
|
e3a20a1746 | ||
|
|
7a02f39921 | ||
|
|
b6ba3bce00 | ||
|
|
638ce187bb | ||
|
|
3cbae96a97 | ||
|
|
a33e48cb07 | ||
|
|
df491c0b14 | ||
|
|
6ff1a2499f | ||
|
|
ce2d7df8df | ||
|
|
1b12265800 | ||
|
|
32e9045334 | ||
|
|
1aed671137 | ||
|
|
68b47dd51c | ||
|
|
8f9b4444f6 | ||
|
|
e49f7107fb | ||
|
|
077302c772 | ||
|
|
6f0dfa0c5f | ||
|
|
82a6bee331 | ||
|
|
ad7e844d68 | ||
|
|
bef2075c60 | ||
|
|
a046523804 | ||
|
|
0ed1a137d6 | ||
|
|
33a92b5dd6 | ||
|
|
0901794b35 | ||
|
|
05d5265554 | ||
|
|
9a29d6222e | ||
|
|
38a89dcf3d | ||
|
|
754ac2c5ac | ||
|
|
ccc4976206 | ||
|
|
6e7348f8d8 | ||
|
|
61078e88ef | ||
|
|
613a077a61 | ||
|
|
68d1c8fa07 | ||
|
|
216937637d | ||
|
|
ff5b8d2939 | ||
|
|
6a20efb965 | ||
|
|
872bc791c7 | ||
|
|
2c7b56853b | ||
|
|
c8157cef5c | ||
|
|
352653dcbe | ||
|
|
cff6928761 | ||
|
|
1fb8962b83 | ||
|
|
d276bbc2f8 | ||
|
|
e78f4e33ce | ||
|
|
53367785b4 | ||
|
|
cff20b99e3 | ||
|
|
0a422e5749 | ||
|
|
37b94cf195 | ||
|
|
0c04c6807c | ||
|
|
b4ca201a91 | ||
|
|
2ab6f5fa24 | ||
|
|
9bad070b8a | ||
|
|
5aaa9fcd50 | ||
|
|
b7e77b11ad | ||
|
|
615b534b56 | ||
|
|
788de0cac3 | ||
|
|
4d484ad752 | ||
|
|
449893fd24 | ||
|
|
5bdeaf68d7 | ||
|
|
a5b09b3ead | ||
|
|
05c4c59c20 | ||
|
|
b4a5227fc0 | ||
|
|
b152618dbc | ||
|
|
a999d8fc00 | ||
|
|
78de73a274 | ||
|
|
4cf1d1cfa4 | ||
|
|
e5d0b3348f | ||
|
|
f10a6e164e | ||
|
|
cea3dc97d1 | ||
|
|
a3a0c55322 | ||
|
|
51d48165fd | ||
|
|
7d50c45801 | ||
|
|
40c5f5ee70 | ||
|
|
1d769fdf33 | ||
|
|
bc665b875e | ||
|
|
154c0dc299 | ||
|
|
050fae5230 | ||
|
|
342286e062 | ||
|
|
537f1058b9 | ||
|
|
283a403a11 | ||
|
|
ae8aaa5376 | ||
|
|
a95117c0d3 | ||
|
|
097390bc89 | ||
|
|
0a0119300b | ||
|
|
fde66f92f5 | ||
|
|
516659f733 | ||
|
|
5aabebbdb7 | ||
|
|
8b376eb46e | ||
|
|
ced570413c | ||
|
|
b2827076da | ||
|
|
07e920cc1b | ||
|
|
89f3659825 | ||
|
|
23a2758a6d | ||
|
|
25aa075fad | ||
|
|
d099a9fc3f | ||
|
|
7bc460e8e0 | ||
|
|
681decf51f | ||
|
|
b93691b82a | ||
|
|
f82ecf8f2a | ||
|
|
3b77a42706 | ||
|
|
b5bc9c8322 | ||
|
|
c7d3ac4fe1 | ||
|
|
0aca64623e | ||
|
|
ff68e46858 | ||
|
|
f9768eb56e | ||
|
|
75e5584060 | ||
|
|
b78fd77015 | ||
|
|
2a06048114 | ||
|
|
9a34d9edfd | ||
|
|
12e71bda4e | ||
|
|
53a481d4da | ||
|
|
8d7b5513fb | ||
|
|
d13b2fb3b4 | ||
|
|
4f7d73bc97 | ||
|
|
163db0e5fd | ||
|
|
71f9eef6fe | ||
|
|
623bc1859f | ||
|
|
b72182c0cf | ||
|
|
ef9fe3a4b1 | ||
|
|
3b241095cb | ||
|
|
545681287f | ||
|
|
80474c6881 | ||
|
|
7aa076c278 | ||
|
|
e6b69ff7f2 | ||
|
|
69e64932b1 | ||
|
|
4b32456db7 | ||
|
|
ec21ec63f0 | ||
|
|
a9a698cf09 | ||
|
|
925c280c68 | ||
|
|
d0b4563ba0 | ||
|
|
aac8ca0eb0 | ||
|
|
0968c6709f |
12
.bob.json
Normal file
12
.bob.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"build": "clean lint coverage test",
|
||||
"lint": {
|
||||
"type": "jshint"
|
||||
},
|
||||
"coverage": {
|
||||
"type": "vows"
|
||||
},
|
||||
"test": {
|
||||
"type": "vows"
|
||||
}
|
||||
}
|
||||
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
*.log
|
||||
*.log??
|
||||
build
|
||||
node_modules
|
||||
.bob/
|
||||
test/streams/test-rolling-file-stream*
|
||||
test/streams/test-rolling-stream-with-existing-files*
|
||||
15
.jshintrc
Normal file
15
.jshintrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"node": true,
|
||||
"laxcomma": true,
|
||||
"indent": 2,
|
||||
"globalstrict": true,
|
||||
"maxparams": 5,
|
||||
"maxdepth": 3,
|
||||
"maxstatements": 20,
|
||||
"maxcomplexity": 5,
|
||||
"maxlen": 100,
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true
|
||||
}
|
||||
}
|
||||
2
.npmignore
Normal file
2
.npmignore
Normal file
@@ -0,0 +1,2 @@
|
||||
*.log
|
||||
*.log??
|
||||
5
.travis.yml
Normal file
5
.travis.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.10"
|
||||
- "0.8"
|
||||
|
||||
179
README.md
179
README.md
@@ -1,93 +1,144 @@
|
||||
# log4js-node
|
||||
# log4js-node [](http://travis-ci.org/nomiddlename/log4js-node)
|
||||
|
||||
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code
|
||||
and tidied up some of the javascript. It includes a basic file logger, with log rolling based on file size, and also replaces node's console.log functions.
|
||||
|
||||
NOTE: in v0.2.x require('log4js') returned a function, and you needed to call that function in your code before you could use it. This was to make testing easier. v0.3.x make use of [felixge's sandbox-module](https://github.com/felixge/node-sandboxed-module), so we don't need to return a function.
|
||||
This is a conversion of the [log4js](https://github.com/stritti/log4js)
|
||||
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
|
||||
|
||||
Out of the box it supports the following features:
|
||||
|
||||
* coloured console logging
|
||||
* replacement of node's console.log functions (optional)
|
||||
* file appender, with log rolling based on file size
|
||||
* SMTP appender
|
||||
* GELF appender
|
||||
* hook.io appender
|
||||
* Loggly appender
|
||||
* multiprocess appender (useful when you've got worker processes)
|
||||
* a logger for connect/express servers
|
||||
* configurable log message layout/patterns
|
||||
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
|
||||
|
||||
NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this:
|
||||
|
||||
```javascript
|
||||
{
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
],
|
||||
replaceConsole: true
|
||||
}
|
||||
```
|
||||
|
||||
## installation
|
||||
|
||||
npm install log4js
|
||||
|
||||
## tests
|
||||
|
||||
Tests now use [vows](http://vowsjs.org), run with `vows test/*.js`.
|
||||
|
||||
## usage
|
||||
|
||||
Minimalist version:
|
||||
|
||||
var log4js = require('log4js');
|
||||
var logger = log4js.getLogger();
|
||||
logger.debug("Some debug messages");
|
||||
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
var logger = log4js.getLogger();
|
||||
logger.debug("Some debug messages");
|
||||
```
|
||||
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
|
||||
```bash
|
||||
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
||||
```
|
||||
See example.js for a full example, but here's a snippet (also in fromreadme.js):
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
//console log is loaded by default, so you won't normally need to do this
|
||||
//log4js.loadAppender('console');
|
||||
log4js.loadAppender('file');
|
||||
//log4js.addAppender(log4js.appenders.console());
|
||||
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
|
||||
|
||||
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
See example.js:
|
||||
|
||||
var log4js = require('log4js'); //note the need to call the function
|
||||
log4js.addAppender(log4js.consoleAppender());
|
||||
log4js.addAppender(log4js.fileAppender('logs/cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
```
|
||||
Output:
|
||||
```bash
|
||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||
```
|
||||
The first 5 lines of the code above could also be written as:
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'console' },
|
||||
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
|
||||
]
|
||||
});
|
||||
```
|
||||
|
||||
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
|
||||
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
|
||||
|
||||
|
||||
## configuration
|
||||
|
||||
You can either configure the appenders and log levels manually (as above), or provide a
|
||||
configuration file (`log4js.configure('path/to/file.json')`) explicitly, or just let log4js look for a file called `log4js.json` (it looks in the current directory first, then the require paths, and finally looks for the default config included in the same directory as the `log4js.js` file).
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`
|
||||
You can also pass an object to the configure function, which has the same properties as the json versions.
|
||||
You can configure the appenders and log levels manually (as above), or provide a
|
||||
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
|
||||
configuration file location may also be specified via the environment variable
|
||||
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
|
||||
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
|
||||
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
|
||||
|
||||
## connect/express logger
|
||||
To turn off configuration file change checking, configure with:
|
||||
|
||||
A connect/express logger has been added to log4js, by [danbell](https://github.com/danbell). This allows connect/express servers to log using log4js. See example-connect-logger.js.
|
||||
```javascript
|
||||
var log4js = require('log4js');
|
||||
log4js.configure('my_log4js_configuration.json', {});
|
||||
```
|
||||
To specify a different period:
|
||||
|
||||
var log4js = require('./lib/log4js');
|
||||
log4js.addAppender(log4js.consoleAppender());
|
||||
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
|
||||
logger.setLevel('INFO');
|
||||
|
||||
var app = require('express').createServer();
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
});
|
||||
app.get('/', function(req,res) {
|
||||
res.send('hello world');
|
||||
});
|
||||
app.listen(5000);
|
||||
```javascript
|
||||
log4js.configure('file.json', { reloadSecs: 300 });
|
||||
```
|
||||
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
|
||||
|
||||
The options object that is passed to log4js.connectLogger supports a format string the same as the connect/express logger. For example:
|
||||
```javascript
|
||||
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
|
||||
```
|
||||
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
|
||||
```json
|
||||
#### my_log4js_configuration.json ####
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "relative/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 3,
|
||||
"category": "relative-logger"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"absolute": true,
|
||||
"filename": "/absolute/path/to/log_file.log",
|
||||
"maxLogSize": 20480,
|
||||
"backups": 10,
|
||||
"category": "absolute-logger"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
|
||||
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO, format: ':method :url' }));
|
||||
});
|
||||
## Documentation
|
||||
See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
|
||||
|
||||
## author (of this node version)
|
||||
|
||||
Gareth Jones (csausdev - gareth.jones@sensis.com.au)
|
||||
## Contributing
|
||||
Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
|
||||
|
||||
## License
|
||||
|
||||
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
|
||||
keep the original copyright and author credits in place, except in sections that I have rewritten
|
||||
keep the original copyright and author credits in place, except in sections that I have rewritten
|
||||
extensively.
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
var log4js = require('./lib/log4js');
|
||||
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('INFO');
|
||||
|
||||
var app = require('express').createServer();
|
||||
app.configure(function() {
|
||||
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
});
|
||||
app.get('/', function(req,res) {
|
||||
res.send('hello world');
|
||||
});
|
||||
app.listen(5000);
|
||||
22
example.js
22
example.js
@@ -1,22 +0,0 @@
|
||||
var log4js = require('./lib/log4js');
|
||||
//log the cheese logger messages to a file, and the console ones as well.
|
||||
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese', 'console');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
//only errors and above get logged.
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
//console logging methds have been replaced with log4js ones.
|
||||
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
||||
|
||||
//these will not appear (logging level beneath error)
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
//these end up on the console and in cheese.log
|
||||
logger.error('Cheese %s is too ripe!', "gouda");
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
|
||||
|
||||
|
||||
46
examples/example-connect-logger.js
Normal file
46
examples/example-connect-logger.js
Normal file
@@ -0,0 +1,46 @@
|
||||
//The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js.
|
||||
//https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger
|
||||
|
||||
// load modules
|
||||
var log4js = require('log4js');
|
||||
var express = require("express");
|
||||
var app = express();
|
||||
|
||||
//config
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'console' },
|
||||
{ type: 'file', filename: 'logs/log4jsconnect.log', category: 'log4jslog' }
|
||||
]
|
||||
});
|
||||
|
||||
//define logger
|
||||
var logger = log4js.getLogger('log4jslog');
|
||||
|
||||
// set at which time msg is logged print like: only on error & above
|
||||
// logger.setLevel('ERROR');
|
||||
|
||||
//express app
|
||||
app.configure(function() {
|
||||
app.use(express.favicon(''));
|
||||
// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
|
||||
// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
|
||||
|
||||
//### AUTO LEVEL DETECTION
|
||||
//http responses 3xx, level = WARN
|
||||
//http responses 4xx & 5xx, level = ERROR
|
||||
//else.level = INFO
|
||||
app.use(log4js.connectLogger(logger, { level: 'auto' }));
|
||||
});
|
||||
|
||||
//route
|
||||
app.get('/', function(req,res) {
|
||||
res.send('hello world');
|
||||
});
|
||||
|
||||
//start app
|
||||
app.listen(5000);
|
||||
|
||||
console.log('server runing at localhost:5000');
|
||||
console.log('Simulation of normal response: goto localhost:5000');
|
||||
console.log('Simulation of error response: goto localhost:5000/xxx');
|
||||
45
examples/example-socket.js
Normal file
45
examples/example-socket.js
Normal file
@@ -0,0 +1,45 @@
|
||||
var log4js = require('./lib/log4js')
|
||||
, cluster = require('cluster')
|
||||
, numCPUs = require('os').cpus().length
|
||||
, i = 0;
|
||||
|
||||
if (cluster.isMaster) {
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{
|
||||
type: "multiprocess",
|
||||
mode: "master",
|
||||
appender: {
|
||||
type: "console"
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
console.info("Master creating %d workers", numCPUs);
|
||||
for (i=0; i < numCPUs; i++) {
|
||||
cluster.fork();
|
||||
}
|
||||
|
||||
cluster.on('death', function(worker) {
|
||||
console.info("Worker %d died.", worker.pid);
|
||||
});
|
||||
} else {
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{
|
||||
type: "multiprocess",
|
||||
mode: "worker"
|
||||
}
|
||||
]
|
||||
});
|
||||
var logger = log4js.getLogger('example-socket');
|
||||
|
||||
console.info("Worker %d started.", process.pid);
|
||||
for (i=0; i < 1000; i++) {
|
||||
logger.info("Worker %d - logging something %d", process.pid, i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
58
examples/example.js
Normal file
58
examples/example.js
Normal file
@@ -0,0 +1,58 @@
|
||||
var log4js = require('../lib/log4js');
|
||||
//log the cheese logger messages to a file, and the console ones as well.
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{
|
||||
type: "file",
|
||||
filename: "cheese.log",
|
||||
category: [ 'cheese','console' ]
|
||||
},
|
||||
{
|
||||
type: "console"
|
||||
}
|
||||
],
|
||||
replaceConsole: true
|
||||
});
|
||||
|
||||
//to add an appender programmatically, and without clearing other appenders
|
||||
//loadAppender is only necessary if you haven't already configured an appender of this type
|
||||
log4js.loadAppender('file');
|
||||
log4js.addAppender(log4js.appenders.file('pants.log'), 'pants');
|
||||
//a custom logger outside of the log4js/lib/appenders directory can be accessed like so
|
||||
//log4js.loadAppender('what/you/would/put/in/require');
|
||||
//log4js.addAppender(log4js.appenders['what/you/would/put/in/require'](args));
|
||||
//or through configure as:
|
||||
//log4js.configure({
|
||||
// appenders: [ { type: 'what/you/would/put/in/require', otherArgs: 'blah' } ]
|
||||
//});
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
//only errors and above get logged.
|
||||
//you can also set this log level in the config object
|
||||
//via the levels field.
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
//console logging methods have been replaced with log4js ones.
|
||||
//so this will get coloured output on console, and appear in cheese.log
|
||||
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
|
||||
|
||||
//these will not appear (logging level beneath error)
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
//these end up on the console and in cheese.log
|
||||
logger.error('Cheese %s is too ripe!', "gouda");
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
|
||||
//these don't end up in cheese.log, but will appear on the console
|
||||
var anotherLogger = log4js.getLogger('another');
|
||||
anotherLogger.debug("Just checking");
|
||||
|
||||
//one for pants.log
|
||||
//will also go to console, since that's configured for all categories
|
||||
var pantsLog = log4js.getLogger('pants');
|
||||
pantsLog.debug("Something for pants");
|
||||
|
||||
|
||||
|
||||
27
examples/flush-on-exit.js
Normal file
27
examples/flush-on-exit.js
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
|
||||
* another shell)
|
||||
*/
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{ type: 'file', filename: 'cheese.log', category: 'cheese' },
|
||||
{ type: 'console'}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('INFO');
|
||||
|
||||
var http=require('http');
|
||||
|
||||
var server = http.createServer(function(request, response){
|
||||
response.writeHead(200, {'Content-Type': 'text/plain'});
|
||||
var rd = Math.random() * 50;
|
||||
logger.info("hello " + rd);
|
||||
response.write('hello ');
|
||||
if (Math.floor(rd) == 30){
|
||||
log4js.shutdown(function() { process.exit(1); });
|
||||
}
|
||||
response.end();
|
||||
}).listen(4444);
|
||||
19
examples/fromreadme.js
Normal file
19
examples/fromreadme.js
Normal file
@@ -0,0 +1,19 @@
|
||||
//remember to change the require to just 'log4js' if you've npm install'ed it
|
||||
var log4js = require('./lib/log4js');
|
||||
//by default the console appender is loaded
|
||||
//log4js.loadAppender('console');
|
||||
//you'd only need to add the console appender if you
|
||||
//had previously called log4js.clearAppenders();
|
||||
//log4js.addAppender(log4js.appenders.console());
|
||||
log4js.loadAppender('file');
|
||||
log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese');
|
||||
|
||||
var logger = log4js.getLogger('cheese');
|
||||
logger.setLevel('ERROR');
|
||||
|
||||
logger.trace('Entering cheese testing');
|
||||
logger.debug('Got cheese.');
|
||||
logger.info('Cheese is Gouda.');
|
||||
logger.warn('Cheese is quite smelly.');
|
||||
logger.error('Cheese is too ripe!');
|
||||
logger.fatal('Cheese was breeding ground for listeria.');
|
||||
27
examples/log-rolling.js
Normal file
27
examples/log-rolling.js
Normal file
@@ -0,0 +1,27 @@
|
||||
var log4js = require('../lib/log4js')
|
||||
, log
|
||||
, i = 0;
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
type: "console"
|
||||
, category: "console"
|
||||
},
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "tmp-test.log",
|
||||
"maxLogSize": 1024,
|
||||
"backups": 3,
|
||||
"category": "test"
|
||||
}
|
||||
]
|
||||
});
|
||||
log = log4js.getLogger("test");
|
||||
|
||||
function doTheLogging(x) {
|
||||
log.info("Logging something %d", x);
|
||||
}
|
||||
|
||||
for ( ; i < 5000; i++) {
|
||||
doTheLogging(i);
|
||||
}
|
||||
24
examples/loggly-appender.js
Normal file
24
examples/loggly-appender.js
Normal file
@@ -0,0 +1,24 @@
|
||||
//Note that loggly appender needs node-loggly to work.
|
||||
//If you haven't got node-loggly installed, you'll get cryptic
|
||||
//"cannot find module" errors when using the loggly appender
|
||||
var log4js = require('../lib/log4js');
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
type: "console",
|
||||
category: "test"
|
||||
},
|
||||
{
|
||||
"type" : "loggly",
|
||||
"token" : "12345678901234567890",
|
||||
"subdomain": "your-subdomain",
|
||||
"tags" : ["test"],
|
||||
"category" : "loggly"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("loggly");
|
||||
logger.info("Test log message");
|
||||
//logger.debug("Test log message");
|
||||
21
examples/patternLayout-tokens.js
Normal file
21
examples/patternLayout-tokens.js
Normal file
@@ -0,0 +1,21 @@
|
||||
var log4js = require('./lib/log4js');
|
||||
|
||||
var config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type": "console",
|
||||
"layout": {
|
||||
"type": "pattern",
|
||||
"pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
|
||||
"tokens": {
|
||||
"pid" : function() { return process.pid; }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config, {});
|
||||
|
||||
var logger = log4js.getLogger("app");
|
||||
logger.info("Test log message");
|
||||
43
examples/smtp-appender.js
Normal file
43
examples/smtp-appender.js
Normal file
@@ -0,0 +1,43 @@
|
||||
//Note that smtp appender needs nodemailer to work.
|
||||
//If you haven't got nodemailer installed, you'll get cryptic
|
||||
//"cannot find module" errors when using the smtp appender
|
||||
var log4js = require('../lib/log4js')
|
||||
, log
|
||||
, logmailer
|
||||
, i = 0;
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
type: "console",
|
||||
category: "test"
|
||||
},
|
||||
{
|
||||
"type": "smtp",
|
||||
"recipients": "logfilerecipient@logging.com",
|
||||
"sendInterval": 5,
|
||||
"transport": "SMTP",
|
||||
"SMTP": {
|
||||
"host": "smtp.gmail.com",
|
||||
"secureConnection": true,
|
||||
"port": 465,
|
||||
"auth": {
|
||||
"user": "someone@gmail",
|
||||
"pass": "********************"
|
||||
},
|
||||
"debug": true
|
||||
},
|
||||
"category": "mailer"
|
||||
}
|
||||
]
|
||||
});
|
||||
log = log4js.getLogger("test");
|
||||
logmailer = log4js.getLogger("mailer");
|
||||
|
||||
function doTheLogging(x) {
|
||||
log.info("Logging something %d", x);
|
||||
logmailer.info("Logging something %d", x);
|
||||
}
|
||||
|
||||
for ( ; i < 500; i++) {
|
||||
doTheLogging(i);
|
||||
}
|
||||
20
lib/appenders/categoryFilter.js
Normal file
20
lib/appenders/categoryFilter.js
Normal file
@@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js');
|
||||
|
||||
function categoryFilter (excludes, appender) {
|
||||
if (typeof(excludes) === 'string') excludes = [excludes];
|
||||
return function(logEvent) {
|
||||
if (excludes.indexOf(logEvent.categoryName) === -1) {
|
||||
appender(logEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
return categoryFilter(config.exclude, appender);
|
||||
}
|
||||
|
||||
exports.appender = categoryFilter;
|
||||
exports.configure = configure;
|
||||
129
lib/appenders/clustered.js
Executable file
129
lib/appenders/clustered.js
Executable file
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
|
||||
var cluster = require('cluster');
|
||||
var log4js = require('../log4js');
|
||||
|
||||
/**
|
||||
* Takes a loggingEvent object, returns string representation of it.
|
||||
*/
|
||||
function serializeLoggingEvent(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
if (item && item.stack && JSON.stringify(item) === '{}') { // Validate that we really are in this case
|
||||
loggingEvent.data[i] = {stack : item.stack};
|
||||
}
|
||||
}
|
||||
return JSON.stringify(loggingEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a string, returns an object with
|
||||
* the correct log properties.
|
||||
*
|
||||
* This method has been "borrowed" from the `multiprocess` appender
|
||||
* by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
|
||||
*
|
||||
* Apparently, node.js serializes everything to strings when using `process.send()`,
|
||||
* so we need smart deserialization that will recreate log date and level for further processing by log4js internals.
|
||||
*/
|
||||
function deserializeLoggingEvent(loggingEventString) {
|
||||
|
||||
var loggingEvent;
|
||||
|
||||
try {
|
||||
|
||||
loggingEvent = JSON.parse(loggingEventString);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
|
||||
} catch (e) {
|
||||
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', loggingEventString ]
|
||||
};
|
||||
}
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appender.
|
||||
*
|
||||
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
|
||||
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
|
||||
*
|
||||
* If you are using this method directly, make sure to provide it with `config.actualAppenders` array
|
||||
* of actual appender instances.
|
||||
*
|
||||
* Or better use `configure(config, options)`
|
||||
*/
|
||||
function createAppender(config) {
|
||||
|
||||
if (cluster.isMaster) {
|
||||
|
||||
var masterAppender = function(loggingEvent) {
|
||||
|
||||
if (config.actualAppenders) {
|
||||
var size = config.actualAppenders.length;
|
||||
for(var i = 0; i < size; i++) {
|
||||
if (!config.appenders[i].category || config.appenders[i].category === loggingEvent.categoryName) {
|
||||
// Relying on the index is not a good practice but otherwise the change would have been bigger.
|
||||
config.actualAppenders[i](loggingEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Listen on new workers
|
||||
cluster.on('fork', function(worker) {
|
||||
|
||||
worker.on('message', function(message) {
|
||||
if (message.type && message.type === '::log-message') {
|
||||
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
|
||||
|
||||
var loggingEvent = deserializeLoggingEvent(message.event);
|
||||
masterAppender(loggingEvent);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
return masterAppender;
|
||||
|
||||
} else {
|
||||
|
||||
return function(loggingEvent) {
|
||||
// If inside the worker process, then send the logger event to master.
|
||||
if (cluster.isWorker) {
|
||||
// console.log("worker " + cluster.worker.id + " is sending message");
|
||||
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
|
||||
if (config.appenders && cluster.isMaster) {
|
||||
|
||||
var size = config.appenders.length;
|
||||
config.actualAppenders = new Array(size);
|
||||
|
||||
for(var i = 0; i < size; i++) {
|
||||
|
||||
log4js.loadAppender(config.appenders[i].type);
|
||||
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](config.appenders[i], options);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
21
lib/appenders/console.js
Normal file
21
lib/appenders/console.js
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, consoleLog = console.log.bind(console);
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
consoleLog(layout(loggingEvent));
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
}
|
||||
|
||||
exports.appender = consoleAppender;
|
||||
exports.configure = configure;
|
||||
72
lib/appenders/dateFile.js
Normal file
72
lib/appenders/dateFile.js
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var streams = require('../streams')
|
||||
, layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = [];
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File appender that rolls files according to a date pattern.
|
||||
* @filename base filename.
|
||||
* @pattern the format that will be added to the end of filename when rolling,
|
||||
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||
* @layout layout function for log messages - defaults to basicLayout
|
||||
*/
|
||||
function appender(filename, pattern, alwaysIncludePattern, layout) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
|
||||
var logFile = new streams.DateRollingFileStream(
|
||||
filename,
|
||||
pattern,
|
||||
{ alwaysIncludePattern: alwaysIncludePattern }
|
||||
);
|
||||
openFiles.push(logFile);
|
||||
|
||||
return function(logEvent) {
|
||||
logFile.write(layout(logEvent) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (!config.alwaysIncludePattern) {
|
||||
config.alwaysIncludePattern = false;
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.forEach(openFiles, function(file, done) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(done);
|
||||
});
|
||||
} else {
|
||||
file.end(done);
|
||||
}
|
||||
}, cb);
|
||||
}
|
||||
|
||||
exports.appender = appender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
||||
96
lib/appenders/file.js
Normal file
96
lib/appenders/file.js
Normal file
@@ -0,0 +1,96 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('../streams')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = [];
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups) {
|
||||
var bytesWritten = 0;
|
||||
file = path.normalize(file);
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||
//there has to be at least one backup if logSize has been specified
|
||||
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
var stream;
|
||||
if (fileSize) {
|
||||
stream = new streams.RollingFileStream(
|
||||
file,
|
||||
fileSize,
|
||||
numFiles
|
||||
);
|
||||
} else {
|
||||
stream = fs.createWriteStream(
|
||||
file,
|
||||
{ encoding: "utf8",
|
||||
mode: parseInt('0644', 8),
|
||||
flags: 'a' }
|
||||
);
|
||||
}
|
||||
stream.on("error", function (err) {
|
||||
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
// push file to the stack of open handlers
|
||||
openFiles.push(logFile);
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent) + eol, "utf8");
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
async.forEach(openFiles, function(file, done) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(done);
|
||||
});
|
||||
} else {
|
||||
file.end(done);
|
||||
}
|
||||
}, cb);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
||||
141
lib/appenders/gelf.js
Normal file
141
lib/appenders/gelf.js
Normal file
@@ -0,0 +1,141 @@
|
||||
"use strict";
|
||||
var zlib = require('zlib');
|
||||
var layouts = require('../layouts');
|
||||
var levels = require('../levels');
|
||||
var dgram = require('dgram');
|
||||
var util = require('util');
|
||||
var debug = require('../debug')('GELF Appender');
|
||||
|
||||
var LOG_EMERG=0; // system is unusable
|
||||
var LOG_ALERT=1; // action must be taken immediately
|
||||
var LOG_CRIT=2; // critical conditions
|
||||
var LOG_ERR=3; // error conditions
|
||||
var LOG_ERROR=3; // because people WILL typo
|
||||
var LOG_WARNING=4; // warning conditions
|
||||
var LOG_NOTICE=5; // normal, but significant, condition
|
||||
var LOG_INFO=6; // informational message
|
||||
var LOG_DEBUG=7; // debug-level message
|
||||
|
||||
var levelMapping = {};
|
||||
levelMapping[levels.ALL] = LOG_DEBUG;
|
||||
levelMapping[levels.TRACE] = LOG_DEBUG;
|
||||
levelMapping[levels.DEBUG] = LOG_DEBUG;
|
||||
levelMapping[levels.INFO] = LOG_INFO;
|
||||
levelMapping[levels.WARN] = LOG_WARNING;
|
||||
levelMapping[levels.ERROR] = LOG_ERR;
|
||||
levelMapping[levels.FATAL] = LOG_CRIT;
|
||||
|
||||
/**
|
||||
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
|
||||
*
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to none).
|
||||
* @param host - host to which to send logs (default:localhost)
|
||||
* @param port - port at which to send logs to (default:12201)
|
||||
* @param hostname - hostname of the current host (default:os hostname)
|
||||
* @param facility - facility to log to (default:nodejs-server)
|
||||
*/
|
||||
function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var config, customFields;
|
||||
if (typeof(host) === 'object') {
|
||||
config = host;
|
||||
host = config.host;
|
||||
port = config.port;
|
||||
hostname = config.hostname;
|
||||
facility = config.facility;
|
||||
customFields = config.customFields;
|
||||
}
|
||||
|
||||
host = host || 'localhost';
|
||||
port = port || 12201;
|
||||
hostname = hostname || require('os').hostname();
|
||||
facility = facility || 'nodejs-server';
|
||||
layout = layout || layouts.messagePassThroughLayout;
|
||||
|
||||
var defaultCustomFields = customFields || {};
|
||||
|
||||
var client = dgram.createSocket("udp4");
|
||||
|
||||
process.on('exit', function() {
|
||||
if (client) client.close();
|
||||
});
|
||||
|
||||
/**
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* copy the underscore fields to the message
|
||||
* @param loggingEvent
|
||||
* @param msg
|
||||
*/
|
||||
function addCustomFields(loggingEvent, msg){
|
||||
|
||||
/* append defaultCustomFields firsts */
|
||||
Object.keys(defaultCustomFields).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
msg[key] = defaultCustomFields[key];
|
||||
}
|
||||
});
|
||||
|
||||
/* append custom fields per message */
|
||||
var data = loggingEvent.data;
|
||||
if (!Array.isArray(data) || data.length === 0) return;
|
||||
var firstData = data[0];
|
||||
|
||||
if (!firstData.GELF) return; // identify with GELF field defined
|
||||
Object.keys(firstData).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
msg[key] = firstData[key];
|
||||
}
|
||||
});
|
||||
|
||||
/* the custom field object should be removed, so it will not be looged by the later appenders */
|
||||
loggingEvent.data.shift();
|
||||
}
|
||||
|
||||
function preparePacket(loggingEvent) {
|
||||
var msg = {};
|
||||
addCustomFields(loggingEvent, msg);
|
||||
msg.full_message = layout(loggingEvent);
|
||||
msg.short_message = msg.full_message;
|
||||
|
||||
msg.version="1.0";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
msg.facility = facility;
|
||||
return msg;
|
||||
}
|
||||
|
||||
function sendPacket(packet) {
|
||||
try {
|
||||
client.send(packet, 0, packet.length, port, host);
|
||||
} catch(e) {}
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var message = preparePacket(loggingEvent);
|
||||
zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
|
||||
if (err) {
|
||||
console.error(err.stack);
|
||||
} else {
|
||||
if (packet.length > 8192) {
|
||||
debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
|
||||
} else {
|
||||
sendPacket(packet);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return gelfAppender(layout, config);
|
||||
}
|
||||
|
||||
exports.appender = gelfAppender;
|
||||
exports.configure = configure;
|
||||
76
lib/appenders/hookio.js
Normal file
76
lib/appenders/hookio.js
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js')
|
||||
, layouts = require('../layouts')
|
||||
, Hook = require('hook.io').Hook
|
||||
, util = require('util');
|
||||
|
||||
var Logger = function createLogger(options) {
|
||||
var self = this;
|
||||
var actualAppender = options.actualAppender;
|
||||
Hook.call(self, options);
|
||||
self.on('hook::ready', function hookReady() {
|
||||
self.on('*::' + options.name + '::log', function log(loggingEvent) {
|
||||
deserializeLoggingEvent(loggingEvent);
|
||||
actualAppender(loggingEvent);
|
||||
});
|
||||
});
|
||||
};
|
||||
util.inherits(Logger, Hook);
|
||||
|
||||
function deserializeLoggingEvent(loggingEvent) {
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level.toString = function levelToString() {
|
||||
return loggingEvent.level.levelStr;
|
||||
};
|
||||
}
|
||||
|
||||
function initHook(hookioOptions) {
|
||||
var loggerHook;
|
||||
if (hookioOptions.mode === 'master') {
|
||||
// Start the master hook, handling the actual logging
|
||||
loggerHook = new Logger(hookioOptions);
|
||||
} else {
|
||||
// Start a worker, just emitting events for a master
|
||||
loggerHook = new Hook(hookioOptions);
|
||||
}
|
||||
loggerHook.start();
|
||||
return loggerHook;
|
||||
}
|
||||
|
||||
function getBufferedHook(hook, eventName) {
|
||||
var hookBuffer = [];
|
||||
var hookReady = false;
|
||||
hook.on('hook::ready', function emptyBuffer() {
|
||||
hookBuffer.forEach(function logBufferItem(loggingEvent) {
|
||||
hook.emit(eventName, loggingEvent);
|
||||
});
|
||||
hookReady = true;
|
||||
});
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (hookReady) {
|
||||
hook.emit(eventName, loggingEvent);
|
||||
} else {
|
||||
hookBuffer.push(loggingEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createAppender(hookioOptions) {
|
||||
var loggerHook = initHook(hookioOptions);
|
||||
var loggerEvent = hookioOptions.name + '::log';
|
||||
return getBufferedHook(loggerHook, loggerEvent);
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
21
lib/appenders/logLevelFilter.js
Normal file
21
lib/appenders/logLevelFilter.js
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
var levels = require('../levels')
|
||||
, log4js = require('../log4js');
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
|
||||
exports.appender = logLevelFilter;
|
||||
exports.configure = configure;
|
||||
84
lib/appenders/loggly.js
Normal file
84
lib/appenders/loggly.js
Normal file
@@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
var layouts = require('../layouts')
|
||||
, loggly = require('loggly')
|
||||
, os = require('os');
|
||||
|
||||
/**
|
||||
* Loggly Appender. Sends logging events to Loggly using node-loggly
|
||||
*
|
||||
* @param config object with loggly configuration data
|
||||
* {
|
||||
* token: 'your-really-long-input-token',
|
||||
* subdomain: 'your-subdomain',
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* }
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
|
||||
*/
|
||||
function logglyAppender(config, layout) {
|
||||
var client = loggly.createClient(config);
|
||||
|
||||
var packageMessage = function (loggingEvent) {
|
||||
var BaseItem = function(level, msg) {
|
||||
this.level = level || loggingEvent.level.toString();
|
||||
this.category = loggingEvent.categoryName;
|
||||
this.hostname = os.hostname().toString();
|
||||
if (typeof msg !== 'undefined')
|
||||
this.msg = msg;
|
||||
};
|
||||
|
||||
var packageItem = function (item) {
|
||||
if (item instanceof Error)
|
||||
return new BaseItem('ERROR', item.message);
|
||||
|
||||
if (['string', 'number', 'boolean'].indexOf(typeof item) > -1 )
|
||||
return new BaseItem(undefined, item);
|
||||
|
||||
var obj = new BaseItem();
|
||||
if (Array.isArray(item))
|
||||
return item.unshift(obj); //add base object as first item
|
||||
|
||||
if (item && Object.prototype.toString.call(item) === '[object Object]') {
|
||||
for (var key in item) {
|
||||
if (item.hasOwnProperty(key)) {
|
||||
obj[key] = item[key]; //don't do packageItem on nested items, because level, category and hostname are needed on top level items only.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
if (loggingEvent.data.length === 1) {
|
||||
return packageItem(loggingEvent.data[0]);
|
||||
}
|
||||
//length >1
|
||||
var msg = loggingEvent.data;
|
||||
for (var i = 0, l = msg.length; i < l; i++) {
|
||||
msg[i] = packageItem(msg[i]);
|
||||
}
|
||||
|
||||
return msg;
|
||||
};
|
||||
|
||||
return function(loggingEvent) {
|
||||
var a = layout ? layout(loggingEvent) : packageMessage(loggingEvent);
|
||||
//console.log('log now', a);
|
||||
client.log(a, config.tags, function(err, result) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return logglyAppender(config, layout);
|
||||
}
|
||||
|
||||
exports.name = 'loggly';
|
||||
exports.appender = logglyAppender;
|
||||
exports.configure = configure;
|
||||
134
lib/appenders/multiprocess.js
Normal file
134
lib/appenders/multiprocess.js
Normal file
@@ -0,0 +1,134 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js')
|
||||
, net = require('net')
|
||||
, END_MSG = '__LOG4JS__';
|
||||
|
||||
/**
|
||||
* Creates a server, listening on config.loggerPort, config.loggerHost.
|
||||
* Output goes to config.actualAppender (config.appender is used to
|
||||
* set up that appender).
|
||||
*/
|
||||
function logServer(config) {
|
||||
|
||||
/**
|
||||
* Takes a utf-8 string, returns an object with
|
||||
* the correct log properties.
|
||||
*/
|
||||
function deserializeLoggingEvent(clientSocket, msg) {
|
||||
var loggingEvent;
|
||||
try {
|
||||
loggingEvent = JSON.parse(msg);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
} catch (e) {
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', msg ]
|
||||
};
|
||||
}
|
||||
|
||||
loggingEvent.remoteAddress = clientSocket.remoteAddress;
|
||||
loggingEvent.remotePort = clientSocket.remotePort;
|
||||
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
var actualAppender = config.actualAppender,
|
||||
server = net.createServer(function serverCreated(clientSocket) {
|
||||
clientSocket.setEncoding('utf8');
|
||||
var logMessage = '';
|
||||
|
||||
function logTheMessage(msg) {
|
||||
if (logMessage.length > 0) {
|
||||
actualAppender(deserializeLoggingEvent(clientSocket, msg));
|
||||
}
|
||||
}
|
||||
|
||||
function chunkReceived(chunk) {
|
||||
var event;
|
||||
logMessage += chunk || '';
|
||||
if (logMessage.indexOf(END_MSG) > -1) {
|
||||
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
|
||||
logTheMessage(event);
|
||||
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
|
||||
//check for more, maybe it was a big chunk
|
||||
chunkReceived();
|
||||
}
|
||||
}
|
||||
|
||||
clientSocket.on('data', chunkReceived);
|
||||
clientSocket.on('end', chunkReceived);
|
||||
});
|
||||
|
||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
|
||||
return actualAppender;
|
||||
}
|
||||
|
||||
function workerAppender(config) {
|
||||
var canWrite = false,
|
||||
buffer = [],
|
||||
socket;
|
||||
|
||||
createSocket();
|
||||
|
||||
function createSocket() {
|
||||
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
socket.on('connect', function() {
|
||||
emptyBuffer();
|
||||
canWrite = true;
|
||||
});
|
||||
socket.on('timeout', socket.end.bind(socket));
|
||||
//don't bother listening for 'error', 'close' gets called after that anyway
|
||||
socket.on('close', createSocket);
|
||||
}
|
||||
|
||||
function emptyBuffer() {
|
||||
var evt;
|
||||
while ((evt = buffer.shift())) {
|
||||
write(evt);
|
||||
}
|
||||
}
|
||||
|
||||
function write(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
|
||||
loggingEvent = {stack : loggingEvent.stack};
|
||||
}
|
||||
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||
socket.write(END_MSG, 'utf8');
|
||||
}
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (canWrite) {
|
||||
write(loggingEvent);
|
||||
} else {
|
||||
buffer.push(loggingEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createAppender(config) {
|
||||
if (config.mode === 'master') {
|
||||
return logServer(config);
|
||||
} else {
|
||||
return workerAppender(config);
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
82
lib/appenders/smtp.js
Normal file
82
lib/appenders/smtp.js
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var layouts = require("../layouts")
|
||||
, mailer = require("nodemailer")
|
||||
, os = require('os');
|
||||
|
||||
/**
|
||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||
* It can either send an email on each event or group several
|
||||
* logging events gathered during specified interval.
|
||||
*
|
||||
* @param config appender configuration data
|
||||
* config.sendInterval time between log emails (in seconds), if 0
|
||||
* then every event sends an email
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
*/
|
||||
function smtpAppender(config, layout) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
var subjectLayout = layouts.messagePassThroughLayout;
|
||||
var sendInterval = config.sendInterval*1000 || 0;
|
||||
|
||||
var logEventBuffer = [];
|
||||
var sendTimer;
|
||||
|
||||
function sendBuffer() {
|
||||
if (logEventBuffer.length > 0) {
|
||||
|
||||
var transport = mailer.createTransport(config.transport, config[config.transport]);
|
||||
var firstEvent = logEventBuffer[0];
|
||||
var body = "";
|
||||
while (logEventBuffer.length > 0) {
|
||||
body += layout(logEventBuffer.shift()) + "\n";
|
||||
}
|
||||
|
||||
var msg = {
|
||||
to: config.recipients,
|
||||
subject: config.subject || subjectLayout(firstEvent),
|
||||
text: body,
|
||||
headers: { "Hostname": os.hostname() }
|
||||
};
|
||||
if (config.sender) {
|
||||
msg.from = config.sender;
|
||||
}
|
||||
transport.sendMail(msg, function(error, success) {
|
||||
if (error) {
|
||||
console.error("log4js.smtpAppender - Error happened", error);
|
||||
}
|
||||
transport.close();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function scheduleSend() {
|
||||
if (!sendTimer) {
|
||||
sendTimer = setTimeout(function() {
|
||||
sendTimer = null;
|
||||
sendBuffer();
|
||||
}, sendInterval);
|
||||
}
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
logEventBuffer.push(loggingEvent);
|
||||
if (sendInterval > 0) {
|
||||
scheduleSend();
|
||||
} else {
|
||||
sendBuffer();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return smtpAppender(config, layout);
|
||||
}
|
||||
|
||||
exports.name = "smtp";
|
||||
exports.appender = smtpAppender;
|
||||
exports.configure = configure;
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
"use strict";
|
||||
var levels = require("./levels");
|
||||
var DEFAULT_FORMAT = ':remote-addr - -' +
|
||||
' ":method :url HTTP/:http-version"' +
|
||||
' :status :content-length ":referrer"' +
|
||||
' ":user-agent"';
|
||||
/**
|
||||
* Log requests with the given `options` or a `format` string.
|
||||
*
|
||||
* Options:
|
||||
*
|
||||
* - `format` Format string, see below for tokens
|
||||
* - `level` A log4js levels instance.
|
||||
* - `level` A log4js levels instance. Supports also 'auto'
|
||||
*
|
||||
* Tokens:
|
||||
*
|
||||
@@ -27,58 +32,72 @@ var levels = require("./levels");
|
||||
*/
|
||||
|
||||
function getLogger(logger4js, options) {
|
||||
if ('object' == typeof options) {
|
||||
options = options || {};
|
||||
} else if (options) {
|
||||
options = { format: options };
|
||||
} else {
|
||||
options = {};
|
||||
}
|
||||
|
||||
var thislogger = logger4js
|
||||
, level = levels.toLevel(options.level, levels.INFO)
|
||||
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":req[referer]" ":user-agent"';
|
||||
|
||||
return function (req, res, next) {
|
||||
|
||||
// mount safety
|
||||
if (req._logging) return next();
|
||||
|
||||
if (thislogger.isLevelEnabled(level)) {
|
||||
|
||||
var start = +new Date
|
||||
, statusCode
|
||||
, writeHead = res.writeHead
|
||||
, end = res.end
|
||||
, url = req.originalUrl;
|
||||
|
||||
// flag as logging
|
||||
req._logging = true;
|
||||
|
||||
// proxy for statusCode.
|
||||
res.writeHead = function(code, headers){
|
||||
res.writeHead = writeHead;
|
||||
res.writeHead(code, headers);
|
||||
res.__statusCode = statusCode = code;
|
||||
res.__headers = headers || {};
|
||||
};
|
||||
|
||||
// proxy end to output a line to the provided logger.
|
||||
res.end = function(chunk, encoding) {
|
||||
res.end = end;
|
||||
res.end(chunk, encoding);
|
||||
res.responseTime = +new Date - start;
|
||||
if ('function' == typeof fmt) {
|
||||
var line = fmt(req, res, function(str){ return format(str, req, res); });
|
||||
if (line) thislogger.log(level, line);
|
||||
} else {
|
||||
thislogger.log(level, format(fmt, req, res));
|
||||
}
|
||||
};
|
||||
|
||||
next();
|
||||
if ('object' == typeof options) {
|
||||
options = options || {};
|
||||
} else if (options) {
|
||||
options = { format: options };
|
||||
} else {
|
||||
options = {};
|
||||
}
|
||||
};
|
||||
|
||||
var thislogger = logger4js
|
||||
, level = levels.toLevel(options.level, levels.INFO)
|
||||
, fmt = options.format || DEFAULT_FORMAT
|
||||
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
|
||||
|
||||
return function (req, res, next) {
|
||||
// mount safety
|
||||
if (req._logging) return next();
|
||||
|
||||
// nologs
|
||||
if (nolog && nolog.test(req.originalUrl)) return next();
|
||||
if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
|
||||
|
||||
var start = new Date()
|
||||
, statusCode
|
||||
, writeHead = res.writeHead
|
||||
, end = res.end
|
||||
, url = req.originalUrl;
|
||||
|
||||
// flag as logging
|
||||
req._logging = true;
|
||||
|
||||
// proxy for statusCode.
|
||||
res.writeHead = function(code, headers){
|
||||
res.writeHead = writeHead;
|
||||
res.writeHead(code, headers);
|
||||
res.__statusCode = statusCode = code;
|
||||
res.__headers = headers || {};
|
||||
|
||||
//status code response level handling
|
||||
if(options.level === 'auto'){
|
||||
level = levels.INFO;
|
||||
if(code >= 300) level = levels.WARN;
|
||||
if(code >= 400) level = levels.ERROR;
|
||||
} else {
|
||||
level = levels.toLevel(options.level, levels.INFO);
|
||||
}
|
||||
};
|
||||
|
||||
// proxy end to output a line to the provided logger.
|
||||
res.end = function(chunk, encoding) {
|
||||
res.end = end;
|
||||
res.end(chunk, encoding);
|
||||
res.responseTime = new Date() - start;
|
||||
if (thislogger.isLevelEnabled(level)) {
|
||||
if (typeof fmt === 'function') {
|
||||
var line = fmt(req, res, function(str){ return format(str, req, res); });
|
||||
if (line) thislogger.log(level, line);
|
||||
} else {
|
||||
thislogger.log(level, format(fmt, req, res));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
//ensure next gets always called
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -92,23 +111,84 @@ function getLogger(logger4js, options) {
|
||||
*/
|
||||
|
||||
function format(str, req, res) {
|
||||
return str
|
||||
.replace(':url', req.originalUrl)
|
||||
.replace(':method', req.method)
|
||||
.replace(':status', res.__statusCode || res.statusCode)
|
||||
.replace(':response-time', res.responseTime)
|
||||
.replace(':date', new Date().toUTCString())
|
||||
.replace(':referrer', req.headers['referer'] || req.headers['referrer'] || '')
|
||||
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
|
||||
.replace(':remote-addr', req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress)))
|
||||
.replace(':user-agent', req.headers['user-agent'] || '')
|
||||
.replace(':content-length', (res._headers && res._headers['content-length']) || (res.__headers && res.__headers['Content-Length']) || '-')
|
||||
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
|
||||
.replace(/:res\[([^\]]+)\]/g, function(_, field){
|
||||
return res._headers
|
||||
? (res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
});
|
||||
return str
|
||||
.replace(':url', req.originalUrl)
|
||||
.replace(':method', req.method)
|
||||
.replace(':status', res.__statusCode || res.statusCode)
|
||||
.replace(':response-time', res.responseTime)
|
||||
.replace(':date', new Date().toUTCString())
|
||||
.replace(':referrer', req.headers.referer || req.headers.referrer || '')
|
||||
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
|
||||
.replace(
|
||||
':remote-addr', req.ip || req._remoteAddress || (
|
||||
req.socket &&
|
||||
(req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress))
|
||||
))
|
||||
.replace(':user-agent', req.headers['user-agent'] || '')
|
||||
.replace(
|
||||
':content-length',
|
||||
(res._headers && res._headers['content-length']) ||
|
||||
(res.__headers && res.__headers['Content-Length']) ||
|
||||
'-'
|
||||
)
|
||||
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
|
||||
.replace(/:res\[([^\]]+)\]/g, function(_, field){
|
||||
return res._headers ?
|
||||
(res._headers[field.toLowerCase()] || res.__headers[field])
|
||||
: (res.__headers && res.__headers[field]);
|
||||
});
|
||||
}
|
||||
|
||||
exports.connectLogger = getLogger;
|
||||
/**
|
||||
* Return RegExp Object about nolog
|
||||
*
|
||||
* @param {String} nolog
|
||||
* @return {RegExp}
|
||||
* @api private
|
||||
*
|
||||
* syntax
|
||||
* 1. String
|
||||
* 1.1 "\\.gif"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
|
||||
* LOGGING http://example.com/hoge.agif
|
||||
* 1.2 in "\\.gif|\\.jpg$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and
|
||||
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
|
||||
* LOGGING http://example.com/hoge.agif,
|
||||
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
|
||||
* 1.3 in "\\.(gif|jpe?g|png)$"
|
||||
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
|
||||
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
|
||||
* 2. RegExp
|
||||
* 2.1 in /\.(gif|jpe?g|png)$/
|
||||
* SAME AS 1.3
|
||||
* 3. Array
|
||||
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
|
||||
* SAME AS "\\.jpg|\\.png|\\.gif"
|
||||
*/
|
||||
function createNoLogCondition(nolog) {
|
||||
var regexp = null;
|
||||
|
||||
if (nolog) {
|
||||
if (nolog instanceof RegExp) {
|
||||
regexp = nolog;
|
||||
}
|
||||
|
||||
if (typeof nolog === 'string') {
|
||||
regexp = new RegExp(nolog);
|
||||
}
|
||||
|
||||
if (Array.isArray(nolog)) {
|
||||
var regexpsAsStrings = nolog.map(
|
||||
function convertToStrings(o) {
|
||||
return o.source ? o.source : o;
|
||||
}
|
||||
);
|
||||
regexp = new RegExp(regexpsAsStrings.join('|'));
|
||||
}
|
||||
}
|
||||
|
||||
return regexp;
|
||||
}
|
||||
|
||||
exports.connectLogger = getLogger;
|
||||
|
||||
@@ -1,60 +1,66 @@
|
||||
"use strict";
|
||||
exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
|
||||
exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO";
|
||||
exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS";
|
||||
exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
|
||||
|
||||
function padWithZeros(vNumber, width) {
|
||||
var numAsString = vNumber + "";
|
||||
while (numAsString.length < width) {
|
||||
numAsString = "0" + numAsString;
|
||||
}
|
||||
return numAsString;
|
||||
}
|
||||
|
||||
function addZero(vNumber) {
|
||||
return padWithZeros(vNumber, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the TimeOffest
|
||||
* Thanks to http://www.svendtofte.com/code/date_format/
|
||||
* @private
|
||||
*/
|
||||
function offset(date) {
|
||||
// Difference to Greenwich time (GMT) in hours
|
||||
var os = Math.abs(date.getTimezoneOffset());
|
||||
var h = String(Math.floor(os/60));
|
||||
var m = String(os%60);
|
||||
if (h.length == 1) {
|
||||
h = "0" + h;
|
||||
}
|
||||
if (m.length == 1) {
|
||||
m = "0" + m;
|
||||
}
|
||||
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
|
||||
}
|
||||
|
||||
exports.asString = function(/*format,*/ date) {
|
||||
var format = exports.ISO8601_FORMAT;
|
||||
if (typeof(date) === "string") {
|
||||
format = arguments[0];
|
||||
date = arguments[1];
|
||||
}
|
||||
var format = exports.ISO8601_FORMAT;
|
||||
if (typeof(date) === "string") {
|
||||
format = arguments[0];
|
||||
date = arguments[1];
|
||||
}
|
||||
|
||||
var vDay = addZero(date.getDate());
|
||||
var vMonth = addZero(date.getMonth()+1);
|
||||
var vYearLong = addZero(date.getFullYear());
|
||||
var vYearShort = addZero(date.getFullYear().toString().substring(3,4));
|
||||
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
||||
var vHour = addZero(date.getHours());
|
||||
var vMinute = addZero(date.getMinutes());
|
||||
var vSecond = addZero(date.getSeconds());
|
||||
var vMillisecond = padWithZeros(date.getMilliseconds(), 3);
|
||||
var vTimeZone = offset(date);
|
||||
var formatted = format
|
||||
.replace(/dd/g, vDay)
|
||||
.replace(/MM/g, vMonth)
|
||||
.replace(/y{1,4}/g, vYear)
|
||||
.replace(/hh/g, vHour)
|
||||
.replace(/mm/g, vMinute)
|
||||
.replace(/ss/g, vSecond)
|
||||
.replace(/SSS/g, vMillisecond)
|
||||
.replace(/O/g, vTimeZone);
|
||||
return formatted;
|
||||
var vDay = addZero(date.getDate());
|
||||
var vMonth = addZero(date.getMonth()+1);
|
||||
var vYearLong = addZero(date.getFullYear());
|
||||
var vYearShort = addZero(date.getFullYear().toString().substring(2,4));
|
||||
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
|
||||
var vHour = addZero(date.getHours());
|
||||
var vMinute = addZero(date.getMinutes());
|
||||
var vSecond = addZero(date.getSeconds());
|
||||
var vMillisecond = padWithZeros(date.getMilliseconds(), 3);
|
||||
var vTimeZone = offset(date);
|
||||
var formatted = format
|
||||
.replace(/dd/g, vDay)
|
||||
.replace(/MM/g, vMonth)
|
||||
.replace(/y{1,4}/g, vYear)
|
||||
.replace(/hh/g, vHour)
|
||||
.replace(/mm/g, vMinute)
|
||||
.replace(/ss/g, vSecond)
|
||||
.replace(/SSS/g, vMillisecond)
|
||||
.replace(/O/g, vTimeZone);
|
||||
return formatted;
|
||||
|
||||
function padWithZeros(vNumber, width) {
|
||||
var numAsString = vNumber + "";
|
||||
while (numAsString.length < width) {
|
||||
numAsString = "0" + numAsString;
|
||||
}
|
||||
return numAsString;
|
||||
}
|
||||
|
||||
function addZero(vNumber) {
|
||||
return padWithZeros(vNumber, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the TimeOffest
|
||||
* Thanks to http://www.svendtofte.com/code/date_format/
|
||||
* @private
|
||||
*/
|
||||
function offset(date) {
|
||||
// Difference to Greenwich time (GMT) in hours
|
||||
var os = Math.abs(date.getTimezoneOffset());
|
||||
var h = String(Math.floor(os/60));
|
||||
var m = String(os%60);
|
||||
h.length == 1? h = "0"+h:1;
|
||||
m.length == 1? m = "0"+m:1;
|
||||
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
|
||||
}
|
||||
};
|
||||
|
||||
15
lib/debug.js
Normal file
15
lib/debug.js
Normal file
@@ -0,0 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function(label) {
|
||||
var debug;
|
||||
|
||||
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
|
||||
debug = function(message) {
|
||||
console.error('LOG4JS: (%s) %s', label, message);
|
||||
};
|
||||
} else {
|
||||
debug = function() { };
|
||||
}
|
||||
|
||||
return debug;
|
||||
};
|
||||
443
lib/layouts.js
443
lib/layouts.js
@@ -1,96 +1,87 @@
|
||||
"use strict";
|
||||
var dateFormat = require('./date_format')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, util = require('util')
|
||||
, replacementRegExp = /%[sdj]/g
|
||||
, layoutMakers = {
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; }
|
||||
, "basic": function() { return basicLayout; }
|
||||
, "colored": function() { return colouredLayout; }
|
||||
, "coloured": function() { return colouredLayout; }
|
||||
, "pattern": function (config) {
|
||||
var pattern = config.pattern || undefined;
|
||||
return patternLayout(pattern);
|
||||
}
|
||||
"messagePassThrough": function() { return messagePassThroughLayout; },
|
||||
"basic": function() { return basicLayout; },
|
||||
"colored": function() { return colouredLayout; },
|
||||
"coloured": function() { return colouredLayout; },
|
||||
"pattern": function (config) {
|
||||
return patternLayout(config && config.pattern, config && config.tokens);
|
||||
}
|
||||
}
|
||||
, colours = {
|
||||
ALL: "grey"
|
||||
, TRACE: "blue"
|
||||
, DEBUG: "cyan"
|
||||
, INFO: "green"
|
||||
, WARN: "yellow"
|
||||
, ERROR: "red"
|
||||
, FATAL: "magenta"
|
||||
, OFF: "grey"
|
||||
ALL: "grey",
|
||||
TRACE: "blue",
|
||||
DEBUG: "cyan",
|
||||
INFO: "green",
|
||||
WARN: "yellow",
|
||||
ERROR: "red",
|
||||
FATAL: "magenta",
|
||||
OFF: "grey"
|
||||
};
|
||||
|
||||
|
||||
function formatLogData(logData) {
|
||||
var output = ""
|
||||
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
|
||||
, format = data.shift();
|
||||
|
||||
if (typeof format === "string") {
|
||||
output = format.replace(replacementRegExp, function(match) {
|
||||
switch (match) {
|
||||
case "%s": return new String(data.shift());
|
||||
case "%d": return new Number(data.shift());
|
||||
case "%j": return JSON.stringify(data.shift());
|
||||
default:
|
||||
return match;
|
||||
};
|
||||
});
|
||||
if (data.length > 0) {
|
||||
output += '\n';
|
||||
}
|
||||
function wrapErrorsWithInspect(items) {
|
||||
return items.map(function(item) {
|
||||
if ((item instanceof Error) && item.stack) {
|
||||
return { inspect: function() { return util.format(item) + '\n' + item.stack; } };
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
|
||||
data.forEach(function (item) {
|
||||
if (item.stack) {
|
||||
output += item.stack;
|
||||
} else {
|
||||
output += util.inspect(item);
|
||||
}
|
||||
});
|
||||
|
||||
return output;
|
||||
});
|
||||
}
|
||||
|
||||
function formatLogData(logData) {
|
||||
var data = Array.isArray(logData) ? logData : Array.prototype.slice.call(arguments);
|
||||
return util.format.apply(util, wrapErrorsWithInspect(data));
|
||||
}
|
||||
|
||||
var styles = {
|
||||
//styles
|
||||
'bold' : [1, 22],
|
||||
'italic' : [3, 23],
|
||||
'underline' : [4, 24],
|
||||
'inverse' : [7, 27],
|
||||
//grayscale
|
||||
'white' : [37, 39],
|
||||
'grey' : [90, 39],
|
||||
'black' : [90, 39],
|
||||
//colors
|
||||
'blue' : [34, 39],
|
||||
'cyan' : [36, 39],
|
||||
'green' : [32, 39],
|
||||
'magenta' : [35, 39],
|
||||
'red' : [31, 39],
|
||||
'yellow' : [33, 39]
|
||||
};
|
||||
|
||||
function colorizeStart(style) {
|
||||
return style ? '\x1B[' + styles[style][0] + 'm' : '';
|
||||
}
|
||||
function colorizeEnd(style) {
|
||||
return style ? '\x1B[' + styles[style][1] + 'm' : '';
|
||||
}
|
||||
/**
|
||||
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
|
||||
*/
|
||||
function colorize (str, style) {
|
||||
var styles = {
|
||||
//styles
|
||||
'bold' : [1, 22],
|
||||
'italic' : [3, 23],
|
||||
'underline' : [4, 24],
|
||||
'inverse' : [7, 27],
|
||||
//grayscale
|
||||
'white' : [37, 39],
|
||||
'grey' : [90, 39],
|
||||
'black' : [90, 39],
|
||||
//colors
|
||||
'blue' : [34, 39],
|
||||
'cyan' : [36, 39],
|
||||
'green' : [32, 39],
|
||||
'magenta' : [35, 39],
|
||||
'red' : [31, 39],
|
||||
'yellow' : [33, 39]
|
||||
};
|
||||
return style ? '\033[' + styles[style][0] + 'm' + str +
|
||||
'\033[' + styles[style][1] + 'm' : str;
|
||||
return colorizeStart(style) + str + colorizeEnd(style);
|
||||
}
|
||||
|
||||
function timestampLevelAndCategory(loggingEvent, colour) {
|
||||
var output = colorize(
|
||||
formatLogData(
|
||||
'[%s] [%s] %s - '
|
||||
, dateFormat.asString(loggingEvent.startTime)
|
||||
, loggingEvent.level
|
||||
, loggingEvent.categoryName
|
||||
)
|
||||
, colour
|
||||
);
|
||||
return output;
|
||||
var output = colorize(
|
||||
formatLogData(
|
||||
'[%s] [%s] %s - '
|
||||
, dateFormat.asString(loggingEvent.startTime)
|
||||
, loggingEvent.level
|
||||
, loggingEvent.categoryName
|
||||
)
|
||||
, colour
|
||||
);
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -103,7 +94,7 @@ function timestampLevelAndCategory(loggingEvent, colour) {
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function basicLayout (loggingEvent) {
|
||||
return timestampLevelAndCategory(loggingEvent) + formatLogData(loggingEvent.data);
|
||||
return timestampLevelAndCategory(loggingEvent) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -111,11 +102,14 @@ function basicLayout (loggingEvent) {
|
||||
* same as basicLayout, but with colours.
|
||||
*/
|
||||
function colouredLayout (loggingEvent) {
|
||||
return timestampLevelAndCategory(loggingEvent, colours[loggingEvent.level.toString()]) + formatLogData(loggingEvent.data);
|
||||
return timestampLevelAndCategory(
|
||||
loggingEvent,
|
||||
colours[loggingEvent.level.toString()]
|
||||
) + formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
function messagePassThroughLayout (loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
return formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -126,132 +120,199 @@ function messagePassThroughLayout (loggingEvent) {
|
||||
* - %r time in toLocaleTimeString format
|
||||
* - %p log level
|
||||
* - %c log category
|
||||
* - %h hostname
|
||||
* - %m log data
|
||||
* - %d date in various formats
|
||||
* - %% %
|
||||
* - %n newline
|
||||
* Takes a pattern string and returns a layout function.
|
||||
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
|
||||
* You can use %[ and %] to define a colored block.
|
||||
*
|
||||
* Tokens are specified as simple key:value objects.
|
||||
* The key represents the token name whereas the value can be a string or function
|
||||
* which is called to extract the value to put in the log message. If token is not
|
||||
* found, it doesn't replace the field.
|
||||
*
|
||||
* A sample token would be: { "pid" : function() { return process.pid; } }
|
||||
*
|
||||
* Takes a pattern string, array of tokens and returns a layout function.
|
||||
* @param {String} Log format pattern String
|
||||
* @param {object} map object of different tokens
|
||||
* @return {Function}
|
||||
* @author Stephan Strittmatter
|
||||
* @author Jan Schmidle
|
||||
*/
|
||||
function patternLayout (pattern) {
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([cdmnpr%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
function patternLayout (pattern, tokens) {
|
||||
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
|
||||
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprx%])(\{([^\}]+)\})?|([^%]+)/;
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
|
||||
pattern = pattern || TTCC_CONVERSION_PATTERN;
|
||||
function categoryName(loggingEvent, specifier) {
|
||||
var loggerName = loggingEvent.categoryName;
|
||||
if (specifier) {
|
||||
var precision = parseInt(specifier, 10);
|
||||
var loggerNameBits = loggerName.split(".");
|
||||
if (precision < loggerNameBits.length) {
|
||||
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
|
||||
}
|
||||
}
|
||||
return loggerName;
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var formattedString = "";
|
||||
var result;
|
||||
var searchString = pattern;
|
||||
function formatAsDate(loggingEvent, specifier) {
|
||||
var format = dateFormat.ISO8601_FORMAT;
|
||||
if (specifier) {
|
||||
format = specifier;
|
||||
// Pick up special cases
|
||||
if (format == "ISO8601") {
|
||||
format = dateFormat.ISO8601_FORMAT;
|
||||
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
|
||||
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
|
||||
} else if (format == "ABSOLUTE") {
|
||||
format = dateFormat.ABSOLUTETIME_FORMAT;
|
||||
} else if (format == "DATE") {
|
||||
format = dateFormat.DATETIME_FORMAT;
|
||||
}
|
||||
}
|
||||
// Format the date
|
||||
return dateFormat.asString(format, loggingEvent.startTime);
|
||||
}
|
||||
|
||||
function hostname() {
|
||||
return os.hostname().toString();
|
||||
}
|
||||
|
||||
while ((result = regex.exec(searchString))) {
|
||||
var matchedString = result[0];
|
||||
var padding = result[1];
|
||||
var truncation = result[2];
|
||||
var conversionCharacter = result[3];
|
||||
var specifier = result[5];
|
||||
var text = result[6];
|
||||
function formatMessage(loggingEvent) {
|
||||
return formatLogData(loggingEvent.data);
|
||||
}
|
||||
|
||||
function endOfLine() {
|
||||
return eol;
|
||||
}
|
||||
|
||||
// Check if the pattern matched was just normal text
|
||||
if (text) {
|
||||
formattedString += "" + text;
|
||||
} else {
|
||||
// Create a raw replacement string based on the conversion
|
||||
// character and specifier
|
||||
var replacement = "";
|
||||
switch(conversionCharacter) {
|
||||
case "c":
|
||||
var loggerName = loggingEvent.categoryName;
|
||||
if (specifier) {
|
||||
var precision = parseInt(specifier, 10);
|
||||
var loggerNameBits = loggingEvent.categoryName.split(".");
|
||||
if (precision >= loggerNameBits.length) {
|
||||
replacement = loggerName;
|
||||
} else {
|
||||
replacement = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
|
||||
}
|
||||
} else {
|
||||
replacement = loggerName;
|
||||
}
|
||||
break;
|
||||
case "d":
|
||||
var format = dateFormat.ISO8601_FORMAT;
|
||||
if (specifier) {
|
||||
format = specifier;
|
||||
// Pick up special cases
|
||||
if (format == "ISO8601") {
|
||||
format = dateFormat.ISO8601_FORMAT;
|
||||
} else if (format == "ABSOLUTE") {
|
||||
format = dateFormat.ABSOLUTETIME_FORMAT;
|
||||
} else if (format == "DATE") {
|
||||
format = dateFormat.DATETIME_FORMAT;
|
||||
}
|
||||
}
|
||||
// Format the date
|
||||
replacement = dateFormat.asString(format, loggingEvent.startTime);
|
||||
break;
|
||||
case "m":
|
||||
replacement = formatLogData(loggingEvent.data);
|
||||
break;
|
||||
case "n":
|
||||
replacement = "\n";
|
||||
break;
|
||||
case "p":
|
||||
replacement = loggingEvent.level.toString();
|
||||
break;
|
||||
case "r":
|
||||
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
|
||||
break;
|
||||
case "%":
|
||||
replacement = "%";
|
||||
break;
|
||||
default:
|
||||
replacement = matchedString;
|
||||
break;
|
||||
}
|
||||
// Format the replacement according to any padding or
|
||||
// truncation specified
|
||||
function logLevel(loggingEvent) {
|
||||
return loggingEvent.level.toString();
|
||||
}
|
||||
|
||||
var len;
|
||||
function startTime(loggingEvent) {
|
||||
return "" + loggingEvent.startTime.toLocaleTimeString();
|
||||
}
|
||||
|
||||
// First, truncation
|
||||
if (truncation) {
|
||||
len = parseInt(truncation.substr(1), 10);
|
||||
replacement = replacement.substring(0, len);
|
||||
}
|
||||
// Next, padding
|
||||
if (padding) {
|
||||
if (padding.charAt(0) == "-") {
|
||||
len = parseInt(padding.substr(1), 10);
|
||||
// Right pad with spaces
|
||||
while (replacement.length < len) {
|
||||
replacement += " ";
|
||||
}
|
||||
} else {
|
||||
len = parseInt(padding, 10);
|
||||
// Left pad with spaces
|
||||
while (replacement.length < len) {
|
||||
replacement = " " + replacement;
|
||||
}
|
||||
}
|
||||
}
|
||||
formattedString += replacement;
|
||||
}
|
||||
searchString = searchString.substr(result.index + result[0].length);
|
||||
}
|
||||
return formattedString;
|
||||
};
|
||||
function startColour(loggingEvent) {
|
||||
return colorizeStart(colours[loggingEvent.level.toString()]);
|
||||
}
|
||||
|
||||
};
|
||||
function endColour(loggingEvent) {
|
||||
return colorizeEnd(colours[loggingEvent.level.toString()]);
|
||||
}
|
||||
|
||||
function percent() {
|
||||
return '%';
|
||||
}
|
||||
|
||||
function userDefined(loggingEvent, specifier) {
|
||||
if (typeof(tokens[specifier]) !== 'undefined') {
|
||||
if (typeof(tokens[specifier]) === 'function') {
|
||||
return tokens[specifier](loggingEvent);
|
||||
} else {
|
||||
return tokens[specifier];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
var replacers = {
|
||||
'c': categoryName,
|
||||
'd': formatAsDate,
|
||||
'h': hostname,
|
||||
'm': formatMessage,
|
||||
'n': endOfLine,
|
||||
'p': logLevel,
|
||||
'r': startTime,
|
||||
'[': startColour,
|
||||
']': endColour,
|
||||
'%': percent,
|
||||
'x': userDefined
|
||||
};
|
||||
|
||||
function replaceToken(conversionCharacter, loggingEvent, specifier) {
|
||||
return replacers[conversionCharacter](loggingEvent, specifier);
|
||||
}
|
||||
|
||||
function truncate(truncation, toTruncate) {
|
||||
var len;
|
||||
if (truncation) {
|
||||
len = parseInt(truncation.substr(1), 10);
|
||||
return toTruncate.substring(0, len);
|
||||
}
|
||||
|
||||
return toTruncate;
|
||||
}
|
||||
|
||||
function pad(padding, toPad) {
|
||||
var len;
|
||||
if (padding) {
|
||||
if (padding.charAt(0) == "-") {
|
||||
len = parseInt(padding.substr(1), 10);
|
||||
// Right pad with spaces
|
||||
while (toPad.length < len) {
|
||||
toPad += " ";
|
||||
}
|
||||
} else {
|
||||
len = parseInt(padding, 10);
|
||||
// Left pad with spaces
|
||||
while (toPad.length < len) {
|
||||
toPad = " " + toPad;
|
||||
}
|
||||
}
|
||||
}
|
||||
return toPad;
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var formattedString = "";
|
||||
var result;
|
||||
var searchString = pattern;
|
||||
|
||||
while ((result = regex.exec(searchString))) {
|
||||
var matchedString = result[0];
|
||||
var padding = result[1];
|
||||
var truncation = result[2];
|
||||
var conversionCharacter = result[3];
|
||||
var specifier = result[5];
|
||||
var text = result[6];
|
||||
|
||||
// Check if the pattern matched was just normal text
|
||||
if (text) {
|
||||
formattedString += "" + text;
|
||||
} else {
|
||||
// Create a raw replacement string based on the conversion
|
||||
// character and specifier
|
||||
var replacement =
|
||||
replaceToken(conversionCharacter, loggingEvent, specifier) ||
|
||||
matchedString;
|
||||
|
||||
// Format the replacement according to any padding or
|
||||
// truncation specified
|
||||
replacement = truncate(truncation, replacement);
|
||||
replacement = pad(padding, replacement);
|
||||
formattedString += replacement;
|
||||
}
|
||||
searchString = searchString.substr(result.index + result[0].length);
|
||||
}
|
||||
return formattedString;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
basicLayout: basicLayout
|
||||
, messagePassThroughLayout: messagePassThroughLayout
|
||||
, patternLayout: patternLayout
|
||||
, colouredLayout: colouredLayout
|
||||
, coloredLayout: colouredLayout
|
||||
, layout: function(name, config) {
|
||||
return layoutMakers[name] && layoutMakers[name](config);
|
||||
}
|
||||
};
|
||||
basicLayout: basicLayout,
|
||||
messagePassThroughLayout: messagePassThroughLayout,
|
||||
patternLayout: patternLayout,
|
||||
colouredLayout: colouredLayout,
|
||||
coloredLayout: colouredLayout,
|
||||
layout: function(name, config) {
|
||||
return layoutMakers[name] && layoutMakers[name](config);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,56 +1,68 @@
|
||||
"use strict";
|
||||
|
||||
function Level(level, levelStr) {
|
||||
this.level = level;
|
||||
this.levelStr = levelStr;
|
||||
this.level = level;
|
||||
this.levelStr = levelStr;
|
||||
}
|
||||
|
||||
/**
|
||||
* converts given String to corresponding Level
|
||||
* @param {String} sArg String value of Level
|
||||
* @param {String} sArg String value of Level OR Log4js.Level
|
||||
* @param {Log4js.Level} defaultLevel default Level, if no String representation
|
||||
* @return Level object
|
||||
* @type Log4js.Level
|
||||
*/
|
||||
function toLevel(sArg, defaultLevel) {
|
||||
|
||||
if (sArg === null) {
|
||||
return defaultLevel;
|
||||
}
|
||||
|
||||
if (typeof sArg == "string") {
|
||||
var s = sArg.toUpperCase();
|
||||
if (module.exports[s]) {
|
||||
return module.exports[s];
|
||||
}
|
||||
}
|
||||
if (!sArg) {
|
||||
return defaultLevel;
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof sArg == "string") {
|
||||
var s = sArg.toUpperCase();
|
||||
if (module.exports[s]) {
|
||||
return module.exports[s];
|
||||
} else {
|
||||
return defaultLevel;
|
||||
}
|
||||
}
|
||||
|
||||
return toLevel(sArg.toString());
|
||||
}
|
||||
|
||||
Level.prototype.toString = function() {
|
||||
return this.levelStr;
|
||||
return this.levelStr;
|
||||
};
|
||||
|
||||
Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = Level.toLevel(otherLevel);
|
||||
}
|
||||
return this.level <= otherLevel.level;
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level <= otherLevel.level;
|
||||
};
|
||||
|
||||
Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = Level.toLevel(otherLevel);
|
||||
}
|
||||
return this.level >= otherLevel.level;
|
||||
if (typeof otherLevel === "string") {
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level >= otherLevel.level;
|
||||
};
|
||||
|
||||
Level.prototype.isEqualTo = function(otherLevel) {
|
||||
if (typeof otherLevel == "string") {
|
||||
otherLevel = toLevel(otherLevel);
|
||||
}
|
||||
return this.level === otherLevel.level;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
ALL: new Level(Number.MIN_VALUE, "ALL")
|
||||
, TRACE: new Level(5000, "TRACE")
|
||||
, DEBUG: new Level(10000, "DEBUG")
|
||||
, INFO: new Level(20000, "INFO")
|
||||
, WARN: new Level(30000, "WARN")
|
||||
, ERROR: new Level(40000, "ERROR")
|
||||
, FATAL: new Level(50000, "FATAL")
|
||||
, OFF: new Level(Number.MAX_VALUE, "OFF")
|
||||
, toLevel: toLevel
|
||||
ALL: new Level(Number.MIN_VALUE, "ALL"),
|
||||
TRACE: new Level(5000, "TRACE"),
|
||||
DEBUG: new Level(10000, "DEBUG"),
|
||||
INFO: new Level(20000, "INFO"),
|
||||
WARN: new Level(30000, "WARN"),
|
||||
ERROR: new Level(40000, "ERROR"),
|
||||
FATAL: new Level(50000, "FATAL"),
|
||||
OFF: new Level(Number.MAX_VALUE, "OFF"),
|
||||
toLevel: toLevel
|
||||
};
|
||||
|
||||
592
lib/log4js.js
592
lib/log4js.js
@@ -1,3 +1,4 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -12,14 +13,10 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*jsl:option explicit*/
|
||||
|
||||
/**
|
||||
* @fileoverview log4js is a library to log in JavaScript in similar manner
|
||||
* than in log4j for Java. The API should be nearly the same.
|
||||
*
|
||||
* This file contains all log4js code and is the only file required for logging.
|
||||
*
|
||||
* <h3>Example:</h3>
|
||||
* <pre>
|
||||
* var logging = require('log4js');
|
||||
@@ -37,6 +34,8 @@
|
||||
* log.trace("trace me" );
|
||||
* </pre>
|
||||
*
|
||||
* NOTE: the authors below are the original browser-based log4js authors
|
||||
* don't try to contact them about bugs in this version :)
|
||||
* @version 1.0
|
||||
* @author Stephan Strittmatter - http://jroller.com/page/stritti
|
||||
* @author Seth Chisamore - http://www.chisamore.com
|
||||
@@ -45,36 +44,32 @@
|
||||
* Website: http://log4js.berlios.de
|
||||
*/
|
||||
var events = require('events')
|
||||
, async = require('async')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, sys = require('sys')
|
||||
, util = require('util')
|
||||
, layouts = require('./layouts')
|
||||
, levels = require('./levels')
|
||||
, DEFAULT_CATEGORY = '[default]'
|
||||
, loggerModule = require('./logger')
|
||||
, LoggingEvent = loggerModule.LoggingEvent
|
||||
, Logger = loggerModule.Logger
|
||||
, ALL_CATEGORIES = '[all]'
|
||||
, appenders = {}
|
||||
, loggers = {}
|
||||
, appenderMakers = {
|
||||
"file": function(config, fileAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
|
||||
},
|
||||
"console": function(config, fileAppender, consoleAppender) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout);
|
||||
},
|
||||
"logLevelFilter": function(config, fileAppender, consoleAppender) {
|
||||
var appender = appenderMakers[config.appender.type](config.appender, fileAppender, consoleAppender);
|
||||
return logLevelFilter(config.level, appender);
|
||||
}
|
||||
, appenderMakers = {}
|
||||
, appenderShutdowns = {}
|
||||
, defaultConfig = {
|
||||
appenders: [
|
||||
{ type: "console" }
|
||||
],
|
||||
replaceConsole: false
|
||||
};
|
||||
|
||||
function hasLogger(logger) {
|
||||
return loggers.hasOwnProperty(logger);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get a logger instance. Instance is cached on categoryName level.
|
||||
* @param {String} categoryName name of category to log to.
|
||||
@@ -83,167 +78,110 @@ var events = require('events')
|
||||
*/
|
||||
function getLogger (categoryName) {
|
||||
|
||||
// Use default logger if categoryName is not specified or invalid
|
||||
if (!(typeof categoryName == "string")) {
|
||||
categoryName = DEFAULT_CATEGORY;
|
||||
}
|
||||
// Use default logger if categoryName is not specified or invalid
|
||||
if (typeof categoryName !== "string") {
|
||||
categoryName = Logger.DEFAULT_CATEGORY;
|
||||
}
|
||||
|
||||
var appenderList;
|
||||
if (!loggers[categoryName]) {
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
appenderList = appenders[categoryName];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
if (appenders[ALL_CATEGORIES]) {
|
||||
appenderList = appenders[ALL_CATEGORIES];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
var appenderList;
|
||||
if (!hasLogger(categoryName)) {
|
||||
// Create the logger for this name if it doesn't already exist
|
||||
loggers[categoryName] = new Logger(categoryName);
|
||||
if (appenders[categoryName]) {
|
||||
appenderList = appenders[categoryName];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
|
||||
return loggers[categoryName];
|
||||
if (appenders[ALL_CATEGORIES]) {
|
||||
appenderList = appenders[ALL_CATEGORIES];
|
||||
appenderList.forEach(function(appender) {
|
||||
loggers[categoryName].addListener("log", appender);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return loggers[categoryName];
|
||||
}
|
||||
|
||||
/**
|
||||
* args are appender, then zero or more categories
|
||||
*/
|
||||
function addAppender () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var appender = args.shift();
|
||||
if (args.length == 0 || args[0] === undefined) {
|
||||
args = [ ALL_CATEGORIES ];
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var appender = args.shift();
|
||||
if (args.length === 0 || args[0] === undefined) {
|
||||
args = [ ALL_CATEGORIES ];
|
||||
}
|
||||
//argument may already be an array
|
||||
if (Array.isArray(args[0])) {
|
||||
args = args[0];
|
||||
}
|
||||
|
||||
args.forEach(function(category) {
|
||||
addAppenderToCategory(appender, category);
|
||||
|
||||
if (category === ALL_CATEGORIES) {
|
||||
addAppenderToAllLoggers(appender);
|
||||
} else if (hasLogger(category)) {
|
||||
loggers[category].addListener("log", appender);
|
||||
}
|
||||
//argument may already be an array
|
||||
if (Array.isArray(args[0])) {
|
||||
args = args[0];
|
||||
});
|
||||
}
|
||||
|
||||
function addAppenderToAllLoggers(appender) {
|
||||
for (var logger in loggers) {
|
||||
if (hasLogger(logger)) {
|
||||
loggers[logger].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
args.forEach(function(category) {
|
||||
if (!appenders[category]) {
|
||||
appenders[category] = [];
|
||||
}
|
||||
appenders[category].push(appender);
|
||||
|
||||
if (category === ALL_CATEGORIES) {
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].addListener("log", appender);
|
||||
}
|
||||
}
|
||||
} else if (loggers[category]) {
|
||||
loggers[category].addListener("log", appender);
|
||||
}
|
||||
});
|
||||
function addAppenderToCategory(appender, category) {
|
||||
if (!appenders[category]) {
|
||||
appenders[category] = [];
|
||||
}
|
||||
appenders[category].push(appender);
|
||||
}
|
||||
|
||||
function clearAppenders () {
|
||||
appenders = {};
|
||||
for (var logger in loggers) {
|
||||
if (loggers.hasOwnProperty(logger)) {
|
||||
loggers[logger].removeAllListeners("log");
|
||||
}
|
||||
appenders = {};
|
||||
for (var logger in loggers) {
|
||||
if (hasLogger(logger)) {
|
||||
loggers[logger].removeAllListeners("log");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function configureAppenders(appenderList, fileAppender, consoleAppender) {
|
||||
clearAppenders();
|
||||
if (appenderList) {
|
||||
appenderList.forEach(function(appenderConfig) {
|
||||
var appender = appenderMakers[appenderConfig.type](appenderConfig, fileAppender, consoleAppender);
|
||||
if (appender) {
|
||||
addAppender(appender, appenderConfig.category);
|
||||
} else {
|
||||
throw new Error("log4js configuration problem for "+sys.inspect(appenderConfig));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
addAppender(consoleAppender);
|
||||
}
|
||||
function configureAppenders(appenderList, options) {
|
||||
clearAppenders();
|
||||
if (appenderList) {
|
||||
appenderList.forEach(function(appenderConfig) {
|
||||
loadAppender(appenderConfig.type);
|
||||
var appender;
|
||||
appenderConfig.makers = appenderMakers;
|
||||
try {
|
||||
appender = appenderMakers[appenderConfig.type](appenderConfig, options);
|
||||
addAppender(appender, appenderConfig.category);
|
||||
} catch(e) {
|
||||
throw new Error("log4js configuration problem for " + util.inspect(appenderConfig), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function configureLevels(levels) {
|
||||
if (levels) {
|
||||
for (var category in levels) {
|
||||
if (levels.hasOwnProperty(category)) {
|
||||
getLogger(category).setLevel(levels[category]);
|
||||
}
|
||||
}
|
||||
if (levels) {
|
||||
for (var category in levels) {
|
||||
if (levels.hasOwnProperty(category)) {
|
||||
getLogger(category).setLevel(levels[category]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Models a logging event.
|
||||
* @constructor
|
||||
* @param {String} categoryName name of category
|
||||
* @param {Log4js.Level} level level of message
|
||||
* @param {Array} data objects to log
|
||||
* @param {Log4js.Logger} logger the associated logger
|
||||
* @author Seth Chisamore
|
||||
*/
|
||||
function LoggingEvent (categoryName, level, data, logger) {
|
||||
this.startTime = new Date();
|
||||
this.categoryName = categoryName;
|
||||
this.data = data;
|
||||
this.level = level;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger to log messages.
|
||||
* use {@see Log4js#getLogger(String)} to get an instance.
|
||||
* @constructor
|
||||
* @param name name of category to log to
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function Logger (name, level) {
|
||||
this.category = name || DEFAULT_CATEGORY;
|
||||
|
||||
if (! this.level) {
|
||||
this.__proto__.level = levels.TRACE;
|
||||
}
|
||||
}
|
||||
sys.inherits(Logger, events.EventEmitter);
|
||||
|
||||
Logger.prototype.setLevel = function(level) {
|
||||
this.level = levels.toLevel(level, levels.TRACE);
|
||||
};
|
||||
|
||||
Logger.prototype.removeLevel = function() {
|
||||
delete this.level;
|
||||
};
|
||||
|
||||
Logger.prototype.log = function(logLevel, args) {
|
||||
var data = Array.prototype.slice.call(args)
|
||||
, loggingEvent = new LoggingEvent(this.category, logLevel, data, this);
|
||||
this.emit("log", loggingEvent);
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||
};
|
||||
|
||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
||||
function(levelString) {
|
||||
var level = levels.toLevel(levelString);
|
||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (this.isLevelEnabled(level)) {
|
||||
this.log(level, arguments);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
function setGlobalLogLevel(level) {
|
||||
Logger.prototype.level = levels.toLevel(level, levels.TRACE);
|
||||
Logger.prototype.level = levels.toLevel(level, levels.TRACE);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -252,170 +190,210 @@ function setGlobalLogLevel(level) {
|
||||
* @static
|
||||
*/
|
||||
function getDefaultLogger () {
|
||||
return getLogger(DEFAULT_CATEGORY);
|
||||
return getLogger(Logger.DEFAULT_CATEGORY);
|
||||
}
|
||||
|
||||
function logLevelFilter (levelString, appender) {
|
||||
var level = levels.toLevel(levelString);
|
||||
return function(logEvent) {
|
||||
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
var configState = {};
|
||||
|
||||
function loadConfigurationFile(filename) {
|
||||
if (filename) {
|
||||
return JSON.parse(fs.readFileSync(filename, "utf8"));
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function configureOnceOff(config, options) {
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, options);
|
||||
configureLevels(config.levels);
|
||||
|
||||
if (config.replaceConsole) {
|
||||
replaceConsole();
|
||||
} else {
|
||||
restoreConsole();
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
"Problem reading log4js config " + util.inspect(config) +
|
||||
". Error was \"" + e.message + "\" (" + e.stack + ")"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reloadConfiguration() {
|
||||
var mtime = getMTime(configState.filename);
|
||||
if (!mtime) return;
|
||||
|
||||
function consoleAppender (layout) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
console._preLog4js_log(layout(loggingEvent));
|
||||
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
|
||||
configureOnceOff(loadConfigurationFile(configState.filename));
|
||||
}
|
||||
configState.lastMTime = mtime;
|
||||
}
|
||||
|
||||
function getMTime(filename) {
|
||||
var mtime;
|
||||
try {
|
||||
mtime = fs.statSync(configState.filename).mtime;
|
||||
} catch (e) {
|
||||
getLogger('log4js').warn('Failed to load configuration file ' + filename);
|
||||
}
|
||||
return mtime;
|
||||
}
|
||||
|
||||
function initReloadConfiguration(filename, options) {
|
||||
if (configState.timerId) {
|
||||
clearInterval(configState.timerId);
|
||||
delete configState.timerId;
|
||||
}
|
||||
configState.filename = filename;
|
||||
configState.lastMTime = getMTime(filename);
|
||||
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
|
||||
}
|
||||
|
||||
function configure(configurationFileOrObject, options) {
|
||||
var config = configurationFileOrObject;
|
||||
config = config || process.env.LOG4JS_CONFIG;
|
||||
options = options || {};
|
||||
|
||||
if (config === undefined || config === null || typeof(config) === 'string') {
|
||||
if (options.reloadSecs) {
|
||||
initReloadConfiguration(config, options);
|
||||
}
|
||||
config = loadConfigurationFile(config) || defaultConfig;
|
||||
} else {
|
||||
if (options.reloadSecs) {
|
||||
getLogger('log4js').warn(
|
||||
'Ignoring configuration reload parameter for "object" configuration.'
|
||||
);
|
||||
}
|
||||
}
|
||||
configureOnceOff(config, options);
|
||||
}
|
||||
|
||||
var originalConsoleFunctions = {
|
||||
log: console.log,
|
||||
debug: console.debug,
|
||||
info: console.info,
|
||||
warn: console.warn,
|
||||
error: console.error
|
||||
};
|
||||
|
||||
function replaceConsole(logger) {
|
||||
function replaceWith(fn) {
|
||||
return function() {
|
||||
fn.apply(logger, arguments);
|
||||
};
|
||||
}
|
||||
logger = logger || getLogger("console");
|
||||
['log','debug','info','warn','error'].forEach(function (item) {
|
||||
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
|
||||
});
|
||||
}
|
||||
|
||||
function restoreConsole() {
|
||||
['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) {
|
||||
console[item] = originalConsoleFunctions[item];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
* Load an appenderModule based on the provided appender filepath. Will first
|
||||
* check if the appender path is a subpath of the log4js "lib/appenders" directory.
|
||||
* If not, it will attempt to load the the appender as complete path.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
|
||||
* @param filePollInterval - the time in seconds between file size checks (default 30s)
|
||||
* @param {string} appender The filepath for the appender.
|
||||
* @returns {Object|null} The required appender or null if appender could not be loaded.
|
||||
* @private
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
var logFile = fs.createWriteStream(file, { flags: 'a', mode: 0644, encoding: 'utf8' });
|
||||
|
||||
if (logSize > 0) {
|
||||
setupLogRolling(logFile, file, logSize, numBackups || 5, (filePollInterval * 1000) || 30000);
|
||||
}
|
||||
|
||||
//close the file on process exit.
|
||||
process.on('exit', function() {
|
||||
logFile.end();
|
||||
logFile.destroySoon();
|
||||
});
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent)+'\n');
|
||||
};
|
||||
function requireAppender(appender) {
|
||||
var appenderModule;
|
||||
try {
|
||||
appenderModule = require('./appenders/' + appender);
|
||||
} catch (e) {
|
||||
appenderModule = require(appender);
|
||||
}
|
||||
return appenderModule;
|
||||
}
|
||||
|
||||
function setupLogRolling (logFile, filename, logSize, numBackups, filePollInterval) {
|
||||
fs.watchFile(
|
||||
filename,
|
||||
{
|
||||
persistent: false,
|
||||
interval: filePollInterval
|
||||
},
|
||||
function (curr, prev) {
|
||||
if (curr.size >= logSize) {
|
||||
rollThatLog(logFile, filename, numBackups);
|
||||
}
|
||||
}
|
||||
);
|
||||
/**
|
||||
* Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
|
||||
* it will be used in place of requiring the appender module.
|
||||
*
|
||||
* @param {string} appender The path to the appender module.
|
||||
* @param {Object|void} [appenderModule] The pre-required appender module. When provided,
|
||||
* instead of requiring the appender by its path, this object will be used.
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
function loadAppender(appender, appenderModule) {
|
||||
appenderModule = appenderModule || requireAppender(appender);
|
||||
|
||||
if (!appenderModule) {
|
||||
throw new Error("Invalid log4js appender: " + util.inspect(appender));
|
||||
}
|
||||
|
||||
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
|
||||
if (appenderModule.shutdown) {
|
||||
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
|
||||
}
|
||||
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
|
||||
}
|
||||
|
||||
function rollThatLog (logFile, filename, numBackups) {
|
||||
//first close the current one.
|
||||
logFile.end();
|
||||
logFile.destroySoon();
|
||||
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
|
||||
for (var i=numBackups; i > 0; i--) {
|
||||
if (i > 1) {
|
||||
if (fileExists(filename + '.' + (i-1))) {
|
||||
fs.renameSync(filename+'.'+(i-1), filename+'.'+i);
|
||||
}
|
||||
} else {
|
||||
fs.renameSync(filename, filename+'.1');
|
||||
}
|
||||
}
|
||||
//open it up again
|
||||
logFile = fs.createWriteStream(filename, { flags: 'a', mode: 0644, encoding: "utf8" });
|
||||
/**
|
||||
* Shutdown all log appenders. This will first disable all writing to appenders
|
||||
* and then call the shutdown function each appender.
|
||||
*
|
||||
* @params {Function} cb - The callback to be invoked once all appenders have
|
||||
* shutdown. If an error occurs, the callback will be given the error object
|
||||
* as the first argument.
|
||||
* @returns {void}
|
||||
*/
|
||||
function shutdown(cb) {
|
||||
// First, disable all writing to appenders. This prevents appenders from
|
||||
// not being able to be drained because of run-away log writes.
|
||||
loggerModule.disableAllLogWrites();
|
||||
|
||||
// Next, get all the shutdown functions for appenders as an array.
|
||||
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
|
||||
function(accum, category) {
|
||||
return accum.concat(appenderShutdowns[category]);
|
||||
}, []);
|
||||
|
||||
// Call each of the shutdown functions.
|
||||
async.forEach(
|
||||
shutdownFunctions,
|
||||
function(shutdownFn, done) {
|
||||
shutdownFn(done);
|
||||
},
|
||||
cb
|
||||
);
|
||||
}
|
||||
|
||||
function fileExists (filename) {
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function configure (configurationFileOrObject) {
|
||||
var config = configurationFileOrObject;
|
||||
if (typeof(config) === "string") {
|
||||
config = JSON.parse(fs.readFileSync(config, "utf8"));
|
||||
}
|
||||
if (config) {
|
||||
try {
|
||||
configureAppenders(config.appenders, fileAppender, consoleAppender);
|
||||
configureLevels(config.levels);
|
||||
} catch (e) {
|
||||
throw new Error("Problem reading log4js config " + sys.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function findConfiguration() {
|
||||
//add current directory onto the list of configPaths
|
||||
var paths = ['.'].concat(require.paths);
|
||||
//add this module's directory to the end of the list, so that we pick up the default config
|
||||
paths.push(__dirname);
|
||||
var pathsWithConfig = paths.filter( function (pathToCheck) {
|
||||
try {
|
||||
fs.statSync(path.join(pathToCheck, "log4js.json"));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
if (pathsWithConfig.length > 0) {
|
||||
return path.join(pathsWithConfig[0], 'log4js.json');
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function replaceConsole(logger) {
|
||||
function replaceWith(fn) {
|
||||
return function() {
|
||||
fn.apply(logger, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
['log','debug','info','warn','error'].forEach(function (item) {
|
||||
console['_preLog4js_'+item] = console[item];
|
||||
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
//set ourselves up if we can find a default log4js.json
|
||||
configure(findConfiguration());
|
||||
//replace console.log, etc with log4js versions
|
||||
replaceConsole(getLogger("console"));
|
||||
|
||||
module.exports = {
|
||||
getLogger: getLogger,
|
||||
getDefaultLogger: getDefaultLogger,
|
||||
|
||||
addAppender: addAppender,
|
||||
clearAppenders: clearAppenders,
|
||||
configure: configure,
|
||||
|
||||
levels: levels,
|
||||
setGlobalLogLevel: setGlobalLogLevel,
|
||||
|
||||
consoleAppender: consoleAppender,
|
||||
fileAppender: fileAppender,
|
||||
logLevelFilter: logLevelFilter,
|
||||
|
||||
layouts: layouts,
|
||||
connectLogger: require('./connect-logger').connectLogger(this)
|
||||
getLogger: getLogger,
|
||||
getDefaultLogger: getDefaultLogger,
|
||||
hasLogger: hasLogger,
|
||||
|
||||
addAppender: addAppender,
|
||||
loadAppender: loadAppender,
|
||||
clearAppenders: clearAppenders,
|
||||
configure: configure,
|
||||
shutdown: shutdown,
|
||||
|
||||
replaceConsole: replaceConsole,
|
||||
restoreConsole: restoreConsole,
|
||||
|
||||
levels: levels,
|
||||
setGlobalLogLevel: setGlobalLogLevel,
|
||||
|
||||
layouts: layouts,
|
||||
appenders: {},
|
||||
appenderMakers: appenderMakers,
|
||||
connectLogger: require('./connect-logger').connectLogger
|
||||
};
|
||||
|
||||
//keep the old-style layouts
|
||||
['basicLayout','messagePassThroughLayout','colouredLayout','coloredLayout'].forEach(function(item) {
|
||||
module.exports[item] = layouts[item];
|
||||
});
|
||||
//set ourselves up
|
||||
configure();
|
||||
|
||||
|
||||
102
lib/logger.js
Normal file
102
lib/logger.js
Normal file
@@ -0,0 +1,102 @@
|
||||
"use strict";
|
||||
var levels = require('./levels')
|
||||
, util = require('util')
|
||||
, events = require('events')
|
||||
, DEFAULT_CATEGORY = '[default]';
|
||||
|
||||
var logWritesEnabled = true;
|
||||
|
||||
/**
|
||||
* Models a logging event.
|
||||
* @constructor
|
||||
* @param {String} categoryName name of category
|
||||
* @param {Log4js.Level} level level of message
|
||||
* @param {Array} data objects to log
|
||||
* @param {Log4js.Logger} logger the associated logger
|
||||
* @author Seth Chisamore
|
||||
*/
|
||||
function LoggingEvent (categoryName, level, data, logger) {
|
||||
this.startTime = new Date();
|
||||
this.categoryName = categoryName;
|
||||
this.data = data;
|
||||
this.level = level;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logger to log messages.
|
||||
* use {@see Log4js#getLogger(String)} to get an instance.
|
||||
* @constructor
|
||||
* @param name name of category to log to
|
||||
* @author Stephan Strittmatter
|
||||
*/
|
||||
function Logger (name, level) {
|
||||
this.category = name || DEFAULT_CATEGORY;
|
||||
|
||||
if (level) {
|
||||
this.setLevel(level);
|
||||
}
|
||||
}
|
||||
util.inherits(Logger, events.EventEmitter);
|
||||
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
|
||||
Logger.prototype.level = levels.TRACE;
|
||||
|
||||
Logger.prototype.setLevel = function(level) {
|
||||
this.level = levels.toLevel(level, this.level || levels.TRACE);
|
||||
};
|
||||
|
||||
Logger.prototype.removeLevel = function() {
|
||||
delete this.level;
|
||||
};
|
||||
|
||||
Logger.prototype.log = function() {
|
||||
var args = Array.prototype.slice.call(arguments)
|
||||
, logLevel = levels.toLevel(args.shift())
|
||||
, loggingEvent;
|
||||
if (this.isLevelEnabled(logLevel)) {
|
||||
loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
|
||||
this.emit("log", loggingEvent);
|
||||
}
|
||||
};
|
||||
|
||||
Logger.prototype.isLevelEnabled = function(otherLevel) {
|
||||
return this.level.isLessThanOrEqualTo(otherLevel);
|
||||
};
|
||||
|
||||
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
|
||||
function(levelString) {
|
||||
var level = levels.toLevel(levelString);
|
||||
Logger.prototype['is'+levelString+'Enabled'] = function() {
|
||||
return this.isLevelEnabled(level);
|
||||
};
|
||||
|
||||
Logger.prototype[levelString.toLowerCase()] = function () {
|
||||
if (logWritesEnabled && this.isLevelEnabled(level)) {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
args.unshift(level);
|
||||
Logger.prototype.log.apply(this, args);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Disable all log writes.
|
||||
* @returns {void}
|
||||
*/
|
||||
function disableAllLogWrites() {
|
||||
logWritesEnabled = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable log writes.
|
||||
* @returns {void}
|
||||
*/
|
||||
function enableAllLogWrites() {
|
||||
logWritesEnabled = true;
|
||||
}
|
||||
|
||||
exports.LoggingEvent = LoggingEvent;
|
||||
exports.Logger = Logger;
|
||||
exports.disableAllLogWrites = disableAllLogWrites;
|
||||
exports.enableAllLogWrites = enableAllLogWrites;
|
||||
90
lib/streams/BaseRollingFileStream.js
Normal file
90
lib/streams/BaseRollingFileStream.js
Normal file
@@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
var fs = require('fs')
|
||||
, stream
|
||||
, debug = require('../debug')('BaseRollingFileStream')
|
||||
, util = require('util')
|
||||
, semver = require('semver');
|
||||
|
||||
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||
stream = require('stream');
|
||||
} else {
|
||||
stream = require('readable-stream');
|
||||
}
|
||||
|
||||
module.exports = BaseRollingFileStream;
|
||||
|
||||
function BaseRollingFileStream(filename, options) {
|
||||
debug("In BaseRollingFileStream");
|
||||
this.filename = filename;
|
||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||
this.currentSize = 0;
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
fileSize = fs.statSync(file).size;
|
||||
} catch (e) {
|
||||
// file does not exist
|
||||
}
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename) {
|
||||
throw new Error("You must specify a filename");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
debug("Calling BaseRollingFileStream.super");
|
||||
BaseRollingFileStream.super_.call(this);
|
||||
this.openTheStream();
|
||||
this.currentSize = currentFileSize(this.filename);
|
||||
}
|
||||
util.inherits(BaseRollingFileStream, stream.Writable);
|
||||
|
||||
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
|
||||
var that = this;
|
||||
function writeTheChunk() {
|
||||
debug("writing the chunk to the underlying stream");
|
||||
that.currentSize += chunk.length;
|
||||
try {
|
||||
that.theStream.write(chunk, encoding, callback);
|
||||
}
|
||||
catch (err){
|
||||
debug(err);
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
debug("in _write");
|
||||
|
||||
if (this.shouldRoll()) {
|
||||
this.currentSize = 0;
|
||||
this.roll(this.filename, writeTheChunk);
|
||||
} else {
|
||||
writeTheChunk();
|
||||
}
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.openTheStream = function(cb) {
|
||||
debug("opening the underlying stream");
|
||||
this.theStream = fs.createWriteStream(this.filename, this.options);
|
||||
if (cb) {
|
||||
this.theStream.on("open", cb);
|
||||
}
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
|
||||
debug("closing the underlying stream");
|
||||
this.theStream.end(cb);
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.shouldRoll = function() {
|
||||
return false; // default behaviour is never to roll
|
||||
};
|
||||
|
||||
BaseRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
callback(); // default behaviour is not to do anything
|
||||
};
|
||||
|
||||
95
lib/streams/DateRollingFileStream.js
Normal file
95
lib/streams/DateRollingFileStream.js
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
var BaseRollingFileStream = require('./BaseRollingFileStream')
|
||||
, debug = require('../debug')('DateRollingFileStream')
|
||||
, format = require('../date_format')
|
||||
, async = require('async')
|
||||
, fs = require('fs')
|
||||
, util = require('util');
|
||||
|
||||
module.exports = DateRollingFileStream;
|
||||
|
||||
function DateRollingFileStream(filename, pattern, options, now) {
|
||||
debug("Now is " + now);
|
||||
if (pattern && typeof(pattern) === 'object') {
|
||||
now = options;
|
||||
options = pattern;
|
||||
pattern = null;
|
||||
}
|
||||
this.pattern = pattern || '.yyyy-MM-dd';
|
||||
this.now = now || Date.now;
|
||||
|
||||
if (fs.existsSync(filename)) {
|
||||
var stat = fs.statSync(filename);
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, stat.mtime);
|
||||
} else {
|
||||
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
|
||||
}
|
||||
|
||||
this.baseFilename = filename;
|
||||
this.alwaysIncludePattern = false;
|
||||
|
||||
if (options) {
|
||||
if (options.alwaysIncludePattern) {
|
||||
this.alwaysIncludePattern = true;
|
||||
filename = this.baseFilename + this.lastTimeWeWroteSomething;
|
||||
}
|
||||
delete options.alwaysIncludePattern;
|
||||
if (Object.keys(options).length === 0) {
|
||||
options = null;
|
||||
}
|
||||
}
|
||||
debug("this.now is " + this.now + ", now is " + now);
|
||||
|
||||
DateRollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(DateRollingFileStream, BaseRollingFileStream);
|
||||
|
||||
DateRollingFileStream.prototype.shouldRoll = function() {
|
||||
var lastTime = this.lastTimeWeWroteSomething,
|
||||
thisTime = format.asString(this.pattern, new Date(this.now()));
|
||||
|
||||
debug("DateRollingFileStream.shouldRoll with now = " +
|
||||
this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
|
||||
|
||||
this.lastTimeWeWroteSomething = thisTime;
|
||||
this.previousTime = lastTime;
|
||||
|
||||
return thisTime !== lastTime;
|
||||
};
|
||||
|
||||
DateRollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this;
|
||||
|
||||
debug("Starting roll");
|
||||
|
||||
if (this.alwaysIncludePattern) {
|
||||
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
} else {
|
||||
var newFilename = this.baseFilename + this.previousTime;
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
deleteAnyExistingFile,
|
||||
renameTheCurrentFile,
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
}
|
||||
|
||||
function deleteAnyExistingFile(cb) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(newFilename, function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
function renameTheCurrentFile(cb) {
|
||||
debug("Renaming the " + filename + " -> " + newFilename);
|
||||
fs.rename(filename, newFilename, cb);
|
||||
}
|
||||
|
||||
};
|
||||
89
lib/streams/RollingFileStream.js
Normal file
89
lib/streams/RollingFileStream.js
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
var BaseRollingFileStream = require('./BaseRollingFileStream')
|
||||
, debug = require('../debug')('RollingFileStream')
|
||||
, util = require('util')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, async = require('async');
|
||||
|
||||
module.exports = RollingFileStream;
|
||||
|
||||
function RollingFileStream (filename, size, backups, options) {
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename || !size || size <= 0) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
RollingFileStream.super_.call(this, filename, options);
|
||||
}
|
||||
util.inherits(RollingFileStream, BaseRollingFileStream);
|
||||
|
||||
RollingFileStream.prototype.shouldRoll = function() {
|
||||
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
RollingFileStream.prototype.roll = function(filename, callback) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
} else if (index(a) < index(b) ) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename, cb) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
fs.unlink(filename + '.' + (idx+1), function (err) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
function renameTheFiles(cb) {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
fs.readdir(path.dirname(filename), function (err, files) {
|
||||
async.forEachSeries(
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse(),
|
||||
increaseFileIndex,
|
||||
cb
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
debug("Rolling, rolling, rolling");
|
||||
async.series([
|
||||
this.closeTheStream.bind(this),
|
||||
renameTheFiles,
|
||||
this.openTheStream.bind(this)
|
||||
], callback);
|
||||
|
||||
};
|
||||
2
lib/streams/index.js
Normal file
2
lib/streams/index.js
Normal file
@@ -0,0 +1,2 @@
|
||||
exports.RollingFileStream = require('./RollingFileStream');
|
||||
exports.DateRollingFileStream = require('./DateRollingFileStream');
|
||||
67
package.json
67
package.json
@@ -1,27 +1,44 @@
|
||||
{
|
||||
"name": "log4js",
|
||||
"version": "0.3.0",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
"log",
|
||||
"log4j",
|
||||
"node"
|
||||
],
|
||||
"main": "./lib/log4js",
|
||||
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
|
||||
"bugs": {
|
||||
"web": "http://github.com/csausdev/log4js-node/issues"
|
||||
},
|
||||
"engines": [ "node >=0.1.100" ],
|
||||
"scripts": {
|
||||
"test": "vows test/*.js"
|
||||
},
|
||||
"directories": {
|
||||
"test": "test",
|
||||
"lib": "lib"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vows": ">=0.5.2"
|
||||
}
|
||||
"name": "log4js",
|
||||
"version": "0.6.15",
|
||||
"description": "Port of Log4js to work with node.",
|
||||
"keywords": [
|
||||
"logging",
|
||||
"log",
|
||||
"log4j",
|
||||
"node"
|
||||
],
|
||||
"main": "./lib/log4js",
|
||||
"author": "Gareth Jones <gareth.nomiddlename@gmail.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/nomiddlename/log4js-node.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "http://github.com/nomiddlename/log4js-node/issues"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "vows"
|
||||
},
|
||||
"directories": {
|
||||
"test": "test",
|
||||
"lib": "lib"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "0.1.15",
|
||||
"semver": "~1.1.4",
|
||||
"readable-stream": "~1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vows": "0.7.0",
|
||||
"sandboxed-module": "0.1.3",
|
||||
"hook.io": "0.8.10",
|
||||
"underscore": "1.2.1"
|
||||
},
|
||||
"browser": {
|
||||
"os": false
|
||||
}
|
||||
}
|
||||
|
||||
83
test/categoryFilter-test.js
Normal file
83
test/categoryFilter-test.js
Normal file
@@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert');
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js categoryFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
|
||||
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
|
||||
log4js.clearAppenders();
|
||||
var appender = require('../lib/appenders/categoryFilter')
|
||||
.appender(
|
||||
['app'],
|
||||
function(evt) { logEvents.push(evt); }
|
||||
);
|
||||
log4js.addAppender(appender, ["app","web"]);
|
||||
|
||||
webLogger = log4js.getLogger("web");
|
||||
appLogger = log4js.getLogger("app");
|
||||
|
||||
webLogger.debug('This should get logged');
|
||||
appLogger.debug('This should not');
|
||||
webLogger.debug('Hello again');
|
||||
log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass matching category' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'This should get logged');
|
||||
assert.equal(logEvents[1].data[0], 'Hello again');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger, weblogger;
|
||||
|
||||
remove(__dirname + '/categoryFilter-web.log');
|
||||
remove(__dirname + '/categoryFilter-noweb.log');
|
||||
|
||||
log4js.configure('test/with-categoryFilter.json');
|
||||
logger = log4js.getLogger("app");
|
||||
weblogger = log4js.getLogger("web");
|
||||
|
||||
logger.info('Loading app');
|
||||
logger.debug('Initialising indexes');
|
||||
weblogger.info('00:00:00 GET / 200');
|
||||
weblogger.warn('00:00:00 GET / 500');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 100);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['Loading app','Initialising indexes']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-web.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
128
test/clusteredAppender-test.js
Executable file
128
test/clusteredAppender-test.js
Executable file
@@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
var assert = require('assert');
|
||||
var vows = require('vows');
|
||||
var layouts = require('../lib/layouts');
|
||||
var sandbox = require('sandboxed-module');
|
||||
var LoggingEvent = require('../lib/logger').LoggingEvent;
|
||||
var cluster = require('cluster');
|
||||
|
||||
vows.describe('log4js cluster appender').addBatch({
|
||||
'when in master mode': {
|
||||
topic: function() {
|
||||
|
||||
var registeredClusterEvents = [];
|
||||
var loggingEvents = [];
|
||||
|
||||
// Fake cluster module, so no cluster listeners be really added
|
||||
var fakeCluster = {
|
||||
|
||||
on: function(event, callback) {
|
||||
registeredClusterEvents.push(event);
|
||||
},
|
||||
|
||||
isMaster: true,
|
||||
isWorker: false,
|
||||
|
||||
};
|
||||
|
||||
var fakeActualAppender = function(loggingEvent) {
|
||||
loggingEvents.push(loggingEvent);
|
||||
}
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
}
|
||||
});
|
||||
|
||||
var masterAppender = appenderModule.appender({
|
||||
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
});
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
|
||||
|
||||
var returnValue = {
|
||||
registeredClusterEvents: registeredClusterEvents,
|
||||
loggingEvents: loggingEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
assert.equal(topic.registeredClusterEvents[0], 'fork');
|
||||
},
|
||||
|
||||
"should log using actual appender": function(topic) {
|
||||
assert.equal(topic.loggingEvents.length, 2)
|
||||
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'when in worker mode': {
|
||||
|
||||
topic: function() {
|
||||
|
||||
var registeredProcessEvents = [];
|
||||
|
||||
// Fake cluster module, to fake we're inside a worker process
|
||||
var fakeCluster = {
|
||||
|
||||
isMaster: false,
|
||||
isWorker: true,
|
||||
|
||||
};
|
||||
|
||||
var fakeProcess = {
|
||||
|
||||
send: function(data) {
|
||||
registeredProcessEvents.push(data);
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
},
|
||||
globals: {
|
||||
'process': fakeProcess,
|
||||
}
|
||||
});
|
||||
|
||||
var workerAppender = appenderModule.appender();
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
|
||||
|
||||
var returnValue = {
|
||||
registeredProcessEvents: registeredProcessEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
|
||||
},
|
||||
|
||||
"worker appender should call process.send" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
|
||||
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
|
||||
},
|
||||
|
||||
"worker should serialize an Error correctly" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
|
||||
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
|
||||
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
149
test/configuration-test.js
Normal file
149
test/configuration-test.js
Normal file
@@ -0,0 +1,149 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function makeTestAppender() {
|
||||
return {
|
||||
configure: function(config, options) {
|
||||
this.configureCalled = true;
|
||||
this.config = config;
|
||||
this.options = options;
|
||||
return this.appender();
|
||||
},
|
||||
appender: function() {
|
||||
var self = this;
|
||||
return function(logEvt) { self.logEvt = logEvt; };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('log4js configure').addBatch({
|
||||
'appenders': {
|
||||
'when specified by type': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/cheese': testAppender
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.configure(
|
||||
{
|
||||
appenders: [
|
||||
{ type: "cheese", flavour: "gouda" }
|
||||
]
|
||||
},
|
||||
{ pants: "yes" }
|
||||
);
|
||||
return testAppender;
|
||||
},
|
||||
'should load appender': function(testAppender) {
|
||||
assert.ok(testAppender.configureCalled);
|
||||
},
|
||||
'should pass config to appender': function(testAppender) {
|
||||
assert.equal(testAppender.config.flavour, 'gouda');
|
||||
},
|
||||
'should pass log4js options to appender': function(testAppender) {
|
||||
assert.equal(testAppender.options.pants, 'yes');
|
||||
}
|
||||
},
|
||||
'when core appender loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires: { './appenders/cheese': testAppender } }
|
||||
);
|
||||
|
||||
log4js.loadAppender('cheese');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender from ../lib/appenders': function(log4js) {
|
||||
assert.ok(log4js.appenders.cheese);
|
||||
},
|
||||
'should add appender configure function to appenderMakers' : function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers.cheese);
|
||||
}
|
||||
},
|
||||
'when appender in node_modules loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires: { 'some/other/external': testAppender } }
|
||||
);
|
||||
log4js.loadAppender('some/other/external');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender via require': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when appender object loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require('../lib/log4js');
|
||||
|
||||
log4js.loadAppender('some/other/external', testAppender);
|
||||
return log4js;
|
||||
},
|
||||
'should load appender with provided object': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
|
||||
topic: function() {
|
||||
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||
var fileRead = 0,
|
||||
modulePath = 'some/path/to/mylog4js.json',
|
||||
pathsChecked = [],
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||
levels: { 'a-test' : 'INFO' } },
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
}
|
||||
}
|
||||
);
|
||||
delete process.env.LOG4JS_CONFIG;
|
||||
return fileRead;
|
||||
},
|
||||
'should load the specified local configuration file' : function(fileRead) {
|
||||
assert.equal(fileRead, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
173
test/configureNoLevels-test.js
Normal file
173
test/configureNoLevels-test.js
Normal file
@@ -0,0 +1,173 @@
|
||||
"use strict";
|
||||
// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
|
||||
// 1) log4js.configure(), log4js.configure(null),
|
||||
// log4js.configure({}), log4js.configure(<some object with no levels prop>)
|
||||
// all set all loggers levels to trace, even if they were previously set to something else.
|
||||
// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo:
|
||||
// bar}}) leaves previously set logger levels intact.
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var toLevel = require('../lib/levels').toLevel;
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
// setup the configurations we want to test
|
||||
var configs = {
|
||||
'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure
|
||||
'is undefined': undefined,
|
||||
'is null': null,
|
||||
'is empty': {},
|
||||
'has no levels': {foo: 'bar'},
|
||||
'has null levels': {levels: null},
|
||||
'has empty levels': {levels: {}},
|
||||
'has random levels': {levels: {foo: 'bar'}},
|
||||
'has some valid levels': {levels: {A: 'INFO'}}
|
||||
};
|
||||
|
||||
// Set up the basic vows batches for this test
|
||||
var batches = [];
|
||||
|
||||
|
||||
function getLoggerName(level) {
|
||||
return level+'-logger';
|
||||
}
|
||||
|
||||
// the common vows top-level context, whether log4js.configure is called or not
|
||||
// just making sure that the code is common,
|
||||
// so that there are no spurious errors in the tests themselves.
|
||||
function getTopLevelContext(nop, configToTest, name) {
|
||||
return {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
// create loggers for each level,
|
||||
// keeping the level in the logger's name for traceability
|
||||
strLevels.forEach(function(l) {
|
||||
log4js.getLogger(getLoggerName(l)).setLevel(l);
|
||||
});
|
||||
|
||||
if (!nop) {
|
||||
showProgress('** Configuring log4js with', configToTest);
|
||||
log4js.configure(configToTest);
|
||||
}
|
||||
else {
|
||||
showProgress('** Not configuring log4js');
|
||||
}
|
||||
return log4js;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
function checkForMismatch(topic) {
|
||||
var er = topic.log4js.levels.toLevel(topic.baseLevel)
|
||||
.isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
|
||||
assert.equal(
|
||||
er,
|
||||
topic.expectedResult,
|
||||
'Mismatch: for setLevel(' + topic.baseLevel +
|
||||
') was expecting a comparison with ' + topic.comparisonLevel +
|
||||
' to be ' + topic.expectedResult
|
||||
);
|
||||
}
|
||||
|
||||
function checkExpectedResult(topic) {
|
||||
var result = topic.log4js
|
||||
.getLogger(getLoggerName(topic.baseLevel))
|
||||
.isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
|
||||
assert.equal(
|
||||
result,
|
||||
topic.expectedResult,
|
||||
'Failed: ' + getLoggerName(topic.baseLevel) +
|
||||
'.isLevelEnabled( ' + topic.comparisonLevel + ' ) returned ' + result
|
||||
);
|
||||
}
|
||||
|
||||
function setupBaseLevelAndCompareToOtherLevels(baseLevel) {
|
||||
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
|
||||
var subContext = { topic: baseLevel };
|
||||
batch[context][baseLevelSubContext] = subContext;
|
||||
|
||||
// each logging level has strLevels sub-contexts,
|
||||
// to exhaustively test all the combinations of
|
||||
// setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
|
||||
strLevels.forEach(compareToOtherLevels(subContext));
|
||||
}
|
||||
|
||||
function compareToOtherLevels(subContext) {
|
||||
var baseLevel = subContext.topic;
|
||||
|
||||
return function (comparisonLevel) {
|
||||
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
|
||||
|
||||
// calculate this independently of log4js, but we'll add a vow
|
||||
// later on to check that we're not mismatched with log4js
|
||||
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
|
||||
|
||||
// the topic simply gathers all the parameters for the vow
|
||||
// into an object, to simplify the vow's work.
|
||||
subContext[comparisonLevelSubContext] = {
|
||||
topic: function(baseLevel, log4js) {
|
||||
return {
|
||||
comparisonLevel: comparisonLevel,
|
||||
baseLevel: baseLevel,
|
||||
log4js: log4js,
|
||||
expectedResult: expectedResult
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var vow = 'should return '+expectedResult;
|
||||
subContext[comparisonLevelSubContext][vow] = checkExpectedResult;
|
||||
|
||||
// the extra vow to check the comparison between baseLevel and
|
||||
// comparisonLevel we performed earlier matches log4js'
|
||||
// comparison too
|
||||
var subSubContext = subContext[comparisonLevelSubContext];
|
||||
subSubContext['finally checking for comparison mismatch with log4js'] = checkForMismatch;
|
||||
};
|
||||
}
|
||||
|
||||
// Populating the batches programmatically, as there are
|
||||
// (configs.length x strLevels.length x strLevels.length) = 324
|
||||
// possible test combinations
|
||||
for (var cfg in configs) {
|
||||
var configToTest = configs[cfg];
|
||||
var nop = configToTest === 'nop';
|
||||
var context;
|
||||
if (nop) {
|
||||
context = 'Setting up loggers with initial levels, then NOT setting a configuration,';
|
||||
}
|
||||
else {
|
||||
context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+',';
|
||||
}
|
||||
|
||||
showProgress('Setting up the vows batch and context for '+context);
|
||||
// each config to be tested has its own vows batch with a single top-level context
|
||||
var batch={};
|
||||
batch[context]= getTopLevelContext(nop, configToTest, context);
|
||||
batches.push(batch);
|
||||
|
||||
// each top-level context has strLevels sub-contexts, one per logger
|
||||
// which has set to a specific level in the top-level context's topic
|
||||
strLevels.forEach(setupBaseLevelAndCompareToOtherLevels);
|
||||
}
|
||||
|
||||
showProgress('Running tests');
|
||||
var v = vows.describe('log4js.configure(), with or without a "levels" property');
|
||||
|
||||
batches.forEach(function(batch) {v=v.addBatch(batch);});
|
||||
|
||||
v.export(module);
|
||||
|
||||
226
test/connect-logger-test.js
Normal file
226
test/connect-logger-test.js
Normal file
@@ -0,0 +1,226 @@
|
||||
/* jshint maxparams:7 */
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl, headers) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = headers || {};
|
||||
|
||||
var self = this;
|
||||
Object.keys(this.headers).forEach(function(key) {
|
||||
self.headers[key.toLowerCase()] = self.headers[key];
|
||||
});
|
||||
}
|
||||
|
||||
function MockResponse() {
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
};
|
||||
|
||||
this.writeHead = function(code, headers) {
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function request(cl, method, url, code, reqHeaders, resHeaders) {
|
||||
var req = new MockRequest('my.remote.addr', method, url, reqHeaders);
|
||||
var res = new MockResponse();
|
||||
cl(req, res, function() {});
|
||||
res.writeHead(code, resHeaders);
|
||||
res.end('chunk','encoding');
|
||||
}
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'take a log4js logger and return a "connect logger"' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
return cl;
|
||||
},
|
||||
|
||||
'should return a "connect logger"': function(cl) {
|
||||
assert.isFunction(cl);
|
||||
}
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with level below logging level' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.FATAL;
|
||||
var cl = clm.connectLogger(ml);
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.isEmpty(messages);
|
||||
}
|
||||
},
|
||||
|
||||
'log events with non-default level and custom format' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
}
|
||||
},
|
||||
|
||||
'logger with options as string': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':method :url');
|
||||
request(cl, 'POST', 'http://meh', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
'should use the passed in format': function(messages) {
|
||||
assert.equal(messages[0].message, 'POST http://meh');
|
||||
}
|
||||
},
|
||||
|
||||
'auto log levels': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
|
||||
request(cl, 'GET', 'http://meh', 200);
|
||||
request(cl, 'GET', 'http://meh', 201);
|
||||
request(cl, 'GET', 'http://meh', 302);
|
||||
request(cl, 'GET', 'http://meh', 404);
|
||||
request(cl, 'GET', 'http://meh', 500);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'should use INFO for 2xx': function(messages) {
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.ok(levels.INFO.isEqualTo(messages[1].level));
|
||||
},
|
||||
|
||||
'should use WARN for 3xx': function(messages) {
|
||||
assert.ok(levels.WARN.isEqualTo(messages[2].level));
|
||||
},
|
||||
|
||||
'should use ERROR for 4xx': function(messages) {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
|
||||
},
|
||||
|
||||
'should use ERROR for 5xx': function(messages) {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
|
||||
}
|
||||
},
|
||||
|
||||
'format using a function': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; });
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'should call the format function': function(messages) {
|
||||
assert.equal(messages[0].message, 'I was called');
|
||||
}
|
||||
},
|
||||
|
||||
'format that includes request headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':req[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
{ 'Content-Type': 'application/json' }
|
||||
);
|
||||
return ml.messages;
|
||||
},
|
||||
'should output the request header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/json');
|
||||
}
|
||||
},
|
||||
|
||||
'format that includes response headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':res[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
null,
|
||||
{ 'Content-Type': 'application/cheese' }
|
||||
);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'should output the response header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/cheese');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).export(module);
|
||||
33
test/consoleAppender-test.js
Normal file
33
test/consoleAppender-test.js
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, layouts = require('../lib/layouts')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('../lib/appenders/console').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var messages = []
|
||||
, fakeConsole = {
|
||||
log: function(msg) { messages.push(msg); }
|
||||
}
|
||||
, appenderModule = sandbox.require(
|
||||
'../lib/appenders/console',
|
||||
{
|
||||
globals: {
|
||||
'console': fakeConsole
|
||||
}
|
||||
}
|
||||
)
|
||||
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
|
||||
|
||||
appender({ data: ["blah"] });
|
||||
return messages;
|
||||
},
|
||||
|
||||
'should output to console': function(messages) {
|
||||
assert.equal(messages[0], 'blah');
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
222
test/dateFileAppender-test.js
Normal file
222
test/dateFileAppender-test.js
Normal file
@@ -0,0 +1,222 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
function removeFile(filename) {
|
||||
return function() {
|
||||
fs.unlink(path.join(__dirname, filename), function(err) {
|
||||
if (err) {
|
||||
console.log("Could not delete ", filename, err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('../lib/appenders/dateFile').addBatch({
|
||||
'appender': {
|
||||
'adding multiple dateFileAppenders': {
|
||||
topic: function () {
|
||||
var listenersCount = process.listeners('exit').length,
|
||||
dateFileAppender = require('../lib/appenders/dateFile'),
|
||||
count = 5,
|
||||
logfile;
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
|
||||
log4js.addAppender(dateFileAppender.appender(logfile));
|
||||
}
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
teardown: function() {
|
||||
removeFile('datefa-default-test0.log')();
|
||||
removeFile('datefa-default-test1.log')();
|
||||
removeFile('datefa-default-test2.log')();
|
||||
removeFile('datefa-default-test3.log')();
|
||||
removeFile('datefa-default-test4.log')();
|
||||
},
|
||||
|
||||
'should only add one `exit` listener': function (initialCount) {
|
||||
assert.equal(process.listeners('exit').length, initialCount + 1);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'exit listener': {
|
||||
topic: function() {
|
||||
var exitListener
|
||||
, openedFiles = []
|
||||
, dateFileAppender = sandbox.require(
|
||||
'../lib/appenders/dateFile',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, listener) {
|
||||
exitListener = listener;
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'../streams': {
|
||||
DateRollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
dateFileAppender.appender('test' + i);
|
||||
}
|
||||
assert.isNotEmpty(openedFiles);
|
||||
exitListener();
|
||||
return openedFiles;
|
||||
},
|
||||
'should close all open files': function(openedFiles) {
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
'with default settings': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
testFile = path.join(__dirname, 'date-appender-default.log'),
|
||||
appender = require('../lib/appenders/dateFile').appender(testFile),
|
||||
logger = log4js.getLogger('default-settings');
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender, 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
|
||||
},
|
||||
teardown: removeFile('date-appender-default.log'),
|
||||
|
||||
'should write to the file': function(contents) {
|
||||
assert.include(contents, 'This should be in the file');
|
||||
},
|
||||
|
||||
'should use the basic layout': function(contents) {
|
||||
assert.match(
|
||||
contents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).addBatch({
|
||||
'configure': {
|
||||
'with dateFileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./date-file-test.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/with-dateFile.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
|
||||
},
|
||||
teardown: removeFile('date-file-test.log'),
|
||||
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
assert.include(contents, 'this should be written to the file' + EOL);
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
},
|
||||
'with options.alwaysIncludePattern': {
|
||||
topic: function() {
|
||||
var self = this
|
||||
, log4js = require('../lib/log4js')
|
||||
, format = require('../lib/date_format')
|
||||
, logger
|
||||
, options = {
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "dateFile",
|
||||
"filename": "test/date-file-test",
|
||||
"pattern": "-from-MM-dd.log",
|
||||
"alwaysIncludePattern": true,
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
, thisTime = format.asString(options.appenders[0].pattern, new Date());
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, 'date-file-test' + thisTime),
|
||||
"this is existing data" + EOL,
|
||||
'utf8'
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
log4js.configure(options);
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.warn('this should be written to the file with the appended date');
|
||||
this.teardown = removeFile('date-file-test' + thisTime);
|
||||
//wait for filesystem to catch up
|
||||
setTimeout(function() {
|
||||
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', self.callback);
|
||||
}, 100);
|
||||
},
|
||||
'should create file with the correct pattern': function(contents) {
|
||||
assert.include(contents, 'this should be written to the file with the appended date');
|
||||
},
|
||||
'should not overwrite the file on open (bug found in issue #132)': function(contents) {
|
||||
assert.include(contents, 'this is existing data');
|
||||
}
|
||||
},
|
||||
'with cwd option': {
|
||||
topic: function() {
|
||||
var fileOpened,
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/dateFile',
|
||||
{ requires:
|
||||
{ '../streams':
|
||||
{ DateRollingFileStream:
|
||||
function(file) {
|
||||
fileOpened = file;
|
||||
return {
|
||||
on: function() {},
|
||||
end: function() {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
appender.configure(
|
||||
{
|
||||
filename: "whatever.log",
|
||||
maxLogSize: 10
|
||||
},
|
||||
{ cwd: '/absolute/path/to' }
|
||||
);
|
||||
return fileOpened;
|
||||
},
|
||||
'should prepend options.cwd to config.filename': function(fileOpened) {
|
||||
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).exportTo(module);
|
||||
51
test/date_format-test.js
Normal file
51
test/date_format-test.js
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, dateFormat = require('../lib/date_format');
|
||||
|
||||
vows.describe('date_format').addBatch({
|
||||
'Date extensions': {
|
||||
topic: function() {
|
||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||
},
|
||||
'should format a date as string using a pattern': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
||||
"11 01 2010 14:31:30.005"
|
||||
);
|
||||
},
|
||||
'should default to the ISO8601 format': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(date),
|
||||
'2010-01-11 14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a ISO8601 with timezone offset format': function(date) {
|
||||
date.getTimezoneOffset = function() { return -660; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
"2010-01-11T14:31:30+1100"
|
||||
);
|
||||
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
"2010-01-11T14:31:30-0200"
|
||||
);
|
||||
|
||||
},
|
||||
'should provide a just-the-time format': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
|
||||
'14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a custom format': function(date) {
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
||||
'-0200.005.30.31.14.11.01.10'
|
||||
);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
@@ -1,23 +0,0 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, dateFormat = require('../lib/date_format');
|
||||
|
||||
vows.describe('date_format').addBatch({
|
||||
'Date extensions': {
|
||||
topic: function() {
|
||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||
},
|
||||
'should format a date as string using a pattern': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
||||
"11 01 2010 14:31:30.005"
|
||||
);
|
||||
},
|
||||
'should default to the ISO8601 format': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(date),
|
||||
'2010-01-11 14:31:30.005'
|
||||
);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
72
test/debug-test.js
Normal file
72
test/debug-test.js
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, fakeConsole = {
|
||||
error: function(format, label, message) {
|
||||
this.logged = [ format, label, message ];
|
||||
}
|
||||
}
|
||||
, globals = function(debugValue) {
|
||||
return {
|
||||
process: {
|
||||
env: {
|
||||
'NODE_DEBUG': debugValue
|
||||
}
|
||||
},
|
||||
console: fakeConsole
|
||||
};
|
||||
};
|
||||
|
||||
vows.describe('../lib/debug').addBatch({
|
||||
'when NODE_DEBUG is set to log4js': {
|
||||
topic: function() {
|
||||
var debug = sandbox.require(
|
||||
'../lib/debug',
|
||||
{ 'globals': globals('log4js') }
|
||||
);
|
||||
|
||||
fakeConsole.logged = [];
|
||||
debug('cheese')('biscuits');
|
||||
return fakeConsole.logged;
|
||||
},
|
||||
'it should log to console.error': function(logged) {
|
||||
assert.equal(logged[0], 'LOG4JS: (%s) %s');
|
||||
assert.equal(logged[1], 'cheese');
|
||||
assert.equal(logged[2], 'biscuits');
|
||||
}
|
||||
},
|
||||
|
||||
'when NODE_DEBUG is set to not log4js': {
|
||||
topic: function() {
|
||||
var debug = sandbox.require(
|
||||
'../lib/debug',
|
||||
{ globals: globals('other_module') }
|
||||
);
|
||||
|
||||
fakeConsole.logged = [];
|
||||
debug('cheese')('biscuits');
|
||||
return fakeConsole.logged;
|
||||
},
|
||||
'it should not log to console.error': function(logged) {
|
||||
assert.equal(logged.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'when NODE_DEBUG is not set': {
|
||||
topic: function() {
|
||||
var debug = sandbox.require(
|
||||
'../lib/debug',
|
||||
{ globals: globals(null) }
|
||||
);
|
||||
|
||||
fakeConsole.logged = [];
|
||||
debug('cheese')('biscuits');
|
||||
return fakeConsole.logged;
|
||||
},
|
||||
'it should not log to console.error': function(logged) {
|
||||
assert.equal(logged.length, 0);
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
280
test/fileAppender-test.js
Normal file
280
test/fileAppender-test.js
Normal file
@@ -0,0 +1,280 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, assert = require('assert');
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js fileAppender').addBatch({
|
||||
'adding multiple fileAppenders': {
|
||||
topic: function () {
|
||||
var listenersCount = process.listeners('exit').length
|
||||
, logger = log4js.getLogger('default-settings')
|
||||
, count = 5, logfile;
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
|
||||
}
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
|
||||
'does not add more than one `exit` listeners': function (initialCount) {
|
||||
assert.ok(process.listeners('exit').length <= initialCount + 1);
|
||||
}
|
||||
},
|
||||
|
||||
'exit listener': {
|
||||
topic: function() {
|
||||
var exitListener
|
||||
, openedFiles = []
|
||||
, fileAppender = sandbox.require(
|
||||
'../lib/appenders/file',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, listener) {
|
||||
exitListener = listener;
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
|
||||
this.on = function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
fileAppender.appender('test' + i, null, 100);
|
||||
}
|
||||
assert.isNotEmpty(openedFiles);
|
||||
exitListener();
|
||||
return openedFiles;
|
||||
},
|
||||
'should close all open files': function(openedFiles) {
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
, testFile = path.join(__dirname, '/fa-default-test.log')
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'should write log messages to the file': function(err, fileContents) {
|
||||
assert.include(fileContents, "This should be in the file.\n");
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
|
||||
, logger = log4js.getLogger('max-file-size')
|
||||
, that = this;
|
||||
remove(testFile);
|
||||
remove(testFile + '.1');
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
//wait for the file system to catch up
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'log file should only contain the second message': function(err, fileContents) {
|
||||
assert.include(fileContents, "This is the second log message.\n");
|
||||
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'starting with the test file name should be two': function(err, files) {
|
||||
//there will always be one backup if you've specified a max log size
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 200);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||
},
|
||||
'should be the second log message': function(contents) {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure' : {
|
||||
'with fileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('./test/log4js.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile('tmp-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
assert.include(contents, 'this should be written to the file\n');
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'when underlying stream errors': {
|
||||
topic: function() {
|
||||
var consoleArgs
|
||||
, errorHandler
|
||||
, fileAppender = sandbox.require(
|
||||
'../lib/appenders/file',
|
||||
{
|
||||
globals: {
|
||||
console: {
|
||||
error: function() {
|
||||
consoleArgs = Array.prototype.slice.call(arguments);
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'../streams': {
|
||||
RollingFileStream: function(filename) {
|
||||
|
||||
this.end = function() {};
|
||||
this.on = function(evt, cb) {
|
||||
if (evt === 'error') {
|
||||
errorHandler = cb;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.appender('test1.log', null, 100);
|
||||
errorHandler({ error: 'aargh' });
|
||||
return consoleArgs;
|
||||
},
|
||||
'should log the error to console.error': function(consoleArgs) {
|
||||
assert.isNotEmpty(consoleArgs);
|
||||
assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
|
||||
assert.equal(consoleArgs[1], 'test1.log');
|
||||
assert.equal(consoleArgs[2].error, 'aargh');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
259
test/gelfAppender-test.js
Normal file
259
test/gelfAppender-test.js
Normal file
@@ -0,0 +1,259 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../lib/log4js')
|
||||
, realLayouts = require('../lib/layouts')
|
||||
, setupLogging = function(options, category, compressedLength) {
|
||||
var fakeDgram = {
|
||||
sent: false,
|
||||
socket: {
|
||||
packetLength: 0,
|
||||
closed: false,
|
||||
close: function() {
|
||||
this.closed = true;
|
||||
},
|
||||
send: function(pkt, offset, pktLength, port, host) {
|
||||
fakeDgram.sent = true;
|
||||
this.packet = pkt;
|
||||
this.offset = offset;
|
||||
this.packetLength = pktLength;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
}
|
||||
},
|
||||
createSocket: function(type) {
|
||||
this.type = type;
|
||||
return this.socket;
|
||||
}
|
||||
}
|
||||
, fakeZlib = {
|
||||
gzip: function(objectToCompress, callback) {
|
||||
fakeZlib.uncompressed = objectToCompress;
|
||||
if (this.shouldError) {
|
||||
callback({ stack: "oh noes" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (compressedLength) {
|
||||
callback(null, { length: compressedLength });
|
||||
} else {
|
||||
callback(null, "I've been compressed");
|
||||
}
|
||||
}
|
||||
}
|
||||
, exitHandler
|
||||
, fakeConsole = {
|
||||
error: function(message) {
|
||||
this.message = message;
|
||||
}
|
||||
}
|
||||
, fakeLayouts = {
|
||||
layout: function(type, options) {
|
||||
this.type = type;
|
||||
this.options = options;
|
||||
return realLayouts.messagePassThroughLayout;
|
||||
},
|
||||
messagePassThroughLayout: realLayouts.messagePassThroughLayout
|
||||
}
|
||||
, appender = sandbox.require('../lib/appenders/gelf', {
|
||||
requires: {
|
||||
dgram: fakeDgram,
|
||||
zlib: fakeZlib,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, handler) {
|
||||
if (evt === 'exit') {
|
||||
exitHandler = handler;
|
||||
}
|
||||
}
|
||||
},
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender.configure(options || {}), category || "gelf-test");
|
||||
return {
|
||||
dgram: fakeDgram,
|
||||
compress: fakeZlib,
|
||||
exitHandler: exitHandler,
|
||||
console: fakeConsole,
|
||||
layouts: fakeLayouts,
|
||||
logger: log4js.getLogger(category || "gelf-test")
|
||||
};
|
||||
};
|
||||
|
||||
vows.describe('log4js gelfAppender').addBatch({
|
||||
|
||||
'with default gelfAppender settings': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.logger.info("This is a test");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should be sent via udp to the localhost gelf server': function(dgram) {
|
||||
assert.equal(dgram.type, "udp4");
|
||||
assert.equal(dgram.socket.host, "localhost");
|
||||
assert.equal(dgram.socket.port, 12201);
|
||||
assert.equal(dgram.socket.offset, 0);
|
||||
assert.ok(dgram.socket.packetLength > 0, "Received blank message");
|
||||
},
|
||||
'should be compressed': function(dgram) {
|
||||
assert.equal(dgram.socket.packet, "I've been compressed");
|
||||
}
|
||||
},
|
||||
'the uncompressed log message': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should be in the gelf format': function(message) {
|
||||
assert.equal(message.version, '1.0');
|
||||
assert.equal(message.host, require('os').hostname());
|
||||
assert.equal(message.level, 6); //INFO
|
||||
assert.equal(message.facility, 'nodejs-server');
|
||||
assert.equal(message.full_message, message.short_message);
|
||||
assert.equal(message.full_message, 'This is a test');
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a message longer than 8k': {
|
||||
topic: function() {
|
||||
var setup = setupLogging(undefined, undefined, 10240);
|
||||
setup.logger.info("Blah.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should not be sent': function(dgram) {
|
||||
assert.equal(dgram.sent, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with non-default options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
host: 'somewhere',
|
||||
port: 12345,
|
||||
hostname: 'cheese',
|
||||
facility: 'nonsense'
|
||||
});
|
||||
setup.logger.debug("Just testing.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should pick up the options': function(dgram) {
|
||||
assert.equal(dgram.socket.host, 'somewhere');
|
||||
assert.equal(dgram.socket.port, 12345);
|
||||
}
|
||||
},
|
||||
'the uncompressed packet': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.equal(message.facility, 'nonsense');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'on process.exit': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.exitHandler();
|
||||
return setup;
|
||||
},
|
||||
'should close open sockets': function(setup) {
|
||||
assert.isTrue(setup.dgram.socket.closed);
|
||||
}
|
||||
},
|
||||
|
||||
'on zlib error': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.compress.shouldError = true;
|
||||
setup.logger.info('whatever');
|
||||
return setup;
|
||||
},
|
||||
'should output to console.error': function(setup) {
|
||||
assert.equal(setup.console.message, 'oh noes');
|
||||
}
|
||||
},
|
||||
|
||||
'with layout in configuration': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
layout: {
|
||||
type: 'madeuplayout',
|
||||
earlgrey: 'yes, please'
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should pass options to layout': function(setup) {
|
||||
assert.equal(setup.layouts.type, 'madeuplayout');
|
||||
assert.equal(setup.layouts.options.earlgrey, 'yes, please');
|
||||
}
|
||||
},
|
||||
|
||||
'with custom fields options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
host: 'somewhere',
|
||||
port: 12345,
|
||||
hostname: 'cheese',
|
||||
facility: 'nonsense',
|
||||
customFields: {
|
||||
_every1: 'Hello every one',
|
||||
_every2: 'Hello every two'
|
||||
}
|
||||
});
|
||||
var myFields = {
|
||||
GELF: true,
|
||||
_every2: 'Overwritten!',
|
||||
_myField: 'This is my field!'
|
||||
};
|
||||
setup.logger.debug(myFields, "Just testing.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should pick up the options': function(dgram) {
|
||||
assert.equal(dgram.socket.host, 'somewhere');
|
||||
assert.equal(dgram.socket.port, 12345);
|
||||
}
|
||||
},
|
||||
'the uncompressed packet': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.equal(message.facility, 'nonsense');
|
||||
assert.equal(message._every1, 'Hello every one'); // the default value
|
||||
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
|
||||
assert.equal(message._myField, 'This is my field!'); // the value for this message only
|
||||
assert.equal(message.short_message, 'Just testing.'); // skip the field object
|
||||
assert.equal(message.full_message, 'Just testing.'); // should be as same as short_message
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
86
test/global-log-level-test.js
Normal file
86
test/global-log-level-test.js
Normal file
@@ -0,0 +1,86 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert');
|
||||
|
||||
vows.describe('log4js global loglevel').addBatch({
|
||||
'global loglevel' : {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
return log4js;
|
||||
},
|
||||
|
||||
'set global loglevel on creation': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'global change loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'override loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
|
||||
log2.setLevel(level);
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
},
|
||||
|
||||
'preload loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
log4js.getLogger('log2').setLevel(level);
|
||||
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
|
||||
// get again same logger but as different variable
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
176
test/hookioAppender-test.js
Normal file
176
test/hookioAppender-test.js
Normal file
@@ -0,0 +1,176 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function fancyResultingHookioAppender(hookNotReady) {
|
||||
var emitHook = !hookNotReady
|
||||
, result = { ons: {}, emissions: {}, logged: [], configs: [] };
|
||||
|
||||
var fakeLog4Js = {
|
||||
appenderMakers: {}
|
||||
};
|
||||
fakeLog4Js.loadAppender = function (appender) {
|
||||
fakeLog4Js.appenderMakers[appender] = function (config) {
|
||||
result.actualLoggerConfig = config;
|
||||
return function log(logEvent) {
|
||||
result.logged.push(logEvent);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
var fakeHookIo = { Hook: function(config) { result.configs.push(config); } };
|
||||
fakeHookIo.Hook.prototype.start = function () {
|
||||
result.startCalled = true;
|
||||
};
|
||||
fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) {
|
||||
result.ons[eventName] = { functionToExec: functionToExec };
|
||||
if (emitHook && eventName === 'hook::ready') {
|
||||
functionToExec();
|
||||
}
|
||||
};
|
||||
fakeHookIo.Hook.prototype.emit = function (eventName, data) {
|
||||
result.emissions[eventName] = result.emissions[eventName] || [];
|
||||
result.emissions[eventName].push({data: data});
|
||||
var on = '*::' + eventName;
|
||||
if (eventName !== 'hook::ready' && result.ons[on]) {
|
||||
result.ons[on].callingCount =
|
||||
result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1;
|
||||
result.ons[on].functionToExec(data);
|
||||
}
|
||||
};
|
||||
|
||||
return { theResult: result,
|
||||
theModule: sandbox.require('../lib/appenders/hookio', {
|
||||
requires: {
|
||||
'../log4js': fakeLog4Js,
|
||||
'hook.io': fakeHookIo
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
vows.describe('log4js hookioAppender').addBatch({
|
||||
'master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender();
|
||||
var logger = fancy.theModule.configure(
|
||||
{
|
||||
name: 'ohno',
|
||||
mode: 'master',
|
||||
'hook-port': 5001,
|
||||
appender: { type: 'file' }
|
||||
}
|
||||
);
|
||||
logger(
|
||||
{
|
||||
level: { levelStr: 'INFO' },
|
||||
data: "ALRIGHTY THEN",
|
||||
startTime: '2011-10-27T03:53:16.031Z'
|
||||
}
|
||||
);
|
||||
logger(
|
||||
{
|
||||
level: { levelStr: 'DEBUG' },
|
||||
data: "OH WOW",
|
||||
startTime: '2011-10-27T04:53:16.031Z'
|
||||
}
|
||||
);
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should write to the actual appender': function (result) {
|
||||
assert.isTrue(result.startCalled);
|
||||
assert.equal(result.configs.length, 1);
|
||||
assert.equal(result.configs[0]['hook-port'], 5001);
|
||||
assert.equal(result.logged.length, 2);
|
||||
assert.equal(result.emissions['ohno::log'].length, 2);
|
||||
assert.equal(result.ons['*::ohno::log'].callingCount, 2);
|
||||
},
|
||||
|
||||
'data written should be formatted correctly': function (result) {
|
||||
assert.equal(result.logged[0].level.toString(), 'INFO');
|
||||
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
|
||||
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
|
||||
assert.equal(result.logged[1].level.toString(), 'DEBUG');
|
||||
assert.equal(result.logged[1].data, 'OH WOW');
|
||||
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
|
||||
},
|
||||
|
||||
'the actual logger should get the right config': function (result) {
|
||||
assert.equal(result.actualLoggerConfig.type, 'file');
|
||||
}
|
||||
},
|
||||
'worker': {
|
||||
'should emit logging events to the master': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender();
|
||||
var logger = fancy.theModule.configure({
|
||||
name: 'ohno',
|
||||
mode: 'worker',
|
||||
appender: { type: 'file' }
|
||||
});
|
||||
logger({
|
||||
level: { levelStr: 'INFO' },
|
||||
data: "ALRIGHTY THEN",
|
||||
startTime: '2011-10-27T03:53:16.031Z'
|
||||
});
|
||||
logger({
|
||||
level: { levelStr: 'DEBUG' },
|
||||
data: "OH WOW",
|
||||
startTime: '2011-10-27T04:53:16.031Z'
|
||||
});
|
||||
return fancy.theResult;
|
||||
},
|
||||
|
||||
'should not write to the actual appender': function (result) {
|
||||
assert.isTrue(result.startCalled);
|
||||
assert.equal(result.logged.length, 0);
|
||||
assert.equal(result.emissions['ohno::log'].length, 2);
|
||||
assert.isUndefined(result.ons['*::ohno::log']);
|
||||
}
|
||||
}
|
||||
},
|
||||
'when hook not ready': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender(true)
|
||||
, logger = fancy.theModule.configure({
|
||||
name: 'ohno',
|
||||
mode: 'worker'
|
||||
});
|
||||
|
||||
logger({
|
||||
level: { levelStr: 'INFO' },
|
||||
data: "something",
|
||||
startTime: '2011-10-27T03:45:12.031Z'
|
||||
});
|
||||
return fancy;
|
||||
},
|
||||
'should buffer the log events': function(fancy) {
|
||||
assert.isUndefined(fancy.theResult.emissions['ohno::log']);
|
||||
},
|
||||
},
|
||||
'when hook ready': {
|
||||
topic: function() {
|
||||
var fancy = fancyResultingHookioAppender(true)
|
||||
, logger = fancy.theModule.configure({
|
||||
name: 'ohno',
|
||||
mode: 'worker'
|
||||
});
|
||||
|
||||
logger({
|
||||
level: { levelStr: 'INFO' },
|
||||
data: "something",
|
||||
startTime: '2011-10-27T03:45:12.031Z'
|
||||
});
|
||||
|
||||
fancy.theResult.ons['hook::ready'].functionToExec();
|
||||
return fancy;
|
||||
},
|
||||
'should emit the buffered events': function(fancy) {
|
||||
assert.equal(fancy.theResult.emissions['ohno::log'].length, 1);
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
299
test/layouts-test.js
Normal file
299
test/layouts-test.js
Normal file
@@ -0,0 +1,299 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert');
|
||||
|
||||
//used for patternLayout tests.
|
||||
function test(args, pattern, value) {
|
||||
var layout = args[0]
|
||||
, event = args[1]
|
||||
, tokens = args[2];
|
||||
|
||||
assert.equal(layout(pattern, tokens)(event), value);
|
||||
}
|
||||
|
||||
vows.describe('log4js layouts').addBatch({
|
||||
'colouredLayout': {
|
||||
topic: function() {
|
||||
return require('../lib/layouts').colouredLayout;
|
||||
},
|
||||
|
||||
'should apply level colour codes to output': function(layout) {
|
||||
var output = layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense');
|
||||
},
|
||||
'should support the console.log format for the message': function(layout) {
|
||||
var output = layout({
|
||||
data: ["thing %d", 2],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
|
||||
}
|
||||
},
|
||||
|
||||
'messagePassThroughLayout': {
|
||||
topic: function() {
|
||||
return require('../lib/layouts').messagePassThroughLayout;
|
||||
},
|
||||
'should take a logevent and output only the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "nonsense");
|
||||
},
|
||||
'should support the console.log format for the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["thing %d", 1, "cheese"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level : {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "thing 1 cheese");
|
||||
},
|
||||
'should output the first item even if it is not a string': function(layout) {
|
||||
assert.equal(layout({
|
||||
data: [ { thing: 1} ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "{ thing: 1 }");
|
||||
},
|
||||
'should print the stacks of a passed error objects': function(layout) {
|
||||
assert.isArray(layout({
|
||||
data: [ new Error() ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
|
||||
, 'regexp did not return a match');
|
||||
},
|
||||
'with passed augmented errors': {
|
||||
topic: function(layout){
|
||||
var e = new Error("My Unique Error Message");
|
||||
e.augmented = "My Unique attribute value";
|
||||
e.augObj = { at1: "at2" };
|
||||
return layout({
|
||||
data: [ e ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
},
|
||||
'should print error the contained error message': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/\{ \[Error: My Unique Error Message\]/);
|
||||
assert.isArray(m);
|
||||
},
|
||||
'should print error augmented string attributes': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/);
|
||||
assert.isArray(m);
|
||||
},
|
||||
'should print error augmented object attributes': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/);
|
||||
assert.isArray(m);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
'basicLayout': {
|
||||
topic: function() {
|
||||
var layout = require('../lib/layouts').basicLayout,
|
||||
event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
};
|
||||
return [layout, event];
|
||||
},
|
||||
'should take a logevent and output a formatted string': function(args) {
|
||||
var layout = args[0], event = args[1];
|
||||
assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
},
|
||||
'should output a stacktrace, message if the event has an error attached': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines,
|
||||
error = new Error("Some made-up error"),
|
||||
stack = error.stack.split(/\n/);
|
||||
|
||||
event.data = ['this is a test', error];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
|
||||
);
|
||||
|
||||
for (var i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+2], stack[i+1]);
|
||||
}
|
||||
},
|
||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines;
|
||||
event.data = ['this is a test', {
|
||||
name: 'Cheese',
|
||||
message: 'Gorgonzola smells.'
|
||||
}];
|
||||
output = layout(event);
|
||||
assert.equal(
|
||||
output,
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
|
||||
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'patternLayout': {
|
||||
topic: function() {
|
||||
var event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "multiple.levels.of.tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
}, layout = require('../lib/layouts').patternLayout
|
||||
, tokens = {
|
||||
testString: 'testStringToken',
|
||||
testFunction: function() { return 'testFunctionToken'; },
|
||||
fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
|
||||
};
|
||||
|
||||
//override getTimezoneOffset
|
||||
event.startTime.getTimezoneOffset = function() { return 0; };
|
||||
return [layout, event, tokens];
|
||||
},
|
||||
|
||||
'should default to "time logLevel loggerName - message"': function(args) {
|
||||
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n");
|
||||
},
|
||||
'%r should output time only': function(args) {
|
||||
test(args, '%r', '14:18:30');
|
||||
},
|
||||
'%p should output the log level': function(args) {
|
||||
test(args, '%p', 'DEBUG');
|
||||
},
|
||||
'%c should output the log category': function(args) {
|
||||
test(args, '%c', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%m should output the log data': function(args) {
|
||||
test(args, '%m', 'this is a test');
|
||||
},
|
||||
'%n should output a new line': function(args) {
|
||||
test(args, '%n', '\n');
|
||||
},
|
||||
'%h should output hostname' : function(args) {
|
||||
test(args, '%h', require('os').hostname().toString());
|
||||
},
|
||||
'%c should handle category names like java-style package names': function(args) {
|
||||
test(args, '%c{1}', 'tests');
|
||||
test(args, '%c{2}', 'of.tests');
|
||||
test(args, '%c{3}', 'levels.of.tests');
|
||||
test(args, '%c{4}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{5}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{99}', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%d should output the date in ISO8601 format': function(args) {
|
||||
test(args, '%d', '2010-12-05 14:18:30.045');
|
||||
},
|
||||
'%d should allow for format specification': function(args) {
|
||||
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000');
|
||||
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
|
||||
test(args, '%d{ABSOLUTE}', '14:18:30.045');
|
||||
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
|
||||
test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
|
||||
test(args, '%d{yyyy MM dd}', '2010 12 05');
|
||||
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
|
||||
},
|
||||
'%% should output %': function(args) {
|
||||
test(args, '%%', '%');
|
||||
},
|
||||
'should output anything not preceded by % as literal': function(args) {
|
||||
test(args, 'blah blah blah', 'blah blah blah');
|
||||
},
|
||||
'should output the original string if no replacer matches the token': function(args) {
|
||||
test(args, '%a{3}', 'a{3}');
|
||||
},
|
||||
'should handle complicated patterns': function(args) {
|
||||
test(args,
|
||||
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
|
||||
'this is a test\n of.tests at 14:18:30.045 cheese DEBUG\n'
|
||||
);
|
||||
},
|
||||
'should truncate fields if specified': function(args) {
|
||||
test(args, '%.4m', 'this');
|
||||
test(args, '%.7m', 'this is');
|
||||
test(args, '%.9m', 'this is a');
|
||||
test(args, '%.14m', 'this is a test');
|
||||
test(args, '%.2919102m', 'this is a test');
|
||||
},
|
||||
'should pad fields if specified': function(args) {
|
||||
test(args, '%10p', ' DEBUG');
|
||||
test(args, '%8p', ' DEBUG');
|
||||
test(args, '%6p', ' DEBUG');
|
||||
test(args, '%4p', 'DEBUG');
|
||||
test(args, '%-4p', 'DEBUG');
|
||||
test(args, '%-6p', 'DEBUG ');
|
||||
test(args, '%-8p', 'DEBUG ');
|
||||
test(args, '%-10p', 'DEBUG ');
|
||||
},
|
||||
'%[%r%] should output colored time': function(args) {
|
||||
test(args, '%[%r%]', '\x1B[36m14:18:30\x1B[39m');
|
||||
},
|
||||
'%x{testString} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{testString}', 'testStringToken');
|
||||
},
|
||||
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
|
||||
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||
},
|
||||
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
|
||||
},
|
||||
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
|
||||
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
|
||||
},
|
||||
'%x should output the string stored in tokens': function(args) {
|
||||
test(args, '%x', '%x');
|
||||
},
|
||||
},
|
||||
'layout makers': {
|
||||
topic: require('../lib/layouts'),
|
||||
'should have a maker for each layout': function(layouts) {
|
||||
assert.ok(layouts.layout("messagePassThrough"));
|
||||
assert.ok(layouts.layout("basic"));
|
||||
assert.ok(layouts.layout("colored"));
|
||||
assert.ok(layouts.layout("coloured"));
|
||||
assert.ok(layouts.layout("pattern"));
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
201
test/layouts.js
201
test/layouts.js
@@ -1,201 +0,0 @@
|
||||
var vows = require('vows'),
|
||||
assert = require('assert');
|
||||
|
||||
//used for patternLayout tests.
|
||||
function test(args, pattern, value) {
|
||||
var layout = args[0]
|
||||
, event = args[1];
|
||||
|
||||
assert.equal(layout(pattern)(event), value);
|
||||
}
|
||||
|
||||
vows.describe('log4js layouts').addBatch({
|
||||
'colouredLayout': {
|
||||
topic: function() {
|
||||
return require('../lib/layouts').colouredLayout;
|
||||
},
|
||||
|
||||
'should apply level colour codes to output': function(layout) {
|
||||
var output = layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mnonsense');
|
||||
},
|
||||
|
||||
'should support the console.log format for the message': function(layout) {
|
||||
var output = layout({
|
||||
data: ["thing %d", 2],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mthing 2');
|
||||
}
|
||||
},
|
||||
|
||||
'messagePassThroughLayout': {
|
||||
topic: function() {
|
||||
return require('../lib/layouts').messagePassThroughLayout;
|
||||
},
|
||||
'should take a logevent and output only the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "nonsense");
|
||||
},
|
||||
'should support the console.log format for the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["thing %d", 1]
|
||||
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
|
||||
, categoryName: "cheese"
|
||||
, level : {
|
||||
colour: "green"
|
||||
, toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "thing 1");
|
||||
}
|
||||
},
|
||||
|
||||
'basicLayout': {
|
||||
topic: function() {
|
||||
var layout = require('../lib/layouts').basicLayout,
|
||||
event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
};
|
||||
return [layout, event];
|
||||
},
|
||||
'should take a logevent and output a formatted string': function(args) {
|
||||
var layout = args[0], event = args[1];
|
||||
assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
},
|
||||
'should output a stacktrace, message if the event has an error attached': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines,
|
||||
error = new Error("Some made-up error"),
|
||||
stack = error.stack.split(/\n/);
|
||||
|
||||
event.data = ['this is a test', error];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.length(lines, stack.length+1);
|
||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
assert.equal(lines[1], "Error: Some made-up error");
|
||||
for (var i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+1], stack[i]);
|
||||
}
|
||||
},
|
||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines;
|
||||
event.data = ['this is a test', {
|
||||
name: 'Cheese',
|
||||
message: 'Gorgonzola smells.'
|
||||
}];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
assert.length(lines, 2);
|
||||
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
assert.equal(lines[1], "{ name: 'Cheese', message: 'Gorgonzola smells.' }");
|
||||
}
|
||||
},
|
||||
|
||||
'patternLayout': {
|
||||
topic: function() {
|
||||
var event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "multiple.levels.of.tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
}, layout = require('../lib/layouts').patternLayout;
|
||||
return [layout, event];
|
||||
},
|
||||
|
||||
'should default to "time logLevel loggerName - message"': function(args) {
|
||||
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n");
|
||||
},
|
||||
'%r should output time only': function(args) {
|
||||
test(args, '%r', '14:18:30');
|
||||
},
|
||||
'%p should output the log level': function(args) {
|
||||
test(args, '%p', 'DEBUG');
|
||||
},
|
||||
'%c should output the log category': function(args) {
|
||||
test(args, '%c', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%m should output the log data': function(args) {
|
||||
test(args, '%m', 'this is a test');
|
||||
},
|
||||
'%n should output a new line': function(args) {
|
||||
test(args, '%n', '\n');
|
||||
},
|
||||
'%c should handle category names like java-style package names': function(args) {
|
||||
test(args, '%c{1}', 'tests');
|
||||
test(args, '%c{2}', 'of.tests');
|
||||
test(args, '%c{3}', 'levels.of.tests');
|
||||
test(args, '%c{4}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{5}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{99}', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%d should output the date in ISO8601 format': function(args) {
|
||||
test(args, '%d', '2010-12-05 14:18:30.045');
|
||||
},
|
||||
'%d should allow for format specification': function(args) {
|
||||
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
|
||||
test(args, '%d{ABSOLUTE}', '14:18:30.045');
|
||||
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
|
||||
test(args, '%d{yyyy MM dd}', '2010 12 05');
|
||||
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
|
||||
},
|
||||
'%% should output %': function(args) {
|
||||
test(args, '%%', '%');
|
||||
},
|
||||
'should output anything not preceded by % as literal': function(args) {
|
||||
test(args, 'blah blah blah', 'blah blah blah');
|
||||
},
|
||||
'should handle complicated patterns': function(args) {
|
||||
test(args,
|
||||
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
|
||||
'this is a test\n of.tests at 14:18:30.045 cheese DEBUG\n'
|
||||
);
|
||||
},
|
||||
'should truncate fields if specified': function(args) {
|
||||
test(args, '%.4m', 'this');
|
||||
test(args, '%.7m', 'this is');
|
||||
test(args, '%.9m', 'this is a');
|
||||
test(args, '%.14m', 'this is a test');
|
||||
test(args, '%.2919102m', 'this is a test');
|
||||
},
|
||||
'should pad fields if specified': function(args) {
|
||||
test(args, '%10p', ' DEBUG');
|
||||
test(args, '%8p', ' DEBUG');
|
||||
test(args, '%6p', ' DEBUG');
|
||||
test(args, '%4p', 'DEBUG');
|
||||
test(args, '%-4p', 'DEBUG');
|
||||
test(args, '%-6p', 'DEBUG ');
|
||||
test(args, '%-8p', 'DEBUG ');
|
||||
test(args, '%-10p', 'DEBUG ');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
|
||||
404
test/levels-test.js
Normal file
404
test/levels-test.js
Normal file
@@ -0,0 +1,404 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function assertThat(level) {
|
||||
function assertForEach(assertion, test, otherLevels) {
|
||||
otherLevels.forEach(function(other) {
|
||||
assertion.call(assert, test.call(level, other));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isNotLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isNotGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isEqualTo, levels);
|
||||
},
|
||||
isNotEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isEqualTo, levels);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('levels').addBatch({
|
||||
'values': {
|
||||
topic: levels,
|
||||
'should define some levels': function(levels) {
|
||||
assert.isNotNull(levels.ALL);
|
||||
assert.isNotNull(levels.TRACE);
|
||||
assert.isNotNull(levels.DEBUG);
|
||||
assert.isNotNull(levels.INFO);
|
||||
assert.isNotNull(levels.WARN);
|
||||
assert.isNotNull(levels.ERROR);
|
||||
assert.isNotNull(levels.FATAL);
|
||||
assert.isNotNull(levels.OFF);
|
||||
},
|
||||
'ALL': {
|
||||
topic: levels.ALL,
|
||||
'should be less than the other levels': function(all) {
|
||||
assertThat(all).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should be greater than no levels': function(all) {
|
||||
assertThat(all).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to ALL': function(all) {
|
||||
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
|
||||
assertThat(all).isNotEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'TRACE': {
|
||||
topic: levels.TRACE,
|
||||
'should be less than DEBUG': function(trace) {
|
||||
assertThat(trace).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
|
||||
},
|
||||
'should be greater than ALL': function(trace) {
|
||||
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(trace).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to TRACE': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'DEBUG': {
|
||||
topic: levels.DEBUG,
|
||||
'should be less than INFO': function(debug) {
|
||||
assertThat(debug).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
},
|
||||
'should be greater than TRACE': function(debug) {
|
||||
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(debug).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to DEBUG': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'INFO': {
|
||||
topic: levels.INFO,
|
||||
'should be less than WARN': function(info) {
|
||||
assertThat(info).isLessThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
},
|
||||
'should be greater than DEBUG': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to INFO': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'WARN': {
|
||||
topic: levels.WARN,
|
||||
'should be less than ERROR': function(warn) {
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
assertThat(warn).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
},
|
||||
'should be greater than INFO': function(warn) {
|
||||
assertThat(warn).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to WARN': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'ERROR': {
|
||||
topic: levels.ERROR,
|
||||
'should be less than FATAL': function(error) {
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
assertThat(error).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
},
|
||||
'should be greater than WARN': function(error) {
|
||||
assertThat(error).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
|
||||
},
|
||||
'should only be equal to ERROR': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'FATAL': {
|
||||
topic: levels.FATAL,
|
||||
'should be less than OFF': function(fatal) {
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(fatal).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than ERROR': function(fatal) {
|
||||
assertThat(fatal).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
},
|
||||
'should only be equal to FATAL': function(fatal) {
|
||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||
assertThat(fatal).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'OFF': {
|
||||
topic: levels.OFF,
|
||||
'should not be less than anything': function(off) {
|
||||
assertThat(off).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
},
|
||||
'should be greater than everything': function(off) {
|
||||
assertThat(off).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
},
|
||||
'should only be equal to OFF': function(off) {
|
||||
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
|
||||
assertThat(off).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
'isGreaterThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
}
|
||||
},
|
||||
'isLessThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
|
||||
}
|
||||
},
|
||||
'isEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isEqualTo(["info", "INFO", "iNfO"]);
|
||||
}
|
||||
},
|
||||
'toLevel': {
|
||||
'with lowercase argument': {
|
||||
topic: levels.toLevel("debug"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with uppercase argument': {
|
||||
topic: levels.toLevel("DEBUG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with varying case': {
|
||||
topic: levels.toLevel("DeBuG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument': {
|
||||
topic: levels.toLevel("cheese"),
|
||||
'should return undefined': function(level) {
|
||||
assert.isUndefined(level);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument and default value': {
|
||||
topic: levels.toLevel("cheese", levels.DEBUG),
|
||||
'should return default value': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
75
test/log-abspath-test.js
Normal file
75
test/log-abspath-test.js
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('log4js-abspath').addBatch({
|
||||
'options': {
|
||||
topic: function() {
|
||||
var appenderOptions,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires:
|
||||
{ './appenders/fake':
|
||||
{ name: "fake",
|
||||
appender: function() {},
|
||||
configure: function(configuration, options) {
|
||||
appenderOptions = options;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "fake",
|
||||
"filename" : "cheesy-wotsits.log"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config, {
|
||||
cwd: '/absolute/path/to'
|
||||
});
|
||||
return appenderOptions;
|
||||
},
|
||||
'should be passed to appenders during configuration': function(options) {
|
||||
assert.equal(options.cwd, '/absolute/path/to');
|
||||
}
|
||||
},
|
||||
|
||||
'file appender': {
|
||||
topic: function() {
|
||||
var fileOpened,
|
||||
fileAppender = sandbox.require(
|
||||
'../lib/appenders/file',
|
||||
{ requires:
|
||||
{ '../streams':
|
||||
{ RollingFileStream:
|
||||
function(file) {
|
||||
fileOpened = file;
|
||||
return {
|
||||
on: function() {},
|
||||
end: function() {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.configure(
|
||||
{
|
||||
filename: "whatever.log",
|
||||
maxLogSize: 10
|
||||
},
|
||||
{ cwd: '/absolute/path/to' }
|
||||
);
|
||||
return fileOpened;
|
||||
},
|
||||
'should prepend options.cwd to config.filename': function(fileOpened) {
|
||||
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
|
||||
}
|
||||
},
|
||||
}).export(module);
|
||||
78
test/logLevelFilter-test.js
Normal file
78
test/logLevelFilter-test.js
Normal file
@@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert');
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js logLevelFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js'), logEvents = [], logger;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../lib/appenders/logLevelFilter')
|
||||
.appender(
|
||||
'ERROR',
|
||||
function(evt) { logEvents.push(evt); }
|
||||
),
|
||||
"logLevelTest"
|
||||
);
|
||||
|
||||
logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass log events greater than or equal to its own level' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js')
|
||||
, logger;
|
||||
|
||||
remove(__dirname + '/logLevelFilter.log');
|
||||
remove(__dirname + '/logLevelFilter-warnings.log');
|
||||
|
||||
log4js.configure('test/with-logLevelFilter.json');
|
||||
logger = log4js.getLogger("tests");
|
||||
logger.info('main');
|
||||
logger.error('both');
|
||||
logger.warn('both');
|
||||
logger.debug('main');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 100);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['main','both','both','main']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-warnings.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split('\n');
|
||||
assert.deepEqual(messages, ['both','both']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
81
test/logger-test.js
Normal file
81
test/logger-test.js
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels')
|
||||
, loggerModule = require('../lib/logger')
|
||||
, Logger = loggerModule.Logger;
|
||||
|
||||
vows.describe('../lib/logger').addBatch({
|
||||
'constructor with no parameters': {
|
||||
topic: new Logger(),
|
||||
'should use default category': function(logger) {
|
||||
assert.equal(logger.category, Logger.DEFAULT_CATEGORY);
|
||||
},
|
||||
'should use TRACE log level': function(logger) {
|
||||
assert.equal(logger.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
|
||||
'constructor with category': {
|
||||
topic: new Logger('cheese'),
|
||||
'should use category': function(logger) {
|
||||
assert.equal(logger.category, 'cheese');
|
||||
},
|
||||
'should use TRACE log level': function(logger) {
|
||||
assert.equal(logger.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
|
||||
'constructor with category and level': {
|
||||
topic: new Logger('cheese', 'debug'),
|
||||
'should use category': function(logger) {
|
||||
assert.equal(logger.category, 'cheese');
|
||||
},
|
||||
'should use level': function(logger) {
|
||||
assert.equal(logger.level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
|
||||
'isLevelEnabled': {
|
||||
topic: new Logger('cheese', 'info'),
|
||||
'should provide a level enabled function for all levels': function(logger) {
|
||||
assert.isFunction(logger.isTraceEnabled);
|
||||
assert.isFunction(logger.isDebugEnabled);
|
||||
assert.isFunction(logger.isInfoEnabled);
|
||||
assert.isFunction(logger.isWarnEnabled);
|
||||
assert.isFunction(logger.isErrorEnabled);
|
||||
assert.isFunction(logger.isFatalEnabled);
|
||||
},
|
||||
'should return the right values': function(logger) {
|
||||
assert.isFalse(logger.isTraceEnabled());
|
||||
assert.isFalse(logger.isDebugEnabled());
|
||||
assert.isTrue(logger.isInfoEnabled());
|
||||
assert.isTrue(logger.isWarnEnabled());
|
||||
assert.isTrue(logger.isErrorEnabled());
|
||||
assert.isTrue(logger.isFatalEnabled());
|
||||
}
|
||||
},
|
||||
|
||||
'should emit log events': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener('log', function (logEvent) { events.push(logEvent); });
|
||||
logger.debug('Event 1');
|
||||
loggerModule.disableAllLogWrites();
|
||||
logger.debug('Event 2');
|
||||
loggerModule.enableAllLogWrites();
|
||||
logger.debug('Event 3');
|
||||
return events;
|
||||
},
|
||||
|
||||
'when log writes are enabled': function(events) {
|
||||
assert.equal(events[0].data[0], 'Event 1');
|
||||
},
|
||||
|
||||
'but not when log writes are disabled': function(events) {
|
||||
assert.equal(events.length, 2);
|
||||
assert.equal(events[1].data[0], 'Event 3');
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
564
test/logging-test.js
Normal file
564
test/logging-test.js
Normal file
@@ -0,0 +1,564 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
|
||||
}
|
||||
|
||||
vows.describe('log4js').addBatch({
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getLogger('tests');
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(logger) {
|
||||
var events = [];
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
'should include the error if passed in': function(events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'when shutdown is called': {
|
||||
topic: function() {
|
||||
var events = {
|
||||
appenderShutdownCalled: false,
|
||||
shutdownCallbackCalled: false
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
return function() {};
|
||||
},
|
||||
shutdown: function(cb) {
|
||||
events.appenderShutdownCalled = true;
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
shutdownCallback = function() {
|
||||
events.shutdownCallbackCalled = true;
|
||||
},
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
log4js.shutdown(shutdownCallback);
|
||||
// Re-enable log writing so other tests that use logger are not
|
||||
// affected.
|
||||
require('../lib/logger').enableAllLogWrites();
|
||||
return events;
|
||||
},
|
||||
|
||||
'should invoke appender shutdowns': function(events) {
|
||||
assert.ok(events.appenderShutdownCalled);
|
||||
},
|
||||
|
||||
'should call callback': function(events) {
|
||||
assert.ok(events.shutdownCallbackCalled);
|
||||
}
|
||||
},
|
||||
|
||||
'invalid configuration': {
|
||||
'should throw an exception': function() {
|
||||
assert.throws(function() {
|
||||
require('log4js').configure({ "type": "invalid" });
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
'configuration when passed as object': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
log4js.configure(config);
|
||||
return appenderConfig;
|
||||
},
|
||||
'should be passed to appender config': function(configuration) {
|
||||
assert.equal(configuration.filename, 'cheesy-wotsits.log');
|
||||
}
|
||||
},
|
||||
|
||||
'configuration that causes an error': {
|
||||
topic: function() {
|
||||
var log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
throw new Error("oh noes");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
try {
|
||||
log4js.configure(config);
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should wrap error in a meaningful message': function(e) {
|
||||
assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
|
||||
}
|
||||
},
|
||||
|
||||
'configuration when passed as filename': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{ requires:
|
||||
{ 'fs':
|
||||
{ statSync:
|
||||
function() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync:
|
||||
function(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
appenders: [
|
||||
{ type: "file"
|
||||
, filename: "whatever.log"
|
||||
}
|
||||
]
|
||||
});
|
||||
},
|
||||
readdirSync:
|
||||
function() {
|
||||
return ['file'];
|
||||
}
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.configure("/path/to/cheese.json");
|
||||
return [ configFilename, appenderConfig ];
|
||||
},
|
||||
'should read the config from a file': function(args) {
|
||||
assert.equal(args[0], '/path/to/cheese.json');
|
||||
},
|
||||
'should pass config to appender': function(args) {
|
||||
assert.equal(args[1].filename, "whatever.log");
|
||||
}
|
||||
},
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger,
|
||||
that = this,
|
||||
fakeConsoleAppender = {
|
||||
name: "console",
|
||||
appender: function() {
|
||||
return function(evt) {
|
||||
that.callback(null, evt);
|
||||
};
|
||||
},
|
||||
configure: function() {
|
||||
return fakeConsoleAppender.appender();
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/console': fakeConsoleAppender
|
||||
}
|
||||
}
|
||||
);
|
||||
logger = log4js.getLogger("some-logger");
|
||||
logger.debug("This is a test");
|
||||
},
|
||||
'should default to the console appender': function(evt) {
|
||||
assert.equal(evt.data[0], "This is a test");
|
||||
}
|
||||
},
|
||||
|
||||
'addAppender' : {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
return log4js;
|
||||
},
|
||||
'without a category': {
|
||||
'should register the function as a listener for all loggers': function (log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender);
|
||||
logger.debug("This is a test");
|
||||
assert.equal(appenderEvent.data[0], "This is a test");
|
||||
assert.equal(appenderEvent.categoryName, "tests");
|
||||
assert.equal(appenderEvent.level.toString(), "DEBUG");
|
||||
},
|
||||
'if an appender for a category is defined': {
|
||||
'should register for that category': function (log4js) {
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
cheeseLogger;
|
||||
|
||||
log4js.addAppender(function (evt) { appenderEvent = evt; });
|
||||
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
|
||||
|
||||
cheeseLogger = log4js.getLogger('cheese');
|
||||
cheeseLogger.debug('This is a test');
|
||||
assert.deepEqual(appenderEvent, otherEvent);
|
||||
assert.equal(otherEvent.data[0], 'This is a test');
|
||||
assert.equal(otherEvent.categoryName, 'cheese');
|
||||
|
||||
otherEvent = undefined;
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
|
||||
assert.isUndefined(otherEvent);
|
||||
assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'with a category': {
|
||||
'should only register the function as a listener for that category': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender, 'tests');
|
||||
logger.debug('this is a category test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a category test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('some other category').debug('Cheese');
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
},
|
||||
|
||||
'with multiple categories': {
|
||||
'should register the function as a listener for all the categories': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger('tests');
|
||||
|
||||
log4js.addAppender(appender, 'tests', 'biscuits');
|
||||
|
||||
logger.debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
appenderEvent = undefined;
|
||||
|
||||
var otherLogger = log4js.getLogger('biscuits');
|
||||
otherLogger.debug("mmm... garibaldis");
|
||||
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
},
|
||||
'should register the function when the list of categories is an array': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; };
|
||||
|
||||
log4js.addAppender(appender, ['tests', 'pants']);
|
||||
|
||||
log4js.getLogger('tests').debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger('pants').debug("big pants");
|
||||
assert.equal(appenderEvent.data[0], "big pants");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var appenderEvents = [],
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) {
|
||||
appenderEvents.push(evt);
|
||||
};
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
globalConsole = {
|
||||
log: function() { }
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
console: globalConsole
|
||||
}
|
||||
}
|
||||
),
|
||||
logger = log4js.getLogger('a-test');
|
||||
|
||||
logger.debug("this is a test");
|
||||
globalConsole.log("this should not be logged");
|
||||
|
||||
return appenderEvents;
|
||||
},
|
||||
|
||||
'should configure a console appender': function(appenderEvents) {
|
||||
assert.equal(appenderEvents[0].data[0], 'this is a test');
|
||||
},
|
||||
|
||||
'should not replace console.log with log4js version': function(appenderEvents) {
|
||||
assert.equal(appenderEvents.length, 1);
|
||||
}
|
||||
},
|
||||
|
||||
'console' : {
|
||||
topic: setupConsoleTest,
|
||||
|
||||
'when replaceConsole called': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
|
||||
test.fakeConsole.log("Some debug message someone put in a module");
|
||||
test.fakeConsole.debug("Some debug");
|
||||
test.fakeConsole.error("An error");
|
||||
test.fakeConsole.info("some info");
|
||||
test.fakeConsole.warn("a warning");
|
||||
|
||||
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
test.fakeConsole.log({ lumpy: "tapioca" });
|
||||
test.fakeConsole.log("count %d", 123);
|
||||
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents.length, 9);
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
assert.equal(logEvents[0].level.toString(), "INFO");
|
||||
assert.equal(logEvents[1].data[0], "Some debug");
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[2].data[0], "An error");
|
||||
assert.equal(logEvents[2].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[3].data[0], "some info");
|
||||
assert.equal(logEvents[3].level.toString(), "INFO");
|
||||
assert.equal(logEvents[4].data[0], "a warning");
|
||||
assert.equal(logEvents[4].level.toString(), "WARN");
|
||||
assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)");
|
||||
assert.equal(logEvents[5].data[1], "gouda");
|
||||
assert.equal(logEvents[5].data[2], "garibaldis");
|
||||
}
|
||||
},
|
||||
'when turned off': {
|
||||
topic: function(test) {
|
||||
test.log4js.restoreConsole();
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should call the original console methods': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, "this should not be called.");
|
||||
}
|
||||
}
|
||||
},
|
||||
'console configuration': {
|
||||
topic: setupConsoleTest,
|
||||
'when disabled': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
test.log4js.configure({ replaceConsole: false });
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should allow for turning off console replacement': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, 'this should not be called.');
|
||||
}
|
||||
},
|
||||
'when enabled': {
|
||||
topic: function(test) {
|
||||
test.log4js.restoreConsole();
|
||||
test.log4js.configure({ replaceConsole: true });
|
||||
//log4js.configure clears all appenders
|
||||
test.log4js.addAppender(function(evt) {
|
||||
test.logEvents.push(evt);
|
||||
});
|
||||
|
||||
test.fakeConsole.debug("Some debug");
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
'should allow for turning on console replacement': function (logEvents) {
|
||||
assert.equal(logEvents.length, 1);
|
||||
assert.equal(logEvents[0].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[0].data[0], "Some debug");
|
||||
}
|
||||
}
|
||||
},
|
||||
'configuration persistence' : {
|
||||
topic: function() {
|
||||
var logEvent,
|
||||
firstLog4js = require('../lib/log4js'),
|
||||
secondLog4js;
|
||||
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
return logEvent;
|
||||
},
|
||||
'should maintain appenders between requires': function (logEvent) {
|
||||
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
|
||||
}
|
||||
},
|
||||
|
||||
'getDefaultLogger': {
|
||||
topic: function() {
|
||||
return require('../lib/log4js').getDefaultLogger();
|
||||
},
|
||||
'should return a logger': function(logger) {
|
||||
assert.ok(logger.info);
|
||||
assert.ok(logger.debug);
|
||||
assert.ok(logger.error);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
558
test/logging.js
558
test/logging.js
@@ -1,558 +0,0 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('log4js').addBatch({
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getLogger('tests');
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(logger) {
|
||||
var events = [];
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.length(events, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
'should include the error if passed in': function (events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'fileAppender': {
|
||||
topic: function() {
|
||||
var appender
|
||||
, logmessages = []
|
||||
, thing = "thing"
|
||||
, fakeFS = {
|
||||
createWriteStream: function() {
|
||||
assert.equal(arguments[0], './tmp-tests.log');
|
||||
assert.isObject(arguments[1]);
|
||||
assert.equal(arguments[1].flags, 'a');
|
||||
assert.equal(arguments[1].mode, 0644);
|
||||
assert.equal(arguments[1].encoding, 'utf8');
|
||||
return {
|
||||
write: function(message) {
|
||||
logmessages.push(message);
|
||||
}
|
||||
, end: function() {}
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
watchFile: function() {
|
||||
throw new Error("watchFile should not be called if logSize is not defined");
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
|
||||
appender = log4js.fileAppender('./tmp-tests.log', log4js.layouts.messagePassThroughLayout);
|
||||
log4js.addAppender(appender, 'file-test');
|
||||
|
||||
var logger = log4js.getLogger('file-test');
|
||||
logger.debug("this is a test");
|
||||
|
||||
return logmessages;
|
||||
},
|
||||
'should write log messages to file': function(logmessages) {
|
||||
assert.length(logmessages, 1);
|
||||
assert.equal(logmessages, "this is a test\n");
|
||||
}
|
||||
},
|
||||
|
||||
'fileAppender - with rolling based on size and number of files to keep': {
|
||||
topic: function() {
|
||||
var watchCb,
|
||||
filesOpened = [],
|
||||
filesEnded = [],
|
||||
filesDestroyedSoon = [],
|
||||
filesRenamed = [],
|
||||
newFilenames = [],
|
||||
existingFiles = ['tests.log'],
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
requires: {
|
||||
'fs': {
|
||||
watchFile: function(file, options, callback) {
|
||||
assert.equal(file, 'tests.log');
|
||||
assert.equal(options.persistent, false);
|
||||
assert.equal(options.interval, 30000);
|
||||
assert.isFunction(callback);
|
||||
watchCb = callback;
|
||||
},
|
||||
createWriteStream: function(file) {
|
||||
assert.equal(file, 'tests.log');
|
||||
filesOpened.push(file);
|
||||
return {
|
||||
end: function() {
|
||||
filesEnded.push(file);
|
||||
},
|
||||
destroySoon: function() {
|
||||
filesDestroyedSoon.push(file);
|
||||
}
|
||||
};
|
||||
},
|
||||
statSync: function(file) {
|
||||
if (existingFiles.indexOf(file) < 0) {
|
||||
throw new Error("this file doesn't exist");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
renameSync: function(oldFile, newFile) {
|
||||
filesRenamed.push(oldFile);
|
||||
existingFiles.push(newFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
var appender = log4js.fileAppender('tests.log', log4js.messagePassThroughLayout, 1024, 2, 30);
|
||||
return [watchCb, filesOpened, filesEnded, filesDestroyedSoon, filesRenamed, existingFiles];
|
||||
},
|
||||
|
||||
'should close current log file, rename all old ones, open new one on rollover': function(args) {
|
||||
var watchCb = args[0]
|
||||
, filesOpened = args[1]
|
||||
, filesEnded = args[2]
|
||||
, filesDestroyedSoon = args[3]
|
||||
, filesRenamed = args[4]
|
||||
, existingFiles = args[5];
|
||||
assert.isFunction(watchCb);
|
||||
//tell the watchCb that the file is below the threshold
|
||||
watchCb({ size: 891 }, { size: 0 });
|
||||
//filesOpened should still be the first one.
|
||||
assert.length(filesOpened, 1);
|
||||
//tell the watchCb that the file is now over the threshold
|
||||
watchCb({ size: 1053 }, { size: 891 });
|
||||
//it should have closed the first log file.
|
||||
assert.length(filesEnded, 1);
|
||||
assert.length(filesDestroyedSoon, 1);
|
||||
//it should have renamed the previous log file
|
||||
assert.length(filesRenamed, 1);
|
||||
//and we should have two files now
|
||||
assert.length(existingFiles, 2);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1']);
|
||||
//and opened a new log file.
|
||||
assert.length(filesOpened, 2);
|
||||
|
||||
//now tell the watchCb that we've flipped over the threshold again
|
||||
watchCb({ size: 1025 }, { size: 123 });
|
||||
//it should have closed the old file
|
||||
assert.length(filesEnded, 2);
|
||||
assert.length(filesDestroyedSoon, 2);
|
||||
//it should have renamed both the old log file, and the previous '.1' file
|
||||
assert.length(filesRenamed, 3);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more file
|
||||
assert.length(existingFiles, 4);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 3);
|
||||
|
||||
//tell the watchCb we've flipped again.
|
||||
watchCb({ size: 1024 }, { size: 234 });
|
||||
//close the old one again.
|
||||
assert.length(filesEnded, 3);
|
||||
assert.length(filesDestroyedSoon, 3);
|
||||
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
|
||||
assert.length(filesRenamed, 5);
|
||||
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
|
||||
//it should have renamed 2 more files
|
||||
assert.length(existingFiles, 6);
|
||||
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
|
||||
//and opened a new log file
|
||||
assert.length(filesOpened, 4);
|
||||
}
|
||||
},
|
||||
|
||||
'configure' : {
|
||||
topic: function() {
|
||||
var messages = {}, fakeFS = {
|
||||
createWriteStream: function(file) {
|
||||
return {
|
||||
write: function(message) {
|
||||
if (!messages.hasOwnProperty(file)) {
|
||||
messages[file] = [];
|
||||
}
|
||||
messages[file].push(message);
|
||||
}
|
||||
, end: function() {}
|
||||
, destroySoon: function() {}
|
||||
};
|
||||
},
|
||||
readFileSync: function(file, encoding) {
|
||||
return require('fs').readFileSync(file, encoding);
|
||||
},
|
||||
watchFile: function(file) {
|
||||
messages.watchedFile = file;
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
}
|
||||
}
|
||||
);
|
||||
return [ log4js, messages ];
|
||||
},
|
||||
'should load appender configuration from a json file': function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-tests.log'];
|
||||
log4js.clearAppenders();
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/log4js.json');
|
||||
var logger = log4js.getLogger("tests");
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
assert.length(messages['tmp-tests.log'], 1);
|
||||
assert.equal(messages['tmp-tests.log'][0], 'this should be written to the file\n');
|
||||
},
|
||||
'should handle logLevelFilter configuration': function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-tests.log'];
|
||||
delete messages['tmp-tests-warnings.log'];
|
||||
log4js.clearAppenders();
|
||||
log4js.configure('test/with-logLevelFilter.json');
|
||||
var logger = log4js.getLogger("tests");
|
||||
logger.info('main');
|
||||
logger.error('both');
|
||||
logger.warn('both');
|
||||
logger.debug('main');
|
||||
|
||||
assert.length(messages['tmp-tests.log'], 4);
|
||||
assert.length(messages['tmp-tests-warnings.log'], 2);
|
||||
assert.deepEqual(messages['tmp-tests.log'], ['main\n','both\n','both\n','main\n']);
|
||||
assert.deepEqual(messages['tmp-tests-warnings.log'], ['both\n','both\n']);
|
||||
},
|
||||
'should handle fileAppender with log rolling' : function(args) {
|
||||
var log4js = args[0], messages = args[1];
|
||||
delete messages['tmp-test.log'];
|
||||
log4js.configure('test/with-log-rolling.json');
|
||||
assert.equal(messages.watchedFile, 'tmp-test.log');
|
||||
},
|
||||
'should handle an object or a file name': function(args) {
|
||||
var log4js = args[0],
|
||||
messages = args[1],
|
||||
config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3,
|
||||
"pollInterval" : 15
|
||||
}
|
||||
]
|
||||
};
|
||||
delete messages['cheesy-wotsits.log'];
|
||||
log4js.configure(config);
|
||||
assert.equal(messages.watchedFile, 'cheesy-wotsits.log');
|
||||
}
|
||||
},
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger
|
||||
, message
|
||||
, log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
globals: {
|
||||
console: {
|
||||
log: function(msg) {
|
||||
message = msg;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
logger = log4js.getLogger("some-logger");
|
||||
logger.debug("This is a test");
|
||||
return message;
|
||||
},
|
||||
'should default to the console appender': function(message) {
|
||||
assert.isTrue(/This is a test$/.test(message));
|
||||
}
|
||||
},
|
||||
|
||||
'addAppender' : {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
return log4js;
|
||||
},
|
||||
'without a category': {
|
||||
'should register the function as a listener for all loggers': function (log4js) {
|
||||
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests");
|
||||
log4js.addAppender(appender);
|
||||
logger.debug("This is a test");
|
||||
assert.equal(appenderEvent.data[0], "This is a test");
|
||||
assert.equal(appenderEvent.categoryName, "tests");
|
||||
assert.equal(appenderEvent.level.toString(), "DEBUG");
|
||||
},
|
||||
'should also register as an appender for loggers if an appender for that category is defined': function (log4js) {
|
||||
var otherEvent, appenderEvent, cheeseLogger;
|
||||
log4js.addAppender(function (evt) { appenderEvent = evt; });
|
||||
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
|
||||
|
||||
cheeseLogger = log4js.getLogger('cheese');
|
||||
cheeseLogger.debug('This is a test');
|
||||
assert.deepEqual(appenderEvent, otherEvent);
|
||||
assert.equal(otherEvent.data[0], 'This is a test');
|
||||
assert.equal(otherEvent.categoryName, 'cheese');
|
||||
|
||||
otherEvent = undefined;
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
|
||||
assert.isUndefined(otherEvent);
|
||||
assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
|
||||
}
|
||||
},
|
||||
|
||||
'with a category': {
|
||||
'should only register the function as a listener for that category': function(log4js) {
|
||||
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests");
|
||||
log4js.addAppender(appender, 'tests');
|
||||
logger.debug('this is a category test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a category test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('some other category').debug('Cheese');
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
},
|
||||
|
||||
'with multiple categories': {
|
||||
'should register the function as a listener for all the categories': function(log4js) {
|
||||
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger('tests');
|
||||
log4js.addAppender(appender, 'tests', 'biscuits');
|
||||
|
||||
logger.debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
appenderEvent = undefined;
|
||||
|
||||
var otherLogger = log4js.getLogger('biscuits');
|
||||
otherLogger.debug("mmm... garibaldis");
|
||||
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
},
|
||||
'should register the function when the list of categories is an array': function(log4js) {
|
||||
var appenderEvent, appender = function(evt) { appenderEvent = evt; };
|
||||
log4js.addAppender(appender, ['tests', 'pants']);
|
||||
|
||||
log4js.getLogger('tests').debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger('pants').debug("big pants");
|
||||
assert.equal(appenderEvent.data[0], "big pants");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
message,
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
fakeFS = {
|
||||
readFileSync: function (file, encoding) {
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return '{ "appenders" : [ { "type": "console", "layout": { "type": "messagePassThrough" }} ] }';
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return true;
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
log : function (msg) { message = msg; },
|
||||
info: this.log,
|
||||
warn: this.log,
|
||||
debug: this.log,
|
||||
error: this.log
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.debug("this is a test");
|
||||
|
||||
return [ pathsChecked, message, modulePath ];
|
||||
},
|
||||
|
||||
'should check current directory, require paths, and finally the module dir for log4js.json': function(args) {
|
||||
var pathsChecked = args[0];
|
||||
expectedPaths = ['log4js.json'].concat(
|
||||
require.paths.map(function(item) {
|
||||
return item + '/log4js.json';
|
||||
}),
|
||||
args[2]
|
||||
);
|
||||
assert.deepEqual(pathsChecked, expectedPaths);
|
||||
},
|
||||
|
||||
'should configure log4js from first log4js.json found': function(args) {
|
||||
var message = args[1];
|
||||
assert.equal(message, 'this is a test');
|
||||
}
|
||||
},
|
||||
|
||||
'logLevelFilter': {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js'), logEvents = [], logger;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(log4js.logLevelFilter('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
|
||||
logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass log events greater than or equal to its own level' : function(logEvents) {
|
||||
assert.length(logEvents, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
}
|
||||
},
|
||||
|
||||
'console' : {
|
||||
topic: function() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js'
|
||||
, {
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
fakeConsole.log("Some debug message someone put in a module");
|
||||
fakeConsole.debug("Some debug");
|
||||
fakeConsole.error("An error");
|
||||
fakeConsole.info("some info");
|
||||
fakeConsole.warn("a warning");
|
||||
|
||||
fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
fakeConsole.log({ lumpy: "tapioca" });
|
||||
fakeConsole.log("count %d", 123);
|
||||
fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
assert.equal(logEvents[0].level.toString(), "INFO");
|
||||
assert.equal(logEvents[1].data[0], "Some debug");
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[2].data[0], "An error");
|
||||
assert.equal(logEvents[2].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[3].data[0], "some info");
|
||||
assert.equal(logEvents[3].level.toString(), "INFO");
|
||||
assert.equal(logEvents[4].data[0], "a warning");
|
||||
assert.equal(logEvents[4].level.toString(), "WARN");
|
||||
}
|
||||
},
|
||||
'configuration persistence' : {
|
||||
'should maintain appenders between requires': function () {
|
||||
var logEvent, firstLog4js = require('../lib/log4js'), secondLog4js;
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
secondLog4js = require('../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
82
test/logglyAppender-test.js
Normal file
82
test/logglyAppender-test.js
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeLoggly = {
|
||||
createClient: function (options) {
|
||||
return {
|
||||
config: options,
|
||||
log: function (msg, tags) {
|
||||
msgs.push({
|
||||
msg: msg,
|
||||
tags: tags
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function(type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function(msg, value) {
|
||||
this.errors.push({ msg: msg, value: value });
|
||||
}
|
||||
};
|
||||
|
||||
var logglyModule = sandbox.require('../lib/appenders/loggly', {
|
||||
requires: {
|
||||
'loggly': fakeLoggly,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(logglyModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
loggly: fakeLoggly,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js logglyAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('loggly', {
|
||||
token: 'your-really-long-input-token',
|
||||
subdomain: 'your-subdomain',
|
||||
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
|
||||
});
|
||||
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (topic) {
|
||||
//console.log('topic', topic);
|
||||
assert.equal(topic.results.length, 1);
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
311
test/multiprocess-test.js
Normal file
311
test/multiprocess-test.js
Normal file
@@ -0,0 +1,311 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, assert = require('assert')
|
||||
;
|
||||
|
||||
function makeFakeNet() {
|
||||
return {
|
||||
logEvents: [],
|
||||
data: [],
|
||||
cbs: {},
|
||||
createConnectionCalled: 0,
|
||||
fakeAppender: function(logEvent) {
|
||||
this.logEvents.push(logEvent);
|
||||
},
|
||||
createConnection: function(port, host) {
|
||||
var fakeNet = this;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
this.createConnectionCalled += 1;
|
||||
return {
|
||||
on: function(evt, cb) {
|
||||
fakeNet.cbs[evt] = cb;
|
||||
},
|
||||
write: function(data, encoding) {
|
||||
fakeNet.data.push(data);
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
end: function() {
|
||||
fakeNet.closeCalled = true;
|
||||
}
|
||||
};
|
||||
},
|
||||
createServer: function(cb) {
|
||||
var fakeNet = this;
|
||||
cb({
|
||||
remoteAddress: '1.2.3.4',
|
||||
remotePort: '1234',
|
||||
setEncoding: function(encoding) {
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
on: function(event, cb) {
|
||||
fakeNet.cbs[event] = cb;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
listen: function(port, host) {
|
||||
fakeNet.port = port;
|
||||
fakeNet.host = host;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('Multiprocess Appender').addBatch({
|
||||
'worker': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after connect');
|
||||
fakeNet.cbs.close(true);
|
||||
appender('after error, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after error, after connect');
|
||||
appender(new Error('Error test'));
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to the loggerPort and loggerHost': function(net) {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'pants');
|
||||
},
|
||||
'should buffer messages written before socket is connected': function(net) {
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
},
|
||||
'should write log messages to socket as json strings with a terminator string': function(net) {
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
assert.equal(net.data[1], '__LOG4JS__');
|
||||
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||
assert.equal(net.data[3], '__LOG4JS__');
|
||||
assert.equal(net.encoding, 'utf8');
|
||||
},
|
||||
'should attempt to re-open the socket on error': function(net) {
|
||||
assert.equal(net.data[4], JSON.stringify('after error, before connect'));
|
||||
assert.equal(net.data[5], '__LOG4JS__');
|
||||
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
|
||||
assert.equal(net.data[7], '__LOG4JS__');
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
},
|
||||
'should serialize an Error correctly': function(net) {
|
||||
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
|
||||
var actual = JSON.parse(net.data[8]).stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
|
||||
|
||||
}
|
||||
},
|
||||
'worker with timeout': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after connect');
|
||||
fakeNet.cbs.timeout();
|
||||
appender('after timeout, before close');
|
||||
fakeNet.cbs.close();
|
||||
appender('after close, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after close, after connect');
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should attempt to re-open the socket': function(net) {
|
||||
//skipping the __LOG4JS__ separators
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||
assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
|
||||
assert.equal(net.data[6], JSON.stringify('after close, before connect'));
|
||||
assert.equal(net.data[8], JSON.stringify('after close, after connect'));
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
}
|
||||
},
|
||||
'worker defaults': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to localhost:5000': function(net) {
|
||||
assert.equal(net.port, 5000);
|
||||
assert.equal(net.host, 'localhost');
|
||||
}
|
||||
},
|
||||
'master': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master',
|
||||
loggerHost: 'server',
|
||||
loggerPort: 1234,
|
||||
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
|
||||
});
|
||||
|
||||
appender('this should be sent to the actual appender directly');
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on loggerPort and loggerHost': function(net) {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'server');
|
||||
},
|
||||
'should return the underlying appender': function(net) {
|
||||
assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
|
||||
},
|
||||
'when a client connects': {
|
||||
topic: function(net) {
|
||||
var logString = JSON.stringify(
|
||||
{ level: { level: 10000, levelStr: 'DEBUG' }
|
||||
, data: ['some debug']}
|
||||
) + '__LOG4JS__';
|
||||
|
||||
net.cbs.data(
|
||||
JSON.stringify(
|
||||
{ level: { level: 40000, levelStr: 'ERROR' }
|
||||
, data: ['an error message'] }
|
||||
) + '__LOG4JS__'
|
||||
);
|
||||
net.cbs.data(logString.substring(0, 10));
|
||||
net.cbs.data(logString.substring(10));
|
||||
net.cbs.data(logString + logString + logString);
|
||||
net.cbs.end(
|
||||
JSON.stringify(
|
||||
{ level: { level: 50000, levelStr: 'FATAL' }
|
||||
, data: ["that's all folks"] }
|
||||
) + '__LOG4JS__'
|
||||
);
|
||||
net.cbs.data('bad message__LOG4JS__');
|
||||
return net;
|
||||
},
|
||||
'should parse log messages into log events and send to appender': function(net) {
|
||||
assert.equal(net.logEvents[1].level.toString(), 'ERROR');
|
||||
assert.equal(net.logEvents[1].data[0], 'an error message');
|
||||
assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
|
||||
assert.equal(net.logEvents[1].remotePort, '1234');
|
||||
},
|
||||
'should parse log messages split into multiple chunks': function(net) {
|
||||
assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
|
||||
assert.equal(net.logEvents[2].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
|
||||
assert.equal(net.logEvents[2].remotePort, '1234');
|
||||
},
|
||||
'should parse multiple log messages in a single chunk': function(net) {
|
||||
assert.equal(net.logEvents[3].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[4].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[5].data[0], 'some debug');
|
||||
},
|
||||
'should handle log messages sent as part of end event': function(net) {
|
||||
assert.equal(net.logEvents[6].data[0], "that's all folks");
|
||||
},
|
||||
'should handle unparseable log messages': function(net) {
|
||||
assert.equal(net.logEvents[7].level.toString(), 'ERROR');
|
||||
assert.equal(net.logEvents[7].categoryName, 'log4js');
|
||||
assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
|
||||
assert.equal(net.logEvents[7].data[1], 'bad message');
|
||||
}
|
||||
}
|
||||
},
|
||||
'master defaults': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master' });
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on localhost:5000': function(net) {
|
||||
assert.equal(net.port, 5000);
|
||||
assert.equal(net.host, 'localhost');
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var results = {}
|
||||
, fakeNet = makeFakeNet()
|
||||
, appender = sandbox.require(
|
||||
'../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet,
|
||||
'../log4js': {
|
||||
loadAppender: function(app) {
|
||||
results.appenderLoaded = app;
|
||||
},
|
||||
appenderMakers: {
|
||||
'madeupappender': function(config, options) {
|
||||
results.config = config;
|
||||
results.options = options;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure(
|
||||
{
|
||||
mode: 'master',
|
||||
appender: {
|
||||
type: 'madeupappender',
|
||||
cheese: 'gouda'
|
||||
}
|
||||
},
|
||||
{ crackers: 'jacobs' }
|
||||
);
|
||||
|
||||
return results;
|
||||
|
||||
},
|
||||
'should load underlying appender for master': function(results) {
|
||||
assert.equal(results.appenderLoaded, 'madeupappender');
|
||||
},
|
||||
'should pass config to underlying appender': function(results) {
|
||||
assert.equal(results.config.cheese, 'gouda');
|
||||
},
|
||||
'should pass options to underlying appender': function(results) {
|
||||
assert.equal(results.options.crackers, 'jacobs');
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
261
test/nolog-test.js
Normal file
261
test/nolog-test.js
Normal file
@@ -0,0 +1,261 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
function MockResponse(statusCode) {
|
||||
|
||||
this.statusCode = statusCode;
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'nolog String' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, { nolog: "\\.gif" });
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'nolog Strings' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
'nolog Array<String>' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
'nolog RegExp' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return d.ml.messages;
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
340
test/reloadConfiguration-test.js
Normal file
340
test/reloadConfiguration-test.js
Normal file
@@ -0,0 +1,340 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
|
||||
}
|
||||
|
||||
vows.describe('reload configuration').addBatch({
|
||||
'with config file changing' : {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = 'path/to/log4js.json',
|
||||
fakeFS = {
|
||||
lastMtime: Date.now(),
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
fakeFS.lastMtime += 1;
|
||||
return { mtime: new Date(fakeFS.lastMtime) };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
fakeFS.config.levels['a-test'] = "DEBUG";
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should configure log4js from first log4js.json found': function(logEvents) {
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
assert.equal(logEvents[2].data[0], 'debug4');
|
||||
assert.equal(logEvents.length, 3);
|
||||
}
|
||||
},
|
||||
|
||||
'with config file staying the same' : {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
fileRead = 0,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return [ pathsChecked, logEvents, modulePath, fileRead ];
|
||||
},
|
||||
'should only read the configuration file once': function(args) {
|
||||
var fileRead = args[3];
|
||||
assert.equal(fileRead, 1);
|
||||
},
|
||||
'should configure log4js from first log4js.json found': function(args) {
|
||||
var logEvents = args[1];
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
}
|
||||
},
|
||||
|
||||
'when config file is removed': {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
fileRead = 0,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
this.statSync = function() {
|
||||
throw new Error("no such file");
|
||||
};
|
||||
return { mtime: new Date() };
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return [ pathsChecked, logEvents, modulePath, fileRead ];
|
||||
},
|
||||
'should only read the configuration file once': function(args) {
|
||||
var fileRead = args[3];
|
||||
assert.equal(fileRead, 1);
|
||||
},
|
||||
'should not clear configuration when config file not found': function(args) {
|
||||
var logEvents = args[1];
|
||||
assert.equal(logEvents.length, 3);
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].level.toString(), 'WARN');
|
||||
assert.include(logEvents[1].data[0], 'Failed to load configuration file');
|
||||
assert.equal(logEvents[2].data[0], 'info3');
|
||||
}
|
||||
},
|
||||
|
||||
'when passed an object': {
|
||||
topic: function() {
|
||||
var test = setupConsoleTest();
|
||||
test.log4js.configure({}, { reloadSecs: 30 });
|
||||
return test.logEvents;
|
||||
},
|
||||
'should log a warning': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'WARN');
|
||||
assert.equal(
|
||||
events[0].data[0],
|
||||
'Ignoring configuration reload parameter for "object" configuration.'
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when called twice with reload options': {
|
||||
topic: function() {
|
||||
var modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
|
||||
fakeFS = {
|
||||
readFileSync: function (file, encoding) {
|
||||
return JSON.stringify({});
|
||||
},
|
||||
statSync: function (path) {
|
||||
return { mtime: new Date() };
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
intervalCleared = false,
|
||||
clearedId,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
return 1234;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
'clearInterval': function(interval) {
|
||||
intervalCleared = true;
|
||||
clearedId = interval;
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
log4js.configure(modulePath, { reloadSecs: 15 });
|
||||
|
||||
return { cleared: intervalCleared, id: clearedId };
|
||||
},
|
||||
'should clear the previous interval': function(result) {
|
||||
assert.isTrue(result.cleared);
|
||||
assert.equal(result.id, 1234);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
100
test/setLevel-asymmetry-test.js
Normal file
100
test/setLevel-asymmetry-test.js
Normal file
@@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
/* jshint loopfunc: true */
|
||||
// This test shows an asymmetry between setLevel and isLevelEnabled
|
||||
// (in log4js-node@0.4.3 and earlier):
|
||||
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
|
||||
// does not (sets the level to TRACE).
|
||||
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../lib/log4js');
|
||||
var logger = log4js.getLogger('test-setLevel-asymmetry');
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
var log4jsLevels =[];
|
||||
// populate an array with the log4js.levels that match the strLevels.
|
||||
// Would be nice if we could iterate over log4js.levels instead,
|
||||
// but log4js.levels.toLevel prevents that for now.
|
||||
strLevels.forEach(function(l) {
|
||||
log4jsLevels.push(log4js.levels.toLevel(l));
|
||||
});
|
||||
|
||||
|
||||
// We are going to iterate over this object's properties to define an exhaustive list of vows.
|
||||
var levelTypes = {
|
||||
'string': strLevels,
|
||||
'log4js.levels.level': log4jsLevels,
|
||||
};
|
||||
|
||||
// Set up the basic vows batch for this test
|
||||
var batch = {
|
||||
setLevel: {
|
||||
}
|
||||
};
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
// Populating the batch object programmatically,
|
||||
// as I don't have the patience to manually populate it with
|
||||
// the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
|
||||
for (var type in levelTypes) {
|
||||
var context = 'is called with a '+type;
|
||||
var levelsToTest = levelTypes[type];
|
||||
showProgress('Setting up the vows context for '+context);
|
||||
|
||||
batch.setLevel[context]= {};
|
||||
levelsToTest.forEach( function(level) {
|
||||
var subContext = 'of '+level;
|
||||
var log4jsLevel=log4js.levels.toLevel(level.toString());
|
||||
|
||||
showProgress('Setting up the vows sub-context for '+subContext);
|
||||
batch.setLevel[context][subContext] = {topic: level};
|
||||
for (var comparisonType in levelTypes) {
|
||||
levelTypes[comparisonType].forEach(function(comparisonLevel) {
|
||||
var t = type;
|
||||
var ct = comparisonType;
|
||||
var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
|
||||
var vow = 'isLevelEnabled(' + comparisonLevel +
|
||||
') called with a ' + comparisonType +
|
||||
' should return ' + expectedResult;
|
||||
showProgress('Setting up the vows vow for '+vow);
|
||||
|
||||
batch.setLevel[context][subContext][vow] = function(levelToSet) {
|
||||
logger.setLevel(levelToSet);
|
||||
showProgress(
|
||||
'*** Checking setLevel( ' + level +
|
||||
' ) of type ' + t +
|
||||
', and isLevelEnabled( ' + comparisonLevel +
|
||||
' ) of type ' + ct + '. Expecting: ' + expectedResult
|
||||
);
|
||||
assert.equal(
|
||||
logger.isLevelEnabled(comparisonLevel),
|
||||
expectedResult,
|
||||
'Failed: calling setLevel( ' + level +
|
||||
' ) with type ' + type +
|
||||
', isLevelEnabled( ' + comparisonLevel +
|
||||
' ) of type ' + comparisonType +
|
||||
' did not return ' + expectedResult
|
||||
);
|
||||
};
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
showProgress('Running tests...');
|
||||
|
||||
vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module);
|
||||
|
||||
|
||||
233
test/smtpAppender-test.js
Normal file
233
test/smtpAppender-test.js
Normal file
@@ -0,0 +1,233 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeMailer = {
|
||||
createTransport: function (name, options) {
|
||||
return {
|
||||
config: options,
|
||||
sendMail: function (msg, callback) {
|
||||
msgs.push(msg);
|
||||
callback(null, true);
|
||||
},
|
||||
close: function() {}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function(type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function(msg, value) {
|
||||
this.errors.push({ msg: msg, value: value });
|
||||
}
|
||||
};
|
||||
|
||||
var smtpModule = sandbox.require('../lib/appenders/smtp', {
|
||||
requires: {
|
||||
'nodemailer': fakeMailer,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(smtpModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailer,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages (result, sender, subject) {
|
||||
for (var i = 0; i < result.results.length; ++i) {
|
||||
assert.equal(result.results[i].from, sender);
|
||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
|
||||
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js smtpAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('minimal config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'fancy config': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('fancy config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sender: 'sender@domain.com',
|
||||
subject: 'This is subject',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function(result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1050);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 2100);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.results.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'multiple events in one email': {
|
||||
topic: function() {
|
||||
var self = this;
|
||||
var setup = setupLogging('multiple events in one email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 1,
|
||||
transport: "SMTP",
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1050);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 2100);
|
||||
},
|
||||
'there should be two messages': function (result) {
|
||||
assert.equal(result.results.length, 2);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||
}
|
||||
},
|
||||
'error when sending email': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('error when sending email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 0,
|
||||
transport: 'SMTP',
|
||||
SMTP: { port: 25, auth: { user: 'user@domain.com' } }
|
||||
});
|
||||
|
||||
setup.mailer.createTransport = function() {
|
||||
return {
|
||||
sendMail: function(msg, cb) {
|
||||
cb({ message: "oh noes" });
|
||||
},
|
||||
close: function() { }
|
||||
};
|
||||
};
|
||||
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
},
|
||||
'should be logged to console': function(cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
|
||||
assert.equal(cons.errors[0].value.message, 'oh noes');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
|
||||
93
test/streams/BaseRollingFileStream-test.js
Normal file
93
test/streams/BaseRollingFileStream-test.js
Normal file
@@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, fs = require('fs')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('../../lib/streams/BaseRollingFileStream').addBatch({
|
||||
'when node version < 0.10.0': {
|
||||
topic: function() {
|
||||
var streamLib = sandbox.load(
|
||||
'../../lib/streams/BaseRollingFileStream',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
version: '0.8.11'
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'readable-stream': {
|
||||
Writable: function() {}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
return streamLib.required;
|
||||
},
|
||||
'it should use readable-stream to maintain compatibility': function(required) {
|
||||
assert.ok(required['readable-stream']);
|
||||
assert.ok(!required.stream);
|
||||
}
|
||||
},
|
||||
|
||||
'when node version > 0.10.0': {
|
||||
topic: function() {
|
||||
var streamLib = sandbox.load(
|
||||
'../../lib/streams/BaseRollingFileStream',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
version: '0.10.1'
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'stream': {
|
||||
Writable: function() {}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
return streamLib.required;
|
||||
},
|
||||
'it should use the core stream module': function(required) {
|
||||
assert.ok(required.stream);
|
||||
assert.ok(!required['readable-stream']);
|
||||
}
|
||||
},
|
||||
|
||||
'when no filename is passed': {
|
||||
topic: require('../../lib/streams/BaseRollingFileStream'),
|
||||
'it should throw an error': function(BaseRollingFileStream) {
|
||||
try {
|
||||
new BaseRollingFileStream();
|
||||
assert.fail('should not get here');
|
||||
} catch (e) {
|
||||
assert.ok(e);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'default behaviour': {
|
||||
topic: function() {
|
||||
var BaseRollingFileStream = require('../../lib/streams/BaseRollingFileStream')
|
||||
, stream = new BaseRollingFileStream('basetest.log');
|
||||
return stream;
|
||||
},
|
||||
teardown: function() {
|
||||
try {
|
||||
fs.unlink('basetest.log');
|
||||
} catch (e) {
|
||||
console.error("could not remove basetest.log", e);
|
||||
}
|
||||
},
|
||||
'it should not want to roll': function(stream) {
|
||||
assert.isFalse(stream.shouldRoll());
|
||||
},
|
||||
'it should not roll': function(stream) {
|
||||
var cbCalled = false;
|
||||
//just calls the callback straight away, no async calls
|
||||
stream.roll('basetest.log', function() { cbCalled = true; });
|
||||
assert.isTrue(cbCalled);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
227
test/streams/DateRollingFileStream-test.js
Normal file
227
test/streams/DateRollingFileStream-test.js
Normal file
@@ -0,0 +1,227 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, fs = require('fs')
|
||||
, semver = require('semver')
|
||||
, streams
|
||||
, DateRollingFileStream
|
||||
, testTime = new Date(2012, 8, 12, 10, 37, 11);
|
||||
|
||||
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||
streams = require('stream');
|
||||
} else {
|
||||
streams = require('readable-stream');
|
||||
}
|
||||
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
|
||||
|
||||
function cleanUp(filename) {
|
||||
return function() {
|
||||
fs.unlink(filename);
|
||||
};
|
||||
}
|
||||
|
||||
function now() {
|
||||
return testTime.getTime();
|
||||
}
|
||||
|
||||
vows.describe('DateRollingFileStream').addBatch({
|
||||
'arguments': {
|
||||
topic: new DateRollingFileStream(
|
||||
__dirname + '/test-date-rolling-file-stream-1',
|
||||
'yyyy-mm-dd.hh'
|
||||
),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
|
||||
|
||||
'should take a filename and a pattern and return a WritableStream': function(stream) {
|
||||
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
|
||||
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
|
||||
assert.instanceOf(stream, streams.Writable);
|
||||
},
|
||||
'with default settings for the underlying stream': function(stream) {
|
||||
assert.equal(stream.theStream.mode, 420);
|
||||
assert.equal(stream.theStream.flags, 'a');
|
||||
//encoding is not available on the underlying stream
|
||||
//assert.equal(stream.encoding, 'utf8');
|
||||
}
|
||||
},
|
||||
|
||||
'default arguments': {
|
||||
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
|
||||
|
||||
'pattern should be .yyyy-MM-dd': function(stream) {
|
||||
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||
}
|
||||
},
|
||||
|
||||
'with stream arguments': {
|
||||
topic: new DateRollingFileStream(
|
||||
__dirname + '/test-date-rolling-file-stream-3',
|
||||
'yyyy-MM-dd',
|
||||
{ mode: parseInt('0666', 8) }
|
||||
),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
|
||||
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.theStream.mode, parseInt('0666', 8));
|
||||
}
|
||||
},
|
||||
|
||||
'with stream arguments but no pattern': {
|
||||
topic: new DateRollingFileStream(
|
||||
__dirname + '/test-date-rolling-file-stream-4',
|
||||
{ mode: parseInt('0666', 8) }
|
||||
),
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
|
||||
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.theStream.mode, parseInt('0666', 8));
|
||||
},
|
||||
'should use default pattern': function(stream) {
|
||||
assert.equal(stream.pattern, '.yyyy-MM-dd');
|
||||
}
|
||||
},
|
||||
|
||||
'with a pattern of .yyyy-MM-dd': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
stream = new DateRollingFileStream(
|
||||
__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd',
|
||||
null,
|
||||
now
|
||||
);
|
||||
stream.write("First message\n", 'utf8', function() {
|
||||
that.callback(null, stream);
|
||||
});
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
|
||||
|
||||
'should create a file with the base name': {
|
||||
topic: function(stream) {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||
},
|
||||
'file should contain first message': function(result) {
|
||||
assert.equal(result.toString(), "First message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'when the day changes': {
|
||||
topic: function(stream) {
|
||||
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
||||
stream.write("Second message\n", 'utf8', this.callback);
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
|
||||
|
||||
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be two': function(files) {
|
||||
assert.equal(
|
||||
files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('test-date-rolling-file-stream-5') > -1;
|
||||
}
|
||||
).length,
|
||||
2
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'the file without a date': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
|
||||
},
|
||||
'should contain the second message': function(contents) {
|
||||
assert.equal(contents.toString(), "Second message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'the file with the date': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
|
||||
},
|
||||
'should contain the first message': function(contents) {
|
||||
assert.equal(contents.toString(), "First message\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'with alwaysIncludePattern': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
testTime = new Date(2012, 8, 12, 0, 10, 12),
|
||||
stream = new DateRollingFileStream(
|
||||
__dirname + '/test-date-rolling-file-stream-pattern',
|
||||
'.yyyy-MM-dd',
|
||||
{alwaysIncludePattern: true},
|
||||
now
|
||||
);
|
||||
stream.write("First message\n", 'utf8', function() {
|
||||
that.callback(null, stream);
|
||||
});
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
|
||||
|
||||
'should create a file with the pattern set': {
|
||||
topic: function(stream) {
|
||||
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
|
||||
},
|
||||
'file should contain first message': function(result) {
|
||||
assert.equal(result.toString(), "First message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'when the day changes': {
|
||||
topic: function(stream) {
|
||||
testTime = new Date(2012, 8, 13, 0, 10, 12);
|
||||
stream.write("Second message\n", 'utf8', this.callback);
|
||||
},
|
||||
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
|
||||
|
||||
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be two': function(files) {
|
||||
assert.equal(
|
||||
files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('test-date-rolling-file-stream-pattern') > -1;
|
||||
}
|
||||
).length,
|
||||
2
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'the file with the later date': {
|
||||
topic: function() {
|
||||
fs.readFile(
|
||||
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13',
|
||||
this.callback
|
||||
);
|
||||
},
|
||||
'should contain the second message': function(contents) {
|
||||
assert.equal(contents.toString(), "Second message\n");
|
||||
}
|
||||
},
|
||||
|
||||
'the file with the date': {
|
||||
topic: function() {
|
||||
fs.readFile(
|
||||
__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12',
|
||||
this.callback
|
||||
);
|
||||
},
|
||||
'should contain the first message': function(contents) {
|
||||
assert.equal(contents.toString(), "First message\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
||||
210
test/streams/rollingFileStream-test.js
Normal file
210
test/streams/rollingFileStream-test.js
Normal file
@@ -0,0 +1,210 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, async = require('async')
|
||||
, assert = require('assert')
|
||||
, events = require('events')
|
||||
, fs = require('fs')
|
||||
, semver = require('semver')
|
||||
, streams
|
||||
, RollingFileStream;
|
||||
|
||||
if (semver.satisfies(process.version, '>=0.10.0')) {
|
||||
streams = require('stream');
|
||||
} else {
|
||||
streams = require('readable-stream');
|
||||
}
|
||||
RollingFileStream = require('../../lib/streams').RollingFileStream;
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
function create(filename) {
|
||||
fs.writeFileSync(filename, "test file");
|
||||
}
|
||||
|
||||
vows.describe('RollingFileStream').addBatch({
|
||||
'arguments': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream");
|
||||
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
|
||||
},
|
||||
'should take a filename, file size (bytes), no. backups, return Writable': function(stream) {
|
||||
assert.instanceOf(stream, streams.Writable);
|
||||
assert.equal(stream.filename, "test-rolling-file-stream");
|
||||
assert.equal(stream.size, 1024);
|
||||
assert.equal(stream.backups, 5);
|
||||
},
|
||||
'with default settings for the underlying stream': function(stream) {
|
||||
assert.equal(stream.theStream.mode, 420);
|
||||
assert.equal(stream.theStream.flags, 'a');
|
||||
//encoding isn't a property on the underlying stream
|
||||
//assert.equal(stream.theStream.encoding, 'utf8');
|
||||
}
|
||||
},
|
||||
'with stream arguments': {
|
||||
topic: function() {
|
||||
remove(__dirname + '/test-rolling-file-stream');
|
||||
return new RollingFileStream(
|
||||
'test-rolling-file-stream',
|
||||
1024,
|
||||
5,
|
||||
{ mode: parseInt('0666', 8) }
|
||||
);
|
||||
},
|
||||
'should pass them to the underlying stream': function(stream) {
|
||||
assert.equal(stream.theStream.mode, parseInt('0666', 8));
|
||||
}
|
||||
},
|
||||
'without size': {
|
||||
topic: function() {
|
||||
try {
|
||||
new RollingFileStream(__dirname + "/test-rolling-file-stream");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should throw an error': function(err) {
|
||||
assert.instanceOf(err, Error);
|
||||
}
|
||||
},
|
||||
'without number of backups': {
|
||||
topic: function() {
|
||||
remove('test-rolling-file-stream');
|
||||
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
|
||||
},
|
||||
'should default to 1 backup': function(stream) {
|
||||
assert.equal(stream.backups, 1);
|
||||
}
|
||||
},
|
||||
'writing less than the file size': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream-write-less");
|
||||
var that = this
|
||||
, stream = new RollingFileStream(
|
||||
__dirname + "/test-rolling-file-stream-write-less",
|
||||
100
|
||||
);
|
||||
stream.write("cheese", "utf8", function() {
|
||||
stream.end();
|
||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
|
||||
});
|
||||
},
|
||||
'should write to the file': function(contents) {
|
||||
assert.equal(contents, "cheese");
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be one': function(files) {
|
||||
assert.equal(
|
||||
files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('test-rolling-file-stream-write-less') > -1;
|
||||
}
|
||||
).length,
|
||||
1
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
'writing more than the file size': {
|
||||
topic: function() {
|
||||
remove(__dirname + "/test-rolling-file-stream-write-more");
|
||||
remove(__dirname + "/test-rolling-file-stream-write-more.1");
|
||||
var that = this
|
||||
, stream = new RollingFileStream(
|
||||
__dirname + "/test-rolling-file-stream-write-more",
|
||||
45
|
||||
);
|
||||
async.forEach(
|
||||
[0, 1, 2, 3, 4, 5, 6],
|
||||
function(i, cb) {
|
||||
stream.write(i +".cheese\n", "utf8", cb);
|
||||
},
|
||||
function() {
|
||||
stream.end();
|
||||
that.callback();
|
||||
}
|
||||
);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be two': function(files) {
|
||||
assert.equal(files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('test-rolling-file-stream-write-more') > -1;
|
||||
}
|
||||
).length, 2);
|
||||
}
|
||||
},
|
||||
'the first file': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
|
||||
},
|
||||
'should contain the last two log messages': function(contents) {
|
||||
assert.equal(contents, '5.cheese\n6.cheese\n');
|
||||
}
|
||||
},
|
||||
'the second file': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
|
||||
},
|
||||
'should contain the first five log messages': function(contents) {
|
||||
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
|
||||
}
|
||||
}
|
||||
},
|
||||
'when many files already exist': {
|
||||
topic: function() {
|
||||
remove(__dirname + '/test-rolling-stream-with-existing-files.11');
|
||||
remove(__dirname + '/test-rolling-stream-with-existing-files.20');
|
||||
remove(__dirname + '/test-rolling-stream-with-existing-files.-1');
|
||||
remove(__dirname + '/test-rolling-stream-with-existing-files.1.1');
|
||||
remove(__dirname + '/test-rolling-stream-with-existing-files.1');
|
||||
|
||||
|
||||
create(__dirname + '/test-rolling-stream-with-existing-files.11');
|
||||
create(__dirname + '/test-rolling-stream-with-existing-files.20');
|
||||
create(__dirname + '/test-rolling-stream-with-existing-files.-1');
|
||||
create(__dirname + '/test-rolling-stream-with-existing-files.1.1');
|
||||
create(__dirname + '/test-rolling-stream-with-existing-files.1');
|
||||
|
||||
var that = this
|
||||
, stream = new RollingFileStream(
|
||||
__dirname + "/test-rolling-stream-with-existing-files",
|
||||
45,
|
||||
5
|
||||
);
|
||||
async.forEach(
|
||||
[0, 1, 2, 3, 4, 5, 6],
|
||||
function(i, cb) {
|
||||
stream.write(i +".cheese\n", "utf8", cb);
|
||||
},
|
||||
function() {
|
||||
stream.end();
|
||||
that.callback();
|
||||
}
|
||||
);
|
||||
},
|
||||
'the files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'should be rolled': function(files) {
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files');
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files.1');
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files.2');
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files.11');
|
||||
assert.include(files, 'test-rolling-stream-with-existing-files.20');
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
||||
@@ -1,128 +0,0 @@
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = {}
|
||||
|
||||
}
|
||||
|
||||
function MockResponse(statusCode) {
|
||||
|
||||
this.statusCode = statusCode;
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'take a log4js logger and return a "connect logger"' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
return cl;
|
||||
},
|
||||
|
||||
'should return a "connect logger"': function(cl) {
|
||||
assert.isFunction(cl);
|
||||
}
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
|
||||
var res = new MockResponse(200);
|
||||
cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.length(messages, 1);
|
||||
assert.equal(messages[0].level, levels.INFO);
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with level below logging level' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.FATAL;
|
||||
var cl = clm.connectLogger(ml);
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
|
||||
var res = new MockResponse(200);
|
||||
cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.isEmpty(messages);
|
||||
}
|
||||
},
|
||||
|
||||
'log events with non-default level and custom format' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
|
||||
var res = new MockResponse(200);
|
||||
cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.length(messages, 1);
|
||||
assert.equal(messages[0].level, levels.INFO);
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}).export(module);
|
||||
@@ -1,85 +0,0 @@
|
||||
var vows = require('vows'),
|
||||
assert = require('assert');
|
||||
|
||||
vows.describe('log4js global loglevel').addBatch({
|
||||
'global loglevel' : {
|
||||
topic: function() {
|
||||
var log4js = require('../lib/log4js');
|
||||
return log4js;
|
||||
},
|
||||
|
||||
'set global loglevel on creation': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'global change loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'override loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
|
||||
log2.setLevel(level);
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
},
|
||||
|
||||
'preload loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
log4js.getLogger('log2').setLevel(level);
|
||||
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
|
||||
// get again same logger but as different variable
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
||||
23
test/with-categoryFilter.json
Normal file
23
test/with-categoryFilter.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "categoryFilter",
|
||||
"exclude": "web",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "test/categoryFilter-noweb.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": "web",
|
||||
"type": "file",
|
||||
"filename": "test/categoryFilter-web.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
17
test/with-dateFile.json
Normal file
17
test/with-dateFile.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "dateFile",
|
||||
"filename": "test/date-file-test.log",
|
||||
"pattern": "-from-MM-dd",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"levels": {
|
||||
"tests": "WARN"
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,7 @@
|
||||
"type": "file",
|
||||
"filename": "tmp-test.log",
|
||||
"maxLogSize": 1024,
|
||||
"backups": 3,
|
||||
"pollInterval": 15
|
||||
"backups": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"level": "WARN",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "tmp-tests-warnings.log",
|
||||
"filename": "test/logLevelFilter-warnings.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
@@ -15,7 +15,7 @@
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "file",
|
||||
"filename": "tmp-tests.log",
|
||||
"filename": "test/logLevelFilter.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user