Compare commits

...

61 Commits

Author SHA1 Message Date
Gareth Jones
8383dfc4f4 0.6.6 2013-05-25 13:10:46 +10:00
Gareth Jones
4e8fb26099 Missed out the smtp test 2013-05-25 13:08:43 +10:00
Gareth Jones
8492519e3b Fixing issue #137 2013-05-25 13:04:48 +10:00
Gareth Jones
fdc9d253c9 0.6.5 2013-05-16 16:57:25 +10:00
Gareth Jones
18e21ca473 Merge branch 'master' of https://github.com/nomiddlename/log4js-node 2013-05-16 16:55:47 +10:00
Gareth Jones
ab8c7ed89d Merge pull request #136 from issacg/dontalwaysrename-bug
Dontalwaysrename bug
2013-05-15 23:52:57 -07:00
Gareth Jones
aa4f7c071b Merge pull request #135 from jmav/master
auto level detection from @jmav
2013-05-15 23:52:27 -07:00
Issac Goldstand
dc632f4705 Fixes bug introduced in github issue #132 where file rolling needs to be handled differently for alwaysIncludePattern streams 2013-05-11 23:01:28 +03:00
Jure Mav
ac6284add1 Added automatic level detection to connect-logger, depends on http status response.
Update of connect logger example code, compatible with express 3.x
2013-05-11 16:17:23 +02:00
Issac Goldstand
2da01cc611 Fixes bug introduced in github issue #132 where renaming a file to itself can cause an unhandled error 2013-05-09 13:09:59 +03:00
Gareth Jones
ad8229145e Merge pull request #133 from issacg/baseFileRollingStream-bug
Fixes bug in detecting empty options (see issue #132 on github)
2013-05-08 02:24:02 -07:00
Issac Goldstand
8c12c948d9 Fixes bug in detecting empty options (see issue #132 on github) 2013-05-08 12:05:32 +03:00
Gareth Jones
af6ae7af98 new version for alwaysIncludePattern 2013-05-05 14:01:40 +10:00
Gareth Jones
936ad4da8e fixed tests broken by alwaysIncludePattern 2013-05-05 13:44:01 +10:00
Gareth Jones
097ae3d7f1 Merge branch 'alwaysIncludePattern' of https://github.com/issacg/log4js-node into isaacg-alwaysIncludePattern 2013-05-04 16:10:02 +10:00
Issac Goldstand
04de4ed8d3 fix OS-specific endline mucking test results (:-O not everyone uses linux?!?!) 2013-05-03 11:14:28 +03:00
Issac Goldstand
29b02921b6 add option alwaysIncludePattern to dateTime appender to always use the filename with the pattern included when logging 2013-05-02 14:56:33 +03:00
Gareth Jones
48ed5d1222 Removed the warning about node 0.10 2013-04-11 22:34:49 +10:00
Gareth Jones
7844b0d2e4 0.6.3 2013-04-11 22:29:13 +10:00
Gareth Jones
8b49ba9f3d added node 0.8 to travis config and package.json 2013-04-11 21:49:08 +10:00
Gareth Jones
ed7462885f backporting new streams to node 0.8 for issue #129 2013-04-11 21:45:16 +10:00
Gareth Jones
36c5175a55 0.6.2 2013-04-02 12:02:47 +11:00
Gareth Jones
22160f90b3 fixed the multiprocess tests 2013-04-02 11:59:45 +11:00
Gareth Jones
73437ecb40 Merge branch 'master' of https://github.com/dsn/log4js-node into dsn-master 2013-04-02 11:34:25 +11:00
Gareth Jones
107e33c0d1 merged in change from @vojtajina for pull request #128 2013-04-02 10:18:25 +11:00
Gareth Jones
6352632fb2 fix version of node supported 2013-04-02 10:02:48 +11:00
Gareth Jones
0544342e9f Merge pull request #128 from Dignifiedquire/master-engine
Fix node engine in package.json
2013-04-01 15:42:41 -07:00
Friedel Ziegelmayer
1d1153d32f Fix node engine in package.json 2013-04-01 23:00:26 +02:00
Gary Steven
e58cf201ca Updated for Node 0.10.x
net.createServer no longer emits 'connect' event
2013-03-30 03:23:58 -07:00
Gareth Jones
83271e47fc Merge pull request #125 from jimschubert/master
Allow for somewhat standard debugging calls
2013-03-24 19:35:24 -07:00
Jim Schubert
f3271a3997 Add standard debug conditional function
: master
2013-03-23 18:50:13 -07:00
Gareth Jones
4b7cf589a2 Fixing the wiki links (issue #124) 2013-03-20 19:47:32 +11:00
Gareth Jones
c8f401c47d fixed travis node version format 2013-03-20 14:58:56 +11:00
Gareth Jones
ecbf41bc83 updated readme with node 0.10 info 2013-03-20 09:16:42 +11:00
Gareth Jones
65e490cbd2 Fixes for version v0.10 streams, breaks log4js for older versions of node 2013-03-20 09:14:27 +11:00
Gareth Jones
5e242c9dc9 bumped version 2013-02-25 16:33:48 +11:00
Gareth Jones
50eefcc701 Merge pull request #116 from imkira/master
Pass options from multiprocess appender to inner appender
2013-02-24 21:30:42 -08:00
Mário Freitas
8e53c6213e fix: pass options from multiprocess appender to inner appender 2013-02-21 00:06:59 +09:00
Gareth Jones
a15a628311 Merge pull request #115 from NicolasPelletier/master
Speed up file logging for high rate of logging.
2013-02-14 16:32:03 -08:00
Nicolas Pelletier
b75e3660f4 Speed up file logging for high rate of logging.
During an evaluation of multiple loggers, I saw a slow down when trying to
quickly log more than 100,000 messages to a file:
```javascript
    counter = 150000;
    while (counter) {
        logger.info('Message[' + counter + ']');
        counter -= 1;
    }
```

My detailed test can be found here:
 - https://gist.github.com/NicolasPelletier/4773843

The test demonstrate that writing 150,000 lines straight in a FileStream
takes about 22 seconds until the file content stabilizes. When calling
logger.debug() 150,000 times, the file stabilizes to its final content
after 229s ( almost 4 minutes ! ).

After investigation, it turns out that the problem is using an Array() to
accumulate the data. Pushing the data in the Array with Array.push() is
quick, but the code flushing the buffer uses Array.shift(), which forces
re-indexing of all 149,999 elements remaining in the Array. This is
exponentially slower as the buffer grows.

The solution is to use something else than an Array to accumulate the
messages. The fix was made using a package called Dequeue
( https://github.com/lleo/node-dequeue ). By replacing the Array with
a Dequeue object, it brought the logging of 150,000 messages back down to
31s. Seven times faster than the previous 229s.

There is a caveat that each log event is slightly longer due to the need
to create an object to put in the double-ended queue inside the Dequeue
object. According to a quick test, it takes about 4% more time per call
to logger.debug().
2013-02-13 09:35:02 -05:00
Gareth Jones
22da6226e5 Merge pull request #113 from bitcloud/patternLayout_tokens
add your own tokens to the patternLayout
2013-02-11 13:45:18 -08:00
Jan Schmidle
a3bdac8e14 updated require in example to match other examles 2013-02-08 16:22:29 +01:00
Jan Schmidle
af428c5669 added example on pattern tokens usage 2013-02-08 16:18:27 +01:00
Jan Schmidle
5c75ba9468 fixed small issue that could occur with wrong evaluated parameters 2013-02-08 16:17:24 +01:00
Jan Schmidle
bec0d05847 added some documentation to the function header 2013-02-08 16:15:51 +01:00
Jan Schmidle
e4bf405f20 add your own tokens to the patternLayout 2013-02-08 14:54:18 +01:00
Gareth Jones
95568f352b Merge pull request #110 from Dignifiedquire/fix-2
Move examples into their own directory.
2013-01-20 16:15:53 -08:00
Gareth Jones
6da6f3c90e Merge pull request #109 from Dignifiedquire/fix-1
Misc code highlighting fixes in readme.md
2013-01-20 14:16:04 -08:00
Friedel Ziegelmayer
7f57d14e70 Move examples into their own directory. 2013-01-19 22:14:14 +01:00
Friedel Ziegelmayer
f478793da3 Misc code highlighting fixes in readme.md 2013-01-19 22:09:31 +01:00
Gareth Jones
ec2f8fec3b Merge pull request #105 from ulikoehler/readme-syntax-highlighting
Added syntax highlighting to JS code in README.md
2013-01-06 13:33:52 -08:00
Uli Köhler
0167c84ea5 Added syntax highlighting to JS code in README.md 2013-01-06 01:09:55 +01:00
Gareth Jones
3e1a27e522 New version, with colours in pattern layout 2012-12-03 09:59:36 +11:00
Gareth Jones
8b42e46071 Merge pull request #101 from Dignifiedquire/feature-color-pattern
[feature] Add patternColoured Layout.
2012-12-02 14:51:27 -08:00
Friedel Ziegelmayer
4a7a90ed53 [feature] Add color option to pattern layout.
Based on #90 this implements the possibillity to add the color codes
according to the log level via %[ and %].
2012-12-02 23:41:59 +01:00
Gareth Jones
a9307fd6da fix for issue #100, multiprocess appender and logLevelFilter don't play nicely 2012-11-09 16:02:16 +11:00
Gareth Jones
4739c65c68 Version 0.5.4 2012-10-16 11:54:21 +11:00
Gareth Jones
892181f88f Merge pull request #98 from danbell/master
Check environment variable LOG4JS_CONFIG for configuration file location.
2012-10-15 17:52:08 -07:00
Daniel Bell
bdfa7f9a9b Delete LOG4JS_CONFIG environment variable after test has finished. 2012-10-16 10:55:30 +11:00
Daniel Bell
ad63b801f7 Check environment variable LOG4JS_CONFIG for configuration file location. 2012-10-16 08:36:26 +11:00
Gareth Jones
2bfad6362a Version 0.5.3 2012-09-26 09:49:58 +10:00
34 changed files with 921 additions and 824 deletions

View File

@@ -1,4 +1,5 @@
language: node_js
node_js:
- 0.6
- 0.8
- "0.10"
- "0.8"

149
README.md
View File

@@ -21,10 +21,10 @@ NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of no
```javascript
{
appenders: [
{ type: "console" }
],
replaceConsole: true
appenders: [
{ type: "console" }
],
replaceConsole: true
}
```
@@ -36,100 +36,105 @@ npm install log4js
## usage
Minimalist version:
var log4js = require('log4js');
var logger = log4js.getLogger();
logger.debug("Some debug messages");
```javascript
var log4js = require('log4js');
var logger = log4js.getLogger();
logger.debug("Some debug messages");
```
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```bash
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```
See example.js for a full example, but here's a snippet (also in fromreadme.js):
```javascript
var log4js = require('log4js');
//console log is loaded by default, so you won't normally need to do this
//log4js.loadAppender('console');
log4js.loadAppender('file');
//log4js.addAppender(log4js.appenders.console());
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
var log4js = require('log4js');
//console log is loaded by default, so you won't normally need to do this
//log4js.loadAppender('console');
log4js.loadAppender('file');
//log4js.addAppender(log4js.appenders.console());
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');
```
Output:
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
```bash
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
```
The first 5 lines of the code above could also be written as:
var log4js = require('log4js');
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
]
});
```javascript
var log4js = require('log4js');
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
]
});
```
## configuration
You can configure the appenders and log levels manually (as above), or provide a
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object.
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
configuration file location may also be specified via the environment variable
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
To turn off configuration file change checking, configure with:
var log4js = require('log4js');
log4js.configure('my_log4js_configuration.json', {});
```javascript
var log4js = require('log4js');
log4js.configure('my_log4js_configuration.json', {});
```
To specify a different period:
log4js.configure('file.json', { reloadSecs: 300 });
```javascript
log4js.configure('file.json', { reloadSecs: 300 });
```
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
```javascript
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
```
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
#### my_log4js_configuration.json ####
```json
#### my_log4js_configuration.json ####
{
"appenders": [
{
"appenders": [
{
"type": "file",
"filename": "relative/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 3,
"category": "relative-logger"
},
{
"type": "file",
"absolute": true,
"filename": "/absolute/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 10,
"category": "absolute-logger"
}
]
"type": "file",
"filename": "relative/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 3,
"category": "relative-logger"
},
{
"type": "file",
"absolute": true,
"filename": "/absolute/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 10,
"category": "absolute-logger"
}
Documentation for most of the core appenders can be found on the [wiki](log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
]
}
```
Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
## Documentation
See the [wiki](log4js-node/wiki). Improve the [wiki](log4js-node/wiki), please.
See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
## Contributing
Contributions welcome, but take a look at the [rules](log4js-node/wiki/Contributing) first.
Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
## License

View File

@@ -1,14 +0,0 @@
var log4js = require('./lib/log4js');
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('INFO');
var app = require('express').createServer();
app.configure(function() {
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
});
app.get('*', function(req,res) {
res.send('hello world\n <a href="/cheese">cheese</a>\n');
});
app.listen(5000);

View File

@@ -0,0 +1,46 @@
//The connect/express logger was added to log4js by danbell. This allows connect/express servers to log using log4js.
//https://github.com/nomiddlename/log4js-node/wiki/Connect-Logger
// load modules
var log4js = require('log4js');
var express = require("express");
var app = express();
//config
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: 'logs/log4jsconnect.log', category: 'log4jslog' }
]
});
//define logger
var logger = log4js.getLogger('log4jslog');
// set at which time msg is logged print like: only on error & above
// logger.setLevel('ERROR');
//express app
app.configure(function() {
app.use(express.favicon(''));
// app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
// app.use(log4js.connectLogger(logger, { level: 'auto', format: ':method :url :status' }));
//### AUTO LEVEL DETECTION
//http responses 3xx, level = WARN
//http responses 4xx & 5xx, level = ERROR
//else.level = INFO
app.use(log4js.connectLogger(logger, { level: 'auto' }));
});
//route
app.get('/', function(req,res) {
res.send('hello world');
});
//start app
app.listen(5000);
console.log('server runing at localhost:5000');
console.log('Simulation of normal response: goto localhost:5000');
console.log('Simulation of error response: goto localhost:5000/xxx');

View File

@@ -1,4 +1,4 @@
var log4js = require('./lib/log4js');
var log4js = require('../lib/log4js');
//log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: [

View File

@@ -1,4 +1,4 @@
var log4js = require('./lib/log4js')
var log4js = require('../lib/log4js')
, log
, i = 0;
log4js.configure({

View File

@@ -0,0 +1,21 @@
var log4js = require('./lib/log4js');
var config = {
"appenders": [
{
"type": "console",
"layout": {
"type": "pattern",
"pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
"tokens": {
"pid" : function() { return process.pid; }
}
}
}
]
};
log4js.configure(config, {});
var logger = log4js.getLogger("app");
logger.info("Test log message");

43
examples/smtp-appender.js Normal file
View File

@@ -0,0 +1,43 @@
//Note that smtp appender needs nodemailer to work.
//If you haven't got nodemailer installed, you'll get cryptic
//"cannot find module" errors when using the smtp appender
var log4js = require('../lib/log4js')
, log
, logmailer
, i = 0;
log4js.configure({
"appenders": [
{
type: "console",
category: "test"
},
{
"type": "smtp",
"recipients": "logfilerecipient@logging.com",
"sendInterval": 5,
"transport": "SMTP",
"SMTP": {
"host": "smtp.gmail.com",
"secureConnection": true,
"port": 465,
"auth": {
"user": "someone@gmail",
"pass": "********************"
},
"debug": true
},
"category": "mailer"
}
]
});
log = log4js.getLogger("test");
logmailer = log4js.getLogger("mailer");
function doTheLogging(x) {
log.info("Logging something %d", x);
logmailer.info("Logging something %d", x);
}
for ( ; i < 500; i++) {
doTheLogging(i);
}

View File

@@ -19,10 +19,10 @@ process.on('exit', function() {
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
* @layout layout function for log messages - defaults to basicLayout
*/
function appender(filename, pattern, layout) {
function appender(filename, pattern, alwaysIncludePattern, layout) {
layout = layout || layouts.basicLayout;
var logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
var logFile = new streams.DateRollingFileStream(filename, pattern, { alwaysIncludePattern: alwaysIncludePattern });
openFiles.push(logFile);
return function(logEvent) {
@@ -35,14 +35,18 @@ function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return appender(config.filename, config.pattern, layout);
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
}
exports.appender = appender;

View File

@@ -22,53 +22,51 @@ process.on('exit', function() {
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
*/
function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new streams.BufferedWriteStream(
new streams.RollingFileStream(
file,
fileSize,
numFiles
)
);
} else {
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new streams.RollingFileStream(
file,
fileSize,
numFiles
);
} else {
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' });
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
var logFile = openTheStream(file, logSize, numBackups);
// push file to the stack of open handlers
openFiles.push(logFile);
return function(loggingEvent) {
logFile.write(layout(loggingEvent) + eol, "utf8");
};
var logFile = openTheStream(file, logSize, numBackups);
// push file to the stack of open handlers
openFiles.push(logFile);
return function(loggingEvent) {
logFile.write(layout(loggingEvent) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
}
exports.appender = fileAppender;

View File

@@ -16,20 +16,15 @@ function logServer(config) {
function deserializeLoggingEvent(clientSocket, msg) {
var loggingEvent;
try {
loggingEvent = JSON.parse(msg);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level.toString = function levelToString() {
return loggingEvent.level.levelStr;
};
loggingEvent = JSON.parse(msg);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: { toString: function () {
return 'ERROR';
}
},
level: log4js.levels.ERROR,
data: [ 'Unable to parse log:', msg ]
};
}
@@ -43,31 +38,30 @@ function logServer(config) {
var actualAppender = config.actualAppender,
server = net.createServer(function serverCreated(clientSocket) {
clientSocket.setEncoding('utf8');
clientSocket.on('connect', function clientConnected() {
var logMessage = '';
var logMessage = '';
function logTheMessage(msg) {
if (logMessage.length > 0) {
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
function logTheMessage(msg) {
if (logMessage.length > 0) {
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
}
function chunkReceived(chunk) {
var event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
//check for more, maybe it was a big chunk
chunkReceived();
}
function chunkReceived(chunk) {
var event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
//check for more, maybe it was a big chunk
chunkReceived();
}
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
});
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
return actualAppender;
@@ -120,11 +114,11 @@ function createAppender(config) {
}
}
function configure(config) {
function configure(config, options) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
config.actualAppender = actualAppender;
}
return createAppender(config);

View File

@@ -1,6 +1,6 @@
var layouts = require("../layouts"),
mailer = require("nodemailer"),
os = require('os');
mailer = require("nodemailer"),
os = require('os');
/**
* SMTP Appender. Sends logging events using SMTP protocol.
@@ -17,12 +17,13 @@ function smtpAppender(config, layout) {
var logEventBuffer = [];
var sendTimer;
var transport = mailer.createTransport(config.transport, config[config.transport]);
function sendBuffer() {
if (logEventBuffer.length == 0)
if (logEventBuffer.length == 0) {
return;
}
var transport = mailer.createTransport(config.transport, config[config.transport]);
var firstEvent = logEventBuffer[0];
var body = "";
while (logEventBuffer.length > 0) {
@@ -35,29 +36,33 @@ function smtpAppender(config, layout) {
text: body,
headers: {"Hostname": os.hostname()}
};
if (config.sender)
if (config.sender) {
msg.from = config.sender;
}
transport.sendMail(msg, function(error, success) {
if (error) {
console.error("log4js.smtpAppender - Error happened ", error);
}
transport.close();
});
}
function scheduleSend() {
if (!sendTimer)
if (!sendTimer) {
sendTimer = setTimeout(function() {
sendTimer = null;
sendBuffer();
}, sendInterval);
}
}
return function(loggingEvent) {
logEventBuffer.push(loggingEvent);
if (sendInterval > 0)
if (sendInterval > 0) {
scheduleSend();
else
} else {
sendBuffer();
}
};
}

View File

@@ -5,7 +5,7 @@ var levels = require("./levels");
* Options:
*
* - `format` Format string, see below for tokens
* - `level` A log4js levels instance.
* - `level` A log4js levels instance. Supports also 'auto'
*
* Tokens:
*
@@ -26,64 +26,74 @@ var levels = require("./levels");
* @api public
*/
function getLogger(logger4js, options) {
if ('object' == typeof options) {
options = options || {};
} else if (options) {
options = { format: options };
} else {
options = {};
}
function getLogger(logger4js, options) {
if ('object' == typeof options) {
options = options || {};
} else if (options) {
options = { format: options };
} else {
options = {};
}
var thislogger = logger4js
, level = levels.toLevel(options.level, levels.INFO)
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"'
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
var thislogger = logger4js
, level = levels.toLevel(options.level, levels.INFO)
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"'
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
return function (req, res, next) {
return function (req, res, next) {
// mount safety
if (req._logging) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
if (thislogger.isLevelEnabled(level)) {
var start = +new Date
, statusCode
, writeHead = res.writeHead
, end = res.end
, url = req.originalUrl;
var start = +new Date
, statusCode
, writeHead = res.writeHead
, end = res.end
, url = req.originalUrl;
// flag as logging
req._logging = true;
// flag as logging
req._logging = true;
// proxy for statusCode.
res.writeHead = function(code, headers){
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = statusCode = code;
res.__headers = headers || {};
// proxy for statusCode.
res.writeHead = function(code, headers){
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = statusCode = code;
res.__headers = headers || {};
};
//status code response level handling
if(options.level === 'auto'){
level = levels.INFO;
if(code >= 300) level = levels.WARN;
if(code >= 400) level = levels.ERROR;
} else {
level = levels.toLevel(options.level, levels.INFO)
}
};
// proxy end to output a line to the provided logger.
res.end = function(chunk, encoding) {
res.end = end;
res.end(chunk, encoding);
res.responseTime = +new Date - start;
if ('function' == typeof fmt) {
var line = fmt(req, res, function(str){ return format(str, req, res); });
if (line) thislogger.log(level, line);
} else {
thislogger.log(level, format(fmt, req, res));
// proxy end to output a line to the provided logger.
res.end = function(chunk, encoding) {
res.end = end;
res.end(chunk, encoding);
res.responseTime = +new Date - start;
if (thislogger.isLevelEnabled(level)) {
if (typeof fmt === 'function') {
var line = fmt(req, res, function(str){ return format(str, req, res); });
if (line) thislogger.log(level, line);
} else {
thislogger.log(level, format(fmt, req, res));
}
}
};
}
};
}
//ensure next gets always called
next();
};
};
}
/**
@@ -96,25 +106,25 @@ function getLogger(logger4js, options) {
* @api private
*/
function format(str, req, res) {
return str
.replace(':url', req.originalUrl)
function format(str, req, res) {
return str
.replace(':url', req.originalUrl)
.replace(':method', req.method)
.replace(':status', res.__statusCode || res.statusCode)
.replace(':response-time', res.responseTime)
.replace(':date', new Date().toUTCString())
.replace(':referrer', req.headers['referer'] || req.headers['referrer'] || '')
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
.replace(':remote-addr', req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress)))
.replace(':user-agent', req.headers['user-agent'] || '')
.replace(':content-length', (res._headers && res._headers['content-length']) || (res.__headers && res.__headers['Content-Length']) || '-')
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
.replace(/:res\[([^\]]+)\]/g, function(_, field){
return res._headers
? (res._headers[field.toLowerCase()] || res.__headers[field])
: (res.__headers && res.__headers[field]);
});
}
.replace(':status', res.__statusCode || res.statusCode)
.replace(':response-time', res.responseTime)
.replace(':date', new Date().toUTCString())
.replace(':referrer', req.headers['referer'] || req.headers['referrer'] || '')
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
.replace(':remote-addr', req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress)))
.replace(':user-agent', req.headers['user-agent'] || '')
.replace(':content-length', (res._headers && res._headers['content-length']) || (res.__headers && res.__headers['Content-Length']) || '-')
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
.replace(/:res\[([^\]]+)\]/g, function(_, field){
return res._headers
? (res._headers[field.toLowerCase()] || res.__headers[field])
: (res.__headers && res.__headers[field]);
});
}
/**
* Return RegExp Object about nolog
@@ -143,26 +153,26 @@ function format(str, req, res) {
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif"
*/
function createNoLogCondition(nolog, type) {
if(!nolog) return null;
type = type || '';
function createNoLogCondition(nolog, type) {
if(!nolog) return null;
type = type || '';
if(nolog instanceof RegExp){
if(type === 'string')
return nolog.source;
return nolog;
} else if(typeof nolog === 'string'){
if(type === 'string')
return nolog;
try{
return new RegExp(nolog);
} catch (ex) {
return null;
}
} else if(nolog instanceof Array){
var regexps = nolog.map(function(o){ return createNoLogCondition(o, 'string')});
return new RegExp(regexps.join('|'));
}
}
if(nolog instanceof RegExp){
if(type === 'string')
return nolog.source;
return nolog;
} else if(typeof nolog === 'string'){
if(type === 'string')
return nolog;
try{
return new RegExp(nolog);
} catch (ex) {
return null;
}
} else if(nolog instanceof Array){
var regexps = nolog.map(function(o){ return createNoLogCondition(o, 'string')});
return new RegExp(regexps.join('|'));
}
}
exports.connectLogger = getLogger;
exports.connectLogger = getLogger;

View File

@@ -10,8 +10,9 @@ var dateFormat = require('./date_format')
, "coloured": function() { return colouredLayout; }
, "pattern": function (config) {
var pattern = config.pattern || undefined;
return patternLayout(pattern);
}
var tokens = config.tokens || undefined;
return patternLayout(pattern, tokens);
}
}
, colours = {
ALL: "grey"
@@ -24,7 +25,6 @@ var dateFormat = require('./date_format')
, OFF: "grey"
};
function formatLogData(logData) {
var output = ""
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
@@ -58,30 +58,36 @@ function formatLogData(logData) {
return output;
}
var styles = {
//styles
'bold' : [1, 22],
'italic' : [3, 23],
'underline' : [4, 24],
'inverse' : [7, 27],
//grayscale
'white' : [37, 39],
'grey' : [90, 39],
'black' : [90, 39],
//colors
'blue' : [34, 39],
'cyan' : [36, 39],
'green' : [32, 39],
'magenta' : [35, 39],
'red' : [31, 39],
'yellow' : [33, 39]
};
function colorizeStart(style) {
return style ? '\033[' + styles[style][0] + 'm' : '';
}
function colorizeEnd(style) {
return style ? '\033[' + styles[style][1] + 'm' : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize (str, style) {
var styles = {
//styles
'bold' : [1, 22],
'italic' : [3, 23],
'underline' : [4, 24],
'inverse' : [7, 27],
//grayscale
'white' : [37, 39],
'grey' : [90, 39],
'black' : [90, 39],
//colors
'blue' : [34, 39],
'cyan' : [36, 39],
'green' : [32, 39],
'magenta' : [35, 39],
'red' : [31, 39],
'yellow' : [33, 39]
};
return style ? '\033[' + styles[style][0] + 'm' + str +
'\033[' + styles[style][1] + 'm' : str;
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
@@ -134,12 +140,26 @@ function messagePassThroughLayout (loggingEvent) {
* - %d date in various formats
* - %% %
* - %n newline
* Takes a pattern string and returns a layout function.
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { "pid" : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @param {String} Log format pattern String
* @param {object} map object of different tokens
* @return {Function}
* @author Stephan Strittmatter
* @author Jan Schmidle
*/
function patternLayout (pattern) {
function patternLayout (pattern, tokens) {
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([cdmnpr%])(\{([^\}]+)\})?|([^%]+)/;
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
@@ -206,9 +226,26 @@ function patternLayout (pattern) {
case "r":
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
break;
case "[":
replacement = colorizeStart(colours[loggingEvent.level.toString()]);
break;
case "]":
replacement = colorizeEnd(colours[loggingEvent.level.toString()]);
break;
case "%":
replacement = "%";
break;
case "x":
if(typeof(tokens[specifier]) !== 'undefined') {
if(typeof(tokens[specifier]) === 'function') {
replacement = tokens[specifier]();
} else {
replacement = tokens[specifier];
}
} else {
replacement = matchedString;
}
break;
default:
replacement = matchedString;
break;
@@ -248,7 +285,6 @@ function patternLayout (pattern) {
};
module.exports = {
basicLayout: basicLayout
, messagePassThroughLayout: messagePassThroughLayout

View File

@@ -240,8 +240,9 @@ function initReloadConfiguration(filename, options) {
function configure(configurationFileOrObject, options) {
var config = configurationFileOrObject;
config = config || process.env.LOG4JS_CONFIG;
options = options || {};
if (config === undefined || config === null || typeof(config) === 'string') {
if (options.reloadSecs) {
initReloadConfiguration(config, options);

View File

@@ -1,92 +1,82 @@
var fs = require('fs'),
util = require('util');
stream,
util = require('util'),
semver = require('semver');
function debug(message) {
// console.log(message);
if (semver.satisfies(process.version, '>=0.10.0')) {
stream = require('stream');
} else {
stream = require('readable-stream');
}
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (BaseRollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.currentSize = 0;
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.rolling = false;
this.writesWhileRolling = [];
this.currentSize = 0;
this.rollBeforeWrite = false;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this, this.filename, this.options);
this.currentSize = currentFileSize(this.filename);
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this);
this.openTheStream();
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
util.inherits(BaseRollingFileStream, stream.Writable);
BaseRollingFileStream.prototype.initRolling = function() {
var that = this;
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
var that = this;
function writeTheChunk() {
debug("writing the chunk to the underlying stream");
that.currentSize += chunk.length;
that.theStream.write(chunk, encoding, callback);
}
function emptyRollingQueue() {
debug("emptying the rolling queue");
var toWrite;
while ((toWrite = that.writesWhileRolling.shift())) {
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
that.currentSize += toWrite.data.length;
if (that.shouldRoll()) {
that.flush();
return true;
}
}
that.flush();
return false;
}
debug("in _write");
this.rolling = true;
this.roll(this.filename, function() {
that.currentSize = 0;
that.rolling = emptyRollingQueue();
if (that.rolling) {
process.nextTick(function() { that.initRolling(); });
}
});
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename, writeTheChunk);
} else {
writeTheChunk();
}
};
BaseRollingFileStream.prototype.write = function(data, encoding) {
var canWrite = false;
if (this.rolling) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
} else {
if (this.rollBeforeWrite && this.shouldRoll()) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
this.initRolling();
} else {
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
this.currentSize += data.length;
debug('current size = ' + this.currentSize);
BaseRollingFileStream.prototype.openTheStream = function(cb) {
debug("opening the underlying stream");
this.theStream = fs.createWriteStream(this.filename, this.options);
if (cb) {
this.theStream.on("open", cb);
}
};
if (!this.rollBeforeWrite && this.shouldRoll()) {
this.initRolling();
}
}
}
return canWrite;
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
debug("closing the underlying stream");
this.theStream.end(cb);
};
BaseRollingFileStream.prototype.shouldRoll = function() {

View File

@@ -1,77 +0,0 @@
var events = require('events'),
util = require('util');
module.exports = BufferedWriteStream;
function BufferedWriteStream(stream) {
var that = this;
this.stream = stream;
this.buffer = [];
this.canWrite = false;
this.bytes = 0;
this.stream.on("open", function() {
that.canWrite = true;
that.flushBuffer();
});
this.stream.on("error", function (err) {
that.emit("error", err);
});
this.stream.on("drain", function() {
that.canWrite = true;
that.flushBuffer();
});
}
util.inherits(BufferedWriteStream, events.EventEmitter);
Object.defineProperty(
BufferedWriteStream.prototype,
"fd",
{
get: function() { return this.stream.fd; },
set: function(newFd) {
this.stream.fd = newFd;
this.bytes = 0;
}
}
);
Object.defineProperty(
BufferedWriteStream.prototype,
"bytesWritten",
{
get: function() { return this.bytes; }
}
);
BufferedWriteStream.prototype.write = function(data, encoding) {
this.buffer.push({ data: data, encoding: encoding });
this.flushBuffer();
};
BufferedWriteStream.prototype.end = function(data, encoding) {
if (data) {
this.buffer.push({ data: data, encoding: encoding });
}
this.flushBufferEvenIfCannotWrite();
};
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
this.bytes += toWrite.data.length;
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
};
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
while (this.buffer.length > 0) {
this.writeToStream(this.buffer.shift());
}
};
BufferedWriteStream.prototype.flushBuffer = function() {
while (this.buffer.length > 0 && this.canWrite) {
this.writeToStream(this.buffer.shift());
}
};

View File

@@ -6,26 +6,40 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
module.exports = DateRollingFileStream;
function debug(message) {
// console.log(message);
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (DateRollingFileStream) %s', message); };
} else {
debug = function() { };
}
function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
this.baseFilename = filename;
this.alwaysIncludePattern = false;
if (options) {
if (options.alwaysIncludePattern) {
this.alwaysIncludePattern = true;
filename = this.baseFilename + this.lastTimeWeWroteSomething;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
debug("this.now is " + this.now + ", now is " + now);
delete options.alwaysIncludePattern;
if (Object.keys(options).length === 0) {
options = null;
}
}
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
this.rollBeforeWrite = true;
DateRollingFileStream.super_.call(this, filename, options);
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
@@ -41,20 +55,26 @@ DateRollingFileStream.prototype.shouldRoll = function() {
};
DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
newFilename = filename + this.previousTime;
var that = this;
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
deleteAnyExistingFile,
renameTheCurrentFile,
openANewFile
], callback);
if (this.alwaysIncludePattern) {
this.filename = this.baseFilename + this.lastTimeWeWroteSomething;
async.series([
this.closeTheStream.bind(this),
this.openTheStream.bind(this)
], callback);
} else {
var newFilename = this.baseFilename + this.previousTime;
async.series([
this.closeTheStream.bind(this),
deleteAnyExistingFile,
renameTheCurrentFile,
this.openTheStream.bind(this)
], callback);
}
function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
@@ -69,21 +89,4 @@ DateRollingFileStream.prototype.roll = function(filename, callback) {
fs.rename(filename, newFilename, cb);
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
};

View File

@@ -4,9 +4,11 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
fs = require('fs'),
async = require('async');
function debug(message) {
// util.debug(message);
// console.log(message);
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (RollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = RollingFileStream;
@@ -28,6 +30,7 @@ function RollingFileStream (filename, size, backups, options) {
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
return this.currentSize >= this.size;
};
@@ -81,30 +84,11 @@ RollingFileStream.prototype.roll = function(filename, callback) {
});
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
renameTheFiles,
openANewFile
], callback);
debug("Rolling, rolling, rolling");
async.series([
this.closeTheStream.bind(this),
renameTheFiles,
this.openTheStream.bind(this)
], callback);
};

View File

@@ -1,3 +1,2 @@
exports.BufferedWriteStream = require('./BufferedWriteStream');
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@@ -1,37 +1,42 @@
{
"name": "log4js",
"version": "0.5.2",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
"log",
"log4j",
"node"
],
"main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
"repository": {
"type": "git",
"url": "https://github.com/nomiddlename/log4js-node.git"
},
"bugs": {
"url": "http://github.com/nomiddlename/log4js-node/issues"
},
"engines": [ "node >=0.6" ],
"scripts": {
"test": "vows"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dependencies": {
"async": "0.1.15"
},
"devDependencies": {
"vows": "0.6.2",
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
}
"name": "log4js",
"version": "0.6.6",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
"log",
"log4j",
"node"
],
"main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
"repository": {
"type": "git",
"url": "https://github.com/nomiddlename/log4js-node.git"
},
"bugs": {
"url": "http://github.com/nomiddlename/log4js-node/issues"
},
"engines": {
"node": ">=0.8"
},
"scripts": {
"test": "vows"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dependencies": {
"async": "0.1.15",
"dequeue": "1.0.3",
"semver": "~1.1.4",
"readable-stream": "~1.0.2"
},
"devDependencies": {
"vows": "0.7.0",
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
}
}

View File

@@ -84,6 +84,48 @@ vows.describe('log4js configure').addBatch({
'should add appender configure function to appenderMakers': function(log4js) {
assert.isFunction(log4js.appenderMakers['some/other/external']);
}
},
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
topic: function() {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
var fileRead = 0,
modulePath = 'some/path/to/mylog4js.json',
pathsChecked = [],
mtime = new Date(),
fakeFS = {
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: mtime };
} else {
throw new Error("no such file");
}
}
},
log4js = sandbox.require('../lib/log4js',
{
requires: {
'fs': fakeFS,
}
});
delete process.env.LOG4JS_CONFIG;
return fileRead;
},
'should load the specified local configuration file' : function(fileRead) {
assert.equal(fileRead, 1);
}
}
}
}).exportTo(module);

View File

@@ -5,13 +5,13 @@ var vows = require('vows'),
log4js = require('../lib/log4js');
function removeFile(filename) {
return function() {
fs.unlink(path.join(__dirname, filename), function(err) {
if (err) {
console.log("Could not delete ", filename, err);
}
});
};
return function() {
fs.unlink(path.join(__dirname, filename), function(err) {
if (err) {
console.log("Could not delete ", filename, err);
}
});
};
}
vows.describe('../lib/appenders/dateFile').addBatch({
@@ -72,27 +72,62 @@ vows.describe('../lib/appenders/dateFile').addBatch({
}
}).addBatch({
'configure': {
'with dateFileAppender': {
'configure': {
'with dateFileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./date-file-test.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/with-dateFile.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./date-file-test.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/with-dateFile.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
},
teardown: removeFile('date-file-test.log'),
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1);
assert.include(contents, 'this should be written to the file' + require('os').EOL);
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
}
}
},
'with options.alwaysIncludePattern': {
topic: function() {
var log4js = require('../lib/log4js')
, format = require('../lib/date_format')
, logger
, options = {
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test",
"pattern": "-from-MM-dd.log",
"alwaysIncludePattern": true,
"layout": {
"type": "messagePassThrough"
}
}
]
}
, thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(path.join(__dirname, 'date-file-test' + thisTime), "this is existing data" + require('os').EOL, 'utf8');
log4js.clearAppenders();
log4js.configure(options);
logger = log4js.getLogger('tests');
logger.warn('this should be written to the file with the appended date');
this.teardown = removeFile('date-file-test' + thisTime);
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', this.callback);
},
'should create file with the correct pattern': function(contents) {
assert.include(contents, 'this should be written to the file with the appended date');
},
'should not overwrite the file on open (bug found in issue #132)': function(contents) {
assert.include(contents, 'this is existing data');
}
}
}
}).exportTo(module);

View File

@@ -108,7 +108,13 @@ vows.describe('log4js fileAppender').addBatch({
var that = this;
//give the system a chance to open the stream
setTimeout(function() {
fs.readdir(__dirname, that.callback);
fs.readdir(__dirname, function(err, files) {
if (files) {
that.callback(null, files.sort());
} else {
that.callback(err, files);
}
});
}, 200);
},
'the log files': {
@@ -120,31 +126,31 @@ vows.describe('log4js fileAppender').addBatch({
assert.equal(files.length, 3);
},
'should be named in sequence': function (files) {
assert.deepEqual(files.sort(), ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
},
'and the contents of the first file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be empty because the last log message triggers rolling': function(contents) {
assert.isEmpty(contents);
}
},
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be the last log message': function(contents) {
assert.include(contents, 'This is the fourth log message.');
}
},
'and the contents of the third file': {
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
},
'should be the third log message': function(contents) {
assert.include(contents, 'This is the third log message.');
}
},
'and the contents of the third file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
},
'should be the second log message': function(contents) {
assert.include(contents, 'This is the second log message.');
}
}
}
}

View File

@@ -4,9 +4,10 @@ assert = require('assert');
//used for patternLayout tests.
function test(args, pattern, value) {
var layout = args[0]
, event = args[1];
, event = args[1]
, tokens = args[2];
assert.equal(layout(pattern)(event), value);
assert.equal(layout(pattern, tokens)(event), value);
}
vows.describe('log4js layouts').addBatch({
@@ -175,8 +176,12 @@ vows.describe('log4js layouts').addBatch({
level: {
toString: function() { return "DEBUG"; }
}
}, layout = require('../lib/layouts').patternLayout;
return [layout, event];
}, layout = require('../lib/layouts').patternLayout
, tokens = {
testString: 'testStringToken',
testFunction: function() { return 'testFunctionToken'; }
};
return [layout, event, tokens];
},
'should default to "time logLevel loggerName - message"': function(args) {
@@ -243,9 +248,21 @@ vows.describe('log4js layouts').addBatch({
test(args, '%-6p', 'DEBUG ');
test(args, '%-8p', 'DEBUG ');
test(args, '%-10p', 'DEBUG ');
}
},
'%[%r%] should output colored time': function(args) {
test(args, '%[%r%]', '\033[36m14:18:30\033[39m');
},
'%x{testString} should output the string stored in tokens': function(args) {
test(args, '%x{testString}', 'testStringToken');
},
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
test(args, '%x{testFunction}', 'testFunctionToken');
},
'%x{doesNotExist} should output the string stored in tokens': function(args) {
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
},
'%x should output the string stored in tokens': function(args) {
test(args, '%x', '%x');
},
}
}).export(module);

View File

@@ -50,9 +50,10 @@ vows.describe('log4js-abspath').addBatch({
{
RollingFileStream: function(file) {
fileOpened = file;
},
BufferedWriteStream: function(other) {
return { on: function() { }, end: function() {} }
return {
on: function() {},
end: function() {}
};
}
}
}

View File

@@ -30,24 +30,24 @@ function makeFakeNet() {
};
},
createServer: function(cb) {
var fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding: function(encoding) {
fakeNet.encoding = encoding;
},
on: function(event, cb) {
fakeNet.cbs[event] = cb;
}
});
var fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding: function(encoding) {
fakeNet.encoding = encoding;
},
on: function(event, cb) {
fakeNet.cbs[event] = cb;
}
});
return {
listen: function(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
return {
listen: function(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
@@ -181,14 +181,13 @@ vows.describe('Multiprocess Appender').addBatch({
},
'when a client connects': {
topic: function(net) {
var logString = JSON.stringify({ level: 'DEBUG', data: ['some debug']}) + '__LOG4JS__';
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__';
net.cbs['connect']();
net.cbs['data'](JSON.stringify({ level: 'ERROR', data: ['an error message'] }) + '__LOG4JS__');
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__');
net.cbs['data'](logString.substring(0, 10));
net.cbs['data'](logString.substring(10));
net.cbs['data'](logString + logString + logString);
net.cbs['end'](JSON.stringify({ level: 'FATAL', data: ["that's all folks"] }) + '__LOG4JS__');
net.cbs['end'](JSON.stringify({ level: { level: 50000, levelStr: 'FATAL' }, data: ["that's all folks"] }) + '__LOG4JS__');
net.cbs['data']('bad message__LOG4JS__');
return net;
},

View File

@@ -13,7 +13,8 @@ function setupLogging(category, options) {
sendMail: function (msg, callback) {
msgs.push(msg);
callback(null, true);
}
},
close: function() {}
};
}
};

View File

@@ -1,8 +1,17 @@
var vows = require('vows'),
assert = require('assert'),
fs = require('fs'),
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream,
testTime = new Date(2012, 8, 12, 10, 37, 11);
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, semver = require('semver')
, streams
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
function cleanUp(filename) {
return function() {
@@ -19,15 +28,16 @@ vows.describe('DateRollingFileStream').addBatch({
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a FileWriteStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, fs.FileWriteStream);
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.mode, 420);
assert.equal(stream.flags, 'a');
assert.equal(stream.encoding, 'utf8');
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
@@ -45,7 +55,7 @@ vows.describe('DateRollingFileStream').addBatch({
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
}
},
@@ -54,7 +64,7 @@ vows.describe('DateRollingFileStream').addBatch({
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
@@ -63,13 +73,11 @@ vows.describe('DateRollingFileStream').addBatch({
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.on("open", function() {
stream.write("First message\n");
//wait for the file system to catch up with us
that.callback(null, stream);
});
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
@@ -84,9 +92,8 @@ vows.describe('DateRollingFileStream').addBatch({
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n");
setTimeout(this.callback, 100);
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
@@ -118,6 +125,63 @@ vows.describe('DateRollingFileStream').addBatch({
}
}
}
},
'with alwaysIncludePattern': {
topic: function() {
var that = this,
testTime = new Date(2012, 8, 12, 0, 10, 12),
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-pattern', '.yyyy-MM-dd', {alwaysIncludePattern: true}, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12'),
'should create a file with the pattern set': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-pattern') > -1; }).length, 2);
}
},
'the file with the later date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-13', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-pattern.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
}
}).exportTo(module);

View File

@@ -1,130 +0,0 @@
var vows = require('vows')
, assert = require('assert')
, events = require('events')
, BufferedWriteStream = require('../../lib/streams').BufferedWriteStream;
function FakeStream() {
this.writes = [];
this.canWrite = false;
this.callbacks = {};
}
FakeStream.prototype.on = function(event, callback) {
this.callbacks[event] = callback;
}
FakeStream.prototype.write = function(data, encoding) {
assert.equal("utf8", encoding);
this.writes.push(data);
return this.canWrite;
}
FakeStream.prototype.emit = function(event, payload) {
this.callbacks[event](payload);
}
FakeStream.prototype.block = function() {
this.canWrite = false;
}
FakeStream.prototype.unblock = function() {
this.canWrite = true;
this.emit("drain");
}
vows.describe('BufferedWriteStream').addBatch({
'stream': {
topic: new BufferedWriteStream(new FakeStream()),
'should take a stream as an argument and return a stream': function(stream) {
assert.instanceOf(stream, events.EventEmitter);
}
},
'before stream is open': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.write("Some data", "utf8");
stream.write("Some more data", "utf8");
return fakeStream.writes;
},
'should buffer writes': function(writes) {
assert.equal(writes.length, 0);
}
},
'when stream is open': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.write("Some data", "utf8");
fakeStream.canWrite = true;
fakeStream.emit("open");
stream.write("Some more data", "utf8");
return fakeStream.writes;
},
'should write data to stream from before stream was open': function (writes) {
assert.equal(writes[0], "Some data");
},
'should write data to stream from after stream was open': function (writes) {
assert.equal(writes[1], "Some more data");
}
},
'when stream is blocked': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("will not know it is blocked until first write", "utf8");
stream.write("so this one will be buffered, but not the previous one", "utf8");
return fakeStream.writes;
},
'should buffer writes': function (writes) {
assert.equal(writes.length, 1);
assert.equal(writes[0], "will not know it is blocked until first write");
}
},
'when stream is unblocked': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("will not know it is blocked until first write", "utf8");
stream.write("so this one will be buffered, but not the previous one", "utf8");
fakeStream.unblock();
return fakeStream.writes;
},
'should send buffered data': function (writes) {
assert.equal(writes.length, 2);
assert.equal(writes[1], "so this one will be buffered, but not the previous one");
}
},
'when stream is closed': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
fakeStream.emit("open");
fakeStream.block();
stream.write("first write to notice stream is blocked", "utf8");
stream.write("data while blocked", "utf8");
stream.end();
return fakeStream.writes;
},
'should send any buffered writes to the stream': function (writes) {
assert.equal(writes.length, 2);
assert.equal(writes[1], "data while blocked");
}
},
'when stream errors': {
topic: function() {
var fakeStream = new FakeStream(),
stream = new BufferedWriteStream(fakeStream);
stream.on("error", this.callback);
fakeStream.emit("error", "oh noes!");
},
'should emit error': function(err, value) {
assert.equal(err, "oh noes!");
}
}
}).exportTo(module);

View File

@@ -1,8 +1,18 @@
var vows = require('vows')
, async = require('async')
, assert = require('assert')
, events = require('events')
, fs = require('fs')
, RollingFileStream = require('../../lib/streams').RollingFileStream;
, semver = require('semver')
, streams
, RollingFileStream;
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
RollingFileStream = require('../../lib/streams').RollingFileStream;
function remove(filename) {
try {
@@ -18,16 +28,17 @@ vows.describe('RollingFileStream').addBatch({
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size in bytes, number of backups as arguments and return a FileWriteStream': function(stream) {
assert.instanceOf(stream, fs.FileWriteStream);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.mode, 420);
assert.equal(stream.flags, 'a');
assert.equal(stream.encoding, 'utf8');
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
@@ -36,7 +47,7 @@ vows.describe('RollingFileStream').addBatch({
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.mode, 0666);
assert.equal(stream.theStream.mode, 0666);
}
},
'without size': {
@@ -61,66 +72,63 @@ vows.describe('RollingFileStream').addBatch({
}
},
'writing less than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
stream.on("open", function() { that.callback(null, stream); });
fs.readdir(__dirname, this.callback);
},
'(when open)': {
topic: function(stream) {
stream.write("cheese", "utf8");
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", this.callback);
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
}
}
'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
}
}
},
'writing more than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
}, function() {
stream.end();
that.callback();
});
},
'the number of files': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
stream.on("open", function() {
for (var i=0; i < 7; i++) {
stream.write(i +".cheese\n", "utf8");
}
//wait for the file system to catch up with us
setTimeout(that.callback, 100);
});
fs.readdir(__dirname, this.callback);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
'should be two': function(files) {
assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
}
}
}).exportTo(module);