Compare commits

...

14 Commits

Author SHA1 Message Date
Gareth Jones
9e8e1f76ad Added node0.8 back into travis builds 2013-04-07 14:41:40 +10:00
Gareth Jones
9c5dff382e Backported the old streams. Nasty if statements abound. 2013-04-07 14:32:39 +10:00
Gareth Jones
36c5175a55 0.6.2 2013-04-02 12:02:47 +11:00
Gareth Jones
22160f90b3 fixed the multiprocess tests 2013-04-02 11:59:45 +11:00
Gareth Jones
73437ecb40 Merge branch 'master' of https://github.com/dsn/log4js-node into dsn-master 2013-04-02 11:34:25 +11:00
Gareth Jones
107e33c0d1 merged in change from @vojtajina for pull request #128 2013-04-02 10:18:25 +11:00
Gareth Jones
6352632fb2 fix version of node supported 2013-04-02 10:02:48 +11:00
Gareth Jones
0544342e9f Merge pull request #128 from Dignifiedquire/master-engine
Fix node engine in package.json
2013-04-01 15:42:41 -07:00
Friedel Ziegelmayer
1d1153d32f Fix node engine in package.json 2013-04-01 23:00:26 +02:00
Gary Steven
e58cf201ca Updated for Node 0.10.x
net.createServer no longer emits 'connect' event
2013-03-30 03:23:58 -07:00
Gareth Jones
83271e47fc Merge pull request #125 from jimschubert/master
Allow for somewhat standard debugging calls
2013-03-24 19:35:24 -07:00
Jim Schubert
f3271a3997 Add standard debug conditional function
: master
2013-03-23 18:50:13 -07:00
Gareth Jones
4b7cf589a2 Fixing the wiki links (issue #124) 2013-03-20 19:47:32 +11:00
Gareth Jones
c8f401c47d fixed travis node version format 2013-03-20 14:58:56 +11:00
21 changed files with 874 additions and 287 deletions

View File

@@ -1,3 +1,4 @@
language: node_js language: node_js
node_js: node_js:
- 0.10 - "0.10"
- "0.8"

View File

@@ -130,13 +130,13 @@ If you have already defined an absolute path for one of the FileAppenders in the
] ]
} }
``` ```
Documentation for most of the core appenders can be found on the [wiki](log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples. Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
## Documentation ## Documentation
See the [wiki](log4js-node/wiki). Improve the [wiki](log4js-node/wiki), please. See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
## Contributing ## Contributing
Contributions welcome, but take a look at the [rules](log4js-node/wiki/Contributing) first. Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
## License ## License

View File

@@ -1,9 +1,10 @@
var streams = require('../streams'), var semver = require('semver')
layouts = require('../layouts'), , layouts = require('../layouts')
path = require('path'), , path = require('path')
os = require('os'), , os = require('os')
eol = os.EOL || '\n', , eol = os.EOL || '\n'
openFiles = []; , openFiles = []
, streams;
//close open files on process exit. //close open files on process exit.
process.on('exit', function() { process.on('exit', function() {
@@ -20,29 +21,36 @@ process.on('exit', function() {
* @layout layout function for log messages - defaults to basicLayout * @layout layout function for log messages - defaults to basicLayout
*/ */
function appender(filename, pattern, layout) { function appender(filename, pattern, layout) {
layout = layout || layouts.basicLayout; layout = layout || layouts.basicLayout;
var logFile;
var logFile = new streams.DateRollingFileStream(filename, pattern); if (semver.satisfies(process.version, '>=0.10.0')) {
openFiles.push(logFile); streams = require('../streams');
logFile = new streams.DateRollingFileStream(filename, pattern);
return function(logEvent) { } else {
logFile.write(layout(logEvent) + eol, "utf8"); streams = require('../old-streams');
}; logFile = new streams.BufferedWriteStream(new streams.DateRollingFileStream(filename, pattern));
}
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent) + eol, "utf8");
};
} }
function configure(config, options) { function configure(config, options) {
var layout; var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (config.layout) { if (options && options.cwd && !config.absolute) {
layout = layouts.layout(config.layout.type, config.layout); config.filename = path.join(options.cwd, config.filename);
} }
if (options && options.cwd && !config.absolute) { return appender(config.filename, config.pattern, layout);
config.filename = path.join(options.cwd, config.filename);
}
return appender(config.filename, config.pattern, layout);
} }
exports.appender = appender; exports.appender = appender;

View File

@@ -1,10 +1,10 @@
var layouts = require('../layouts') var layouts = require('../layouts')
, path = require('path') , path = require('path')
, fs = require('fs') , fs = require('fs')
, streams = require('../streams') , semver = require('semver')
, os = require('os') , os = require('os')
, eol = os.EOL || '\n' , eol = os.EOL || '\n'
, openFiles = []; , openFiles = [];
//close open files on process exit. //close open files on process exit.
process.on('exit', function() { process.on('exit', function() {
@@ -22,7 +22,7 @@ process.on('exit', function() {
* @param numBackups - the number of log files to keep after logSize has been reached (default 5) * @param numBackups - the number of log files to keep after logSize has been reached (default 5)
*/ */
function fileAppender (file, layout, logSize, numBackups) { function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0; var logFile;
file = path.normalize(file); file = path.normalize(file);
layout = layout || layouts.basicLayout; layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups; numBackups = numBackups === undefined ? 5 : numBackups;
@@ -30,7 +30,9 @@ function fileAppender (file, layout, logSize, numBackups) {
numBackups = numBackups === 0 ? 1 : numBackups; numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) { function openTheStream(file, fileSize, numFiles) {
var stream; var stream
, streams = require('../streams');
if (fileSize) { if (fileSize) {
stream = new streams.RollingFileStream( stream = new streams.RollingFileStream(
file, file,
@@ -46,7 +48,31 @@ function fileAppender (file, layout, logSize, numBackups) {
return stream; return stream;
} }
var logFile = openTheStream(file, logSize, numBackups); function openTheOldStyleStream(file, fileSize, numFiles) {
var stream
, streams = require('../old-streams');
if (fileSize) {
stream = new streams.BufferedWriteStream(
new streams.RollingFileStream(
file,
fileSize,
numFiles
)
);
} else {
stream = new streams.BufferedWriteStream(fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' }));
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
if (semver.satisfies(process.version, '>=0.10.0')) {
logFile = openTheStream(file, logSize, numBackups);
} else {
logFile = openTheOldStyleStream(file, logSize, numBackups);
}
// push file to the stack of open handlers // push file to the stack of open handlers
openFiles.push(logFile); openFiles.push(logFile);

View File

@@ -38,31 +38,30 @@ function logServer(config) {
var actualAppender = config.actualAppender, var actualAppender = config.actualAppender,
server = net.createServer(function serverCreated(clientSocket) { server = net.createServer(function serverCreated(clientSocket) {
clientSocket.setEncoding('utf8'); clientSocket.setEncoding('utf8');
clientSocket.on('connect', function clientConnected() { var logMessage = '';
var logMessage = '';
function logTheMessage(msg) { function logTheMessage(msg) {
if (logMessage.length > 0) { if (logMessage.length > 0) {
actualAppender(deserializeLoggingEvent(clientSocket, msg)); actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
} }
}
function chunkReceived(chunk) { function chunkReceived(chunk) {
var event; var event;
logMessage += chunk || ''; logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) { if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG)); event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event); logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || ''; logMessage = logMessage.substring(event.length + END_MSG.length) || '';
//check for more, maybe it was a big chunk //check for more, maybe it was a big chunk
chunkReceived(); chunkReceived();
}
} }
}
clientSocket.on('data', chunkReceived); clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived); clientSocket.on('end', chunkReceived);
});
}); });
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost'); server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
return actualAppender; return actualAppender;

View File

@@ -0,0 +1,99 @@
var fs = require('fs'),
util = require('util');
function debug(message) {
// console.log(message);
}
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.rolling = false;
this.writesWhileRolling = [];
this.currentSize = 0;
this.rollBeforeWrite = false;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this, this.filename, this.options);
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, fs.FileWriteStream);
BaseRollingFileStream.prototype.initRolling = function() {
var that = this;
function emptyRollingQueue() {
debug("emptying the rolling queue");
var toWrite;
while ((toWrite = that.writesWhileRolling.shift())) {
BaseRollingFileStream.super_.prototype.write.call(that, toWrite.data, toWrite.encoding);
that.currentSize += toWrite.data.length;
if (that.shouldRoll()) {
that.flush();
return true;
}
}
that.flush();
return false;
}
this.rolling = true;
this.roll(this.filename, function() {
that.currentSize = 0;
that.rolling = emptyRollingQueue();
if (that.rolling) {
process.nextTick(function() { that.initRolling(); });
}
});
};
BaseRollingFileStream.prototype.write = function(data, encoding) {
var canWrite = false;
if (this.rolling) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
} else {
if (this.rollBeforeWrite && this.shouldRoll()) {
this.writesWhileRolling.push({ data: data, encoding: encoding });
this.initRolling();
} else {
canWrite = BaseRollingFileStream.super_.prototype.write.call(this, data, encoding);
this.currentSize += data.length;
debug('current size = ' + this.currentSize);
if (!this.rollBeforeWrite && this.shouldRoll()) {
this.initRolling();
}
}
}
return canWrite;
};
BaseRollingFileStream.prototype.shouldRoll = function() {
return false; // default behaviour is never to roll
};
BaseRollingFileStream.prototype.roll = function(filename, callback) {
callback(); // default behaviour is not to do anything
};

View File

@@ -0,0 +1,78 @@
var events = require('events'),
Dequeue = require('dequeue'),
util = require('util');
module.exports = BufferedWriteStream;
function BufferedWriteStream(stream) {
var that = this;
this.stream = stream;
this.buffer = new Dequeue();
this.canWrite = false;
this.bytes = 0;
this.stream.on("open", function() {
that.canWrite = true;
that.flushBuffer();
});
this.stream.on("error", function (err) {
that.emit("error", err);
});
this.stream.on("drain", function() {
that.canWrite = true;
that.flushBuffer();
});
}
util.inherits(BufferedWriteStream, events.EventEmitter);
Object.defineProperty(
BufferedWriteStream.prototype,
"fd",
{
get: function() { return this.stream.fd; },
set: function(newFd) {
this.stream.fd = newFd;
this.bytes = 0;
}
}
);
Object.defineProperty(
BufferedWriteStream.prototype,
"bytesWritten",
{
get: function() { return this.bytes; }
}
);
BufferedWriteStream.prototype.write = function(data, encoding) {
this.buffer.push({ data: data, encoding: encoding });
this.flushBuffer();
};
BufferedWriteStream.prototype.end = function(data, encoding) {
if (data) {
this.buffer.push({ data: data, encoding: encoding });
}
this.flushBufferEvenIfCannotWrite();
};
BufferedWriteStream.prototype.writeToStream = function(toWrite) {
this.bytes += toWrite.data.length;
this.canWrite = this.stream.write(toWrite.data, toWrite.encoding);
};
BufferedWriteStream.prototype.flushBufferEvenIfCannotWrite = function() {
while (this.buffer.length > 0) {
this.writeToStream(this.buffer.shift());
}
};
BufferedWriteStream.prototype.flushBuffer = function() {
while (this.buffer.length > 0 && this.canWrite) {
this.writeToStream(this.buffer.shift());
}
};

View File

@@ -0,0 +1,89 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
format = require('../date_format'),
async = require('async'),
fs = require('fs'),
util = require('util');
module.exports = DateRollingFileStream;
function debug(message) {
// console.log(message);
}
function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
this.rollBeforeWrite = true;
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
var lastTime = this.lastTimeWeWroteSomething,
thisTime = format.asString(this.pattern, new Date(this.now()));
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime;
return thisTime !== lastTime;
};
DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
newFilename = filename + this.previousTime;
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
deleteAnyExistingFile,
renameTheCurrentFile,
openANewFile
], callback);
function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(newFilename, function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
cb();
});
}
function renameTheCurrentFile(cb) {
debug("Renaming the " + filename + " -> " + newFilename);
fs.rename(filename, newFilename, cb);
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
};

View File

@@ -0,0 +1 @@
These are for pre-0.10.x versions of node and are here just for backwards compatibility. No bug fixes or enhancements will be made to these files.

View File

@@ -0,0 +1,110 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
util = require('util'),
path = require('path'),
fs = require('fs'),
async = require('async');
function debug(message) {
// util.debug(message);
// console.log(message);
}
module.exports = RollingFileStream;
function RollingFileStream (filename, size, backups, options) {
this.size = size;
this.backups = backups || 1;
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error("You must specify a filename and file size");
}
}
throwErrorIfArgumentsAreNotValid();
RollingFileStream.super_.call(this, filename, options);
}
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
return this.currentSize >= this.size;
};
RollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
nameMatcher = new RegExp('^' + path.basename(filename));
function justTheseFiles (item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b) ) {
return -1;
} else {
return 0;
}
}
function increaseFileIndex (fileToRename, cb) {
var idx = index(fileToRename);
debug('Index of ' + fileToRename + ' is ' + idx);
if (idx < that.backups) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(filename + '.' + (idx+1), function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
});
} else {
cb();
}
}
function renameTheFiles(cb) {
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug("Renaming the old files");
fs.readdir(path.dirname(filename), function (err, files) {
async.forEachSeries(
files.filter(justTheseFiles).sort(byIndex).reverse(),
increaseFileIndex,
cb
);
});
}
function openANewFile(cb) {
debug("Opening a new file");
fs.open(
filename,
that.options.flags,
that.options.mode,
function (err, fd) {
debug("opened new file");
var oldLogFileFD = that.fd;
that.fd = fd;
that.writable = true;
fs.close(oldLogFileFD, cb);
}
);
}
debug("Starting roll");
debug("Queueing up data until we've finished rolling");
debug("Flushing underlying stream");
this.flush();
async.series([
renameTheFiles,
openANewFile
], callback);
};

3
lib/old-streams/index.js Normal file
View File

@@ -0,0 +1,3 @@
exports.BufferedWriteStream = require('./BufferedWriteStream');
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@@ -2,8 +2,11 @@ var fs = require('fs'),
stream = require('stream'), stream = require('stream'),
util = require('util'); util = require('util');
function debug(message) { var debug;
// console.log(message); if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (BaseRollingFileStream) %s', message); };
} else {
debug = function() { };
} }
module.exports = BaseRollingFileStream; module.exports = BaseRollingFileStream;

View File

@@ -6,8 +6,11 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
module.exports = DateRollingFileStream; module.exports = DateRollingFileStream;
function debug(message) { var debug;
// console.log(message); if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (DateRollingFileStream) %s', message); };
} else {
debug = function() { };
} }
function DateRollingFileStream(filename, pattern, options, now) { function DateRollingFileStream(filename, pattern, options, now) {

View File

@@ -4,9 +4,11 @@ var BaseRollingFileStream = require('./BaseRollingFileStream'),
fs = require('fs'), fs = require('fs'),
async = require('async'); async = require('async');
function debug() { var debug;
// util.debug(message); if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
// console.log.apply(console, arguments); debug = function(message) { console.error('LOG4JS: (RollingFileStream) %s', message); };
} else {
debug = function() { };
} }
module.exports = RollingFileStream; module.exports = RollingFileStream;

View File

@@ -1,38 +1,41 @@
{ {
"name": "log4js", "name": "log4js",
"version": "0.6.0", "version": "0.6.2",
"description": "Port of Log4js to work with node.", "description": "Port of Log4js to work with node.",
"keywords": [ "keywords": [
"logging", "logging",
"log", "log",
"log4j", "log4j",
"node" "node"
], ],
"main": "./lib/log4js", "main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>", "author": "Gareth Jones <gareth.jones@sensis.com.au>",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/nomiddlename/log4js-node.git" "url": "https://github.com/nomiddlename/log4js-node.git"
}, },
"bugs": { "bugs": {
"url": "http://github.com/nomiddlename/log4js-node/issues" "url": "http://github.com/nomiddlename/log4js-node/issues"
}, },
"engines": [ "node >=0.10" ], "engines": {
"scripts": { "node": ">=0.6.0"
"test": "vows" },
}, "scripts": {
"directories": { "test": "vows"
"test": "test", },
"lib": "lib" "directories": {
}, "test": "test",
"dependencies": { "lib": "lib"
"async": "0.1.15", },
"dequeue": "1.0.3" "dependencies": {
}, "async": "0.1.15",
"devDependencies": { "dequeue": "1.0.3",
"vows": "0.7.0", "semver": "~1.1.4"
"sandboxed-module": "0.1.3", },
"hook.io": "0.8.10", "devDependencies": {
"underscore": "1.2.1" "vows": "0.7.0",
} "sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
}
} }

View File

@@ -1,8 +1,9 @@
var vows = require('vows'), var vows = require('vows'),
assert = require('assert'), assert = require('assert'),
path = require('path'), path = require('path'),
fs = require('fs'), fs = require('fs'),
log4js = require('../lib/log4js'); sandbox = require('sandboxed-module'),
log4js = require('../lib/log4js');
function removeFile(filename) { function removeFile(filename) {
return function() { return function() {
@@ -95,4 +96,65 @@ vows.describe('../lib/appenders/dateFile').addBatch({
} }
} }
}).addBatch({
'with node version less than 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/dateFile',
{
globals: {
process: {
version: "v0.8.1",
on: function() {}
}
},
requires: {
'../old-streams': {
BufferedWriteStream: function() {
oldStyleStreamCreated = true;
this.on = function() {};
},
DateRollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the old-style streams': function(loaded) {
assert.isTrue(loaded);
}
},
'with node version greater than or equal to 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/dateFile',
{
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
},
requires: {
'../streams': {
DateRollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the new streams': function(loaded) {
assert.isFalse(loaded);
}
}
}).exportTo(module); }).exportTo(module);

View File

@@ -2,6 +2,8 @@ var vows = require('vows')
, fs = require('fs') , fs = require('fs')
, path = require('path') , path = require('path')
, log4js = require('../lib/log4js') , log4js = require('../lib/log4js')
, sandbox = require('sandboxed-module')
, semver = require('semver')
, assert = require('assert'); , assert = require('assert');
log4js.clearAppenders(); log4js.clearAppenders();
@@ -17,9 +19,10 @@ function remove(filename) {
vows.describe('log4js fileAppender').addBatch({ vows.describe('log4js fileAppender').addBatch({
'adding multiple fileAppenders': { 'adding multiple fileAppenders': {
topic: function () { topic: function () {
var listenersCount = process.listeners('exit').length var listenersCount = process.listeners('exit').length
, logger = log4js.getLogger('default-settings') , logger = log4js.getLogger('default-settings')
, count = 5, logfile; , count = 5
, logfile;
while (count--) { while (count--) {
logfile = path.join(__dirname, '/fa-default-test' + count + '.log'); logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
@@ -109,12 +112,12 @@ vows.describe('log4js fileAppender').addBatch({
//give the system a chance to open the stream //give the system a chance to open the stream
setTimeout(function() { setTimeout(function() {
fs.readdir(__dirname, function(err, files) { fs.readdir(__dirname, function(err, files) {
if (files) { if (files) {
that.callback(null, files.sort()); that.callback(null, files.sort());
} else { } else {
that.callback(err, files); that.callback(err, files);
} }
}); });
}, 200); }, 200);
}, },
'the log files': { 'the log files': {
@@ -133,7 +136,14 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback); fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
}, },
'should be the last log message': function(contents) { 'should be the last log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the fourth log message.'); assert.include(contents, 'This is the fourth log message.');
} else {
assert.isEmpty(contents);
}
} }
}, },
'and the contents of the second file': { 'and the contents of the second file': {
@@ -141,7 +151,14 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback); fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
}, },
'should be the third log message': function(contents) { 'should be the third log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the third log message.'); assert.include(contents, 'This is the third log message.');
} else {
assert.include(contents, 'This is the fourth log message.');
}
} }
}, },
'and the contents of the third file': { 'and the contents of the third file': {
@@ -149,31 +166,98 @@ vows.describe('log4js fileAppender').addBatch({
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback); fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
}, },
'should be the second log message': function(contents) { 'should be the second log message': function(contents) {
//there's a difference in behaviour between
//old-style streams and new ones (the new ones are
//correct)
if (semver.satisfies(process.version, ">=0.10.0")) {
assert.include(contents, 'This is the second log message.'); assert.include(contents, 'This is the second log message.');
} else {
assert.include(contents, 'This is the third log message.');
}
} }
} }
} }
} }
}).addBatch({ }).addBatch({
'configure' : { 'configure' : {
'with fileAppender': { 'with fileAppender': {
topic: function() { topic: function() {
var log4js = require('../lib/log4js') var log4js = require('../lib/log4js')
, logger; , logger;
//this config file defines one file appender (to ./tmp-tests.log) //this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN //and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json'); log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests'); logger = log4js.getLogger('tests');
logger.info('this should not be written to the file'); logger.info('this should not be written to the file');
logger.warn('this should be written to the file'); logger.warn('this should be written to the file');
fs.readFile('tmp-tests.log', 'utf8', this.callback); fs.readFile('tmp-tests.log', 'utf8', this.callback);
}, },
'should load appender configuration from a json file': function(err, contents) { 'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file\n'); assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1); assert.equal(contents.indexOf('this should not be written to the file'), -1);
} }
}
} }
}
}).addBatch({
'with node version less than 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/file',
{
globals: {
process: {
version: "v0.8.1",
on: function() {}
}
},
requires: {
'../old-streams': {
BufferedWriteStream: function() {
oldStyleStreamCreated = true;
this.on = function() {};
},
RollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the old-style streams': function(loaded) {
assert.isTrue(loaded);
}
},
'with node version greater than or equal to 0.10': {
topic: function() {
var oldStyleStreamCreated = false
, appender = sandbox.require(
'../lib/appenders/file',
{
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
},
requires: {
'../streams': {
RollingFileStream: function() {
this.on = function() {};
}
}
}
}
).appender('cheese.log', null, 1000, 1);
return oldStyleStreamCreated;
},
'should load the new streams': function(loaded) {
assert.isFalse(loaded);
}
}
}).export(module); }).export(module);

View File

@@ -56,6 +56,12 @@ vows.describe('log4js-abspath').addBatch({
}; };
} }
} }
},
globals: {
process: {
version: "v0.10.1",
on: function() {}
}
} }
} }
); );
@@ -66,4 +72,4 @@ vows.describe('log4js-abspath').addBatch({
assert.equal(fileOpened, "/absolute/path/to/whatever.log"); assert.equal(fileOpened, "/absolute/path/to/whatever.log");
} }
}, },
}).export(module); }).export(module);

View File

@@ -30,24 +30,24 @@ function makeFakeNet() {
}; };
}, },
createServer: function(cb) { createServer: function(cb) {
var fakeNet = this; var fakeNet = this;
cb({ cb({
remoteAddress: '1.2.3.4', remoteAddress: '1.2.3.4',
remotePort: '1234', remotePort: '1234',
setEncoding: function(encoding) { setEncoding: function(encoding) {
fakeNet.encoding = encoding; fakeNet.encoding = encoding;
}, },
on: function(event, cb) { on: function(event, cb) {
fakeNet.cbs[event] = cb; fakeNet.cbs[event] = cb;
} }
}); });
return { return {
listen: function(port, host) { listen: function(port, host) {
fakeNet.port = port; fakeNet.port = port;
fakeNet.host = host; fakeNet.host = host;
} }
}; };
} }
}; };
} }
@@ -183,7 +183,6 @@ vows.describe('Multiprocess Appender').addBatch({
topic: function(net) { topic: function(net) {
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__'; var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__';
net.cbs['connect']();
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__'); net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__');
net.cbs['data'](logString.substring(0, 10)); net.cbs['data'](logString.substring(0, 10));
net.cbs['data'](logString.substring(10)); net.cbs['data'](logString.substring(10));

View File

@@ -2,7 +2,8 @@ var vows = require('vows')
, assert = require('assert') , assert = require('assert')
, streams = require('stream') , streams = require('stream')
, fs = require('fs') , fs = require('fs')
, DateRollingFileStream = require('../../lib/streams').DateRollingFileStream , semver = require('semver')
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11); , testTime = new Date(2012, 8, 12, 10, 37, 11);
function cleanUp(filename) { function cleanUp(filename) {
@@ -15,108 +16,112 @@ function now() {
return testTime.getTime(); return testTime.getTime();
} }
vows.describe('DateRollingFileStream').addBatch({ if (semver.satisfies(process.version, '>=0.10.0')) {
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream;
vows.describe('DateRollingFileStream').addBatch({
'arguments': { 'arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'), topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) { 'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1'); assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh'); assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable); assert.instanceOf(stream, streams.Writable);
}, },
'with default settings for the underlying stream': function(stream) { 'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420); assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a'); assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream //encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8'); //assert.equal(stream.encoding, 'utf8');
} }
}, },
'default arguments': { 'default arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'), topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) { 'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd'); assert.equal(stream.pattern, '.yyyy-MM-dd');
} }
}, },
'with stream arguments': { 'with stream arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }), topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, 0666);
} }
}, },
'with stream arguments but no pattern': { 'with stream arguments but no pattern': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }), topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'), teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, 0666);
}, },
'should use default pattern': function(stream) { 'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd'); assert.equal(stream.pattern, '.yyyy-MM-dd');
} }
}, },
'with a pattern of .yyyy-MM-dd': { 'with a pattern of .yyyy-MM-dd': {
topic: function() { topic: function() {
var that = this, var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now); stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() { stream.write("First message\n", 'utf8', function() {
that.callback(null, stream); that.callback(null, stream);
}); });
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
}, },
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'), 'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
} }
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
} }
}).exportTo(module); }).exportTo(module);
}

View File

@@ -4,7 +4,9 @@ var vows = require('vows')
, events = require('events') , events = require('events')
, fs = require('fs') , fs = require('fs')
, streams = require('stream') , streams = require('stream')
, RollingFileStream = require('../../lib/streams').RollingFileStream; , semver = require('semver')
, RollingFileStream;
function remove(filename) { function remove(filename) {
try { try {
@@ -14,54 +16,57 @@ function remove(filename) {
} }
} }
vows.describe('RollingFileStream').addBatch({ if (semver.satisfies(process.version, '>=0.10.0')) {
RollingFileStream = require('../../lib/streams').RollingFileStream;
vows.describe('RollingFileStream').addBatch({
'arguments': { 'arguments': {
topic: function() { topic: function() {
remove(__dirname + "/test-rolling-file-stream"); remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5); return new RollingFileStream("test-rolling-file-stream", 1024, 5);
}, },
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) { 'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable); assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream"); assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024); assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5); assert.equal(stream.backups, 5);
}, },
'with default settings for the underlying stream': function(stream) { 'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420); assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a'); assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream //encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8'); //assert.equal(stream.theStream.encoding, 'utf8');
} }
}, },
'with stream arguments': { 'with stream arguments': {
topic: function() { topic: function() {
remove(__dirname + '/test-rolling-file-stream'); remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 }); return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
}, },
'should pass them to the underlying stream': function(stream) { 'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666); assert.equal(stream.theStream.mode, 0666);
} }
}, },
'without size': { 'without size': {
topic: function() { topic: function() {
try { try {
new RollingFileStream(__dirname + "/test-rolling-file-stream"); new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) { } catch (e) {
return e; return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
} }
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
}
}, },
'without number of backups': { 'without number of backups': {
topic: function() { topic: function() {
remove('test-rolling-file-stream'); remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024); return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
}, },
'should default to 1 backup': function(stream) { 'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1); assert.equal(stream.backups, 1);
} }
}, },
'writing less than the file size': { 'writing less than the file size': {
topic: function() { topic: function() {
@@ -70,7 +75,7 @@ vows.describe('RollingFileStream').addBatch({
stream.write("cheese", "utf8", function() { stream.write("cheese", "utf8", function() {
stream.end(); stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback); fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
}); });
}, },
'should write to the file': function(contents) { 'should write to the file': function(contents) {
assert.equal(contents, "cheese"); assert.equal(contents, "cheese");
@@ -89,12 +94,12 @@ vows.describe('RollingFileStream').addBatch({
remove(__dirname + "/test-rolling-file-stream-write-more"); remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1"); remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45); var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) { async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb); stream.write(i +".cheese\n", "utf8", cb);
}, function() { }, function() {
stream.end(); stream.end();
that.callback(); that.callback();
}); });
}, },
'the number of files': { 'the number of files': {
topic: function() { topic: function() {
@@ -102,8 +107,8 @@ vows.describe('RollingFileStream').addBatch({
}, },
'should be two': function(files) { 'should be two': function(files) {
assert.equal(files.filter( assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; } function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2); ).length, 2);
} }
}, },
'the first file': { 'the first file': {
@@ -123,4 +128,5 @@ vows.describe('RollingFileStream').addBatch({
} }
} }
} }
}).exportTo(module); }).exportTo(module);
}