Compare commits

..

243 Commits

Author SHA1 Message Date
Gareth Jones
936ad4da8e fixed tests broken by alwaysIncludePattern 2013-05-05 13:44:01 +10:00
Gareth Jones
097ae3d7f1 Merge branch 'alwaysIncludePattern' of https://github.com/issacg/log4js-node into isaacg-alwaysIncludePattern 2013-05-04 16:10:02 +10:00
Issac Goldstand
04de4ed8d3 fix OS-specific endline mucking test results (:-O not everyone uses linux?!?!) 2013-05-03 11:14:28 +03:00
Issac Goldstand
29b02921b6 add option alwaysIncludePattern to dateTime appender to always use the filename with the pattern included when logging 2013-05-02 14:56:33 +03:00
Gareth Jones
48ed5d1222 Removed the warning about node 0.10 2013-04-11 22:34:49 +10:00
Gareth Jones
7844b0d2e4 0.6.3 2013-04-11 22:29:13 +10:00
Gareth Jones
8b49ba9f3d added node 0.8 to travis config and package.json 2013-04-11 21:49:08 +10:00
Gareth Jones
ed7462885f backporting new streams to node 0.8 for issue #129 2013-04-11 21:45:16 +10:00
Gareth Jones
36c5175a55 0.6.2 2013-04-02 12:02:47 +11:00
Gareth Jones
22160f90b3 fixed the multiprocess tests 2013-04-02 11:59:45 +11:00
Gareth Jones
73437ecb40 Merge branch 'master' of https://github.com/dsn/log4js-node into dsn-master 2013-04-02 11:34:25 +11:00
Gareth Jones
107e33c0d1 merged in change from @vojtajina for pull request #128 2013-04-02 10:18:25 +11:00
Gareth Jones
6352632fb2 fix version of node supported 2013-04-02 10:02:48 +11:00
Gareth Jones
0544342e9f Merge pull request #128 from Dignifiedquire/master-engine
Fix node engine in package.json
2013-04-01 15:42:41 -07:00
Friedel Ziegelmayer
1d1153d32f Fix node engine in package.json 2013-04-01 23:00:26 +02:00
Gary Steven
e58cf201ca Updated for Node 0.10.x
net.createServer no longer emits 'connect' event
2013-03-30 03:23:58 -07:00
Gareth Jones
83271e47fc Merge pull request #125 from jimschubert/master
Allow for somewhat standard debugging calls
2013-03-24 19:35:24 -07:00
Jim Schubert
f3271a3997 Add standard debug conditional function
: master
2013-03-23 18:50:13 -07:00
Gareth Jones
4b7cf589a2 Fixing the wiki links (issue #124) 2013-03-20 19:47:32 +11:00
Gareth Jones
c8f401c47d fixed travis node version format 2013-03-20 14:58:56 +11:00
Gareth Jones
ecbf41bc83 updated readme with node 0.10 info 2013-03-20 09:16:42 +11:00
Gareth Jones
65e490cbd2 Fixes for version v0.10 streams, breaks log4js for older versions of node 2013-03-20 09:14:27 +11:00
Gareth Jones
5e242c9dc9 bumped version 2013-02-25 16:33:48 +11:00
Gareth Jones
50eefcc701 Merge pull request #116 from imkira/master
Pass options from multiprocess appender to inner appender
2013-02-24 21:30:42 -08:00
Mário Freitas
8e53c6213e fix: pass options from multiprocess appender to inner appender 2013-02-21 00:06:59 +09:00
Gareth Jones
a15a628311 Merge pull request #115 from NicolasPelletier/master
Speed up file logging for high rate of logging.
2013-02-14 16:32:03 -08:00
Nicolas Pelletier
b75e3660f4 Speed up file logging for high rate of logging.
During an evaluation of multiple loggers, I saw a slow down when trying to
quickly log more than 100,000 messages to a file:
```javascript
    counter = 150000;
    while (counter) {
        logger.info('Message[' + counter + ']');
        counter -= 1;
    }
```

My detailed test can be found here:
 - https://gist.github.com/NicolasPelletier/4773843

The test demonstrate that writing 150,000 lines straight in a FileStream
takes about 22 seconds until the file content stabilizes. When calling
logger.debug() 150,000 times, the file stabilizes to its final content
after 229s ( almost 4 minutes ! ).

After investigation, it turns out that the problem is using an Array() to
accumulate the data. Pushing the data in the Array with Array.push() is
quick, but the code flushing the buffer uses Array.shift(), which forces
re-indexing of all 149,999 elements remaining in the Array. This is
exponentially slower as the buffer grows.

The solution is to use something else than an Array to accumulate the
messages. The fix was made using a package called Dequeue
( https://github.com/lleo/node-dequeue ). By replacing the Array with
a Dequeue object, it brought the logging of 150,000 messages back down to
31s. Seven times faster than the previous 229s.

There is a caveat that each log event is slightly longer due to the need
to create an object to put in the double-ended queue inside the Dequeue
object. According to a quick test, it takes about 4% more time per call
to logger.debug().
2013-02-13 09:35:02 -05:00
Gareth Jones
22da6226e5 Merge pull request #113 from bitcloud/patternLayout_tokens
add your own tokens to the patternLayout
2013-02-11 13:45:18 -08:00
Jan Schmidle
a3bdac8e14 updated require in example to match other examles 2013-02-08 16:22:29 +01:00
Jan Schmidle
af428c5669 added example on pattern tokens usage 2013-02-08 16:18:27 +01:00
Jan Schmidle
5c75ba9468 fixed small issue that could occur with wrong evaluated parameters 2013-02-08 16:17:24 +01:00
Jan Schmidle
bec0d05847 added some documentation to the function header 2013-02-08 16:15:51 +01:00
Jan Schmidle
e4bf405f20 add your own tokens to the patternLayout 2013-02-08 14:54:18 +01:00
Gareth Jones
95568f352b Merge pull request #110 from Dignifiedquire/fix-2
Move examples into their own directory.
2013-01-20 16:15:53 -08:00
Gareth Jones
6da6f3c90e Merge pull request #109 from Dignifiedquire/fix-1
Misc code highlighting fixes in readme.md
2013-01-20 14:16:04 -08:00
Friedel Ziegelmayer
7f57d14e70 Move examples into their own directory. 2013-01-19 22:14:14 +01:00
Friedel Ziegelmayer
f478793da3 Misc code highlighting fixes in readme.md 2013-01-19 22:09:31 +01:00
Gareth Jones
ec2f8fec3b Merge pull request #105 from ulikoehler/readme-syntax-highlighting
Added syntax highlighting to JS code in README.md
2013-01-06 13:33:52 -08:00
Uli Köhler
0167c84ea5 Added syntax highlighting to JS code in README.md 2013-01-06 01:09:55 +01:00
Gareth Jones
3e1a27e522 New version, with colours in pattern layout 2012-12-03 09:59:36 +11:00
Gareth Jones
8b42e46071 Merge pull request #101 from Dignifiedquire/feature-color-pattern
[feature] Add patternColoured Layout.
2012-12-02 14:51:27 -08:00
Friedel Ziegelmayer
4a7a90ed53 [feature] Add color option to pattern layout.
Based on #90 this implements the possibillity to add the color codes
according to the log level via %[ and %].
2012-12-02 23:41:59 +01:00
Gareth Jones
a9307fd6da fix for issue #100, multiprocess appender and logLevelFilter don't play nicely 2012-11-09 16:02:16 +11:00
Gareth Jones
4739c65c68 Version 0.5.4 2012-10-16 11:54:21 +11:00
Gareth Jones
892181f88f Merge pull request #98 from danbell/master
Check environment variable LOG4JS_CONFIG for configuration file location.
2012-10-15 17:52:08 -07:00
Daniel Bell
bdfa7f9a9b Delete LOG4JS_CONFIG environment variable after test has finished. 2012-10-16 10:55:30 +11:00
Daniel Bell
ad63b801f7 Check environment variable LOG4JS_CONFIG for configuration file location. 2012-10-16 08:36:26 +11:00
Gareth Jones
2bfad6362a Version 0.5.3 2012-09-26 09:49:58 +10:00
Gareth Jones
2b889fe776 Working date rolling file appender. 2012-09-25 08:16:59 +10:00
Gareth Jones
9ac61e37f4 Refactored where the exit handler gets added 2012-09-25 07:43:37 +10:00
Gareth Jones
185f343e68 Working date rolling file stream 2012-09-18 08:46:39 +10:00
Gareth Jones
be1272cd7c moved streams code around, added stub for DateRollingFileStream 2012-09-05 10:58:28 +10:00
Gareth Jones
cbc1dd32f9 fixed up some dodgy tabbing 2012-09-05 08:00:31 +10:00
Gareth Jones
a6fb26efb1 Removed mentions of pollInterval (issue #93) 2012-09-04 13:48:35 +10:00
Gareth Jones
012b0d5ed7 version 0.5.2 2012-08-14 10:47:25 +10:00
Gareth Jones
de72005e7e Fixed layout stack trace test 2012-08-14 09:44:43 +10:00
Gareth Jones
c6a0e58409 Merge pull request #89 from ixti/master
Fix possible memleak with `exit` event handlers
2012-08-13 16:32:08 -07:00
Aleksey V Zapparov
f832a2ba79 Do not assign multiple exit handlers for FA 2012-08-09 15:21:30 +02:00
Aleksey V Zapparov
3f10b68c30 Add test for amount of exit listeners in FA 2012-08-09 15:15:28 +02:00
Gareth Jones
54c311842c Merge pull request #86 from osher/patch-3
Update lib/layouts.js
2012-08-01 16:21:01 -07:00
osher
f948b5f5cd Add unit tests - layouts-test.js 2012-08-01 10:11:37 +03:00
osher
54e420eb58 Update lib/layouts.js
Errors sometimes carry additional attributes on them as part of the passed error data.
A utility that utilizes it, for example - is called 'errs', which is in use for instance 'nano' - the couch-db driver.

when only the stack is printed - all the additional information that is augmented on the error object does not sink to the log and is lost.

consider the following code:

```
//the oups throwing utility
function oups(){
  e = new Error();
  extend(
    { message    : "Oups error"
    , description: "huston, we got a problem"
    , status     : "MESS"
    , errorCode  : 991
    , arr :[1,2,3,4,{}]
    , data: 
      { c:{}
      , d:{e:{}}
      }
    }
  throw e;
}

var log = require('log4js')

try{
  oups()
} catch( e ) {
   log.error("error on oups", e );
}

```


output before the fix

```
error on oups Error: Oups error
    at repl:1:11
    at REPLServer.eval (repl.js:80:21)
    at Interface.<anonymous> (repl.js:182:12)
    at Interface.emit (events.js:67:17)
    at Interface._onLine (readline.js:162:10)
    at Interface._line (readline.js:426:8)
    at Interface._ttyWrite (readline.js:603:14)
    at ReadStream.<anonymous> (readline.js:82:12)
    at ReadStream.emit (events.js:88:20)
```


output after the fix would be

```
error on oups { [Error: My error message]
  name: 'Error',
  description: 'huston, we got a problem',
  status: 'MESS',
  errorCode: 991,
  arr: [ 1, 2, 3, 4, {} ],
  data: { c: {}, d: { e: {} } } }
Error: Oups error
    at repl:1:11
    at REPLServer.eval (repl.js:80:21)
    at Interface.<anonymous> (repl.js:182:12)
    at Interface.emit (events.js:67:17)
    at Interface._onLine (readline.js:162:10)
    at Interface._line (readline.js:426:8)
    at Interface._ttyWrite (readline.js:603:14)
    at ReadStream.<anonymous> (readline.js:82:12)
    at ReadStream.emit (events.js:88:20)
```
2012-07-31 14:32:03 +03:00
Gareth Jones
40ba24a55d Renamed tests so that vows will pick them up automatically 2012-07-31 14:52:36 +10:00
Gareth Jones
e3a20a1746 bumped npm version 2012-07-04 09:28:56 +10:00
Gareth Jones
7a02f39921 Fallback to \n if os.EOL is not defined 2012-07-04 09:25:08 +10:00
Gareth Jones
b6ba3bce00 Merge branch 'master' of https://github.com/nomiddlename/log4js-node 2012-07-04 09:11:07 +10:00
Gareth Jones
638ce187bb use os.EOL instead of \n 2012-07-04 08:53:09 +10:00
Gareth Jones
3cbae96a97 Changed multiprocess appender to use a single socket per client 2012-07-04 08:45:20 +10:00
Gareth Jones
a33e48cb07 Changed multiprocess appender to use a single socket per client 2012-07-04 08:44:50 +10:00
Gareth Jones
df491c0b14 Changed multiprocess appender to use a single socket per client 2012-07-04 08:44:16 +10:00
Gareth Jones
6ff1a2499f removed 0.7 added 0.8 2012-07-04 08:33:06 +10:00
Gareth Jones
ce2d7df8df Merge pull request #78 from druciak/smtp
SMTP appender migrated to nodemailer 0.3.x
2012-06-28 18:09:04 -07:00
Gareth Jones
1b12265800 Merge branch 'master' of https://github.com/nomiddlename/log4js-node 2012-06-29 10:53:38 +10:00
Gareth Jones
32e9045334 added explanation of console appender 2012-06-29 09:38:23 +10:00
Gareth Jones
1aed671137 added fromreadme.js example, updated README 2012-06-29 09:37:41 +10:00
Gareth Jones
68b47dd51c expanded example to include loading appender programmatically 2012-06-29 09:19:20 +10:00
Gareth Jones
8f9b4444f6 made sure example works with categories 2012-06-29 09:05:18 +10:00
Gareth Jones
e49f7107fb example now works 2012-06-29 09:01:42 +10:00
druciak
077302c772 SMTP appender migrated to nodemailer 0.3.x 2012-06-27 18:00:32 +02:00
Gareth Jones
6f0dfa0c5f Added note about console.log replacement. 2012-06-04 09:18:58 +10:00
Gareth Jones
82a6bee331 Fixed the wiki links. 2012-06-01 18:15:55 +10:00
Gareth Jones
ad7e844d68 bumped npm version 2012-06-01 18:13:00 +10:00
Gareth Jones
bef2075c60 moved some docs to the wiki 2012-06-01 18:12:30 +10:00
Gareth Jones
a046523804 Moved Logger into separate file, added support for loading appenders outside log4js, removed 'name' from appender requirements 2012-06-01 11:11:07 +10:00
Gareth Jones
0ed1a137d6 moved Logger class out of main module 2012-05-31 08:16:22 +10:00
Gareth Jones
33a92b5dd6 Removed some exports that are no longer needed 2012-05-31 08:07:45 +10:00
Gareth Jones
0901794b35 Moved abspath option checking into file appender, log4js options now passed to appenders 2012-05-31 07:50:01 +10:00
Gareth Jones
05d5265554 updated hook.io version, was breaking travis build 2012-05-29 16:59:26 +10:00
Gareth Jones
9a29d6222e changed minimum node version to 0.6 2012-05-29 16:52:50 +10:00
Gareth Jones
38a89dcf3d manually merged TooTallNate's pull request #62 2012-05-29 16:49:12 +10:00
Gareth Jones
754ac2c5ac changed config loading to be more predictable 2012-05-29 15:50:35 +10:00
Gareth Jones
ccc4976206 updated node versions for travis 2012-05-09 16:52:02 +10:00
Gareth Jones
6e7348f8d8 all tests pass 2012-05-09 16:48:52 +10:00
Gareth Jones
61078e88ef fixed the nolog tests 2012-05-09 16:40:27 +10:00
Gareth Jones
613a077a61 fixed test-configureNoLevels 2012-05-09 16:31:01 +10:00
Gareth Jones
68d1c8fa07 Merge pull request #69 from NetDevLtd/feature/setLevelAsymmetry
setLevel vs isLevelEnabled asymmetry
2012-05-08 16:38:55 -07:00
Gareth Jones
216937637d Merge pull request #70 from NetDevLtd/feature/configureNoLevels
log4js.configure({}) resets all loggers' levels to TRACE
2012-05-08 16:37:23 -07:00
Mike Bardzinski
ff5b8d2939 Added vows test for the log4js.configure inconsistency, when no 'levels' property is passed in the configuration 2012-05-08 19:19:33 +01:00
Mike Bardzinski
6a20efb965 Added vows tests for the setLevel asymmetry fix 2012-05-08 12:23:30 +01:00
Mike Bardzinski
872bc791c7 Fixes the log4js.configure({}) issue which zapped all loggers' levels to TRACE, even if they were previously set to something else 2012-05-02 16:10:20 +01:00
Mike Bardzinski
2c7b56853b Changed toLevel to accept a Log4js.Level (or in fact any object), and try to convert it to a Log4js.Level. Fixes the setLevel asymmetry, where you cannot setLevel(log4js.level.foo) 2012-05-02 15:41:32 +01:00
Gareth Jones
c8157cef5c fixed file appender tests 2012-03-22 09:34:41 +11:00
Gareth Jones
352653dcbe increased the wait for file open, think it is what's breaking travis build 2012-03-20 13:55:38 +11:00
Gareth Jones
cff6928761 bumped npm version 2012-03-20 09:39:56 +11:00
Gareth Jones
1fb8962b83 turned off debug in streams (issue #63) 2012-03-20 09:39:15 +11:00
Gareth Jones
d276bbc2f8 Bumped version number, added travis status to readme 2012-02-22 14:37:45 +11:00
Gareth Jones
e78f4e33ce Fixed issue #51, added tests to cover levels 2012-02-22 14:14:46 +11:00
Gareth Jones
53367785b4 got rid of the __preLog4js stuff from the console.log replacement 2012-02-22 08:53:28 +11:00
Gareth Jones
cff20b99e3 added more gelf tests 2012-02-13 08:54:35 +11:00
Gareth Jones
0a422e5749 fixed up gelf tests 2012-02-10 18:14:50 +11:00
Gareth Jones
37b94cf195 Merge pull request #59 from shripadk/master
Allow passing cwd (__dirname) as an option.
2012-02-09 20:27:29 -08:00
Shripad K
0c04c6807c More fixes + Test for "cwd" option 2012-02-08 10:25:14 +05:30
Shripad K
b4ca201a91 feature: allow passing cwd as an option 2012-02-07 12:41:10 +05:30
Gareth Jones
2ab6f5fa24 Merge pull request #56 from arifamirani/master
Fixed tests for gelf appender
2012-01-15 14:36:55 -08:00
Arif Amirani
9bad070b8a Changed tests to not use live udp server as it fails on CI 2012-01-13 13:00:53 +05:30
Gareth Jones
5aaa9fcd50 Merge pull request #54 from arifamirani/master
Add support for GELF logging using UDP
2012-01-12 15:00:11 -08:00
Arif Amirani
b7e77b11ad Fixed some spacing to make README more legible 2012-01-12 15:02:19 +05:30
Arif Amirani
615b534b56 Added README for gelf appender 2012-01-12 15:00:34 +05:30
Arif Amirani
788de0cac3 Added basic tests for gelf appender 2012-01-12 14:52:55 +05:30
Gareth Jones
4d484ad752 Merge pull request #53 from vincentcr/master
make restoreConsole work
2012-01-11 14:44:16 -08:00
Arif Amirani
449893fd24 Added missing dependency on compress-buffer 2012-01-11 16:13:42 +05:30
Arif Amirani
5bdeaf68d7 Adding gelf as an appender 2012-01-11 16:12:24 +05:30
Vincent Côté-Roy
a5b09b3ead fix restoreConsole by making console appender not depend on _preLog4js_log 2012-01-05 08:47:15 -05:00
Daniel Bell
05c4c59c20 Refactored streams to make it easier to write other rolling based file appenders. 2011-12-22 14:36:30 +11:00
Gareth Jones
b4a5227fc0 Merge pull request #49 from Pita/patch-1
Fixed a BUG that prevents connectlogger from working if loglevel is WARN
2011-12-19 15:10:29 -08:00
Gareth Jones
b152618dbc made the file tests more robust 2011-12-20 09:59:02 +11:00
Gareth Jones
a999d8fc00 Fixed the file appender tests 2011-12-20 08:49:21 +11:00
Gareth Jones
78de73a274 Working version of fully-async log rolling file appender - tests need fixing though 2011-12-19 16:58:21 +11:00
Peter 'Pita' Martischka
4cf1d1cfa4 Fixed a BUG that prevents connectlogger from working if loglevel is WARN 2011-12-07 15:28:35 +01:00
Gareth Jones
e5d0b3348f bumped version 2011-11-24 08:40:12 +11:00
Gareth Jones
f10a6e164e windows throws an EEXIST error when renaming, need to handle it 2011-11-24 08:37:05 +11:00
Gareth Jones
cea3dc97d1 Changes to handle drain events not fired on write in linux & windows - should fix issue #44 2011-11-24 08:20:33 +11:00
Gareth Jones
a3a0c55322 version 0.4.0 2011-11-21 16:17:46 +11:00
Gareth Jones
51d48165fd Added travis-ci.org config 2011-11-21 15:07:35 +11:00
Gareth Jones
7d50c45801 Rewrote file appender, fixing issue #16 and issue #31 2011-11-21 15:03:51 +11:00
Gareth Jones
40c5f5ee70 added methods and config to turn off console.log replacement (issue #34) 2011-11-18 08:44:04 +11:00
Gareth Jones
1d769fdf33 added build and node_modules 2011-11-16 08:40:26 +11:00
Gareth Jones
bc665b875e vows seems to have removed assert.length, replaced with assert.equal 2011-11-16 08:39:07 +11:00
Gareth Jones
154c0dc299 changed web->url in bugs (issue #41) 2011-11-16 08:21:44 +11:00
Gareth Jones
050fae5230 replaced 'sys' with 'util' (issue #42) 2011-11-16 08:10:20 +11:00
Gareth Jones
342286e062 Merge pull request #40 from druciak/smtp
SMTP appender
2011-11-10 14:06:27 -08:00
druciak
537f1058b9 Add SMTP appender 2011-11-08 08:56:21 +01:00
Gareth Jones
283a403a11 Merge pull request #37 from dbrain/master
Multiprocess (tcp) appender
2011-11-02 16:03:53 -07:00
Danny Brain
ae8aaa5376 Add a short description on using multiprocess logger 2011-11-03 09:16:38 +11:00
Danny Brain
a95117c0d3 Add tests for multiprocess file appender 2011-11-03 09:10:02 +11:00
Danny Brain
097390bc89 Add multiprocess appender, pending tests 2011-11-02 15:49:46 +11:00
Gareth Jones
0a0119300b Merge pull request #32 from dbrain/master
hook.io appender should accept all configuration
2011-10-30 14:47:29 -07:00
Gareth Jones
fde66f92f5 Merge branch 'master' of https://github.com/csausdev/log4js-node 2011-10-31 08:42:58 +11:00
muddydixon
516659f733 add test code for no log 2011-10-29 11:57:28 +09:00
muddydixon
5aabebbdb7 change check target from req.url to req.originalUrl 2011-10-29 11:55:46 +09:00
Danny Brain
8b376eb46e Buffer the logging until the hook is ready, will prevent lost logs 2011-10-28 10:50:28 +11:00
Danny Brain
ced570413c Pass in all appender parameters to the Hook constructor so a port can be specified 2011-10-28 10:07:48 +11:00
Gareth Jones
b2827076da Merge pull request #30 from dbrain/master
hook.io appender
2011-10-27 15:22:35 -07:00
Danny Brain
07e920cc1b Quick check to make sure the actualAppender gets the right configuration 2011-10-27 16:43:55 +11:00
Danny Brain
89f3659825 Fix the logLevelFilter with lazy loading 2011-10-27 16:37:11 +11:00
Danny Brain
23a2758a6d Lazy load any new style appenders 2011-10-27 16:25:38 +11:00
Danny Brain
25aa075fad Basic (ugly) test 2011-10-27 16:03:06 +11:00
Danny Brain
d099a9fc3f Update readme to describe hook.io usage 2011-10-27 13:16:42 +11:00
Danny Brain
7bc460e8e0 Update readme to describe hook.io usage 2011-10-27 13:14:29 +11:00
Danny Brain
681decf51f Update readme to describe hook.io usage 2011-10-27 13:14:10 +11:00
Danny Brain
b93691b82a Update readme to describe hook.io usage 2011-10-27 13:13:22 +11:00
Danny Brain
f82ecf8f2a Update readme to describe hook.io usage 2011-10-27 13:12:36 +11:00
Danny Brain
3b77a42706 Added a hookio appender, this allows you to run a 'master' log4js instance and 'worker' so only one process writes to file 2011-10-27 12:38:13 +11:00
muddydixon
b5bc9c8322 mod if nolog 2011-10-25 14:28:46 +09:00
muddydixon
c7d3ac4fe1 add nolog operation 2011-10-25 14:09:41 +09:00
Daniel Bell
0aca64623e Merged changes from danbell/master. 2011-10-05 15:03:08 +11:00
Daniel Bell
ff68e46858 Merged changes 2011-10-05 12:27:33 +11:00
Daniel Bell
f9768eb56e Issue #21: fixed reloading of config when config has not changed. 2011-10-05 12:22:31 +11:00
Gareth Jones
75e5584060 Merge pull request #24 from cliffano/master
Add sandboxed-module to devDependencies
2011-09-14 18:15:28 -07:00
Cliffano Subagio
b78fd77015 Add sandboxed-module to dev dependencies. 2011-09-15 11:03:54 +10:00
Gareth Jones
2a06048114 added ignore files 2011-09-15 08:28:12 +10:00
Gareth Jones
9a34d9edfd fixed missing space between log data elements 2011-09-15 08:18:24 +10:00
Gareth Jones
12e71bda4e fixed to work with node 0.5.x 2011-09-15 08:13:04 +10:00
Gareth Jones
53a481d4da Added filtering to appender loader - was choking on .svn files 2011-08-11 16:27:37 +10:00
Gareth Jones
8d7b5513fb bumped version number 2011-07-27 21:22:13 +10:00
Gareth Jones
d13b2fb3b4 turned off config file reloading by default 2011-07-27 21:21:43 +10:00
Gareth Jones
4f7d73bc97 bumped version number 2011-07-27 10:37:30 +10:00
Gareth Jones
163db0e5fd fixed the behaviour of maxlogsize + 0 backups 2011-07-26 18:40:41 +10:00
Gareth Jones
71f9eef6fe Merge pull request #20 from danbell/master
Added ability to reload configuration file periodically.
2011-07-25 18:16:36 -07:00
Daniel Bell
623bc1859f Merged Gareth's latest changes in 2011-07-26 11:11:27 +10:00
Gareth Jones
b72182c0cf bumped version number 2011-07-26 09:10:02 +10:00
Gareth Jones
ef9fe3a4b1 All tests pass, moved appenders into separate files, so that extra ones can be added easily 2011-07-26 08:52:40 +10:00
Daniel Bell
3b241095cb Fixed indentation on markdown file. 2011-07-25 13:16:56 +10:00
Gareth Jones
545681287f working fileappender, with tests, broken everything else 2011-07-24 21:58:02 +10:00
Gareth Jones
80474c6881 got log rolling working, need to fix all the tests 2011-07-22 18:25:55 +10:00
Gareth Jones
7aa076c278 removed the annoying extra new line 2011-07-22 18:25:26 +10:00
Daniel Bell
e6b69ff7f2 Added more documentation on new functionality. 2011-07-22 15:59:17 +10:00
Daniel Bell
69e64932b1 Added functionality to reload configuration file periodically. 2011-07-22 14:43:33 +10:00
Gareth Jones
4b32456db7 fixed a bug where if the first log arg was not a string it wouldn't get logged 2011-07-22 12:28:02 +10:00
Gareth Jones
ec21ec63f0 bumped version number 2011-07-21 20:44:04 +10:00
Gareth Jones
a9a698cf09 fixed log rolling problem 2011-07-21 20:42:14 +10:00
Gareth Jones
925c280c68 check for existence of destroySoon (does not exist in node v0.2.x) 2011-07-21 19:09:22 +10:00
Gareth Jones
d0b4563ba0 fixed small bug checking for stack on undefined object 2011-07-20 19:39:54 +10:00
Gareth Jones
aac8ca0eb0 updated npm version number 2011-07-19 09:44:47 +10:00
Gareth Jones
0968c6709f fixed connect-logger 2011-07-19 09:08:15 +10:00
Gareth Jones
800f0d6bf6 updated npm version number 2011-07-18 09:17:36 +10:00
Gareth Jones
71fe001278 fixed tests to cover writestream 2011-07-17 20:49:39 +10:00
Gareth Jones
3d27140a9d changed fileappender to use writeStream instead of fs.write, tests don't work 2011-07-17 12:28:26 +10:00
Gareth Jones
d64d4ca0ca updated to remove TODO and credit danbell 2011-07-15 09:13:09 +10:00
Gareth Jones
b338b34fd6 added tests for pattern layout 2011-07-15 08:22:44 +10:00
Gareth Jones
3691648cd0 fixed patternlayout, needs tests though 2011-07-13 18:42:56 +10:00
Gareth Jones
d7ffa59434 moved level colours into layouts where they belong, updated README 2011-07-13 18:29:53 +10:00
Gareth Jones
5868856a7d all tests pass, now with proper console.log formatting 2011-07-13 18:12:29 +10:00
Gareth Jones
f89d54b66e removed main function, now using felixge's sandboxed-module, split code into multiple files 2011-07-12 13:03:17 +10:00
Gareth Jones
e121ca345a removed main function, now using felixge's sandboxed-module, split code into multiple files 2011-07-12 13:02:48 +10:00
Gareth Jones
8767cda15f removed the extensions to Date, put them in their own library 2011-07-08 08:15:13 +10:00
Gareth Jones
8eaff77974 Merge pull request #14 from AlexanderS/master
Global log level
2011-06-04 20:41:04 -07:00
Alexander Sulfrian
d7a97366cb global log level is now set via the log4js object 2011-05-30 13:26:30 +02:00
Alexander Sulfrian
a1681f5579 added tests for global loglevel 2011-05-29 04:46:05 +02:00
Alexander Sulfrian
1fa9d029a2 added possibility to define global and local log levels 2011-05-19 15:25:13 +02:00
csausdev
ffdfca7d99 fixing some formatting problems 2011-04-17 18:25:00 +10:00
csausdev
1ee8cfcd74 removed a mention of console.log 2011-04-17 17:55:42 +10:00
csausdev
f386f003be removed the console.log replacement from the readme 2011-04-17 17:48:51 +10:00
csausdev
9f4878d82c Disabling the console.log replacement 2011-04-17 17:46:13 +10:00
csausdev
d9bfc5db44 Added devDependencies for npm 2011-04-17 17:29:22 +10:00
csausdev
a50c02a3e5 Added coloured layout to configuration (thanks @melin) 2011-04-17 17:20:49 +10:00
Daniel Bell
59f7e0af3c Added connect/express logger. 2011-04-07 10:19:18 +10:00
Daniel Bell
3f95e02cba Merged latest changes from upstream. 2011-04-07 10:01:15 +10:00
csausdev
540a683566 moved logLevelFilter tests to vows 2011-04-07 07:21:24 +08:00
csausdev
c74120e499 moved Date tests to vows format 2011-04-07 07:21:24 +08:00
csausdev
6f79694904 added messagePassThroughLayout to vows tests 2011-04-07 07:21:23 +08:00
csausdev
47fcb2233d Added log rolling to config files 2011-04-07 07:21:23 +08:00
csausdev
fb8b4554e1 added a log rolling function to file appender 2011-04-07 07:21:22 +08:00
csausdev
0258fda93c added test for log roller, not written yet 2011-04-07 07:21:22 +08:00
csausdev
80e3ed7174 added masylum's coloured layout function 2011-04-07 07:21:22 +08:00
Gareth Jones
838f0c8f28 Added loading of config from require paths, and now defaults to console appender with basic layout 2011-04-07 07:21:21 +08:00
csausdev
9364a8a442 fixed example to work with refactoring 2011-04-07 07:21:20 +08:00
csausdev
76fea28bbb refactoring to allow dependency injection 2011-04-07 07:21:20 +08:00
csausdev
fc3d50846d initial refactoring to allow dependency injection 2011-04-07 07:21:19 +08:00
csausdev
f21fa2bcf8 changed array detection (thanks fkei) 2011-03-04 19:52:48 +11:00
csausdev
71459ab6d3 changed array detection (thanks fkei) 2011-03-04 19:49:43 +11:00
csausdev
079edd19c8 bumped version, added configure(object) to README 2011-01-16 13:24:07 +11:00
csausdev
a876dfbe9c configure now takes a filename or object 2011-01-16 13:21:37 +11:00
csausdev
c6dd2398ab Persist logging config across invocations 2011-01-16 13:05:13 +11:00
csausdev
cf7d5f681a Merge branch 'master' of github.com:csausdev/log4js-node 2010-12-13 20:21:47 +11:00
csausdev
612d9eeb23 small tweak to exception handling 2010-12-11 21:59:50 +11:00
csausdev
c870289928 now handles exceptions that aren't Errors 2010-12-11 21:55:21 +11:00
csausdev
c2f9ccce73 enhanced console.log 2010-12-08 08:53:59 +11:00
csausdev
2e3843205a finished moving all tests to vows 2010-12-07 09:12:43 +11:00
csausdev
80305ca376 moved basicLayout tests to vows 2010-12-07 08:08:29 +11:00
csausdev
7e7961330d Merge branch 'master' of github.com:csausdev/log4js-node 2010-12-06 20:48:24 +11:00
csausdev
682d95db69 Merge branch 'master' of github.com:csausdev/log4js-node 2010-12-04 15:51:35 +11:00
Gareth Jones
a7006444b3 appenders can be added to multiple categories at the same time 2010-10-21 13:02:16 +08:00
71 changed files with 5410 additions and 3921 deletions

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
*.log
*.log??
build
node_modules

2
.npmignore Normal file
View File

@@ -0,0 +1,2 @@
*.log
*.log??

5
.travis.yml Normal file
View File

@@ -0,0 +1,5 @@
language: node_js
node_js:
- "0.10"
- "0.8"

View File

@@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

158
README.md
View File

@@ -1,67 +1,143 @@
# log4js-node
# log4js-node [![Build Status](https://secure.travis-ci.org/nomiddlename/log4js-node.png?branch=master)](http://travis-ci.org/nomiddlename/log4js-node)
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code
and tidied up some of the javascript. It includes a basic file logger, with log rolling based on file size.
NOTE: since v0.2.0 require('log4js') returns a function, so you need to call that function in your code before you can use it. I've done this to make testing easier (allows dependency injection).
This is a conversion of the [log4js](http://log4js.berlios.de/index.html)
framework to work with [node](http://nodejs.org). I've mainly stripped out the browser-specific code and tidied up some of the javascript.
Out of the box it supports the following features:
* coloured console logging
* replacement of node's console.log functions (optional)
* file appender, with log rolling based on file size
* SMTP appender
* GELF appender
* hook.io appender
* multiprocess appender (useful when you've got worker processes)
* a logger for connect/express servers
* configurable log message layout/patterns
* different log levels for different log categories (make some parts of your app log as DEBUG, others only ERRORS, etc.)
NOTE: from log4js 0.5 onwards you'll need to explicitly enable replacement of node's console.log functions. Do this either by calling `log4js.replaceConsole()` or configuring with an object or json file like this:
```javascript
{
appenders: [
{ type: "console" }
],
replaceConsole: true
}
```
## installation
npm install log4js
## tests
Tests now use [vows](http://vowsjs.org), run with `vows test/logging.js`. I am slowly porting the previous tests from jspec (run those with `node tests.js`), since jspec is no longer maintained.
## usage
Minimalist version:
var log4js = require('log4js')();
var logger = log4js.getLogger();
logger.debug("Some debug messages");
```javascript
var log4js = require('log4js');
var logger = log4js.getLogger();
logger.debug("Some debug messages");
```
By default, log4js outputs to stdout with the coloured layout (thanks to [masylum](http://github.com/masylum)), so for the above you would see:
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```bash
[2010-01-17 11:43:37.987] [DEBUG] [default] - Some debug messages
```
See example.js for a full example, but here's a snippet (also in fromreadme.js):
```javascript
var log4js = require('log4js');
//console log is loaded by default, so you won't normally need to do this
//log4js.loadAppender('console');
log4js.loadAppender('file');
//log4js.addAppender(log4js.appenders.console());
log4js.addAppender(log4js.appenders.file('logs/cheese.log'), 'cheese');
See example.js:
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
var log4js = require('log4js')(); //note the need to call the function
log4js.addAppender(log4js.consoleAppender());
log4js.addAppender(log4js.fileAppender('logs/cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');
Output
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');
```
Output:
```bash
[2010-01-17 11:43:37.987] [ERROR] cheese - Cheese is too ripe!
[2010-01-17 11:43:37.990] [FATAL] cheese - Cheese was breeding ground for listeria.
```
The first 5 lines of the code above could also be written as:
```javascript
var log4js = require('log4js');
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: 'logs/cheese.log', category: 'cheese' }
]
});
```
## configuration
You can either configure the appenders and log levels manually (as above), or provide a
configuration file (`log4js.configure('path/to/file.json')`) explicitly, or just let log4js look for a file called `log4js.json` (it looks in the current directory first, then the require paths, and finally looks for the default config included in the same directory as the `log4js.js` file).
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`
You can configure the appenders and log levels manually (as above), or provide a
configuration file (`log4js.configure('path/to/file.json')`), or a configuration object. The
configuration file location may also be specified via the environment variable
LOG4JS_CONFIG (`export LOG4JS_CONFIG=path/to/file.json`).
An example file can be found in `test/log4js.json`. An example config file with log rolling is in `test/with-log-rolling.json`.
By default, the configuration file is checked for changes every 60 seconds, and if changed, reloaded. This allows changes to logging levels to occur without restarting the application.
## todo
To turn off configuration file change checking, configure with:
patternLayout has no tests. This is mainly because I haven't found a use for it yet,
and am not entirely sure what it was supposed to do. It is more-or-less intact from
the original log4js.
```javascript
var log4js = require('log4js');
log4js.configure('my_log4js_configuration.json', {});
```
To specify a different period:
## author (of this node version)
```javascript
log4js.configure('file.json', { reloadSecs: 300 });
```
For FileAppender you can also pass the path to the log directory as an option where all your log files would be stored.
Gareth Jones (csausdev - gareth.jones@sensis.com.au)
```javascript
log4js.configure('my_log4js_configuration.json', { cwd: '/absolute/path/to/log/dir' });
```
If you have already defined an absolute path for one of the FileAppenders in the configuration file, you could add a "absolute": true to the particular FileAppender to override the cwd option passed. Here is an example configuration file:
```json
#### my_log4js_configuration.json ####
{
"appenders": [
{
"type": "file",
"filename": "relative/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 3,
"category": "relative-logger"
},
{
"type": "file",
"absolute": true,
"filename": "/absolute/path/to/log_file.log",
"maxLogSize": 20480,
"backups": 10,
"category": "absolute-logger"
}
]
}
```
Documentation for most of the core appenders can be found on the [wiki](https://github.com/nomiddlename/log4js-node/wiki/Appenders), otherwise take a look at the tests and the examples.
## Documentation
See the [wiki](https://github.com/nomiddlename/log4js-node/wiki). Improve the [wiki](https://github.com/nomiddlename/log4js-node/wiki), please.
## Contributing
Contributions welcome, but take a look at the [rules](https://github.com/nomiddlename/log4js-node/wiki/Contributing) first.
## License
The original log4js was distributed under the Apache 2.0 License, and so is this. I've tried to
keep the original copyright and author credits in place, except in sections that I have rewritten
keep the original copyright and author credits in place, except in sections that I have rewritten
extensively.

View File

@@ -1,13 +0,0 @@
var log4js = require('./lib/log4js')();
log4js.addAppender(log4js.consoleAppender());
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');

View File

@@ -0,0 +1,14 @@
var log4js = require('./lib/log4js');
log4js.addAppender(log4js.fileAppender('cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('INFO');
var app = require('express').createServer();
app.configure(function() {
app.use(log4js.connectLogger(logger, { level: log4js.levels.INFO }));
});
app.get('*', function(req,res) {
res.send('hello world\n <a href="/cheese">cheese</a>\n');
});
app.listen(5000);

View File

@@ -0,0 +1,45 @@
var log4js = require('./lib/log4js')
, cluster = require('cluster')
, numCPUs = require('os').cpus().length
, i = 0;
if (cluster.isMaster) {
log4js.configure({
appenders: [
{
type: "multiprocess",
mode: "master",
appender: {
type: "console"
}
}
]
});
console.info("Master creating %d workers", numCPUs);
for (i=0; i < numCPUs; i++) {
cluster.fork();
}
cluster.on('death', function(worker) {
console.info("Worker %d died.", worker.pid);
});
} else {
log4js.configure({
appenders: [
{
type: "multiprocess",
mode: "worker"
}
]
});
var logger = log4js.getLogger('example-socket');
console.info("Worker %d started.", process.pid);
for (i=0; i < 1000; i++) {
logger.info("Worker %d - logging something %d", process.pid, i);
}
}

58
examples/example.js Normal file
View File

@@ -0,0 +1,58 @@
var log4js = require('../lib/log4js');
//log the cheese logger messages to a file, and the console ones as well.
log4js.configure({
appenders: [
{
type: "file",
filename: "cheese.log",
category: [ 'cheese','console' ]
},
{
type: "console"
}
],
replaceConsole: true
});
//to add an appender programmatically, and without clearing other appenders
//loadAppender is only necessary if you haven't already configured an appender of this type
log4js.loadAppender('file');
log4js.addAppender(log4js.appenders.file('pants.log'), 'pants');
//a custom logger outside of the log4js/lib/appenders directory can be accessed like so
//log4js.loadAppender('what/you/would/put/in/require');
//log4js.addAppender(log4js.appenders['what/you/would/put/in/require'](args));
//or through configure as:
//log4js.configure({
// appenders: [ { type: 'what/you/would/put/in/require', otherArgs: 'blah' } ]
//});
var logger = log4js.getLogger('cheese');
//only errors and above get logged.
//you can also set this log level in the config object
//via the levels field.
logger.setLevel('ERROR');
//console logging methods have been replaced with log4js ones.
//so this will get coloured output on console, and appear in cheese.log
console.error("AAArgh! Something went wrong", { some: "otherObject", useful_for: "debug purposes" });
//these will not appear (logging level beneath error)
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
//these end up on the console and in cheese.log
logger.error('Cheese %s is too ripe!', "gouda");
logger.fatal('Cheese was breeding ground for listeria.');
//these don't end up in cheese.log, but will appear on the console
var anotherLogger = log4js.getLogger('another');
anotherLogger.debug("Just checking");
//one for pants.log
//will also go to console, since that's configured for all categories
var pantsLog = log4js.getLogger('pants');
pantsLog.debug("Something for pants");

19
examples/fromreadme.js Normal file
View File

@@ -0,0 +1,19 @@
//remember to change the require to just 'log4js' if you've npm install'ed it
var log4js = require('./lib/log4js');
//by default the console appender is loaded
//log4js.loadAppender('console');
//you'd only need to add the console appender if you
//had previously called log4js.clearAppenders();
//log4js.addAppender(log4js.appenders.console());
log4js.loadAppender('file');
log4js.addAppender(log4js.appenders.file('cheese.log'), 'cheese');
var logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
logger.trace('Entering cheese testing');
logger.debug('Got cheese.');
logger.info('Cheese is Gouda.');
logger.warn('Cheese is quite smelly.');
logger.error('Cheese is too ripe!');
logger.fatal('Cheese was breeding ground for listeria.');

27
examples/log-rolling.js Normal file
View File

@@ -0,0 +1,27 @@
var log4js = require('../lib/log4js')
, log
, i = 0;
log4js.configure({
"appenders": [
{
type: "console"
, category: "console"
},
{
"type": "file",
"filename": "tmp-test.log",
"maxLogSize": 1024,
"backups": 3,
"category": "test"
}
]
});
log = log4js.getLogger("test");
function doTheLogging(x) {
log.info("Logging something %d", x);
}
for ( ; i < 5000; i++) {
doTheLogging(i);
}

37
examples/memory-test.js Normal file
View File

@@ -0,0 +1,37 @@
var log4js = require('./lib/log4js')
, logger
, usage
, i;
log4js.configure(
{
appenders: [
{
category: "memory-test"
, type: "file"
, filename: "memory-test.log"
},
{
type: "console"
, category: "memory-usage"
},
{
type: "file"
, filename: "memory-usage.log"
, category: "memory-usage"
, layout: {
type: "messagePassThrough"
}
}
]
}
);
logger = log4js.getLogger("memory-test");
usage = log4js.getLogger("memory-usage");
for (i=0; i < 1000000; i++) {
if ( (i % 5000) === 0) {
usage.info("%d %d", i, process.memoryUsage().rss);
}
logger.info("Doing something.");
}

View File

@@ -0,0 +1,21 @@
var log4js = require('./lib/log4js');
var config = {
"appenders": [
{
"type": "console",
"layout": {
"type": "pattern",
"pattern": "%[%r (%x{pid}) %p %c -%] %m%n",
"tokens": {
"pid" : function() { return process.pid; }
}
}
}
]
};
log4js.configure(config, {});
var logger = log4js.getLogger("app");
logger.info("Test log message");

20
lib/appenders/console.js Normal file
View File

@@ -0,0 +1,20 @@
var layouts = require('../layouts'),
consoleLog = console.log;
function consoleAppender (layout) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
consoleLog(layout(loggingEvent));
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return consoleAppender(layout);
}
exports.appender = consoleAppender;
exports.configure = configure;

53
lib/appenders/dateFile.js Normal file
View File

@@ -0,0 +1,53 @@
var streams = require('../streams'),
layouts = require('../layouts'),
path = require('path'),
os = require('os'),
eol = os.EOL || '\n',
openFiles = [];
//close open files on process exit.
process.on('exit', function() {
openFiles.forEach(function (file) {
file.end();
});
});
/**
* File appender that rolls files according to a date pattern.
* @filename base filename.
* @pattern the format that will be added to the end of filename when rolling,
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
* @layout layout function for log messages - defaults to basicLayout
*/
function appender(filename, pattern, alwaysIncludePattern, layout) {
layout = layout || layouts.basicLayout;
var logFile = new streams.DateRollingFileStream(filename, pattern, { alwaysIncludePattern: alwaysIncludePattern });
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return appender(config.filename, config.pattern, config.alwaysIncludePattern, layout);
}
exports.appender = appender;
exports.configure = configure;

73
lib/appenders/file.js Normal file
View File

@@ -0,0 +1,73 @@
var layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = [];
//close open files on process exit.
process.on('exit', function() {
openFiles.forEach(function (file) {
file.end();
});
});
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
*/
function fileAppender (file, layout, logSize, numBackups) {
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new streams.RollingFileStream(
file,
fileSize,
numFiles
);
} else {
stream = fs.createWriteStream(file, { encoding: "utf8", mode: 0644, flags: 'a' });
}
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
var logFile = openTheStream(file, logSize, numBackups);
// push file to the stack of open handlers
openFiles.push(logFile);
return function(loggingEvent) {
logFile.write(layout(loggingEvent) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
}
exports.appender = fileAppender;
exports.configure = configure;

93
lib/appenders/gelf.js Normal file
View File

@@ -0,0 +1,93 @@
var zlib = require('zlib');
var layouts = require('../layouts');
var levels = require('../levels');
var dgram = require('dgram');
var util = require('util');
var LOG_EMERG=0; // system is unusable
var LOG_ALERT=1; // action must be taken immediately
var LOG_CRIT=2; // critical conditions
var LOG_ERR=3; // error conditions
var LOG_ERROR=3; // because people WILL typo
var LOG_WARNING=4; // warning conditions
var LOG_NOTICE=5; // normal, but significant, condition
var LOG_INFO=6; // informational message
var LOG_DEBUG=7; // debug-level message
var levelMapping = {};
levelMapping[levels.ALL] = LOG_DEBUG;
levelMapping[levels.TRACE] = LOG_DEBUG;
levelMapping[levels.DEBUG] = LOG_DEBUG;
levelMapping[levels.INFO] = LOG_INFO;
levelMapping[levels.WARN] = LOG_WARNING;
levelMapping[levels.ERROR] = LOG_ERR;
levelMapping[levels.FATAL] = LOG_CRIT;
/**
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
*
* @param layout a function that takes a logevent and returns a string (defaults to none).
* @param host - host to which to send logs (default:localhost)
* @param port - port at which to send logs to (default:12201)
* @param hostname - hostname of the current host (default:os hostname)
* @param facility - facility to log to (default:nodejs-server)
*/
function gelfAppender (layout, host, port, hostname, facility) {
host = host || 'localhost';
port = port || 12201;
hostname = hostname || require('os').hostname();
facility = facility || 'nodejs-server';
layout = layout || layouts.messagePassThroughLayout;
var client = dgram.createSocket("udp4");
process.on('exit', function() {
if (client) client.close();
});
function preparePacket(loggingEvent) {
var msg = {};
msg.full_message = layout(loggingEvent);
msg.short_message = msg.full_message;
msg.version="1.0";
msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0;
msg.host = hostname;
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
msg.facility = facility;
return msg;
}
function sendPacket(packet) {
try {
client.send(packet, 0, packet.length, port, host);
} catch(e) {}
}
return function(loggingEvent) {
var message = preparePacket(loggingEvent);
zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
if (err) {
console.error(err.stack);
} else {
if (packet.length > 8192) {
util.debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
} else {
sendPacket(packet);
}
}
});
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return gelfAppender(layout, config.host, config.port, config.hostname, config.facility);
}
exports.appender = gelfAppender;
exports.configure = configure;

75
lib/appenders/hookio.js Normal file
View File

@@ -0,0 +1,75 @@
var log4js = require('../log4js');
var layouts = require('../layouts');
var Hook = require('hook.io').Hook;
var util = require('util');
var Logger = function createLogger(options) {
var self = this;
var actualAppender = options.actualAppender;
Hook.call(self, options);
self.on('hook::ready', function hookReady() {
self.on('*::' + options.name + '::log', function log(loggingEvent) {
deserializeLoggingEvent(loggingEvent);
actualAppender(loggingEvent);
});
});
}
util.inherits(Logger, Hook);
function deserializeLoggingEvent(loggingEvent) {
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level.toString = function levelToString() {
return loggingEvent.level.levelStr;
};
}
function initHook(hookioOptions) {
var loggerHook;
if (hookioOptions.mode === 'master') {
// Start the master hook, handling the actual logging
loggerHook = new Logger(hookioOptions);
} else {
// Start a worker, just emitting events for a master
loggerHook = new Hook(hookioOptions);
}
loggerHook.start();
return loggerHook;
}
function getBufferedHook(hook, eventName) {
var hookBuffer = [];
var hookReady = false;
hook.on('hook::ready', function emptyBuffer() {
hookBuffer.forEach(function logBufferItem(loggingEvent) {
hook.emit(eventName, loggingEvent);
})
hookReady = true;
});
return function log(loggingEvent) {
if (hookReady) {
hook.emit(eventName, loggingEvent);
} else {
hookBuffer.push(loggingEvent);
}
}
}
function createAppender(hookioOptions) {
var loggerHook = initHook(hookioOptions);
var loggerEvent = hookioOptions.name + '::log';
return getBufferedHook(loggerHook, loggerEvent);
}
function configure(config) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
config.actualAppender = actualAppender;
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;

View File

@@ -0,0 +1,20 @@
var levels = require('../levels');
var log4js = require('../log4js');
function logLevelFilter (levelString, appender) {
var level = levels.toLevel(levelString);
return function(logEvent) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
appender(logEvent);
}
}
}
function configure(config) {
log4js.loadAppender(config.appender.type);
var appender = log4js.appenderMakers[config.appender.type](config.appender);
return logLevelFilter(config.level, appender);
}
exports.appender = logLevelFilter;
exports.configure = configure;

View File

@@ -0,0 +1,128 @@
var log4js = require('../log4js'),
net = require('net'),
END_MSG = '__LOG4JS__';
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
var loggingEvent;
try {
loggingEvent = JSON.parse(msg);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: log4js.levels.ERROR,
data: [ 'Unable to parse log:', msg ]
};
}
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
var actualAppender = config.actualAppender,
server = net.createServer(function serverCreated(clientSocket) {
clientSocket.setEncoding('utf8');
var logMessage = '';
function logTheMessage(msg) {
if (logMessage.length > 0) {
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
}
function chunkReceived(chunk) {
var event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
//check for more, maybe it was a big chunk
chunkReceived();
}
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost');
return actualAppender;
}
function workerAppender(config) {
var canWrite = false,
buffer = [],
socket;
createSocket();
function createSocket() {
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
socket.on('connect', function() {
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
//don't bother listening for 'error', 'close' gets called after that anyway
socket.on('close', createSocket);
}
function emptyBuffer() {
var evt;
while ((evt = buffer.shift())) {
write(evt);
}
}
function write(loggingEvent) {
socket.write(JSON.stringify(loggingEvent), 'utf8');
socket.write(END_MSG, 'utf8');
}
return function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
buffer.push(loggingEvent);
}
};
}
function createAppender(config) {
if (config.mode === 'master') {
return logServer(config);
} else {
return workerAppender(config);
}
}
function configure(config, options) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
config.actualAppender = actualAppender;
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;

75
lib/appenders/smtp.js Normal file
View File

@@ -0,0 +1,75 @@
var layouts = require("../layouts"),
mailer = require("nodemailer"),
os = require('os');
/**
* SMTP Appender. Sends logging events using SMTP protocol.
* It can either send an email on each event or group several logging events gathered during specified interval.
*
* @param config appender configuration data
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* all events are buffered and sent in one email during this time; if 0 than every event sends an email
*/
function smtpAppender(config, layout) {
layout = layout || layouts.basicLayout;
var subjectLayout = layouts.messagePassThroughLayout;
var sendInterval = config.sendInterval*1000 || 0;
var logEventBuffer = [];
var sendTimer;
var transport = mailer.createTransport(config.transport, config[config.transport]);
function sendBuffer() {
if (logEventBuffer.length == 0)
return;
var firstEvent = logEventBuffer[0];
var body = "";
while (logEventBuffer.length > 0) {
body += layout(logEventBuffer.shift()) + "\n";
}
var msg = {
to: config.recipients,
subject: config.subject || subjectLayout(firstEvent),
text: body,
headers: {"Hostname": os.hostname()}
};
if (config.sender)
msg.from = config.sender;
transport.sendMail(msg, function(error, success) {
if (error) {
console.error("log4js.smtpAppender - Error happened ", error);
}
});
}
function scheduleSend() {
if (!sendTimer)
sendTimer = setTimeout(function() {
sendTimer = null;
sendBuffer();
}, sendInterval);
}
return function(loggingEvent) {
logEventBuffer.push(loggingEvent);
if (sendInterval > 0)
scheduleSend();
else
sendBuffer();
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return smtpAppender(config, layout);
}
exports.name = "smtp";
exports.appender = smtpAppender;
exports.configure = configure;

168
lib/connect-logger.js Normal file
View File

@@ -0,0 +1,168 @@
var levels = require("./levels");
/**
* Log requests with the given `options` or a `format` string.
*
* Options:
*
* - `format` Format string, see below for tokens
* - `level` A log4js levels instance.
*
* Tokens:
*
* - `:req[header]` ex: `:req[Accept]`
* - `:res[header]` ex: `:res[Content-Length]`
* - `:http-version`
* - `:response-time`
* - `:remote-addr`
* - `:date`
* - `:method`
* - `:url`
* - `:referrer`
* - `:user-agent`
* - `:status`
*
* @param {String|Function|Object} format or options
* @return {Function}
* @api public
*/
function getLogger(logger4js, options) {
if ('object' == typeof options) {
options = options || {};
} else if (options) {
options = { format: options };
} else {
options = {};
}
var thislogger = logger4js
, level = levels.toLevel(options.level, levels.INFO)
, fmt = options.format || ':remote-addr - - ":method :url HTTP/:http-version" :status :content-length ":referrer" ":user-agent"'
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
return function (req, res, next) {
// mount safety
if (req._logging) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
if (thislogger.isLevelEnabled(level)) {
var start = +new Date
, statusCode
, writeHead = res.writeHead
, end = res.end
, url = req.originalUrl;
// flag as logging
req._logging = true;
// proxy for statusCode.
res.writeHead = function(code, headers){
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = statusCode = code;
res.__headers = headers || {};
};
// proxy end to output a line to the provided logger.
res.end = function(chunk, encoding) {
res.end = end;
res.end(chunk, encoding);
res.responseTime = +new Date - start;
if ('function' == typeof fmt) {
var line = fmt(req, res, function(str){ return format(str, req, res); });
if (line) thislogger.log(level, line);
} else {
thislogger.log(level, format(fmt, req, res));
}
};
}
//ensure next gets always called
next();
};
}
/**
* Return formatted log line.
*
* @param {String} str
* @param {IncomingMessage} req
* @param {ServerResponse} res
* @return {String}
* @api private
*/
function format(str, req, res) {
return str
.replace(':url', req.originalUrl)
.replace(':method', req.method)
.replace(':status', res.__statusCode || res.statusCode)
.replace(':response-time', res.responseTime)
.replace(':date', new Date().toUTCString())
.replace(':referrer', req.headers['referer'] || req.headers['referrer'] || '')
.replace(':http-version', req.httpVersionMajor + '.' + req.httpVersionMinor)
.replace(':remote-addr', req.socket && (req.socket.remoteAddress || (req.socket.socket && req.socket.socket.remoteAddress)))
.replace(':user-agent', req.headers['user-agent'] || '')
.replace(':content-length', (res._headers && res._headers['content-length']) || (res.__headers && res.__headers['Content-Length']) || '-')
.replace(/:req\[([^\]]+)\]/g, function(_, field){ return req.headers[field.toLowerCase()]; })
.replace(/:res\[([^\]]+)\]/g, function(_, field){
return res._headers
? (res._headers[field.toLowerCase()] || res.__headers[field])
: (res.__headers && res.__headers[field]);
});
}
/**
* Return RegExp Object about nolog
*
* @param {String} nolog
* @return {RegExp}
* @api private
*/
/**
* syntax
* 1. String
* 1.1 "\\.gif"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
* LOGGING http://example.com/hoge.agif
* 1.2 in "\\.gif|\\.jpg$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
* LOGGING http://example.com/hoge.agif, http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
* 1.3 in "\\.(gif|jpe?g|png)$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
* 2. RegExp
* 2.1 in /\.(gif|jpe?g|png)$/
* SAME AS 1.3
* 3. Array
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif"
*/
function createNoLogCondition(nolog, type) {
if(!nolog) return null;
type = type || '';
if(nolog instanceof RegExp){
if(type === 'string')
return nolog.source;
return nolog;
} else if(typeof nolog === 'string'){
if(type === 'string')
return nolog;
try{
return new RegExp(nolog);
} catch (ex) {
return null;
}
} else if(nolog instanceof Array){
var regexps = nolog.map(function(o){ return createNoLogCondition(o, 'string')});
return new RegExp(regexps.join('|'));
}
}
exports.connectLogger = getLogger;

60
lib/date_format.js Normal file
View File

@@ -0,0 +1,60 @@
exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO";
exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS";
exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
exports.asString = function(/*format,*/ date) {
var format = exports.ISO8601_FORMAT;
if (typeof(date) === "string") {
format = arguments[0];
date = arguments[1];
}
var vDay = addZero(date.getDate());
var vMonth = addZero(date.getMonth()+1);
var vYearLong = addZero(date.getFullYear());
var vYearShort = addZero(date.getFullYear().toString().substring(3,4));
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
var vHour = addZero(date.getHours());
var vMinute = addZero(date.getMinutes());
var vSecond = addZero(date.getSeconds());
var vMillisecond = padWithZeros(date.getMilliseconds(), 3);
var vTimeZone = offset(date);
var formatted = format
.replace(/dd/g, vDay)
.replace(/MM/g, vMonth)
.replace(/y{1,4}/g, vYear)
.replace(/hh/g, vHour)
.replace(/mm/g, vMinute)
.replace(/ss/g, vSecond)
.replace(/SSS/g, vMillisecond)
.replace(/O/g, vTimeZone);
return formatted;
function padWithZeros(vNumber, width) {
var numAsString = vNumber + "";
while (numAsString.length < width) {
numAsString = "0" + numAsString;
}
return numAsString;
}
function addZero(vNumber) {
return padWithZeros(vNumber, 2);
}
/**
* Formats the TimeOffest
* Thanks to http://www.svendtofte.com/code/date_format/
* @private
*/
function offset(date) {
// Difference to Greenwich time (GMT) in hours
var os = Math.abs(date.getTimezoneOffset());
var h = String(Math.floor(os/60));
var m = String(os%60);
h.length == 1? h = "0"+h:1;
m.length == 1? m = "0"+m:1;
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
}
};

297
lib/layouts.js Normal file
View File

@@ -0,0 +1,297 @@
var dateFormat = require('./date_format')
, os = require('os')
, eol = os.EOL || '\n'
, util = require('util')
, replacementRegExp = /%[sdj]/g
, layoutMakers = {
"messagePassThrough": function() { return messagePassThroughLayout; }
, "basic": function() { return basicLayout; }
, "colored": function() { return colouredLayout; }
, "coloured": function() { return colouredLayout; }
, "pattern": function (config) {
var pattern = config.pattern || undefined;
var tokens = config.tokens || undefined;
return patternLayout(pattern, tokens);
}
}
, colours = {
ALL: "grey"
, TRACE: "blue"
, DEBUG: "cyan"
, INFO: "green"
, WARN: "yellow"
, ERROR: "red"
, FATAL: "magenta"
, OFF: "grey"
};
function formatLogData(logData) {
var output = ""
, data = Array.isArray(logData) ? logData.slice() : Array.prototype.slice.call(arguments)
, format = data.shift();
if (typeof format === "string") {
output = format.replace(replacementRegExp, function(match) {
switch (match) {
case "%s": return new String(data.shift());
case "%d": return new Number(data.shift());
case "%j": return JSON.stringify(data.shift());
default:
return match;
};
});
} else {
//put it back, it's not a format string
data.unshift(format);
}
data.forEach(function (item) {
if (output) {
output += ' ';
}
output += util.inspect(item);
if (item && item.stack) {
output += "\n" + item.stack;
}
});
return output;
}
var styles = {
//styles
'bold' : [1, 22],
'italic' : [3, 23],
'underline' : [4, 24],
'inverse' : [7, 27],
//grayscale
'white' : [37, 39],
'grey' : [90, 39],
'black' : [90, 39],
//colors
'blue' : [34, 39],
'cyan' : [36, 39],
'green' : [32, 39],
'magenta' : [35, 39],
'red' : [31, 39],
'yellow' : [33, 39]
};
function colorizeStart(style) {
return style ? '\033[' + styles[style][0] + 'm' : '';
}
function colorizeEnd(style) {
return style ? '\033[' + styles[style][1] + 'm' : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize (str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour) {
var output = colorize(
formatLogData(
'[%s] [%s] %s - '
, dateFormat.asString(loggingEvent.startTime)
, loggingEvent.level
, loggingEvent.categoryName
)
, colour
);
return output;
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout (loggingEvent) {
return timestampLevelAndCategory(loggingEvent) + formatLogData(loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout (loggingEvent) {
return timestampLevelAndCategory(loggingEvent, colours[loggingEvent.level.toString()]) + formatLogData(loggingEvent.data);
}
function messagePassThroughLayout (loggingEvent) {
return formatLogData(loggingEvent.data);
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %m log data
* - %d date in various formats
* - %% %
* - %n newline
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { "pid" : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @param {String} Log format pattern String
* @param {object} map object of different tokens
* @return {Function}
* @author Stephan Strittmatter
* @author Jan Schmidle
*/
function patternLayout (pattern, tokens) {
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
return function(loggingEvent) {
var formattedString = "";
var result;
var searchString = pattern;
while ((result = regex.exec(searchString))) {
var matchedString = result[0];
var padding = result[1];
var truncation = result[2];
var conversionCharacter = result[3];
var specifier = result[5];
var text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += "" + text;
} else {
// Create a raw replacement string based on the conversion
// character and specifier
var replacement = "";
switch(conversionCharacter) {
case "c":
var loggerName = loggingEvent.categoryName;
if (specifier) {
var precision = parseInt(specifier, 10);
var loggerNameBits = loggingEvent.categoryName.split(".");
if (precision >= loggerNameBits.length) {
replacement = loggerName;
} else {
replacement = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
}
} else {
replacement = loggerName;
}
break;
case "d":
var format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
if (format == "ISO8601") {
format = dateFormat.ISO8601_FORMAT;
} else if (format == "ABSOLUTE") {
format = dateFormat.ABSOLUTETIME_FORMAT;
} else if (format == "DATE") {
format = dateFormat.DATETIME_FORMAT;
}
}
// Format the date
replacement = dateFormat.asString(format, loggingEvent.startTime);
break;
case "m":
replacement = formatLogData(loggingEvent.data);
break;
case "n":
replacement = eol;
break;
case "p":
replacement = loggingEvent.level.toString();
break;
case "r":
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
break;
case "[":
replacement = colorizeStart(colours[loggingEvent.level.toString()]);
break;
case "]":
replacement = colorizeEnd(colours[loggingEvent.level.toString()]);
break;
case "%":
replacement = "%";
break;
case "x":
if(typeof(tokens[specifier]) !== 'undefined') {
if(typeof(tokens[specifier]) === 'function') {
replacement = tokens[specifier]();
} else {
replacement = tokens[specifier];
}
} else {
replacement = matchedString;
}
break;
default:
replacement = matchedString;
break;
}
// Format the replacement according to any padding or
// truncation specified
var len;
// First, truncation
if (truncation) {
len = parseInt(truncation.substr(1), 10);
replacement = replacement.substring(0, len);
}
// Next, padding
if (padding) {
if (padding.charAt(0) == "-") {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (replacement.length < len) {
replacement += " ";
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (replacement.length < len) {
replacement = " " + replacement;
}
}
}
formattedString += replacement;
}
searchString = searchString.substr(result.index + result[0].length);
}
return formattedString;
};
};
module.exports = {
basicLayout: basicLayout
, messagePassThroughLayout: messagePassThroughLayout
, patternLayout: patternLayout
, colouredLayout: colouredLayout
, coloredLayout: colouredLayout
, layout: function(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};

67
lib/levels.js Normal file
View File

@@ -0,0 +1,67 @@
function Level(level, levelStr) {
this.level = level;
this.levelStr = levelStr;
}
/**
* converts given String to corresponding Level
* @param {String} sArg String value of Level OR Log4js.Level
* @param {Log4js.Level} defaultLevel default Level, if no String representation
* @return Level object
* @type Log4js.Level
*/
function toLevel(sArg, defaultLevel) {
if (!sArg) {
return defaultLevel;
}
if (typeof sArg == "string") {
var s = sArg.toUpperCase();
if (module.exports[s]) {
return module.exports[s];
} else {
return defaultLevel;
}
}
return toLevel(sArg.toString());
};
Level.prototype.toString = function() {
return this.levelStr;
};
Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
if (typeof otherLevel === "string") {
otherLevel = toLevel(otherLevel);
}
return this.level <= otherLevel.level;
};
Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
if (typeof otherLevel === "string") {
otherLevel = toLevel(otherLevel);
}
return this.level >= otherLevel.level;
};
Level.prototype.isEqualTo = function(otherLevel) {
if (typeof otherLevel == "string") {
otherLevel = toLevel(otherLevel);
}
return this.level === otherLevel.level;
}
module.exports = {
ALL: new Level(Number.MIN_VALUE, "ALL")
, TRACE: new Level(5000, "TRACE")
, DEBUG: new Level(10000, "DEBUG")
, INFO: new Level(20000, "INFO")
, WARN: new Level(30000, "WARN")
, ERROR: new Level(40000, "ERROR")
, FATAL: new Level(50000, "FATAL")
, OFF: new Level(Number.MAX_VALUE, "OFF")
, toLevel: toLevel
};

View File

@@ -15,28 +15,28 @@
/*jsl:option explicit*/
/**
* @fileoverview log4js is a library to log in JavaScript in similar manner
* @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java. The API should be nearly the same.
*
* This file contains all log4js code and is the only file required for logging.
*
*
* <h3>Example:</h3>
* <pre>
* var logging = require('log4js-node')();
* var logging = require('log4js');
* //add an appender that logs all messages to stdout.
* logging.addAppender(logging.consoleAppender());
* //add an appender that logs "some-category" to a file
* logging.addAppender(logging.fileAppender("file.log"), "some-category");
* //get a logger
* var log = logging.getLogger("some-category");
* var log = logging.getLogger("some-category");
* log.setLevel(logging.levels.TRACE); //set the Level
*
*
* ...
*
*
* //call the log
* log.trace("trace me" );
* </pre>
*
* NOTE: the authors below are the original browser-based log4js authors
* don't try to contact them about bugs in this version :)
* @version 1.0
* @author Stephan Strittmatter - http://jroller.com/page/stritti
* @author Seth Chisamore - http://www.chisamore.com
@@ -44,656 +44,276 @@
* @static
* Website: http://log4js.berlios.de
*/
module.exports = function (fileSystem, standardOutput, configPaths) {
var fs = fileSystem || require('fs'),
standardOutput = standardOutput || console.log,
configPaths = configPaths || require.paths,
sys = require('sys'),
events = require('events'),
path = require('path'),
DEFAULT_CATEGORY = '[default]',
ALL_CATEGORIES = '[all]',
loggers = {},
appenders = {},
levels = {
ALL: new Level(Number.MIN_VALUE, "ALL", "grey"),
TRACE: new Level(5000, "TRACE", "blue"),
DEBUG: new Level(10000, "DEBUG", "cyan"),
INFO: new Level(20000, "INFO", "green"),
WARN: new Level(30000, "WARN", "yellow"),
ERROR: new Level(40000, "ERROR", "red"),
FATAL: new Level(50000, "FATAL", "magenta"),
OFF: new Level(Number.MAX_VALUE, "OFF", "grey")
},
appenderMakers = {
"file": function(config) {
var layout;
if (config.layout) {
layout = layoutMakers[config.layout.type](config.layout);
}
return fileAppender(config.filename, layout, config.maxLogSize, config.backups, config.pollInterval);
},
"console": function(config) {
var layout;
if (config.layout) {
layout = layoutMakers[config.layout.type](config.layout);
}
return consoleAppender(layout);
},
"logLevelFilter": function(config) {
var appender = appenderMakers[config.appender.type](config.appender);
return logLevelFilter(config.level, appender);
}
},
layoutMakers = {
"messagePassThrough": function() { return messagePassThroughLayout; },
"basic": function() { return basicLayout; },
"pattern": function (config) {
var pattern = config.pattern || undefined;
return patternLayout(pattern);
}
};
/**
* Get a logger instance. Instance is cached on categoryName level.
* @param {String} categoryName name of category to log to.
* @return {Logger} instance of logger for the category
* @static
*/
function getLogger (categoryName) {
// Use default logger if categoryName is not specified or invalid
if (!(typeof categoryName == "string")) {
categoryName = DEFAULT_CATEGORY;
}
var appenderList;
if (!loggers[categoryName]) {
// Create the logger for this name if it doesn't already exist
loggers[categoryName] = new Logger(categoryName);
if (appenders[categoryName]) {
appenderList = appenders[categoryName];
appenderList.forEach(function(appender) {
loggers[categoryName].addListener("log", appender);
});
}
if (appenders[ALL_CATEGORIES]) {
appenderList = appenders[ALL_CATEGORIES];
appenderList.forEach(function(appender) {
loggers[categoryName].addListener("log", appender);
});
}
}
return loggers[categoryName];
}
/**
* args are appender, then zero or more categories
*/
function addAppender () {
var args = Array.prototype.slice.call(arguments);
var appender = args.shift();
if (args.length == 0 || args[0] === undefined) {
args = [ ALL_CATEGORIES ];
}
//argument may already be an array
if (args[0].forEach) {
args = args[0];
}
args.forEach(function(category) {
if (!appenders[category]) {
appenders[category] = [];
}
appenders[category].push(appender);
if (category === ALL_CATEGORIES) {
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
loggers[logger].addListener("log", appender);
}
}
} else if (loggers[category]) {
loggers[category].addListener("log", appender);
}
});
}
function clearAppenders () {
appenders = [];
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
loggers[logger].removeAllListeners("log");
}
}
}
function configure (configurationFile) {
if (configurationFile) {
try {
var config = JSON.parse(fs.readFileSync(configurationFile, "utf8"));
configureAppenders(config.appenders);
configureLevels(config.levels);
} catch (e) {
throw new Error("Problem reading log4js config file " + configurationFile + ". Error was " + e.message);
}
}
}
function findConfiguration() {
//add current directory onto the list of configPaths
var paths = ['.'].concat(configPaths);
//add this module's directory to the end of the list, so that we pick up the default config
paths.push(__dirname);
var pathsWithConfig = paths.filter( function (pathToCheck) {
try {
fs.statSync(path.join(pathToCheck, "log4js.json"));
return true;
} catch (e) {
return false;
}
});
if (pathsWithConfig.length > 0) {
return path.join(pathsWithConfig[0], 'log4js.json');
}
return undefined;
}
function configureAppenders(appenderList) {
clearAppenders();
if (appenderList) {
appenderList.forEach(function(appenderConfig) {
var appender = appenderMakers[appenderConfig.type](appenderConfig);
if (appender) {
addAppender(appender, appenderConfig.category);
} else {
throw new Error("log4js configuration problem for "+sys.inspect(appenderConfig));
}
});
} else {
addAppender(consoleAppender);
}
}
function configureLevels(levels) {
if (levels) {
for (var category in levels) {
if (levels.hasOwnProperty(category)) {
getLogger(category).setLevel(levels[category]);
}
}
}
}
function Level(level, levelStr, colour) {
this.level = level;
this.levelStr = levelStr;
this.colour = colour;
}
/**
* converts given String to corresponding Level
* @param {String} sArg String value of Level
* @param {Log4js.Level} defaultLevel default Level, if no String representation
* @return Level object
* @type Log4js.Level
*/
Level.toLevel = function(sArg, defaultLevel) {
if (sArg === null) {
return defaultLevel;
}
if (typeof sArg == "string") {
var s = sArg.toUpperCase();
if (levels[s]) {
return levels[s];
}
}
return defaultLevel;
};
Level.prototype.toString = function() {
return this.levelStr;
};
Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
return this.level <= otherLevel.level;
};
Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
return this.level >= otherLevel.level;
};
/**
* Models a logging event.
* @constructor
* @param {String} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {String} message message to log
* @param {Log4js.Logger} logger the associated logger
* @author Seth Chisamore
*/
function LoggingEvent (categoryName, level, message, exception, logger) {
this.startTime = new Date();
this.categoryName = categoryName;
this.message = message;
this.exception = exception;
this.level = level;
this.logger = logger;
}
/**
* Logger to log messages.
* use {@see Log4js#getLogger(String)} to get an instance.
* @constructor
* @param name name of category to log to
* @author Stephan Strittmatter
*/
function Logger (name, level) {
this.category = name || DEFAULT_CATEGORY;
this.level = Level.toLevel(level, levels.TRACE);
}
sys.inherits(Logger, events.EventEmitter);
Logger.prototype.setLevel = function(level) {
this.level = Level.toLevel(level, levels.TRACE);
};
Logger.prototype.log = function(logLevel, message, exception) {
var loggingEvent = new LoggingEvent(this.category, logLevel, message, exception, this);
this.emit("log", loggingEvent);
};
Logger.prototype.isLevelEnabled = function(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
};
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
function(levelString) {
var level = Level.toLevel(levelString);
Logger.prototype['is'+levelString+'Enabled'] = function() {
return this.isLevelEnabled(level);
};
Logger.prototype[levelString.toLowerCase()] = function (message, exception) {
if (this.isLevelEnabled(level)) {
this.log(level, message, exception);
}
};
}
);
/**
* Get the default logger instance.
* @return {Logger} instance of default logger
* @static
*/
function getDefaultLogger () {
return getLogger(DEFAULT_CATEGORY);
}
function consoleAppender (layout) {
layout = layout || colouredLayout;
return function(loggingEvent) {
standardOutput(layout(loggingEvent));
};
}
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file, if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize has been reached (default 5)
* @param filePollInterval - the time in seconds between file size checks (default 30s)
*/
function fileAppender (file, layout, logSize, numBackups, filePollInterval) {
layout = layout || basicLayout;
//syncs are generally bad, but we need
//the file to be open before we start doing any writing.
var logFile = fs.openSync(file, 'a', 0644);
if (logSize > 0) {
setupLogRolling(logFile, file, logSize, numBackups || 5, (filePollInterval * 1000) || 30000);
}
return function(loggingEvent) {
fs.write(logFile, layout(loggingEvent)+'\n', null, "utf8");
};
}
function setupLogRolling (logFile, filename, logSize, numBackups, filePollInterval) {
fs.watchFile(filename,
{
persistent: false,
interval: filePollInterval
},
function (curr, prev) {
if (curr.size >= logSize) {
rollThatLog(logFile, filename, numBackups);
}
}
);
}
function rollThatLog (logFile, filename, numBackups) {
//doing all of this fs stuff sync, because I don't want to lose any log events.
//first close the current one.
fs.closeSync(logFile);
//roll the backups (rename file.n-1 to file.n, where n <= numBackups)
for (var i=numBackups; i > 0; i--) {
if (i > 1) {
if (fileExists(filename + '.' + (i-1))) {
fs.renameSync(filename+'.'+(i-1), filename+'.'+i);
}
} else {
fs.renameSync(filename, filename+'.1');
}
}
//open it up again
logFile = fs.openSync(filename, 'a', 0644);
}
function fileExists (filename) {
try {
fs.statSync(filename);
return true;
} catch (e) {
return false;
}
}
function logLevelFilter (levelString, appender) {
var level = Level.toLevel(levelString);
return function(logEvent) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
appender(logEvent);
}
}
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout (loggingEvent) {
var timestampLevelAndCategory = '[' + loggingEvent.startTime.toFormattedString() + '] ';
timestampLevelAndCategory += '[' + loggingEvent.level.toString() + '] ';
timestampLevelAndCategory += loggingEvent.categoryName + ' - ';
var output = timestampLevelAndCategory + loggingEvent.message;
if (loggingEvent.exception) {
output += '\n'
output += timestampLevelAndCategory;
if (loggingEvent.exception.stack) {
output += loggingEvent.exception.stack;
} else {
output += loggingEvent.exception.name + ': '+loggingEvent.exception.message;
}
}
return output;
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize (str, style) {
var styles = {
//styles
'bold' : [1, 22],
'italic' : [3, 23],
'underline' : [4, 24],
'inverse' : [7, 27],
//grayscale
'white' : [37, 39],
'grey' : [90, 39],
'black' : [90, 39],
//colors
'blue' : [34, 39],
'cyan' : [36, 39],
'green' : [32, 39],
'magenta' : [35, 39],
'red' : [31, 39],
'yellow' : [33, 39]
};
return '\033[' + styles[style][0] + 'm' + str +
'\033[' + styles[style][1] + 'm';
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout (loggingEvent) {
var timestampLevelAndCategory = colorize('[' + loggingEvent.startTime.toFormattedString() + '] ', 'grey');
timestampLevelAndCategory += colorize(
'[' + loggingEvent.level.toString() + '] ', loggingEvent.level.colour
);
timestampLevelAndCategory += colorize(loggingEvent.categoryName + ' - ', 'grey');
var output = timestampLevelAndCategory + loggingEvent.message;
if (loggingEvent.exception) {
output += '\n'
output += timestampLevelAndCategory;
if (loggingEvent.exception.stack) {
output += loggingEvent.exception.stack;
} else {
output += loggingEvent.exception.name + ': '+loggingEvent.exception.message;
}
}
return output;
}
function messagePassThroughLayout (loggingEvent) {
return loggingEvent.message;
}
/**
* PatternLayout
* Takes a pattern string and returns a layout function.
* @author Stephan Strittmatter
*/
function patternLayout (pattern) {
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([cdmnpr%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || patternLayout.TTCC_CONVERSION_PATTERN;
return function(loggingEvent) {
var formattedString = "";
var result;
var searchString = this.pattern;
while ((result = regex.exec(searchString))) {
var matchedString = result[0];
var padding = result[1];
var truncation = result[2];
var conversionCharacter = result[3];
var specifier = result[5];
var text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += "" + text;
} else {
// Create a raw replacement string based on the conversion
// character and specifier
var replacement = "";
switch(conversionCharacter) {
case "c":
var loggerName = loggingEvent.categoryName;
if (specifier) {
var precision = parseInt(specifier, 10);
var loggerNameBits = loggingEvent.categoryName.split(".");
if (precision >= loggerNameBits.length) {
replacement = loggerName;
} else {
replacement = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
}
} else {
replacement = loggerName;
}
break;
case "d":
var dateFormat = Date.ISO8601_FORMAT;
if (specifier) {
dateFormat = specifier;
// Pick up special cases
if (dateFormat == "ISO8601") {
dateFormat = Date.ISO8601_FORMAT;
} else if (dateFormat == "ABSOLUTE") {
dateFormat = Date.ABSOLUTETIME_FORMAT;
} else if (dateFormat == "DATE") {
dateFormat = Date.DATETIME_FORMAT;
}
}
// Format the date
replacement = loggingEvent.startTime.toFormattedString(dateFormat);
break;
case "m":
replacement = loggingEvent.message;
break;
case "n":
replacement = "\n";
break;
case "p":
replacement = loggingEvent.level.toString();
break;
case "r":
replacement = "" + loggingEvent.startTime.toLocaleTimeString();
break;
case "%":
replacement = "%";
break;
default:
replacement = matchedString;
break;
}
// Format the replacement according to any padding or
// truncation specified
var len;
// First, truncation
if (truncation) {
len = parseInt(truncation.substr(1), 10);
replacement = replacement.substring(0, len);
}
// Next, padding
if (padding) {
if (padding.charAt(0) == "-") {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (replacement.length < len) {
replacement += " ";
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (replacement.length < len) {
replacement = " " + replacement;
}
}
}
formattedString += replacement;
}
searchString = searchString.substr(result.index + result[0].length);
}
return formattedString;
};
};
//set ourselves up if we can find a default log4js.json
configure(findConfiguration());
return {
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
addAppender: addAppender,
clearAppenders: clearAppenders,
configure: configure,
levels: levels,
consoleAppender: consoleAppender,
fileAppender: fileAppender,
logLevelFilter: logLevelFilter,
basicLayout: basicLayout,
messagePassThroughLayout: messagePassThroughLayout,
patternLayout: patternLayout,
colouredLayout: colouredLayout,
coloredLayout: colouredLayout
};
}
Date.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
Date.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ssO";
Date.DATETIME_FORMAT = "dd MMM YYYY hh:mm:ss.SSS";
Date.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
Date.prototype.toFormattedString = function(format) {
format = format || Date.ISO8601_FORMAT;
var vDay = addZero(this.getDate());
var vMonth = addZero(this.getMonth()+1);
var vYearLong = addZero(this.getFullYear());
var vYearShort = addZero(this.getFullYear().toString().substring(3,4));
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
var vHour = addZero(this.getHours());
var vMinute = addZero(this.getMinutes());
var vSecond = addZero(this.getSeconds());
var vMillisecond = padWithZeros(this.getMilliseconds(), 3);
var vTimeZone = offset(this);
var formatted = format
.replace(/dd/g, vDay)
.replace(/MM/g, vMonth)
.replace(/y{1,4}/g, vYear)
.replace(/hh/g, vHour)
.replace(/mm/g, vMinute)
.replace(/ss/g, vSecond)
.replace(/SSS/g, vMillisecond)
.replace(/O/g, vTimeZone);
return formatted;
function padWithZeros(vNumber, width) {
var numAsString = vNumber + "";
while (numAsString.length < width) {
numAsString = "0" + numAsString;
}
return numAsString;
}
function addZero(vNumber) {
return padWithZeros(vNumber, 2);
}
/**
* Formats the TimeOffest
* Thanks to http://www.svendtofte.com/code/date_format/
* @private
*/
function offset(date) {
// Difference to Greenwich time (GMT) in hours
var os = Math.abs(date.getTimezoneOffset());
var h = String(Math.floor(os/60));
var m = String(os%60);
h.length == 1? h = "0"+h:1;
m.length == 1? m = "0"+m:1;
return date.getTimezoneOffset() < 0 ? "+"+h+m : "-"+h+m;
}
var events = require('events')
, fs = require('fs')
, path = require('path')
, util = require('util')
, layouts = require('./layouts')
, levels = require('./levels')
, LoggingEvent = require('./logger').LoggingEvent
, Logger = require('./logger').Logger
, ALL_CATEGORIES = '[all]'
, appenders = {}
, loggers = {}
, appenderMakers = {}
, defaultConfig = {
appenders: [
{ type: "console" }
],
replaceConsole: false
};
/**
* Get a logger instance. Instance is cached on categoryName level.
* @param {String} categoryName name of category to log to.
* @return {Logger} instance of logger for the category
* @static
*/
function getLogger (categoryName) {
// Use default logger if categoryName is not specified or invalid
if (!(typeof categoryName == "string")) {
categoryName = Logger.DEFAULT_CATEGORY;
}
var appenderList;
if (!loggers[categoryName]) {
// Create the logger for this name if it doesn't already exist
loggers[categoryName] = new Logger(categoryName);
if (appenders[categoryName]) {
appenderList = appenders[categoryName];
appenderList.forEach(function(appender) {
loggers[categoryName].addListener("log", appender);
});
}
if (appenders[ALL_CATEGORIES]) {
appenderList = appenders[ALL_CATEGORIES];
appenderList.forEach(function(appender) {
loggers[categoryName].addListener("log", appender);
});
}
}
return loggers[categoryName];
}
/**
* args are appender, then zero or more categories
*/
function addAppender () {
var args = Array.prototype.slice.call(arguments);
var appender = args.shift();
if (args.length == 0 || args[0] === undefined) {
args = [ ALL_CATEGORIES ];
}
//argument may already be an array
if (Array.isArray(args[0])) {
args = args[0];
}
args.forEach(function(category) {
if (!appenders[category]) {
appenders[category] = [];
}
appenders[category].push(appender);
if (category === ALL_CATEGORIES) {
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
loggers[logger].addListener("log", appender);
}
}
} else if (loggers[category]) {
loggers[category].addListener("log", appender);
}
});
}
function clearAppenders () {
appenders = {};
for (var logger in loggers) {
if (loggers.hasOwnProperty(logger)) {
loggers[logger].removeAllListeners("log");
}
}
}
function configureAppenders(appenderList, options) {
clearAppenders();
if (appenderList) {
appenderList.forEach(function(appenderConfig) {
loadAppender(appenderConfig.type);
var appender;
appenderConfig.makers = appenderMakers;
appender = appenderMakers[appenderConfig.type](appenderConfig, options);
if (appender) {
addAppender(appender, appenderConfig.category);
} else {
throw new Error("log4js configuration problem for "+util.inspect(appenderConfig));
}
});
}
}
function configureLevels(levels) {
if (levels) {
for (var category in levels) {
if (levels.hasOwnProperty(category)) {
getLogger(category).setLevel(levels[category]);
}
}
}
}
function setGlobalLogLevel(level) {
Logger.prototype.level = levels.toLevel(level, levels.TRACE);
}
/**
* Get the default logger instance.
* @return {Logger} instance of default logger
* @static
*/
function getDefaultLogger () {
return getLogger(Logger.DEFAULT_CATEGORY);
}
var configState = {};
function loadConfigurationFile(filename) {
if (filename && (!configState.lastFilename || filename !== configState.lastFilename ||
!configState.lastMTime || fs.statSync(filename).mtime !== configState.lastMTime)) {
configState.lastFilename = filename;
configState.lastMTime = fs.statSync(filename).mtime;
return JSON.parse(fs.readFileSync(filename, "utf8"));
}
return undefined;
}
function configureOnceOff(config, options) {
if (config) {
try {
configureAppenders(config.appenders, options);
configureLevels(config.levels);
if (config.replaceConsole) {
replaceConsole();
} else {
restoreConsole();
}
} catch (e) {
throw new Error("Problem reading log4js config " + util.inspect(config) + ". Error was \"" + e.message + "\" ("+e.stack+")");
}
}
}
function reloadConfiguration() {
var filename = configState.filename,
mtime;
if (!filename) {
// can't find anything to reload
return;
}
try {
mtime = fs.statSync(filename).mtime;
} catch (e) {
getLogger('log4js').warn('Failed to load configuration file ' + filename);
return;
}
if (configState.lastFilename && configState.lastFilename === filename) {
if (mtime.getTime() > configState.lastMTime.getTime()) {
configureOnceOff(loadConfigurationFile(filename));
}
} else {
configureOnceOff(loadConfigurationFile(filename));
}
}
function initReloadConfiguration(filename, options) {
if (configState.timerId) {
clearInterval(configState.timerId);
delete configState.timerId;
}
configState.filename = filename;
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000);
}
function configure(configurationFileOrObject, options) {
var config = configurationFileOrObject;
config = config || process.env.LOG4JS_CONFIG;
options = options || {};
if (config === undefined || config === null || typeof(config) === 'string') {
if (options.reloadSecs) {
initReloadConfiguration(config, options);
}
config = loadConfigurationFile(config) || defaultConfig;
} else {
if (options.reloadSecs) {
getLogger('log4js').warn('Ignoring configuration reload parameter for "object" configuration.');
}
}
configureOnceOff(config, options);
}
var originalConsoleFunctions = {
log: console.log,
debug: console.debug,
info: console.info,
warn: console.warn,
error: console.error
};
function replaceConsole(logger) {
function replaceWith(fn) {
return function() {
fn.apply(logger, arguments);
}
}
logger = logger || getLogger("console");
['log','debug','info','warn','error'].forEach(function (item) {
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
});
}
function restoreConsole() {
['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) {
console[item] = originalConsoleFunctions[item];
});
}
function loadAppender(appender) {
var appenderModule;
try {
appenderModule = require('./appenders/' + appender);
} catch (e) {
appenderModule = require(appender);
}
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
}
module.exports = {
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
addAppender: addAppender,
loadAppender: loadAppender,
clearAppenders: clearAppenders,
configure: configure,
replaceConsole: replaceConsole,
restoreConsole: restoreConsole,
levels: levels,
setGlobalLogLevel: setGlobalLogLevel,
layouts: layouts,
appenders: {},
appenderMakers: appenderMakers,
connectLogger: require('./connect-logger').connectLogger
};
//set ourselves up
configure();

78
lib/logger.js Normal file
View File

@@ -0,0 +1,78 @@
var levels = require('./levels'),
util = require('util'),
events = require('events'),
DEFAULT_CATEGORY = '[default]';
/**
* Models a logging event.
* @constructor
* @param {String} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @param {Log4js.Logger} logger the associated logger
* @author Seth Chisamore
*/
function LoggingEvent (categoryName, level, data, logger) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.logger = logger;
}
/**
* Logger to log messages.
* use {@see Log4js#getLogger(String)} to get an instance.
* @constructor
* @param name name of category to log to
* @author Stephan Strittmatter
*/
function Logger (name, level) {
this.category = name || DEFAULT_CATEGORY;
if (! this.level) {
this.__proto__.level = levels.TRACE;
}
}
util.inherits(Logger, events.EventEmitter);
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
Logger.prototype.setLevel = function(level) {
this.level = levels.toLevel(level, this.level || levels.TRACE);
};
Logger.prototype.removeLevel = function() {
delete this.level;
};
Logger.prototype.log = function() {
var args = Array.prototype.slice.call(arguments)
, logLevel = args.shift()
, loggingEvent = new LoggingEvent(this.category, logLevel, args, this);
this.emit("log", loggingEvent);
};
Logger.prototype.isLevelEnabled = function(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
};
['Trace','Debug','Info','Warn','Error','Fatal'].forEach(
function(levelString) {
var level = levels.toLevel(levelString);
Logger.prototype['is'+levelString+'Enabled'] = function() {
return this.isLevelEnabled(level);
};
Logger.prototype[levelString.toLowerCase()] = function () {
if (this.isLevelEnabled(level)) {
var args = Array.prototype.slice.call(arguments);
args.unshift(level);
Logger.prototype.log.apply(this, args);
}
};
}
);
exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger;

View File

@@ -0,0 +1,89 @@
var fs = require('fs'),
stream,
util = require('util'),
semver = require('semver');
if (semver.satisfies(process.version, '>=0.10.0')) {
stream = require('stream');
} else {
stream = require('readable-stream');
}
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (BaseRollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = BaseRollingFileStream;
function BaseRollingFileStream(filename, options) {
debug("In BaseRollingFileStream");
this.filename = filename;
this.options = options || { encoding: 'utf8', mode: 0644, flags: 'a' };
this.currentSize = 0;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
}
return fileSize;
}
function throwErrorIfArgumentsAreNotValid() {
if (!filename) {
throw new Error("You must specify a filename");
}
}
throwErrorIfArgumentsAreNotValid();
debug("Calling BaseRollingFileStream.super");
BaseRollingFileStream.super_.call(this);
this.openTheStream();
this.currentSize = currentFileSize(this.filename);
}
util.inherits(BaseRollingFileStream, stream.Writable);
BaseRollingFileStream.prototype._write = function(chunk, encoding, callback) {
var that = this;
function writeTheChunk() {
debug("writing the chunk to the underlying stream");
that.currentSize += chunk.length;
that.theStream.write(chunk, encoding, callback);
}
debug("in _write");
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename, writeTheChunk);
} else {
writeTheChunk();
}
};
BaseRollingFileStream.prototype.openTheStream = function(cb) {
debug("opening the underlying stream");
this.theStream = fs.createWriteStream(this.filename, this.options);
if (cb) {
this.theStream.on("open", cb);
}
};
BaseRollingFileStream.prototype.closeTheStream = function(cb) {
debug("closing the underlying stream");
this.theStream.end(cb);
};
BaseRollingFileStream.prototype.shouldRoll = function() {
return false; // default behaviour is never to roll
};
BaseRollingFileStream.prototype.roll = function(filename, callback) {
callback(); // default behaviour is not to do anything
};

View File

@@ -0,0 +1,82 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
format = require('../date_format'),
async = require('async'),
fs = require('fs'),
util = require('util');
module.exports = DateRollingFileStream;
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (DateRollingFileStream) %s', message); };
} else {
debug = function() { };
}
function DateRollingFileStream(filename, pattern, options, now) {
debug("Now is " + now);
if (pattern && typeof(pattern) === 'object') {
now = options;
options = pattern;
pattern = null;
}
this.pattern = pattern || '.yyyy-MM-dd';
this.now = now || Date.now;
this.lastTimeWeWroteSomething = format.asString(this.pattern, new Date(this.now()));
this.baseFilename = filename;
if (options) {
if (options.alwaysIncludePattern) {
filename = filename + this.lastTimeWeWroteSomething;
}
delete options.alwaysIncludePattern;
if (options === {}) {
options = null;
}
}
debug("this.now is " + this.now + ", now is " + now);
DateRollingFileStream.super_.call(this, filename, options);
}
util.inherits(DateRollingFileStream, BaseRollingFileStream);
DateRollingFileStream.prototype.shouldRoll = function() {
var lastTime = this.lastTimeWeWroteSomething,
thisTime = format.asString(this.pattern, new Date(this.now()));
debug("DateRollingFileStream.shouldRoll with now = " + this.now() + ", thisTime = " + thisTime + ", lastTime = " + lastTime);
this.lastTimeWeWroteSomething = thisTime;
this.previousTime = lastTime;
return thisTime !== lastTime;
};
DateRollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
newFilename = this.baseFilename + this.previousTime;
debug("Starting roll");
async.series([
this.closeTheStream.bind(this),
deleteAnyExistingFile,
renameTheCurrentFile,
this.openTheStream.bind(this)
], callback);
function deleteAnyExistingFile(cb) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(newFilename, function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
cb();
});
}
function renameTheCurrentFile(cb) {
debug("Renaming the " + filename + " -> " + newFilename);
fs.rename(filename, newFilename, cb);
}
};

View File

@@ -0,0 +1,94 @@
var BaseRollingFileStream = require('./BaseRollingFileStream'),
util = require('util'),
path = require('path'),
fs = require('fs'),
async = require('async');
var debug;
if (process.env.NODE_DEBUG && /\blog4js\b/.test(process.env.NODE_DEBUG)) {
debug = function(message) { console.error('LOG4JS: (RollingFileStream) %s', message); };
} else {
debug = function() { };
}
module.exports = RollingFileStream;
function RollingFileStream (filename, size, backups, options) {
this.size = size;
this.backups = backups || 1;
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error("You must specify a filename and file size");
}
}
throwErrorIfArgumentsAreNotValid();
RollingFileStream.super_.call(this, filename, options);
}
util.inherits(RollingFileStream, BaseRollingFileStream);
RollingFileStream.prototype.shouldRoll = function() {
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
return this.currentSize >= this.size;
};
RollingFileStream.prototype.roll = function(filename, callback) {
var that = this,
nameMatcher = new RegExp('^' + path.basename(filename));
function justTheseFiles (item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b) ) {
return -1;
} else {
return 0;
}
}
function increaseFileIndex (fileToRename, cb) {
var idx = index(fileToRename);
debug('Index of ' + fileToRename + ' is ' + idx);
if (idx < that.backups) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
fs.unlink(filename + '.' + (idx+1), function (err) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
fs.rename(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1), cb);
});
} else {
cb();
}
}
function renameTheFiles(cb) {
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug("Renaming the old files");
fs.readdir(path.dirname(filename), function (err, files) {
async.forEachSeries(
files.filter(justTheseFiles).sort(byIndex).reverse(),
increaseFileIndex,
cb
);
});
}
debug("Rolling, rolling, rolling");
async.series([
this.closeTheStream.bind(this),
renameTheFiles,
this.openTheStream.bind(this)
], callback);
};

2
lib/streams/index.js Normal file
View File

@@ -0,0 +1,2 @@
exports.RollingFileStream = require('./RollingFileStream');
exports.DateRollingFileStream = require('./DateRollingFileStream');

View File

@@ -1,24 +1,42 @@
{
"name": "log4js",
"version": "0.2.0",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
"log",
"log4j",
"node"
],
"main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
"bugs": {
"web": "http://github.com/csausdev/log4js-node/issues"
},
"engines": [ "node >=0.1.100" ],
"scripts": {
"test": "vows test/logging.js"
},
"directories": {
"test": "test",
"lib": "lib"
}
"name": "log4js",
"version": "0.6.3",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
"log",
"log4j",
"node"
],
"main": "./lib/log4js",
"author": "Gareth Jones <gareth.jones@sensis.com.au>",
"repository": {
"type": "git",
"url": "https://github.com/nomiddlename/log4js-node.git"
},
"bugs": {
"url": "http://github.com/nomiddlename/log4js-node/issues"
},
"engines": {
"node": ">=0.8"
},
"scripts": {
"test": "vows"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dependencies": {
"async": "0.1.15",
"dequeue": "1.0.3",
"semver": "~1.1.4",
"readable-stream": "~1.0.2"
},
"devDependencies": {
"vows": "0.7.0",
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 154 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 321 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 145 B

View File

@@ -1,149 +0,0 @@
body.jspec {
margin: 45px 0;
font: 12px "Helvetica Neue Light", "Lucida Grande", "Calibri", "Arial", sans-serif;
background: #efefef url(images/bg.png) top left repeat-x;
text-align: center;
}
#jspec {
margin: 0 auto;
padding-top: 30px;
width: 1008px;
background: url(images/vr.png) top left repeat-y;
text-align: left;
}
#jspec-top {
position: relative;
margin: 0 auto;
width: 1008px;
height: 40px;
background: url(images/sprites.bg.png) top left no-repeat;
}
#jspec-bottom {
margin: 0 auto;
width: 1008px;
height: 15px;
background: url(images/sprites.bg.png) bottom left no-repeat;
}
#jspec .loading {
margin-top: -45px;
width: 1008px;
height: 80px;
background: url(images/loading.gif) 50% 50% no-repeat;
}
#jspec-title {
position: absolute;
top: 15px;
left: 20px;
width: 160px;
font-size: 22px;
font-weight: normal;
background: url(images/sprites.png) 0 -126px no-repeat;
text-align: center;
}
#jspec-title em {
font-size: 10px;
font-style: normal;
color: #BCC8D1;
}
#jspec-report * {
margin: 0;
padding: 0;
background: none;
border: none;
}
#jspec-report {
padding: 15px 40px;
font: 11px "Helvetica Neue Light", "Lucida Grande", "Calibri", "Arial", sans-serif;
color: #7B8D9B;
}
#jspec-report.has-failures {
padding-bottom: 30px;
}
#jspec-report .hidden {
display: none;
}
#jspec-report .heading {
margin-bottom: 15px;
}
#jspec-report .heading span {
padding-right: 10px;
}
#jspec-report .heading .passes em {
color: #0ea0eb;
}
#jspec-report .heading .failures em {
color: #FA1616;
}
#jspec-report table {
font-size: 11px;
border-collapse: collapse;
}
#jspec-report td {
padding: 8px;
text-indent: 30px;
color: #7B8D9B;
}
#jspec-report tr.body {
display: none;
}
#jspec-report tr.body pre {
margin: 0;
padding: 0 0 5px 25px;
}
#jspec-report tr.even:hover + tr.body,
#jspec-report tr.odd:hover + tr.body {
display: block;
}
#jspec-report tr td:first-child em {
display: block;
clear: both;
font-style: normal;
font-weight: normal;
color: #7B8D9B;
}
#jspec-report tr.even:hover,
#jspec-report tr.odd:hover {
text-shadow: 1px 1px 1px #fff;
background: #F2F5F7;
}
#jspec-report td + td {
padding-right: 0;
width: 15px;
}
#jspec-report td.pass {
background: url(images/sprites.png) 3px -7px no-repeat;
}
#jspec-report td.fail {
background: url(images/sprites.png) 3px -158px no-repeat;
font-weight: bold;
color: #FC0D0D;
}
#jspec-report td.requires-implementation {
background: url(images/sprites.png) 3px -333px no-repeat;
}
#jspec-report tr.description td {
margin-top: 25px;
padding-top: 25px;
font-size: 12px;
font-weight: bold;
text-indent: 0;
color: #1a1a1a;
}
#jspec-report tr.description:first-child td {
border-top: none;
}
#jspec-report .assertion {
display: block;
float: left;
margin: 0 0 0 1px;
padding: 0;
width: 1px;
height: 5px;
background: #7B8D9B;
}
#jspec-report .assertion.failed {
background: red;
}
.jspec-sandbox {
display: none;
}

View File

@@ -1,115 +0,0 @@
// JSpec - Growl - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
;(function(){
Growl = {
// --- Version
version: '1.0.0',
/**
* Execute the given _cmd_, returning an array of lines from stdout.
*
* Examples:
*
* Growl.exec('growlnotify', '-m', msg)
*
* @param {string ...} cmd
* @return {array}
* @api public
*/
exec: function(cmd) {
var lines = [], line
with (JavaImporter(java.lang, java.io)) {
var proccess = Runtime.getRuntime().exec(Array.prototype.slice.call(arguments))
var stream = new DataInputStream(proccess.getInputStream())
while (line = stream.readLine())
lines.push(line + '')
stream.close()
}
return lines
},
/**
* Return the extension of the given _path_ or null.
*
* @param {string} path
* @return {string}
* @api private
*/
extname: function(path) {
return path.lastIndexOf('.') != -1 ?
path.slice(path.lastIndexOf('.') + 1, path.length) :
null
},
/**
* Version of the 'growlnotify' binary.
*
* @return {string}
* @api private
*/
binVersion: function() {
try { return this.exec('growlnotify', '-v')[0].split(' ')[1] } catch (e) {}
},
/**
* Send growl notification _msg_ with _options_.
*
* Options:
*
* - title Notification title
* - sticky Make the notification stick (defaults to false)
* - name Application name (defaults to growlnotify)
* - image
* - path to an icon sets --iconpath
* - path to an image sets --image
* - capitalized word sets --appIcon
* - filename uses extname as --icon
* - otherwise treated as --icon
*
* Examples:
*
* Growl.notify('New email')
* Growl.notify('5 new emails', { title: 'Thunderbird' })
*
* @param {string} msg
* @param {options} hash
* @api public
*/
notify: function(msg, options) {
options = options || {}
var args = ['growlnotify', '-m', msg]
if (!this.binVersion()) throw new Error('growlnotify executable is required')
if (image = options.image) {
var flag, ext = this.extname(image)
flag = flag || ext == 'icns' && 'iconpath'
flag = flag || /^[A-Z]/.test(image) && 'appIcon'
flag = flag || /^png|gif|jpe?g$/.test(ext) && 'image'
flag = flag || ext && (image = ext) && 'icon'
flag = flag || 'icon'
args.push('--' + flag, image)
}
if (options.sticky) args.push('--sticky')
if (options.name) args.push('--name', options.name)
if (options.title) args.push(options.title)
this.exec.apply(this, args)
}
}
JSpec.include({
name: 'Growl',
reporting: function(options){
var stats = JSpec.stats
if (stats.failures) Growl.notify('failed ' + stats.failures + ' assertions', { title: 'JSpec'})
else Growl.notify('passed ' + stats.passes + ' assertions', { title: 'JSpec' })
}
})
})()

View File

@@ -1,71 +0,0 @@
// JSpec - jQuery - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
JSpec
.requires('jQuery', 'when using jspec.jquery.js')
.include({
name: 'jQuery',
// --- Initialize
init : function() {
jQuery.ajaxSetup({ async: false })
},
// --- Utilities
utilities : {
element: jQuery,
elements: jQuery,
sandbox : function() {
return jQuery('<div class="sandbox"></div>')
}
},
// --- Matchers
matchers : {
have_tag : "jQuery(expected, actual).length == 1",
have_one : "alias have_tag",
have_tags : "jQuery(expected, actual).length > 1",
have_many : "alias have_tags",
have_child : "jQuery(actual).children(expected).length == 1",
have_children : "jQuery(actual).children(expected).length > 1",
have_text : "jQuery(actual).text() == expected",
have_value : "jQuery(actual).val() == expected",
be_enabled : "!jQuery(actual).attr('disabled')",
have_class : "jQuery(actual).hasClass(expected)",
be_visible : function(actual) {
return jQuery(actual).css('display') != 'none' &&
jQuery(actual).css('visibility') != 'hidden' &&
jQuery(actual).attr('type') != 'hidden'
},
be_hidden : function(actual) {
return !JSpec.does(actual, 'be_visible')
},
have_classes : function(actual) {
return !JSpec.any(JSpec.toArray(arguments, 1), function(arg){
return !JSpec.does(actual, 'have_class', arg)
})
},
have_attr : function(actual, attr, value) {
return value ? jQuery(actual).attr(attr) == value:
jQuery(actual).attr(attr)
},
'be disabled selected checked' : function(attr) {
return 'jQuery(actual).attr("' + attr + '")'
},
'have type id title alt href src sel rev name target' : function(attr) {
return function(actual, value) {
return JSpec.does(actual, 'have_attr', attr, value)
}
}
}
})

File diff suppressed because it is too large Load Diff

View File

@@ -1,39 +0,0 @@
// JSpec - Shell - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
;(function(){
var _quit = quit
Shell = {
// --- Global
main: this,
// --- Commands
commands: {
quit: ['Terminate the shell', function(){ _quit() }],
exit: ['Terminate the shell', function(){ _quit() }],
p: ['Inspect an object', function(o){ return o.toSource() }]
},
/**
* Start the interactive shell.
*
* @api public
*/
start : function() {
for (var name in this.commands)
if (this.commands.hasOwnProperty(name))
this.commands[name][1].length ?
this.main[name] = this.commands[name][1] :
this.main.__defineGetter__(name, this.commands[name][1])
}
}
Shell.start()
})()

View File

@@ -1,90 +0,0 @@
// JSpec - Mock Timers - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
;(function(){
/**
* Version.
*/
mockTimersVersion = '1.0.2'
/**
* Localized timer stack.
*/
var timers = []
/**
* Set mock timeout with _callback_ and timeout of _ms_.
*
* @param {function} callback
* @param {int} ms
* @return {int}
* @api public
*/
setTimeout = function(callback, ms) {
var id
return id = setInterval(function(){
callback()
clearInterval(id)
}, ms)
}
/**
* Set mock interval with _callback_ and interval of _ms_.
*
* @param {function} callback
* @param {int} ms
* @return {int}
* @api public
*/
setInterval = function(callback, ms) {
callback.step = ms, callback.current = callback.last = 0
return timers[timers.length] = callback, timers.length
}
/**
* Destroy timer with _id_.
*
* @param {int} id
* @return {bool}
* @api public
*/
clearInterval = clearTimeout = function(id) {
return delete timers[--id]
}
/**
* Reset timers.
*
* @return {array}
* @api public
*/
resetTimers = function() {
return timers = []
}
/**
* Increment each timers internal clock by _ms_.
*
* @param {int} ms
* @api public
*/
tick = function(ms) {
for (var i = 0, len = timers.length; i < len; ++i)
if (timers[i] && (timers[i].current += ms))
if (timers[i].current - timers[i].last >= timers[i].step) {
var times = Math.floor((timers[i].current - timers[i].last) / timers[i].step)
var remainder = (timers[i].current - timers[i].last) % timers[i].step
timers[i].last = timers[i].current - remainder
while (times-- && timers[i]) timers[i]()
}
}
})()

View File

@@ -1,193 +0,0 @@
// JSpec - XHR - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
(function(){
// --- Original XMLHttpRequest
var OriginalXMLHttpRequest = 'XMLHttpRequest' in this ?
XMLHttpRequest :
function(){}
var OriginalActiveXObject = 'ActiveXObject' in this ?
ActiveXObject :
undefined
// --- MockXMLHttpRequest
var MockXMLHttpRequest = function() {
this.requestHeaders = {}
}
MockXMLHttpRequest.prototype = {
status: 0,
async: true,
readyState: 0,
responseText: '',
abort: function(){},
onreadystatechange: function(){},
/**
* Return response headers hash.
*/
getAllResponseHeaders : function(){
return this.responseHeaders
},
/**
* Return case-insensitive value for header _name_.
*/
getResponseHeader : function(name) {
return this.responseHeaders[name.toLowerCase()]
},
/**
* Set case-insensitive _value_ for header _name_.
*/
setRequestHeader : function(name, value) {
this.requestHeaders[name.toLowerCase()] = value
},
/**
* Open mock request.
*/
open : function(method, url, async, user, password) {
this.user = user
this.password = password
this.url = url
this.readyState = 1
this.method = method.toUpperCase()
if (async != undefined) this.async = async
if (this.async) this.onreadystatechange()
},
/**
* Send request _data_.
*/
send : function(data) {
var self = this
this.data = data
this.readyState = 4
if (this.method == 'HEAD') this.responseText = null
this.responseHeaders['content-length'] = (this.responseText || '').length
if(this.async) this.onreadystatechange()
lastRequest = function(){
return self
}
}
}
// --- Response status codes
JSpec.statusCodes = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choice',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
422: 'Unprocessable Entity',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported'
}
/**
* Mock XMLHttpRequest requests.
*
* mockRequest().and_return('some data', 'text/plain', 200, { 'X-SomeHeader' : 'somevalue' })
*
* @return {hash}
* @api public
*/
function mockRequest() {
return { and_return : function(body, type, status, headers) {
XMLHttpRequest = MockXMLHttpRequest
ActiveXObject = false
status = status || 200
headers = headers || {}
headers['content-type'] = type
JSpec.extend(XMLHttpRequest.prototype, {
responseText: body,
responseHeaders: headers,
status: status,
statusText: JSpec.statusCodes[status]
})
}}
}
/**
* Unmock XMLHttpRequest requests.
*
* @api public
*/
function unmockRequest() {
XMLHttpRequest = OriginalXMLHttpRequest
ActiveXObject = OriginalActiveXObject
}
JSpec.include({
name: 'Mock XHR',
// --- Utilities
utilities : {
mockRequest: mockRequest,
unmockRequest: unmockRequest
},
// --- Hooks
afterSpec : function() {
unmockRequest()
},
// --- DSLs
DSLs : {
snake : {
mock_request: mockRequest,
unmock_request: unmockRequest,
last_request: function(){ return lastRequest() }
}
}
})
})()

View File

@@ -1,144 +0,0 @@
describe 'log4js'
before
extend(context, {
log4js : require("log4js")()
});
end
before_each
log4js.clearAppenders();
event = '';
logger = log4js.getLogger('tests');
logger.setLevel("TRACE");
logger.addListener("log", function (logEvent) { event = logEvent; });
end
describe 'addAppender'
before_each
appenderEvent = undefined;
appender = function(logEvent) { appenderEvent = logEvent; };
end
describe 'without a category'
it 'should register the function as a listener for all loggers'
log4js.addAppender(appender);
logger.debug("This is a test");
appenderEvent.should.be event
end
it 'should also register as an appender for loggers if an appender for that category is defined'
var otherEvent;
log4js.addAppender(appender);
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
var cheeseLogger = log4js.getLogger('cheese');
cheeseLogger.addListener("log", function (logEvent) { event = logEvent; });
cheeseLogger.debug('This is a test');
appenderEvent.should.be event
otherEvent.should.be event
otherEvent = undefined;
appenderEvent = undefined;
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
otherEvent.should.be undefined
appenderEvent.should.not.be undefined
appenderEvent.message.should.be "this should not be propagated to otherEvent"
cheeseLogger = null;
end
end
describe 'with a category'
it 'should only register the function as a listener for that category'
log4js.addAppender(appender, 'tests');
logger.debug('this is a test');
appenderEvent.should.be event
appenderEvent = undefined;
log4js.getLogger('some other category').debug('Cheese');
appenderEvent.should.be undefined
end
end
describe 'with multiple categories'
it 'should register the function as a listener for all the categories'
log4js.addAppender(appender, 'tests', 'biscuits');
logger.debug('this is a test');
appenderEvent.should.be event
appenderEvent = undefined;
var otherLogger = log4js.getLogger('biscuits');
otherLogger.debug("mmm... garibaldis");
appenderEvent.should.not.be undefined
appenderEvent.message.should.be "mmm... garibaldis"
appenderEvent = undefined;
otherLogger = null;
log4js.getLogger("something else").debug("pants");
appenderEvent.should.be undefined
end
it 'should register the function when the list of categories is an array'
log4js.addAppender(appender, ['tests', 'pants']);
logger.debug('this is a test');
appenderEvent.should.be event
appenderEvent = undefined;
var otherLogger = log4js.getLogger('pants');
otherLogger.debug("big pants");
appenderEvent.should.not.be undefined
appenderEvent.message.should.be "big pants"
appenderEvent = undefined;
otherLogger = null;
log4js.getLogger("something else").debug("pants");
appenderEvent.should.be undefined
end
end
end
describe 'basicLayout'
it 'should take a logevent and output a formatted string'
logger.debug('this is a test');
var output = log4js.basicLayout(event);
output.should.match /\[.*?\] \[DEBUG\] tests - this is a test/
end
it 'should output a stacktrace, message if the event has an error attached'
var error = new Error("Some made-up error");
var stack = error.stack.split(/\n/);
logger.debug('this is a test', error);
var output = log4js.basicLayout(event);
var lines = output.split(/\n/);
lines.length.should.be stack.length+1
lines[0].should.match /\[.*?\] \[DEBUG\] tests - this is a test/
lines[1].should.match /\[.*?\] \[DEBUG\] tests - Error: Some made-up error/
for (var i = 1; i < stack.length; i++) {
lines[i+1].should.eql stack[i]
}
end
it 'should output a name and message if the event has something that pretends to be an error'
logger.debug('this is a test', { name: 'Cheese', message: 'Gorgonzola smells.' });
var output = log4js.basicLayout(event);
var lines = output.split(/\n/);
lines.length.should.be 2
lines[0].should.match /\[.*?\] \[DEBUG\] tests - this is a test/
lines[1].should.match /\[.*?\] \[DEBUG\] tests - Cheese: Gorgonzola smells./
end
end
end

131
test/configuration-test.js Normal file
View File

@@ -0,0 +1,131 @@
var assert = require('assert'),
vows = require('vows'),
sandbox = require('sandboxed-module');
function makeTestAppender() {
return {
configure: function(config, options) {
this.configureCalled = true;
this.config = config;
this.options = options;
return this.appender();
},
appender: function() {
var self = this;
return function(logEvt) { self.logEvt = logEvt; }
}
};
}
vows.describe('log4js configure').addBatch({
'appenders': {
'when specified by type': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/cheese': testAppender
}
}
);
log4js.configure(
{
appenders: [
{ type: "cheese", flavour: "gouda" }
]
},
{ pants: "yes" }
);
return testAppender;
},
'should load appender': function(testAppender) {
assert.ok(testAppender.configureCalled);
},
'should pass config to appender': function(testAppender) {
assert.equal(testAppender.config.flavour, 'gouda');
},
'should pass log4js options to appender': function(testAppender) {
assert.equal(testAppender.options.pants, 'yes');
}
},
'when core appender loaded via loadAppender': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
{ requires: { './appenders/cheese': testAppender } }
);
log4js.loadAppender('cheese');
return log4js;
},
'should load appender from ../lib/appenders': function(log4js) {
assert.ok(log4js.appenders.cheese);
},
'should add appender configure function to appenderMakers' : function(log4js) {
assert.isFunction(log4js.appenderMakers.cheese);
}
},
'when appender in node_modules loaded via loadAppender': {
topic: function() {
var testAppender = makeTestAppender(),
log4js = sandbox.require(
'../lib/log4js',
{ requires: { 'some/other/external': testAppender } }
);
log4js.loadAppender('some/other/external');
return log4js;
},
'should load appender via require': function(log4js) {
assert.ok(log4js.appenders['some/other/external']);
},
'should add appender configure function to appenderMakers': function(log4js) {
assert.isFunction(log4js.appenderMakers['some/other/external']);
}
},
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
topic: function() {
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
var fileRead = 0,
modulePath = 'some/path/to/mylog4js.json',
pathsChecked = [],
mtime = new Date(),
fakeFS = {
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: mtime };
} else {
throw new Error("no such file");
}
}
},
log4js = sandbox.require('../lib/log4js',
{
requires: {
'fs': fakeFS,
}
});
delete process.env.LOG4JS_CONFIG;
return fileRead;
},
'should load the specified local configuration file' : function(fileRead) {
assert.equal(fileRead, 1);
}
}
}
}).exportTo(module);

View File

@@ -0,0 +1,126 @@
// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
// 1) log4js.configure(), log4js.configure(null), log4js.configure({}), log4js.configure(<some object with no levels prop>)
// all set all loggers levels to trace, even if they were previously set to something else.
// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo: bar}}) leaves previously set logger levels intact.
//
// Basic set up
var vows = require('vows');
var assert = require('assert');
var toLevel = require('../lib/levels').toLevel;
// uncomment one or other of the following to see progress (or not) while running the tests
// var showProgress = console.log;
var showProgress = function() {};
// Define the array of levels as string to iterate over.
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
// setup the configurations we want to test
var configs = {
'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure
'is undefined': undefined,
'is null': null,
'is empty': {},
'has no levels': {foo: 'bar'},
'has null levels': {levels: null},
'has empty levels': {levels: {}},
'has random levels': {levels: {foo: 'bar'}},
'has some valid levels': {levels: {A: 'INFO'}}
}
// Set up the basic vows batches for this test
var batches = [];
function getLoggerName(level) {
return level+'-logger';
}
// the common vows top-level context, whether log4js.configure is called or not
// just making sure that the code is common,
// so that there are no spurious errors in the tests themselves.
function getTopLevelContext(nop, configToTest, name) {
return {
topic: function() {
var log4js = require('../lib/log4js');
// create loggers for each level,
// keeping the level in the logger's name for traceability
strLevels.forEach(function(l) {
log4js.getLogger(getLoggerName(l)).setLevel(l);
});
if (!nop) {
showProgress('** Configuring log4js with', configToTest);
log4js.configure(configToTest);
}
else {
showProgress('** Not configuring log4js');
}
return log4js;
}
}
};
showProgress('Populating batch object...');
// Populating the batches programmatically,
// as there are (configs.length x strLevels.length x strLevels.length) = 324 possible test combinations
for (var cfg in configs) {
var configToTest = configs[cfg];
var nop = configToTest === 'nop';
var context;
if (nop) {
context = 'Setting up loggers with initial levels, then NOT setting a configuration,';
}
else {
context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+',';
}
showProgress('Setting up the vows batch and context for '+context);
// each config to be tested has its own vows batch with a single top-level context
var batch={};
batch[context]= getTopLevelContext(nop, configToTest, context);
batches.push(batch);
// each top-level context has strLevels sub-contexts, one per logger which has set to a specific level in the top-level context's topic
strLevels.forEach(function (baseLevel) {
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
batch[context][baseLevelSubContext] = {topic: baseLevel};
// each logging level has strLevels sub-contexts,
// to exhaustively test all the combinations of setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
strLevels.forEach(function (comparisonLevel) {
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
// calculate this independently of log4js, but we'll add a vow later on to check that we're not mismatched with log4js
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
// the topic simply gathers all the parameters for the vow into an object, to simplify the vow's work.
batch[context][baseLevelSubContext][comparisonLevelSubContext] = {topic: function(baseLevel, log4js){
return {comparisonLevel: comparisonLevel, baseLevel: baseLevel, log4js: log4js, expectedResult: expectedResult};
}};
var vow = 'should return '+expectedResult;
batch[context][baseLevelSubContext][comparisonLevelSubContext][vow] = function(topic){
var result = topic.log4js.getLogger(getLoggerName(topic.baseLevel)).isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(result, topic.expectedResult, 'Failed: '+getLoggerName(topic.baseLevel)+'.isLevelEnabled( '+topic.comparisonLevel+' ) returned '+result);
};
// the extra vow to check the comparison between baseLevel and comparisonLevel we performed earlier matches log4js' comparison too
batch[context][baseLevelSubContext][comparisonLevelSubContext]['finally checking for comparison mismatch with log4js'] = function(topic){
var er = topic.log4js.levels.toLevel(topic.baseLevel).isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
assert.equal(er, topic.expectedResult, 'Mismatch: for setLevel('+topic.baseLevel+') was expecting a comparison with '+topic.comparisonLevel+' to be '+topic.expectedResult);
};
});
});
};
showProgress('Running tests');
var v = vows.describe('log4js.configure(), with or without a "levels" property');
batches.forEach(function(batch) {v=v.addBatch(batch)});
v.export(module);

128
test/connect-logger-test.js Normal file
View File

@@ -0,0 +1,128 @@
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels');
function MockLogger() {
var that = this;
this.messages = [];
this.log = function(level, message, exception) {
that.messages.push({ level: level, message: message });
};
this.isLevelEnabled = function(level) {
return level.isGreaterThanOrEqualTo(that.level);
};
this.level = levels.TRACE;
}
function MockRequest(remoteAddr, method, originalUrl) {
this.socket = { remoteAddress: remoteAddr };
this.originalUrl = originalUrl;
this.method = method;
this.httpVersionMajor = '5';
this.httpVersionMinor = '0';
this.headers = {}
}
function MockResponse(statusCode) {
this.statusCode = statusCode;
this.end = function(chunk, encoding) {
}
}
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
return clm;
},
'should return a "connect logger" factory' : function(clm) {
assert.isObject(clm);
},
'take a log4js logger and return a "connect logger"' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml);
return cl;
},
'should return a "connect logger"': function(cl) {
assert.isFunction(cl);
}
},
'log events' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml);
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages;
},
'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.include(messages[0].message, 'GET');
assert.include(messages[0].message, 'http://url');
assert.include(messages[0].message, 'my.remote.addr');
assert.include(messages[0].message, '200');
}
},
'log events with level below logging level' : {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.FATAL;
var cl = clm.connectLogger(ml);
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages;
},
'check message': function(messages) {
assert.isArray(messages);
assert.isEmpty(messages);
}
},
'log events with non-default level and custom format' : {
topic: function(clm) {
var ml = new MockLogger();
ml.level = levels.INFO;
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
var req = new MockRequest('my.remote.addr', 'GET', 'http://url');
var res = new MockResponse(200);
cl(req, res, function() { });
res.end('chunk', 'encoding');
return ml.messages;
},
'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.equal(messages[0].message, 'GET http://url');
}
}
}
}).export(module);

View File

@@ -0,0 +1,129 @@
var vows = require('vows'),
assert = require('assert'),
path = require('path'),
fs = require('fs'),
log4js = require('../lib/log4js');
function removeFile(filename) {
return function() {
fs.unlink(path.join(__dirname, filename), function(err) {
if (err) {
console.log("Could not delete ", filename, err);
}
});
};
}
vows.describe('../lib/appenders/dateFile').addBatch({
'appender': {
'adding multiple dateFileAppenders': {
topic: function () {
var listenersCount = process.listeners('exit').length,
dateFileAppender = require('../lib/appenders/dateFile'),
count = 5,
logfile;
while (count--) {
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
log4js.addAppender(dateFileAppender.appender(logfile));
}
return listenersCount;
},
teardown: function() {
removeFile('datefa-default-test0.log')();
removeFile('datefa-default-test1.log')();
removeFile('datefa-default-test2.log')();
removeFile('datefa-default-test3.log')();
removeFile('datefa-default-test4.log')();
},
'should only add one `exit` listener': function (initialCount) {
assert.equal(process.listeners('exit').length, initialCount + 1);
}
},
'with default settings': {
topic: function() {
var that = this,
testFile = path.join(__dirname, 'date-appender-default.log'),
appender = require('../lib/appenders/dateFile').appender(testFile),
logger = log4js.getLogger('default-settings');
log4js.clearAppenders();
log4js.addAppender(appender, 'default-settings');
logger.info("This should be in the file.");
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 100);
},
teardown: removeFile('date-appender-default.log'),
'should write to the file': function(contents) {
assert.include(contents, 'This should be in the file');
},
'should use the basic layout': function(contents) {
assert.match(contents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
}
}
}
}).addBatch({
'configure': {
'with dateFileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./date-file-test.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/with-dateFile.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
},
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file' + require('os').EOL);
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
},
'with options.alwaysIncludePattern': {
topic: function() {
var log4js = require('../lib/log4js')
, format = require('../lib/date_format')
, logger
, options = {
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test",
"pattern": "-from-MM-dd.log",
"alwaysIncludePattern": true,
"layout": {
"type": "messagePassThrough"
}
}
]
}
, thisTime = format.asString(options.appenders[0].pattern, new Date());
log4js.clearAppenders();
log4js.configure(options);
logger = log4js.getLogger('tests');
logger.warn('this should be written to the file with the appended date');
this.teardown = removeFile('date-file-test' + thisTime);
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', this.callback);
},
'should create file with the correct pattern': function(contents) {
assert.include(contents, 'this should be written to the file with the appended date');
}
}
}
}).exportTo(module);

23
test/date_format-test.js Normal file
View File

@@ -0,0 +1,23 @@
var vows = require('vows')
, assert = require('assert')
, dateFormat = require('../lib/date_format');
vows.describe('date_format').addBatch({
'Date extensions': {
topic: function() {
return new Date(2010, 0, 11, 14, 31, 30, 5);
},
'should format a date as string using a pattern': function(date) {
assert.equal(
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
"11 01 2010 14:31:30.005"
);
},
'should default to the ISO8601 format': function(date) {
assert.equal(
dateFormat.asString(date),
'2010-01-11 14:31:30.005'
);
}
}
}).export(module);

179
test/fileAppender-test.js Normal file
View File

@@ -0,0 +1,179 @@
var vows = require('vows')
, fs = require('fs')
, path = require('path')
, log4js = require('../lib/log4js')
, assert = require('assert');
log4js.clearAppenders();
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('log4js fileAppender').addBatch({
'adding multiple fileAppenders': {
topic: function () {
var listenersCount = process.listeners('exit').length
, logger = log4js.getLogger('default-settings')
, count = 5, logfile;
while (count--) {
logfile = path.join(__dirname, '/fa-default-test' + count + '.log');
log4js.addAppender(require('../lib/appenders/file').appender(logfile), 'default-settings');
}
return listenersCount;
},
'does not adds more than one `exit` listeners': function (initialCount) {
assert.ok(process.listeners('exit').length <= initialCount + 1);
}
},
'with default fileAppender settings': {
topic: function() {
var that = this
, testFile = path.join(__dirname, '/fa-default-test.log')
, logger = log4js.getLogger('default-settings');
remove(testFile);
//log4js.configure({ appenders:[ { type: "file", filename: testFile, category: 'default-settings' } ] });
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile), 'default-settings');
logger.info("This should be in the file.");
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 100);
},
'should write log messages to the file': function(err, fileContents) {
assert.include(fileContents, "This should be in the file.\n");
},
'log messages should be in the basic layout format': function(err, fileContents) {
assert.match(fileContents, /\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /);
}
},
'with a max file size and no backups': {
topic: function() {
var testFile = path.join(__dirname, '/fa-maxFileSize-test.log')
, logger = log4js.getLogger('max-file-size')
, that = this;
remove(testFile);
remove(testFile + '.1');
//log file of 100 bytes maximum, no backups
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0), 'max-file-size');
logger.info("This is the first log message.");
logger.info("This is an intermediate log message.");
logger.info("This is the second log message.");
//wait for the file system to catch up
setTimeout(function() {
fs.readFile(testFile, "utf8", that.callback);
}, 100);
},
'log file should only contain the second message': function(err, fileContents) {
assert.include(fileContents, "This is the second log message.\n");
assert.equal(fileContents.indexOf("This is the first log message."), -1);
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'starting with the test file name should be two': function(err, files) {
//there will always be one backup if you've specified a max log size
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; });
assert.equal(logFiles.length, 2);
}
}
},
'with a max file size and 2 backups': {
topic: function() {
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-test.log')
, logger = log4js.getLogger('max-file-size-backups');
remove(testFile);
remove(testFile+'.1');
remove(testFile+'.2');
//log file of 50 bytes maximum, 2 backups
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2), 'max-file-size-backups');
logger.info("This is the first log message.");
logger.info("This is the second log message.");
logger.info("This is the third log message.");
logger.info("This is the fourth log message.");
var that = this;
//give the system a chance to open the stream
setTimeout(function() {
fs.readdir(__dirname, function(err, files) {
if (files) {
that.callback(null, files.sort());
} else {
that.callback(err, files);
}
});
}, 200);
},
'the log files': {
topic: function(files) {
var logFiles = files.filter(function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; });
return logFiles;
},
'should be 3': function (files) {
assert.equal(files.length, 3);
},
'should be named in sequence': function (files) {
assert.deepEqual(files, ['fa-maxFileSize-with-backups-test.log', 'fa-maxFileSize-with-backups-test.log.1', 'fa-maxFileSize-with-backups-test.log.2']);
},
'and the contents of the first file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
},
'should be the last log message': function(contents) {
assert.include(contents, 'This is the fourth log message.');
}
},
'and the contents of the second file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
},
'should be the third log message': function(contents) {
assert.include(contents, 'This is the third log message.');
}
},
'and the contents of the third file': {
topic: function(logFiles) {
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
},
'should be the second log message': function(contents) {
assert.include(contents, 'This is the second log message.');
}
}
}
}
}).addBatch({
'configure' : {
'with fileAppender': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
fs.readFile('tmp-tests.log', 'utf8', this.callback);
},
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file\n');
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
}
}
}).export(module);

138
test/gelfAppender-test.js Normal file
View File

@@ -0,0 +1,138 @@
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js')
, setupLogging = function(options, category, compressedLength) {
var fakeDgram = {
sent: false,
socket: {
packetLength: 0,
close: function() {
},
send: function(pkt, offset, pktLength, port, host) {
fakeDgram.sent = true;
this.packet = pkt;
this.offset = offset;
this.packetLength = pktLength;
this.port = port;
this.host = host;
}
},
createSocket: function(type) {
this.type = type;
return this.socket;
}
}
, fakeZlib = {
gzip: function(objectToCompress, callback) {
fakeZlib.uncompressed = objectToCompress;
if (compressedLength) {
callback(null, { length: compressedLength });
} else {
callback(null, "I've been compressed");
}
}
}
, appender = sandbox.require('../lib/appenders/gelf', {
requires: {
dgram: fakeDgram,
zlib: fakeZlib
}
});
log4js.clearAppenders();
log4js.addAppender(appender.configure(options || {}), category || "gelf-test");
return {
dgram: fakeDgram,
compress: fakeZlib,
logger: log4js.getLogger(category || "gelf-test")
};
};
//log4js.configure({ doNotReplaceConsole: true });
vows.describe('log4js gelfAppender').addBatch({
'with default gelfAppender settings': {
topic: function() {
var setup = setupLogging();
setup.logger.info("This is a test");
return setup;
},
'the dgram packet': {
topic: function(setup) {
return setup.dgram;
},
'should be sent via udp to the localhost gelf server': function(dgram) {
assert.equal(dgram.type, "udp4");
assert.equal(dgram.socket.host, "localhost");
assert.equal(dgram.socket.port, 12201);
assert.equal(dgram.socket.offset, 0);
assert.ok(dgram.socket.packetLength > 0, "Received blank message");
},
'should be compressed': function(dgram) {
assert.equal(dgram.socket.packet, "I've been compressed");
}
},
'the uncompressed log message': {
topic: function(setup) {
var message = JSON.parse(setup.compress.uncompressed);
return message;
},
'should be in the gelf format': function(message) {
assert.equal(message.version, '1.0');
assert.equal(message.host, require('os').hostname());
assert.equal(message.level, 6); //INFO
assert.equal(message.facility, 'nodejs-server');
assert.equal(message.full_message, message.short_message);
assert.equal(message.full_message, 'This is a test');
}
}
},
'with a message longer than 8k': {
topic: function() {
var setup = setupLogging(undefined, undefined, 10240);
setup.logger.info("Blah.");
return setup;
},
'the dgram packet': {
topic: function(setup) {
return setup.dgram;
},
'should not be sent': function(dgram) {
assert.equal(dgram.sent, false);
}
}
},
'with non-default options': {
topic: function() {
var setup = setupLogging({
host: 'somewhere',
port: 12345,
hostname: 'cheese',
facility: 'nonsense'
});
setup.logger.debug("Just testing.");
return setup;
},
'the dgram packet': {
topic: function(setup) {
return setup.dgram;
},
'should pick up the options': function(dgram) {
assert.equal(dgram.socket.host, 'somewhere');
assert.equal(dgram.socket.port, 12345);
}
},
'the uncompressed packet': {
topic: function(setup) {
var message = JSON.parse(setup.compress.uncompressed);
return message;
},
'should pick up the options': function(message) {
assert.equal(message.host, 'cheese');
assert.equal(message.facility, 'nonsense');
}
}
}
}).export(module);

View File

@@ -0,0 +1,85 @@
var vows = require('vows'),
assert = require('assert');
vows.describe('log4js global loglevel').addBatch({
'global loglevel' : {
topic: function() {
var log4js = require('../lib/log4js');
return log4js;
},
'set global loglevel on creation': function(log4js) {
var log1 = log4js.getLogger('log1');
var level = 'OFF';
if (log1.level.toString() == level) {
level = 'TRACE';
}
assert.notEqual(log1.level.toString(), level);
log4js.setGlobalLogLevel(level);
assert.equal(log1.level.toString(), level);
var log2 = log4js.getLogger('log2');
assert.equal(log2.level.toString(), level);
},
'global change loglevel': function(log4js) {
var log1 = log4js.getLogger('log1');
var log2 = log4js.getLogger('log2');
var level = 'OFF';
if (log1.level.toString() == level) {
level = 'TRACE';
}
assert.notEqual(log1.level.toString(), level);
log4js.setGlobalLogLevel(level);
assert.equal(log1.level.toString(), level);
assert.equal(log2.level.toString(), level);
},
'override loglevel': function(log4js) {
var log1 = log4js.getLogger('log1');
var log2 = log4js.getLogger('log2');
var level = 'OFF';
if (log1.level.toString() == level) {
level = 'TRACE';
}
assert.notEqual(log1.level.toString(), level);
var oldLevel = log1.level.toString();
assert.equal(log2.level.toString(), oldLevel);
log2.setLevel(level);
assert.equal(log1.level.toString(), oldLevel);
assert.equal(log2.level.toString(), level);
assert.notEqual(oldLevel, level);
log2.removeLevel();
assert.equal(log1.level.toString(), oldLevel);
assert.equal(log2.level.toString(), oldLevel);
},
'preload loglevel': function(log4js) {
var log1 = log4js.getLogger('log1');
var level = 'OFF';
if (log1.level.toString() == level) {
level = 'TRACE';
}
assert.notEqual(log1.level.toString(), level);
var oldLevel = log1.level.toString();
log4js.getLogger('log2').setLevel(level);
assert.equal(log1.level.toString(), oldLevel);
// get again same logger but as different variable
var log2 = log4js.getLogger('log2');
assert.equal(log2.level.toString(), level);
assert.notEqual(oldLevel, level);
log2.removeLevel();
assert.equal(log1.level.toString(), oldLevel);
assert.equal(log2.level.toString(), oldLevel);
}
}
}).export(module);

101
test/hookioAppender-test.js Normal file
View File

@@ -0,0 +1,101 @@
var vows = require('vows');
var assert = require('assert');
var sandbox = require('sandboxed-module');
function fancyResultingHookioAppender(opts) {
var result = { ons: {}, emissions: {}, logged: [], configs: [] };
var fakeLog4Js = {
appenderMakers: {}
};
fakeLog4Js.loadAppender = function (appender) {
fakeLog4Js.appenderMakers[appender] = function (config) {
result.actualLoggerConfig = config;
return function log(logEvent) {
result.logged.push(logEvent);
}
};
};
var fakeHookIo = { Hook: function(config) { result.configs.push(config); } };
fakeHookIo.Hook.prototype.start = function () {
result.startCalled = true;
};
fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) {
result.ons[eventName] = { functionToExec: functionToExec };
if (eventName === 'hook::ready') {
functionToExec();
}
};
fakeHookIo.Hook.prototype.emit = function (eventName, data) {
result.emissions[eventName] = result.emissions[eventName] || [];
result.emissions[eventName].push({data: data});
var on = '*::' + eventName;
if (eventName !== 'hook::ready' && result.ons[on]) {
result.ons[on].callingCount = result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1;
result.ons[on].functionToExec(data);
}
};
return { theResult: result,
theModule: sandbox.require('../lib/appenders/hookio', {
requires: {
'../log4js': fakeLog4Js,
'hook.io': fakeHookIo
}
})
};
}
vows.describe('log4js hookioAppender').addBatch({
'master': {
topic: function() {
var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'master', 'hook-port': 5001, appender: { type: 'file' } });
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
return fancy.theResult;
},
'should write to the actual appender': function (result) {
assert.isTrue(result.startCalled);
assert.equal(result.configs.length, 1);
assert.equal(result.configs[0]['hook-port'], 5001);
assert.equal(result.logged.length, 2);
assert.equal(result.emissions['ohno::log'].length, 2);
assert.equal(result.ons['*::ohno::log'].callingCount, 2);
},
'data written should be formatted correctly': function (result) {
assert.equal(result.logged[0].level.toString(), 'INFO');
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
assert.equal(result.logged[1].level.toString(), 'DEBUG');
assert.equal(result.logged[1].data, 'OH WOW');
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
},
'the actual logger should get the right config': function (result) {
assert.equal(result.actualLoggerConfig.type, 'file');
}
},
'worker': {
'should emit logging events to the master': {
topic: function() {
var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure({ name: 'ohno', mode: 'worker', appender: { type: 'file' } });
logger({ level: { levelStr: 'INFO' }, data: "ALRIGHTY THEN", startTime: '2011-10-27T03:53:16.031Z' });
logger({ level: { levelStr: 'DEBUG' }, data: "OH WOW", startTime: '2011-10-27T04:53:16.031Z'});
return fancy.theResult;
},
'should not write to the actual appender': function (result) {
assert.isTrue(result.startCalled);
assert.equal(result.logged.length, 0);
assert.equal(result.emissions['ohno::log'].length, 2);
assert.isUndefined(result.ons['*::ohno::log']);
}
}
}
}).exportTo(module);

268
test/layouts-test.js Normal file
View File

@@ -0,0 +1,268 @@
var vows = require('vows'),
assert = require('assert');
//used for patternLayout tests.
function test(args, pattern, value) {
var layout = args[0]
, event = args[1]
, tokens = args[2];
assert.equal(layout(pattern, tokens)(event), value);
}
vows.describe('log4js layouts').addBatch({
'colouredLayout': {
topic: function() {
return require('../lib/layouts').colouredLayout;
},
'should apply level colour codes to output': function(layout) {
var output = layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString: function() { return "ERROR"; }
}
});
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mnonsense');
},
'should support the console.log format for the message': function(layout) {
var output = layout({
data: ["thing %d", 2],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
toString: function() { return "ERROR"; }
}
});
assert.equal(output, '\033[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \033[39mthing 2');
}
},
'messagePassThroughLayout': {
topic: function() {
return require('../lib/layouts').messagePassThroughLayout;
},
'should take a logevent and output only the message' : function(layout) {
assert.equal(layout({
data: ["nonsense"],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}), "nonsense");
},
'should support the console.log format for the message' : function(layout) {
assert.equal(layout({
data: ["thing %d", 1, "cheese"]
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
, categoryName: "cheese"
, level : {
colour: "green"
, toString: function() { return "ERROR"; }
}
}), "thing 1 'cheese'");
},
'should output the first item even if it is not a string': function(layout) {
assert.equal(layout({
data: [ { thing: 1} ]
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
, categoryName: "cheese"
, level: {
colour: "green"
, toString: function() { return "ERROR"; }
}
}), "{ thing: 1 }");
},
'should print the stacks of a passed error objects': function(layout) {
assert.isArray(layout({
data: [ new Error() ]
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
, categoryName: "cheese"
, level: {
colour: "green"
, toString: function() { return "ERROR"; }
}
}).match(/Error\s+at Object\..*\s+\((.*)test[\\\/]layouts-test\.js\:\d+\:\d+\)\s+at runTest/)
, 'regexp did not return a match');
},
'with passed augmented errors':
{ topic:
function(layout){
var e = new Error("My Unique Error Message");
e.augmented = "My Unique attribute value"
e.augObj = { at1: "at2" }
return layout({
data: [ e ]
, startTime: new Date(2010, 11, 5, 14, 18, 30, 45)
, categoryName: "cheese"
, level: {
colour: "green"
, toString: function() { return "ERROR"; }
}
});
},
'should print error the contained error message': function(layoutOutput) {
var m = layoutOutput.match(/\{ \[Error: My Unique Error Message\]/);
assert.isArray(m);
},
'should print error augmented string attributes': function(layoutOutput) {
var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/);
assert.isArray(m);
},
'should print error augmented object attributes': function(layoutOutput) {
var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/);
assert.isArray(m);
}
}
},
'basicLayout': {
topic: function() {
var layout = require('../lib/layouts').basicLayout,
event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "tests",
level: {
toString: function() { return "DEBUG"; }
}
};
return [layout, event];
},
'should take a logevent and output a formatted string': function(args) {
var layout = args[0], event = args[1];
assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
},
'should output a stacktrace, message if the event has an error attached': function(args) {
var layout = args[0], event = args[1], output, lines,
error = new Error("Some made-up error"),
stack = error.stack.split(/\n/);
event.data = ['this is a test', error];
output = layout(event);
lines = output.split(/\n/);
assert.equal(lines.length - 1, stack.length);
assert.equal(lines[0], "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]");
for (var i = 1; i < stack.length; i++) {
assert.equal(lines[i+2], stack[i+1]);
}
},
'should output any extra data in the log event as util.inspect strings': function(args) {
var layout = args[0], event = args[1], output, lines;
event.data = ['this is a test', {
name: 'Cheese',
message: 'Gorgonzola smells.'
}];
output = layout(event);
assert.equal(output, "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test { name: 'Cheese', message: 'Gorgonzola smells.' }");
}
},
'patternLayout': {
topic: function() {
var event = {
data: ['this is a test'],
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "multiple.levels.of.tests",
level: {
toString: function() { return "DEBUG"; }
}
}, layout = require('../lib/layouts').patternLayout
, tokens = {
testString: 'testStringToken',
testFunction: function() { return 'testFunctionToken'; }
};
return [layout, event, tokens];
},
'should default to "time logLevel loggerName - message"': function(args) {
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test\n");
},
'%r should output time only': function(args) {
test(args, '%r', '14:18:30');
},
'%p should output the log level': function(args) {
test(args, '%p', 'DEBUG');
},
'%c should output the log category': function(args) {
test(args, '%c', 'multiple.levels.of.tests');
},
'%m should output the log data': function(args) {
test(args, '%m', 'this is a test');
},
'%n should output a new line': function(args) {
test(args, '%n', '\n');
},
'%c should handle category names like java-style package names': function(args) {
test(args, '%c{1}', 'tests');
test(args, '%c{2}', 'of.tests');
test(args, '%c{3}', 'levels.of.tests');
test(args, '%c{4}', 'multiple.levels.of.tests');
test(args, '%c{5}', 'multiple.levels.of.tests');
test(args, '%c{99}', 'multiple.levels.of.tests');
},
'%d should output the date in ISO8601 format': function(args) {
test(args, '%d', '2010-12-05 14:18:30.045');
},
'%d should allow for format specification': function(args) {
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
test(args, '%d{ABSOLUTE}', '14:18:30.045');
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
test(args, '%d{yyyy MM dd}', '2010 12 05');
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
},
'%% should output %': function(args) {
test(args, '%%', '%');
},
'should output anything not preceded by % as literal': function(args) {
test(args, 'blah blah blah', 'blah blah blah');
},
'should handle complicated patterns': function(args) {
test(args,
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
'this is a test\n of.tests at 14:18:30.045 cheese DEBUG\n'
);
},
'should truncate fields if specified': function(args) {
test(args, '%.4m', 'this');
test(args, '%.7m', 'this is');
test(args, '%.9m', 'this is a');
test(args, '%.14m', 'this is a test');
test(args, '%.2919102m', 'this is a test');
},
'should pad fields if specified': function(args) {
test(args, '%10p', ' DEBUG');
test(args, '%8p', ' DEBUG');
test(args, '%6p', ' DEBUG');
test(args, '%4p', 'DEBUG');
test(args, '%-4p', 'DEBUG');
test(args, '%-6p', 'DEBUG ');
test(args, '%-8p', 'DEBUG ');
test(args, '%-10p', 'DEBUG ');
},
'%[%r%] should output colored time': function(args) {
test(args, '%[%r%]', '\033[36m14:18:30\033[39m');
},
'%x{testString} should output the string stored in tokens': function(args) {
test(args, '%x{testString}', 'testStringToken');
},
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
test(args, '%x{testFunction}', 'testFunctionToken');
},
'%x{doesNotExist} should output the string stored in tokens': function(args) {
test(args, '%x{doesNotExist}', '%x{doesNotExist}');
},
'%x should output the string stored in tokens': function(args) {
test(args, '%x', '%x');
},
}
}).export(module);

210
test/levels-test.js Normal file
View File

@@ -0,0 +1,210 @@
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels');
function assertThat(level) {
function assertForEach(assertion, test, otherLevels) {
otherLevels.forEach(function(other) {
assertion.call(assert, test.call(level, other));
});
}
return {
isLessThanOrEqualTo: function(levels) {
assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels);
},
isNotLessThanOrEqualTo: function(levels) {
assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels);
},
isGreaterThanOrEqualTo: function(levels) {
assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels);
},
isNotGreaterThanOrEqualTo: function(levels) {
assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels);
},
isEqualTo: function(levels) {
assertForEach(assert.isTrue, level.isEqualTo, levels);
},
isNotEqualTo: function(levels) {
assertForEach(assert.isFalse, level.isEqualTo, levels);
}
};
}
vows.describe('levels').addBatch({
'values': {
topic: levels,
'should define some levels': function(levels) {
assert.isNotNull(levels.ALL);
assert.isNotNull(levels.TRACE);
assert.isNotNull(levels.DEBUG);
assert.isNotNull(levels.INFO);
assert.isNotNull(levels.WARN);
assert.isNotNull(levels.ERROR);
assert.isNotNull(levels.FATAL);
assert.isNotNull(levels.OFF);
},
'ALL': {
topic: levels.ALL,
'should be less than the other levels': function(all) {
assertThat(all).isLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
},
'should be greater than no levels': function(all) {
assertThat(all).isNotGreaterThanOrEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
},
'should only be equal to ALL': function(all) {
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
assertThat(all).isNotEqualTo([levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
}
},
'TRACE': {
topic: levels.TRACE,
'should be less than DEBUG': function(trace) {
assertThat(trace).isLessThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
},
'should be greater than ALL': function(trace) {
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
assertThat(trace).isNotGreaterThanOrEqualTo([levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
},
'should only be equal to TRACE': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
}
},
'DEBUG': {
topic: levels.DEBUG,
'should be less than INFO': function(debug) {
assertThat(debug).isLessThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
},
'should be greater than TRACE': function(debug) {
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
assertThat(debug).isNotGreaterThanOrEqualTo([levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
},
'should only be equal to DEBUG': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
}
},
'INFO': {
topic: levels.INFO,
'should be less than WARN': function(info) {
assertThat(info).isLessThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
},
'should be greater than DEBUG': function(info) {
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
assertThat(info).isNotGreaterThanOrEqualTo([levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
},
'should only be equal to INFO': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.WARN, levels.ERROR, levels.FATAL, levels.OFF]);
}
},
'WARN': {
topic: levels.WARN,
'should be less than ERROR': function(warn) {
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
assertThat(warn).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]);
},
'should be greater than INFO': function(warn) {
assertThat(warn).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO]);
assertThat(warn).isNotGreaterThanOrEqualTo([levels.ERROR, levels.FATAL, levels.OFF]);
},
'should only be equal to WARN': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.ERROR, levels.FATAL, levels.OFF]);
}
},
'ERROR': {
topic: levels.ERROR,
'should be less than FATAL': function(error) {
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.OFF]);
assertThat(error).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]);
},
'should be greater than WARN': function(error) {
assertThat(error).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN]);
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.OFF]);
},
'should only be equal to ERROR': function(trace) {
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
assertThat(trace).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.FATAL, levels.OFF]);
}
},
'FATAL': {
topic: levels.FATAL,
'should be less than OFF': function(fatal) {
assertThat(fatal).isLessThanOrEqualTo([levels.OFF]);
assertThat(fatal).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]);
},
'should be greater than ERROR': function(fatal) {
assertThat(fatal).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR]);
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.OFF]);
},
'should only be equal to FATAL': function(fatal) {
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
assertThat(fatal).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.OFF]);
}
},
'OFF': {
topic: levels.OFF,
'should not be less than anything': function(off) {
assertThat(off).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
},
'should be greater than everything': function(off) {
assertThat(off).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
},
'should only be equal to OFF': function(off) {
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
assertThat(off).isNotEqualTo([levels.ALL, levels.TRACE, levels.DEBUG, levels.INFO, levels.WARN, levels.ERROR, levels.FATAL]);
}
}
},
'isGreaterThanOrEqualTo': {
topic: levels.INFO,
'should handle string arguments': function(info) {
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
}
},
'isLessThanOrEqualTo': {
topic: levels.INFO,
'should handle string arguments': function(info) {
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'off']);
}
},
'toLevel': {
'with lowercase argument': {
topic: levels.toLevel("debug"),
'should take the string and return the corresponding level': function(level) {
assert.equal(level, levels.DEBUG);
}
},
'with uppercase argument': {
topic: levels.toLevel("DEBUG"),
'should take the string and return the corresponding level': function(level) {
assert.equal(level, levels.DEBUG);
}
},
'with varying case': {
topic: levels.toLevel("DeBuG"),
'should take the string and return the corresponding level': function(level) {
assert.equal(level, levels.DEBUG);
}
},
'with unrecognised argument': {
topic: levels.toLevel("cheese"),
'should return undefined': function(level) {
assert.isUndefined(level);
}
},
'with unrecognised argument and default value': {
topic: levels.toLevel("cheese", levels.DEBUG),
'should return default value': function(level) {
assert.equal(level, levels.DEBUG);
}
}
}
}).export(module);

69
test/log-abspath-test.js Normal file
View File

@@ -0,0 +1,69 @@
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module');
vows.describe('log4js-abspath').addBatch({
'options': {
topic: function() {
var appenderOptions,
log4js = sandbox.require(
'../lib/log4js',
{ requires:
{ './appenders/fake':
{
name: "fake",
appender: function() {},
configure: function(configuration, options) {
appenderOptions = options;
return function() {};
}
}
}
}
),
config = {
"appenders": [
{
"type" : "fake",
"filename" : "cheesy-wotsits.log"
}
]
};
log4js.configure(config, {
cwd: '/absolute/path/to'
});
return appenderOptions;
},
'should be passed to appenders during configuration': function(options) {
assert.equal(options.cwd, '/absolute/path/to');
}
},
'file appender': {
topic: function() {
var fileOpened,
fileAppender = sandbox.require(
'../lib/appenders/file',
{ requires:
{ '../streams':
{
RollingFileStream: function(file) {
fileOpened = file;
return {
on: function() {},
end: function() {}
};
}
}
}
}
);
fileAppender.configure({ filename: "whatever.log", maxLogSize: 10 }, { cwd: '/absolute/path/to' });
return fileOpened;
},
'should prepend options.cwd to config.filename': function(fileOpened) {
assert.equal(fileOpened, "/absolute/path/to/whatever.log");
}
},
}).export(module);

View File

@@ -0,0 +1,69 @@
var vows = require('vows')
, fs = require('fs')
, assert = require('assert');
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('log4js logLevelFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(require('../lib/appenders/logLevelFilter').appender('ERROR', function(evt) { logEvents.push(evt); }), "logLevelTest");
logger = log4js.getLogger("logLevelTest");
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
return logEvents;
},
'should only pass log events greater than or equal to its own level' : function(logEvents) {
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'this should, though');
assert.equal(logEvents[1].data[0], 'so should this');
}
},
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
, logger;
remove(__dirname + '/logLevelFilter.log');
remove(__dirname + '/logLevelFilter-warnings.log');
log4js.configure('test/with-logLevelFilter.json');
logger = log4js.getLogger("tests");
logger.info('main');
logger.error('both');
logger.warn('both');
logger.debug('main');
//wait for the file system to catch up
setTimeout(this.callback, 100);
},
'tmp-tests.log': {
topic: function() {
fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
},
'should contain all log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['main','both','both','main']);
}
},
'tmp-tests-warnings.log': {
topic: function() {
fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
},
'should contain only error and warning log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['both','both']);
}
}
}
}).export(module);

578
test/logging-test.js Normal file
View File

@@ -0,0 +1,578 @@
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module');
function setupConsoleTest() {
var fakeConsole = {}
, logEvents = []
, log4js;
['trace','debug','log','info','warn','error'].forEach(function(fn) {
fakeConsole[fn] = function() {
throw new Error("this should not be called.");
};
});
log4js = sandbox.require(
'../lib/log4js'
, {
globals: {
console: fakeConsole
}
}
);
log4js.clearAppenders();
log4js.addAppender(function(evt) {
logEvents.push(evt);
});
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
}
vows.describe('log4js').addBatch({
'getLogger': {
topic: function() {
var log4js = require('../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getLogger('tests');
logger.setLevel("DEBUG");
return logger;
},
'should take a category and return a logger': function(logger) {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), "DEBUG");
assert.isFunction(logger.debug);
assert.isFunction(logger.info);
assert.isFunction(logger.warn);
assert.isFunction(logger.error);
assert.isFunction(logger.fatal);
},
'log events' : {
topic: function(logger) {
var events = [];
logger.addListener("log", function (logEvent) { events.push(logEvent); });
logger.debug("Debug event");
logger.trace("Trace event 1");
logger.trace("Trace event 2");
logger.warn("Warning event");
logger.error("Aargh!", new Error("Pants are on fire!"));
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
return events;
},
'should emit log events': function(events) {
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].data[0], 'Debug event');
assert.instanceOf(events[0].startTime, Date);
},
'should not emit events of a lower level': function(events) {
assert.equal(events.length, 4);
assert.equal(events[1].level.toString(), 'WARN');
},
'should include the error if passed in': function (events) {
assert.instanceOf(events[2].data[1], Error);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
}
},
},
'invalid configuration': {
'should throw an exception': function() {
assert.throws(function() {
require('log4js').configure({ "type": "invalid" });
});
}
},
'configuration when passed as object': {
topic: function() {
var appenderConfig
, log4js = sandbox.require(
'../lib/log4js'
, { requires:
{ './appenders/file':
{
name: "file"
, appender: function() {}
, configure: function(configuration) {
appenderConfig = configuration;
return function() {};
}
}
}
}
)
, config = {
"appenders": [
{
"type" : "file",
"filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024,
"backups" : 3
}
]
};
log4js.configure(config);
return appenderConfig;
},
'should be passed to appender config': function(configuration) {
assert.equal(configuration.filename, 'cheesy-wotsits.log');
}
},
'configuration when passed as filename': {
topic: function() {
var appenderConfig
, configFilename
, log4js = sandbox.require(
'../lib/log4js'
, { requires:
{ 'fs':
{
statSync: function() {
return { mtime: Date.now() };
},
readFileSync: function(filename) {
configFilename = filename;
return JSON.stringify({
appenders: [
{ type: "file"
, filename: "whatever.log"
}
]
});
},
readdirSync: function() {
return ['file'];
}
}
, './appenders/file':
{
name: "file"
, appender: function() {}
, configure: function(configuration) {
appenderConfig = configuration;
return function() {};
}
}
}
}
);
log4js.configure("/path/to/cheese.json");
return [ configFilename, appenderConfig ];
},
'should read the config from a file': function(args) {
assert.equal(args[0], '/path/to/cheese.json');
},
'should pass config to appender': function(args) {
assert.equal(args[1].filename, "whatever.log");
}
},
'with no appenders defined' : {
topic: function() {
var logger
, that = this
, fakeConsoleAppender = {
name: "console"
, appender: function() {
return function(evt) {
that.callback(null, evt);
}
}
, configure: function() {
return fakeConsoleAppender.appender();
}
}
, log4js = sandbox.require(
'../lib/log4js'
, {
requires: {
'./appenders/console': fakeConsoleAppender
}
}
);
logger = log4js.getLogger("some-logger");
logger.debug("This is a test");
},
'should default to the console appender': function(evt) {
assert.equal(evt.data[0], "This is a test");
}
},
'addAppender' : {
topic: function() {
var log4js = require('../lib/log4js');
log4js.clearAppenders();
return log4js;
},
'without a category': {
'should register the function as a listener for all loggers': function (log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests");
log4js.addAppender(appender);
logger.debug("This is a test");
assert.equal(appenderEvent.data[0], "This is a test");
assert.equal(appenderEvent.categoryName, "tests");
assert.equal(appenderEvent.level.toString(), "DEBUG");
},
'should also register as an appender for loggers if an appender for that category is defined': function (log4js) {
var otherEvent, appenderEvent, cheeseLogger;
log4js.addAppender(function (evt) { appenderEvent = evt; });
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
cheeseLogger = log4js.getLogger('cheese');
cheeseLogger.debug('This is a test');
assert.deepEqual(appenderEvent, otherEvent);
assert.equal(otherEvent.data[0], 'This is a test');
assert.equal(otherEvent.categoryName, 'cheese');
otherEvent = undefined;
appenderEvent = undefined;
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
assert.isUndefined(otherEvent);
assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
}
},
'with a category': {
'should only register the function as a listener for that category': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger("tests");
log4js.addAppender(appender, 'tests');
logger.debug('this is a category test');
assert.equal(appenderEvent.data[0], 'this is a category test');
appenderEvent = undefined;
log4js.getLogger('some other category').debug('Cheese');
assert.isUndefined(appenderEvent);
}
},
'with multiple categories': {
'should register the function as a listener for all the categories': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; }, logger = log4js.getLogger('tests');
log4js.addAppender(appender, 'tests', 'biscuits');
logger.debug('this is a test');
assert.equal(appenderEvent.data[0], 'this is a test');
appenderEvent = undefined;
var otherLogger = log4js.getLogger('biscuits');
otherLogger.debug("mmm... garibaldis");
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
appenderEvent = undefined;
log4js.getLogger("something else").debug("pants");
assert.isUndefined(appenderEvent);
},
'should register the function when the list of categories is an array': function(log4js) {
var appenderEvent, appender = function(evt) { appenderEvent = evt; };
log4js.addAppender(appender, ['tests', 'pants']);
log4js.getLogger('tests').debug('this is a test');
assert.equal(appenderEvent.data[0], 'this is a test');
appenderEvent = undefined;
log4js.getLogger('pants').debug("big pants");
assert.equal(appenderEvent.data[0], "big pants");
appenderEvent = undefined;
log4js.getLogger("something else").debug("pants");
assert.isUndefined(appenderEvent);
}
}
},
'default setup': {
topic: function() {
var appenderEvents = [],
fakeConsole = {
'name': 'console'
, 'appender': function () {
return function(evt) {
appenderEvents.push(evt);
}
}
, 'configure': function (config) {
return fakeConsole.appender();
}
},
globalConsole = {
log: function() { }
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/console': fakeConsole
},
globals: {
console: globalConsole
}
}
),
logger = log4js.getLogger('a-test');
logger.debug("this is a test");
globalConsole.log("this should not be logged");
return appenderEvents;
},
'should configure a console appender': function(appenderEvents) {
assert.equal(appenderEvents[0].data[0], 'this is a test');
},
'should not replace console.log with log4js version': function(appenderEvents) {
assert.equal(appenderEvents.length, 1);
}
},
'console' : {
topic: setupConsoleTest,
'when replaceConsole called': {
topic: function(test) {
test.log4js.replaceConsole();
test.fakeConsole.log("Some debug message someone put in a module");
test.fakeConsole.debug("Some debug");
test.fakeConsole.error("An error");
test.fakeConsole.info("some info");
test.fakeConsole.warn("a warning");
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
test.fakeConsole.log({ lumpy: "tapioca" });
test.fakeConsole.log("count %d", 123);
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
return test.logEvents;
},
'should replace console.log methods with log4js ones': function(logEvents) {
assert.equal(logEvents.length, 9);
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
assert.equal(logEvents[0].level.toString(), "INFO");
assert.equal(logEvents[1].data[0], "Some debug");
assert.equal(logEvents[1].level.toString(), "DEBUG");
assert.equal(logEvents[2].data[0], "An error");
assert.equal(logEvents[2].level.toString(), "ERROR");
assert.equal(logEvents[3].data[0], "some info");
assert.equal(logEvents[3].level.toString(), "INFO");
assert.equal(logEvents[4].data[0], "a warning");
assert.equal(logEvents[4].level.toString(), "WARN");
assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)");
assert.equal(logEvents[5].data[1], "gouda");
assert.equal(logEvents[5].data[2], "garibaldis");
}
},
'when turned off': {
topic: function(test) {
test.log4js.restoreConsole();
try {
test.fakeConsole.log("This should cause the error described in the setup");
} catch (e) {
return e;
}
},
'should call the original console methods': function (err) {
assert.instanceOf(err, Error);
assert.equal(err.message, "this should not be called.");
}
},
'configuration': {
topic: function(test) {
test.log4js.replaceConsole();
test.log4js.configure({ replaceConsole: false });
try {
test.fakeConsole.log("This should cause the error described in the setup");
} catch (e) {
return e;
}
},
'should allow for turning off console replacement': function (err) {
assert.instanceOf(err, Error);
assert.equal(err.message, 'this should not be called.');
}
}
},
'configuration persistence' : {
topic: function() {
var logEvent,
firstLog4js = require('../lib/log4js'),
secondLog4js;
firstLog4js.clearAppenders();
firstLog4js.addAppender(function(evt) { logEvent = evt; });
secondLog4js = require('../lib/log4js');
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
return logEvent;
},
'should maintain appenders between requires': function (logEvent) {
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
}
},
'configuration reload with configuration changing' : {
topic: function() {
var pathsChecked = [],
logEvents = [],
logger,
modulePath = 'path/to/log4js.json',
fakeFS = {
lastMtime: Date.now(),
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
fakeFS.lastMtime += 1;
return { mtime: new Date(fakeFS.lastMtime) };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
fakeFS.config.levels['a-test'] = "DEBUG";
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return logEvents;
},
'should configure log4js from first log4js.json found': function(logEvents) {
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
assert.equal(logEvents[2].data[0], 'debug4');
assert.equal(logEvents.length, 3);
}
},
'configuration reload with configuration staying the same' : {
topic: function() {
var pathsChecked = [],
fileRead = 0,
logEvents = [],
logger,
modulePath = require('path').normalize(__dirname + '/../lib/log4js.json'),
mtime = new Date(),
fakeFS = {
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
levels: { 'a-test' : 'INFO' } },
readdirSync: function(dir) {
return require('fs').readdirSync(dir);
},
readFileSync: function (file, encoding) {
fileRead += 1;
assert.isString(file);
assert.equal(file, modulePath);
assert.equal(encoding, 'utf8');
return JSON.stringify(fakeFS.config);
},
statSync: function (path) {
pathsChecked.push(path);
if (path === modulePath) {
return { mtime: mtime };
} else {
throw new Error("no such file");
}
}
},
fakeConsole = {
'name': 'console',
'appender': function () {
return function(evt) { logEvents.push(evt); };
},
'configure': function (config) {
return fakeConsole.appender();
}
},
setIntervalCallback,
fakeSetInterval = function(cb, timeout) {
setIntervalCallback = cb;
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'fs': fakeFS,
'./appenders/console': fakeConsole
},
globals: {
'console': fakeConsole,
'setInterval' : fakeSetInterval,
}
}
);
log4js.configure(modulePath, { reloadSecs: 3 });
logger = log4js.getLogger('a-test');
logger.info("info1");
logger.debug("debug2 - should be ignored");
setIntervalCallback();
logger.info("info3");
logger.debug("debug4");
return [ pathsChecked, logEvents, modulePath, fileRead ];
},
'should only read the configuration file once': function(args) {
var fileRead = args[3];
assert.equal(fileRead, 1);
},
'should configure log4js from first log4js.json found': function(args) {
var logEvents = args[1];
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'info1');
assert.equal(logEvents[1].data[0], 'info3');
}
}
}).export(module);

View File

@@ -1,362 +0,0 @@
var vows = require('vows'),
assert = require('assert');
vows.describe('log4js').addBatch({
'getLogger': {
topic: function() {
var log4js = require('../lib/log4js')();
log4js.clearAppenders();
var logger = log4js.getLogger('tests');
logger.setLevel("DEBUG");
return logger;
},
'should take a category and return a logger': function(logger) {
assert.equal(logger.category, 'tests');
assert.equal(logger.level.toString(), "DEBUG");
assert.isFunction(logger.debug);
assert.isFunction(logger.info);
assert.isFunction(logger.warn);
assert.isFunction(logger.error);
assert.isFunction(logger.fatal);
},
'log events' : {
topic: function(logger) {
var events = [];
logger.addListener("log", function (logEvent) { events.push(logEvent); });
logger.debug("Debug event");
logger.trace("Trace event 1");
logger.trace("Trace event 2");
logger.warn("Warning event");
return events;
},
'should emit log events': function(events) {
assert.equal(events[0].level.toString(), 'DEBUG');
assert.equal(events[0].message, 'Debug event');
assert.instanceOf(events[0].startTime, Date);
},
'should not emit events of a lower level': function(events) {
assert.length(events, 2);
assert.equal(events[1].level.toString(), 'WARN');
}
},
},
'fileAppender': {
topic: function() {
var appender, logmessages = [], thing = "thing", fakeFS = {
openSync: function() {
assert.equal(arguments[0], './tmp-tests.log');
assert.equal(arguments[1], 'a');
assert.equal(arguments[2], 0644);
return thing;
},
write: function() {
assert.equal(arguments[0], thing);
assert.isString(arguments[1]);
assert.isNull(arguments[2]);
assert.equal(arguments[3], "utf8");
logmessages.push(arguments[1]);
},
watchFile: function() {
throw new Error("watchFile should not be called if logSize is not defined");
}
},
log4js = require('../lib/log4js')(fakeFS);
log4js.clearAppenders();
appender = log4js.fileAppender('./tmp-tests.log', log4js.messagePassThroughLayout);
log4js.addAppender(appender, 'file-test');
var logger = log4js.getLogger('file-test');
logger.debug("this is a test");
return logmessages;
},
'should write log messages to file': function(logmessages) {
assert.length(logmessages, 1);
assert.equal(logmessages, "this is a test\n");
}
},
'fileAppender - with rolling based on size and number of files to keep': {
topic: function() {
var watchCb,
filesOpened = [],
filesClosed = [],
filesRenamed = [],
newFilenames = [],
existingFiles = ['tests.log'],
log4js = require('../lib/log4js')({
watchFile: function(file, options, callback) {
assert.equal(file, 'tests.log');
assert.equal(options.persistent, false);
assert.equal(options.interval, 30000);
assert.isFunction(callback);
watchCb = callback;
},
openSync: function(file) {
assert.equal(file, 'tests.log');
filesOpened.push(file);
return file;
},
statSync: function(file) {
if (existingFiles.indexOf(file) < 0) {
throw new Error("this file doesn't exist");
} else {
return true;
}
},
renameSync: function(oldFile, newFile) {
filesRenamed.push(oldFile);
existingFiles.push(newFile);
},
closeSync: function(file) {
//it should always be closing tests.log
assert.equal(file, 'tests.log');
filesClosed.push(file);
}
});
var appender = log4js.fileAppender('tests.log', log4js.messagePassThroughLayout, 1024, 2, 30);
return [watchCb, filesOpened, filesClosed, filesRenamed, existingFiles];
},
'should close current log file, rename all old ones, open new one on rollover': function(args) {
var watchCb = args[0], filesOpened = args[1], filesClosed = args[2], filesRenamed = args[3], existingFiles = args[4];
assert.isFunction(watchCb);
//tell the watchCb that the file is below the threshold
watchCb({ size: 891 }, { size: 0 });
//filesOpened should still be the first one.
assert.length(filesOpened, 1);
//tell the watchCb that the file is now over the threshold
watchCb({ size: 1053 }, { size: 891 });
//it should have closed the first log file.
assert.length(filesClosed, 1);
//it should have renamed the previous log file
assert.length(filesRenamed, 1);
//and we should have two files now
assert.length(existingFiles, 2);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1']);
//and opened a new log file.
assert.length(filesOpened, 2);
//now tell the watchCb that we've flipped over the threshold again
watchCb({ size: 1025 }, { size: 123 });
//it should have closed the old file
assert.length(filesClosed, 2);
//it should have renamed both the old log file, and the previous '.1' file
assert.length(filesRenamed, 3);
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log' ]);
//it should have renamed 2 more file
assert.length(existingFiles, 4);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
//and opened a new log file
assert.length(filesOpened, 3);
//tell the watchCb we've flipped again.
watchCb({ size: 1024 }, { size: 234 });
//close the old one again.
assert.length(filesClosed, 3);
//it should have renamed the old log file and the 2 backups, with the last one being overwritten.
assert.length(filesRenamed, 5);
assert.deepEqual(filesRenamed, ['tests.log', 'tests.log.1', 'tests.log', 'tests.log.1', 'tests.log' ]);
//it should have renamed 2 more files
assert.length(existingFiles, 6);
assert.deepEqual(existingFiles, ['tests.log', 'tests.log.1', 'tests.log.2', 'tests.log.1', 'tests.log.2', 'tests.log.1']);
//and opened a new log file
assert.length(filesOpened, 4);
}
},
'configure' : {
topic: function() {
var messages = {}, fakeFS = {
openSync: function(file) {
return file;
},
write: function(file, message) {
if (!messages.hasOwnProperty(file)) {
messages[file] = [];
}
messages[file].push(message);
},
readFileSync: function(file, encoding) {
return require('fs').readFileSync(file, encoding);
},
watchFile: function(file) {
messages.watchedFile = file;
}
},
log4js = require('../lib/log4js')(fakeFS);
return [ log4js, messages ];
},
'should load appender configuration from a json file': function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-tests.log'];
log4js.clearAppenders();
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('test/log4js.json');
var logger = log4js.getLogger("tests");
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');
assert.length(messages['tmp-tests.log'], 1);
assert.equal(messages['tmp-tests.log'][0], 'this should be written to the file\n');
},
'should handle logLevelFilter configuration': function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-tests.log'];
delete messages['tmp-tests-warnings.log'];
log4js.clearAppenders();
log4js.configure('test/with-logLevelFilter.json');
var logger = log4js.getLogger("tests");
logger.info('main');
logger.error('both');
logger.warn('both');
logger.debug('main');
assert.length(messages['tmp-tests.log'], 4);
assert.length(messages['tmp-tests-warnings.log'], 2);
assert.deepEqual(messages['tmp-tests.log'], ['main\n','both\n','both\n','main\n']);
assert.deepEqual(messages['tmp-tests-warnings.log'], ['both\n','both\n']);
},
'should handle fileAppender with log rolling' : function(args) {
var log4js = args[0], messages = args[1];
delete messages['tmp-test.log'];
log4js.configure('test/with-log-rolling.json');
assert.equal(messages.watchedFile, 'tmp-test.log');
}
},
'with no appenders defined' : {
topic: function() {
var logger, message, log4js = require('../lib/log4js')(null, function (msg) { message = msg; } );
logger = log4js.getLogger("some-logger");
logger.debug("This is a test");
return message;
},
'should default to the console appender': function(message) {
assert.isTrue(/This is a test$/.test(message));
}
},
'default setup': {
topic: function() {
var pathsChecked = [],
message,
logger,
fakeFS = {
readFileSync: function (file, encoding) {
assert.equal(file, '/path/to/config/log4js.json');
assert.equal(encoding, 'utf8');
return '{ "appenders" : [ { "type": "console", "layout": { "type": "messagePassThrough" }} ] }';
},
statSync: function (path) {
pathsChecked.push(path);
if (path === '/path/to/config/log4js.json') {
return true;
} else {
throw new Error("no such file");
}
}
},
fakeConsoleLog = function (msg) { message = msg; },
fakeRequirePath = [ '/a/b/c', '/some/other/path', '/path/to/config', '/some/later/directory' ],
log4js = require('../lib/log4js')(fakeFS, fakeConsoleLog, fakeRequirePath),
logger = log4js.getLogger('a-test');
logger.debug("this is a test");
return [ pathsChecked, message ];
},
'should check current directory, require paths, and finally the module dir for log4js.json': function(args) {
var pathsChecked = args[0];
assert.deepEqual(pathsChecked, [
'log4js.json',
'/a/b/c/log4js.json',
'/some/other/path/log4js.json',
'/path/to/config/log4js.json',
'/some/later/directory/log4js.json',
require('path').normalize(__dirname + '/../lib/log4js.json')
]);
},
'should configure log4js from first log4js.json found': function(args) {
var message = args[1];
assert.equal(message, 'this is a test');
}
},
'colouredLayout': {
topic: function() {
return require('../lib/log4js')().colouredLayout;
},
'should apply level colour codes to output': function(layout) {
var output = layout({
message: "nonsense",
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
});
assert.equal(output, '\033[90m[2010-12-05 14:18:30.045] \033[39m\033[32m[ERROR] \033[39m\033[90mcheese - \033[39mnonsense');
}
},
'messagePassThroughLayout': {
topic: function() {
return require('../lib/log4js')().messagePassThroughLayout;
},
'should take a logevent and output only the message' : function(layout) {
assert.equal(layout({
message: "nonsense",
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
categoryName: "cheese",
level: {
colour: "green",
toString: function() { return "ERROR"; }
}
}), "nonsense");
}
},
'logLevelFilter': {
topic: function() {
var log4js = require('../lib/log4js')(), logEvents = [], logger;
log4js.clearAppenders();
log4js.addAppender(log4js.logLevelFilter('ERROR', function(evt) { logEvents.push(evt); }));
logger = log4js.getLogger();
logger.debug('this should not trigger an event');
logger.warn('neither should this');
logger.error('this should, though');
logger.fatal('so should this');
return logEvents;
},
'should only pass log events greater than or equal to its own level' : function(logEvents) {
assert.length(logEvents, 2);
assert.equal(logEvents[0].message, 'this should, though');
assert.equal(logEvents[1].message, 'so should this');
}
},
'Date extensions': {
topic: function() {
require('../lib/log4js');
return new Date(2010, 0, 11, 14, 31, 30, 5);
},
'should add a toFormattedString method to Date': function(date) {
assert.isFunction(date.toFormattedString);
},
'should default to a format': function(date) {
assert.equal(date.toFormattedString(), '2010-01-11 14:31:30.005');
}
}
}).export(module);

241
test/multiprocess-test.js Normal file
View File

@@ -0,0 +1,241 @@
var vows = require('vows'),
sandbox = require('sandboxed-module'),
assert = require('assert');
function makeFakeNet() {
return {
logEvents: [],
data: [],
cbs: {},
createConnectionCalled: 0,
fakeAppender: function(logEvent) {
this.logEvents.push(logEvent);
},
createConnection: function(port, host) {
var fakeNet = this;
this.port = port;
this.host = host;
this.createConnectionCalled += 1;
return {
on: function(evt, cb) {
fakeNet.cbs[evt] = cb;
},
write: function(data, encoding) {
fakeNet.data.push(data);
fakeNet.encoding = encoding;
},
end: function() {
fakeNet.closeCalled = true;
}
};
},
createServer: function(cb) {
var fakeNet = this;
cb({
remoteAddress: '1.2.3.4',
remotePort: '1234',
setEncoding: function(encoding) {
fakeNet.encoding = encoding;
},
on: function(event, cb) {
fakeNet.cbs[event] = cb;
}
});
return {
listen: function(port, host) {
fakeNet.port = port;
fakeNet.host = host;
}
};
}
};
}
vows.describe('Multiprocess Appender').addBatch({
'worker': {
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
}
}
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
//don't need a proper log event for the worker tests
appender('before connect');
fakeNet.cbs['connect']();
appender('after connect');
fakeNet.cbs['close'](true);
appender('after error, before connect');
fakeNet.cbs['connect']();
appender('after error, after connect');
return fakeNet;
},
'should open a socket to the loggerPort and loggerHost': function(net) {
assert.equal(net.port, 1234);
assert.equal(net.host, 'pants');
},
'should buffer messages written before socket is connected': function(net) {
assert.equal(net.data[0], JSON.stringify('before connect'));
},
'should write log messages to socket as json strings with a terminator string': function(net) {
assert.equal(net.data[0], JSON.stringify('before connect'));
assert.equal(net.data[1], '__LOG4JS__');
assert.equal(net.data[2], JSON.stringify('after connect'));
assert.equal(net.data[3], '__LOG4JS__');
assert.equal(net.encoding, 'utf8');
},
'should attempt to re-open the socket on error': function(net) {
assert.equal(net.data[4], JSON.stringify('after error, before connect'));
assert.equal(net.data[5], '__LOG4JS__');
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
assert.equal(net.data[7], '__LOG4JS__');
assert.equal(net.createConnectionCalled, 2);
}
},
'worker with timeout': {
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
}
}
).appender({ mode: 'worker' });
//don't need a proper log event for the worker tests
appender('before connect');
fakeNet.cbs['connect']();
appender('after connect');
fakeNet.cbs['timeout']();
appender('after timeout, before close');
fakeNet.cbs['close']();
appender('after close, before connect');
fakeNet.cbs['connect']();
appender('after close, after connect');
return fakeNet;
},
'should attempt to re-open the socket': function(net) {
//skipping the __LOG4JS__ separators
assert.equal(net.data[0], JSON.stringify('before connect'));
assert.equal(net.data[2], JSON.stringify('after connect'));
assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
assert.equal(net.data[6], JSON.stringify('after close, before connect'));
assert.equal(net.data[8], JSON.stringify('after close, after connect'));
assert.equal(net.createConnectionCalled, 2);
}
},
'worker defaults': {
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
}
}
).appender({ mode: 'worker' });
return fakeNet;
},
'should open a socket to localhost:5000': function(net) {
assert.equal(net.port, 5000);
assert.equal(net.host, 'localhost');
}
},
'master': {
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
}
}
).appender({ mode: 'master',
loggerHost: 'server',
loggerPort: 1234,
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
});
appender('this should be sent to the actual appender directly');
return fakeNet;
},
'should listen for log messages on loggerPort and loggerHost': function(net) {
assert.equal(net.port, 1234);
assert.equal(net.host, 'server');
},
'should return the underlying appender': function(net) {
assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
},
'when a client connects': {
topic: function(net) {
var logString = JSON.stringify({ level: { level: 10000, levelStr: 'DEBUG' }, data: ['some debug']}) + '__LOG4JS__';
net.cbs['data'](JSON.stringify({ level: { level: 40000, levelStr: 'ERROR' }, data: ['an error message'] }) + '__LOG4JS__');
net.cbs['data'](logString.substring(0, 10));
net.cbs['data'](logString.substring(10));
net.cbs['data'](logString + logString + logString);
net.cbs['end'](JSON.stringify({ level: { level: 50000, levelStr: 'FATAL' }, data: ["that's all folks"] }) + '__LOG4JS__');
net.cbs['data']('bad message__LOG4JS__');
return net;
},
'should parse log messages into log events and send to appender': function(net) {
assert.equal(net.logEvents[1].level.toString(), 'ERROR');
assert.equal(net.logEvents[1].data[0], 'an error message');
assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
assert.equal(net.logEvents[1].remotePort, '1234');
},
'should parse log messages split into multiple chunks': function(net) {
assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
assert.equal(net.logEvents[2].data[0], 'some debug');
assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
assert.equal(net.logEvents[2].remotePort, '1234');
},
'should parse multiple log messages in a single chunk': function(net) {
assert.equal(net.logEvents[3].data[0], 'some debug');
assert.equal(net.logEvents[4].data[0], 'some debug');
assert.equal(net.logEvents[5].data[0], 'some debug');
},
'should handle log messages sent as part of end event': function(net) {
assert.equal(net.logEvents[6].data[0], "that's all folks");
},
'should handle unparseable log messages': function(net) {
assert.equal(net.logEvents[7].level.toString(), 'ERROR');
assert.equal(net.logEvents[7].categoryName, 'log4js');
assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
assert.equal(net.logEvents[7].data[1], 'bad message');
}
}
},
'master defaults': {
topic: function() {
var fakeNet = makeFakeNet(),
appender = sandbox.require(
'../lib/appenders/multiprocess',
{
requires: {
'net': fakeNet
}
}
).appender({ mode: 'master' });
return fakeNet;
},
'should listen for log messages on localhost:5000': function(net) {
assert.equal(net.port, 5000);
assert.equal(net.host, 'localhost');
}
}
}).exportTo(module);

260
test/nolog-test.js Normal file
View File

@@ -0,0 +1,260 @@
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels');
function MockLogger() {
var that = this;
this.messages = [];
this.log = function(level, message, exception) {
that.messages.push({ level: level, message: message });
};
this.isLevelEnabled = function(level) {
return level.isGreaterThanOrEqualTo(that.level);
};
this.level = levels.TRACE;
}
function MockRequest(remoteAddr, method, originalUrl) {
this.socket = { remoteAddress: remoteAddr };
this.originalUrl = originalUrl;
this.method = method;
this.httpVersionMajor = '5';
this.httpVersionMinor = '0';
this.headers = {};
}
function MockResponse(statusCode) {
this.statusCode = statusCode;
this.end = function(chunk, encoding) {
};
}
vows.describe('log4js connect logger').addBatch({
'getConnectLoggerModule': {
topic: function() {
var clm = require('../lib/connect-logger');
return clm;
},
'should return a "connect logger" factory' : function(clm) {
assert.isObject(clm);
},
'nolog String' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml, { nolog: "\\.gif" });
return {cl: cl, ml: ml};
},
'check unmatch url request': {
topic: function(d){
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages){
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.include(messages[0].message, 'GET');
assert.include(messages[0].message, 'http://url');
assert.include(messages[0].message, 'my.remote.addr');
assert.include(messages[0].message, '200');
messages.pop();
}
},
'check match url request': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
}
},
'nolog Strings' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
return {cl: cl, ml: ml};
},
'check unmatch url request (png)': {
topic: function(d){
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages){
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.include(messages[0].message, 'GET');
assert.include(messages[0].message, 'http://url');
assert.include(messages[0].message, 'my.remote.addr');
assert.include(messages[0].message, '200');
messages.pop();
}
},
'check match url request (gif)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
},
'check match url request (jpeg)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
}
},
'nolog Array<String>' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
return {cl: cl, ml: ml};
},
'check unmatch url request (png)': {
topic: function(d){
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages){
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.include(messages[0].message, 'GET');
assert.include(messages[0].message, 'http://url');
assert.include(messages[0].message, 'my.remote.addr');
assert.include(messages[0].message, '200');
messages.pop();
}
},
'check match url request (gif)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
},
'check match url request (jpeg)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
},
},
'nolog RegExp' : {
topic: function(clm) {
var ml = new MockLogger();
var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/});
return {cl: cl, ml: ml};
},
'check unmatch url request (png)': {
topic: function(d){
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages){
assert.isArray(messages);
assert.equal(messages.length, 1);
assert.ok(levels.INFO.isEqualTo(messages[0].level));
assert.include(messages[0].message, 'GET');
assert.include(messages[0].message, 'http://url');
assert.include(messages[0].message, 'my.remote.addr');
assert.include(messages[0].message, '200');
messages.pop();
}
},
'check match url request (gif)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
},
'check match url request (jpeg)': {
topic: function(d) {
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
var res = new MockResponse(200);
d.cl(req, res, function() { });
res.end('chunk', 'encoding');
return d.ml.messages;
}
, 'check message': function(messages) {
assert.isArray(messages);
assert.equal(messages.length, 0);
}
},
}
}
}).export(module);

View File

@@ -0,0 +1,79 @@
// This test shows an asymmetry between setLevel and isLevelEnabled (in log4js-node@0.4.3 and earlier):
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently does not (sets the level to TRACE).
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
//
// Basic set up
var vows = require('vows');
var assert = require('assert');
var log4js = require('../lib/log4js');
var logger = log4js.getLogger('test-setLevel-asymmetry');
// uncomment one or other of the following to see progress (or not) while running the tests
// var showProgress = console.log;
var showProgress = function() {};
// Define the array of levels as string to iterate over.
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
var log4jsLevels =[];
// populate an array with the log4js.levels that match the strLevels.
// Would be nice if we could iterate over log4js.levels instead, but log4js.levels.toLevel prevents that for now.
strLevels.forEach(function(l) {
log4jsLevels.push(log4js.levels.toLevel(l));
});
// We are going to iterate over this object's properties to define an exhaustive list of vows.
var levelTypes = {
'string': strLevels,
'log4js.levels.level': log4jsLevels,
}
// Set up the basic vows batch for this test
var batch = {
setLevel: {
}
}
showProgress('Populating batch object...');
// Populating the batch object programmatically,
// as I don't have the patience to manually populate it with the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
for (var type in levelTypes) {
var context = 'is called with a '+type;
var levelsToTest = levelTypes[type];
showProgress('Setting up the vows context for '+context);
batch.setLevel[context]= {};
levelsToTest.forEach( function(level) {
var subContext = 'of '+level;
var log4jsLevel=log4js.levels.toLevel(level.toString());
showProgress('Setting up the vows sub-context for '+subContext);
batch.setLevel[context][subContext] = {topic: level};
for (var comparisonType in levelTypes) {
levelTypes[comparisonType].forEach(function(comparisonLevel) {
var t = type;
var ct = comparisonType;
var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
var vow = 'isLevelEnabled('+comparisonLevel+') called with a '+comparisonType+' should return '+expectedResult;
showProgress('Setting up the vows vow for '+vow);
batch.setLevel[context][subContext][vow] = function(levelToSet) {
logger.setLevel(levelToSet);
showProgress('*** Checking setLevel( '+level+' ) of type '+t+', and isLevelEnabled( '+comparisonLevel+' ) of type '+ct+'. Expecting: '+expectedResult);
assert.equal(logger.isLevelEnabled(comparisonLevel), expectedResult, 'Failed: calling setLevel( '+level+' ) with type '+type+', isLevelEnabled( '+comparisonLevel+' ) of type '+comparisonType+' did not return '+expectedResult);
};
})
}
});
}
showProgress('Running tests...');
vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module);

168
test/smtpAppender-test.js Normal file
View File

@@ -0,0 +1,168 @@
var vows = require('vows'),
assert = require('assert'),
log4js = require('../lib/log4js'),
sandbox = require('sandboxed-module');
function setupLogging(category, options) {
var msgs = [];
var fakeMailer = {
createTransport: function (name, options) {
return {
config: options,
sendMail: function (msg, callback) {
msgs.push(msg);
callback(null, true);
}
};
}
};
var smtpModule = sandbox.require('../lib/appenders/smtp', {
requires: {
'nodemailer': fakeMailer
}
});
log4js.addAppender(smtpModule.configure(options), category);
return {
logger: log4js.getLogger(category),
mailer: fakeMailer,
results: msgs
};
}
function checkMessages (result, sender, subject) {
for (var i = 0; i < result.results.length; ++i) {
assert.equal(result.results[i].from, sender);
assert.equal(result.results[i].to, 'recipient@domain.com');
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i+1));
assert.ok(new RegExp('.+Log event #' + (i+1) + '\n$').test(result.results[i].text));
}
}
log4js.clearAppenders();
vows.describe('log4js smtpAppender').addBatch({
'minimal config': {
topic: function() {
var setup = setupLogging('minimal config', {
recipients: 'recipient@domain.com',
transport: "SMTP",
SMTP: {
port: 25,
auth: {
user: 'user@domain.com'
}
}
});
setup.logger.info('Log event #1');
return setup;
},
'there should be one message only': function (result) {
assert.equal(result.results.length, 1);
},
'message should contain proper data': function (result) {
checkMessages(result);
}
},
'fancy config': {
topic: function() {
var setup = setupLogging('fancy config', {
recipients: 'recipient@domain.com',
sender: 'sender@domain.com',
subject: 'This is subject',
transport: "SMTP",
SMTP: {
port: 25,
auth: {
user: 'user@domain.com'
}
}
});
setup.logger.info('Log event #1');
return setup;
},
'there should be one message only': function (result) {
assert.equal(result.results.length, 1);
},
'message should contain proper data': function (result) {
checkMessages(result, 'sender@domain.com', 'This is subject');
}
},
'separate email for each event': {
topic: function() {
var self = this;
var setup = setupLogging('separate email for each event', {
recipients: 'recipient@domain.com',
transport: "SMTP",
SMTP: {
port: 25,
auth: {
user: 'user@domain.com'
}
}
});
setTimeout(function () {
setup.logger.info('Log event #1');
}, 0);
setTimeout(function () {
setup.logger.info('Log event #2');
}, 500);
setTimeout(function () {
setup.logger.info('Log event #3');
}, 1050);
setTimeout(function () {
self.callback(null, setup);
}, 2100);
},
'there should be three messages': function (result) {
assert.equal(result.results.length, 3);
},
'messages should contain proper data': function (result) {
checkMessages(result);
}
},
'multiple events in one email': {
topic: function() {
var self = this;
var setup = setupLogging('multiple events in one email', {
recipients: 'recipient@domain.com',
sendInterval: 1,
transport: "SMTP",
SMTP: {
port: 25,
auth: {
user: 'user@domain.com'
}
}
});
setTimeout(function () {
setup.logger.info('Log event #1');
}, 0);
setTimeout(function () {
setup.logger.info('Log event #2');
}, 500);
setTimeout(function () {
setup.logger.info('Log event #3');
}, 1050);
setTimeout(function () {
self.callback(null, setup);
}, 2100);
},
'there should be two messages': function (result) {
assert.equal(result.results.length, 2);
},
'messages should contain proper data': function (result) {
assert.equal(result.results[0].to, 'recipient@domain.com');
assert.equal(result.results[0].subject, 'Log event #1');
assert.equal(result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length, 2);
assert.equal(result.results[1].to, 'recipient@domain.com');
assert.equal(result.results[1].subject, 'Log event #3');
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
}
}
}).export(module);

View File

@@ -0,0 +1,130 @@
var vows = require('vows')
, assert = require('assert')
, fs = require('fs')
, semver = require('semver')
, streams
, DateRollingFileStream
, testTime = new Date(2012, 8, 12, 10, 37, 11);
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
DateRollingFileStream = require('../../lib/streams').DateRollingFileStream
function cleanUp(filename) {
return function() {
fs.unlink(filename);
};
}
function now() {
return testTime.getTime();
}
vows.describe('DateRollingFileStream').addBatch({
'arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-1', 'yyyy-mm-dd.hh'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-1'),
'should take a filename and a pattern and return a WritableStream': function(stream) {
assert.equal(stream.filename, __dirname + '/test-date-rolling-file-stream-1');
assert.equal(stream.pattern, 'yyyy-mm-dd.hh');
assert.instanceOf(stream, streams.Writable);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding is not available on the underlying stream
//assert.equal(stream.encoding, 'utf8');
}
},
'default arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-2'),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-2'),
'pattern should be .yyyy-MM-dd': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with stream arguments': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-3', 'yyyy-MM-dd', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-3'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
},
'with stream arguments but no pattern': {
topic: new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-4', { mode: 0666 }),
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-4'),
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
},
'should use default pattern': function(stream) {
assert.equal(stream.pattern, '.yyyy-MM-dd');
}
},
'with a pattern of .yyyy-MM-dd': {
topic: function() {
var that = this,
stream = new DateRollingFileStream(__dirname + '/test-date-rolling-file-stream-5', '.yyyy-MM-dd', null, now);
stream.write("First message\n", 'utf8', function() {
that.callback(null, stream);
});
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5'),
'should create a file with the base name': {
topic: function(stream) {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'file should contain first message': function(result) {
assert.equal(result.toString(), "First message\n");
}
},
'when the day changes': {
topic: function(stream) {
testTime = new Date(2012, 8, 13, 0, 10, 12);
stream.write("Second message\n", 'utf8', this.callback);
},
teardown: cleanUp(__dirname + '/test-date-rolling-file-stream-5.2012-09-12'),
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-date-rolling-file-stream-5') > -1; }).length, 2);
}
},
'the file without a date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5', this.callback);
},
'should contain the second message': function(contents) {
assert.equal(contents.toString(), "Second message\n");
}
},
'the file with the date': {
topic: function() {
fs.readFile(__dirname + '/test-date-rolling-file-stream-5.2012-09-12', this.callback);
},
'should contain the first message': function(contents) {
assert.equal(contents.toString(), "First message\n");
}
}
}
}
}).exportTo(module);

View File

@@ -0,0 +1,134 @@
var vows = require('vows')
, async = require('async')
, assert = require('assert')
, events = require('events')
, fs = require('fs')
, semver = require('semver')
, streams
, RollingFileStream;
if (semver.satisfies(process.version, '>=0.10.0')) {
streams = require('stream');
} else {
streams = require('readable-stream');
}
RollingFileStream = require('../../lib/streams').RollingFileStream;
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('RollingFileStream').addBatch({
'arguments': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream");
return new RollingFileStream("test-rolling-file-stream", 1024, 5);
},
'should take a filename, file size in bytes, number of backups as arguments and return a Writable': function(stream) {
assert.instanceOf(stream, streams.Writable);
assert.equal(stream.filename, "test-rolling-file-stream");
assert.equal(stream.size, 1024);
assert.equal(stream.backups, 5);
},
'with default settings for the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 420);
assert.equal(stream.theStream.flags, 'a');
//encoding isn't a property on the underlying stream
//assert.equal(stream.theStream.encoding, 'utf8');
}
},
'with stream arguments': {
topic: function() {
remove(__dirname + '/test-rolling-file-stream');
return new RollingFileStream('test-rolling-file-stream', 1024, 5, { mode: 0666 });
},
'should pass them to the underlying stream': function(stream) {
assert.equal(stream.theStream.mode, 0666);
}
},
'without size': {
topic: function() {
try {
new RollingFileStream(__dirname + "/test-rolling-file-stream");
} catch (e) {
return e;
}
},
'should throw an error': function(err) {
assert.instanceOf(err, Error);
}
},
'without number of backups': {
topic: function() {
remove('test-rolling-file-stream');
return new RollingFileStream(__dirname + "/test-rolling-file-stream", 1024);
},
'should default to 1 backup': function(stream) {
assert.equal(stream.backups, 1);
}
},
'writing less than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-less");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-less", 100);
stream.write("cheese", "utf8", function() {
stream.end();
fs.readFile(__dirname + "/test-rolling-file-stream-write-less", "utf8", that.callback);
});
},
'should write to the file': function(contents) {
assert.equal(contents, "cheese");
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be one': function(files) {
assert.equal(files.filter(function(file) { return file.indexOf('test-rolling-file-stream-write-less') > -1; }).length, 1);
}
}
},
'writing more than the file size': {
topic: function() {
remove(__dirname + "/test-rolling-file-stream-write-more");
remove(__dirname + "/test-rolling-file-stream-write-more.1");
var that = this, stream = new RollingFileStream(__dirname + "/test-rolling-file-stream-write-more", 45);
async.forEach([0, 1, 2, 3, 4, 5, 6], function(i, cb) {
stream.write(i +".cheese\n", "utf8", cb);
}, function() {
stream.end();
that.callback();
});
},
'the number of files': {
topic: function() {
fs.readdir(__dirname, this.callback);
},
'should be two': function(files) {
assert.equal(files.filter(
function(file) { return file.indexOf('test-rolling-file-stream-write-more') > -1; }
).length, 2);
}
},
'the first file': {
topic: function() {
fs.readFile(__dirname + "/test-rolling-file-stream-write-more", "utf8", this.callback);
},
'should contain the last two log messages': function(contents) {
assert.equal(contents, '5.cheese\n6.cheese\n');
}
},
'the second file': {
topic: function() {
fs.readFile(__dirname + '/test-rolling-file-stream-write-more.1', "utf8", this.callback);
},
'should contain the first five log messages': function(contents) {
assert.equal(contents, '0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n');
}
}
}
}).exportTo(module);

17
test/with-dateFile.json Normal file
View File

@@ -0,0 +1,17 @@
{
"appenders": [
{
"category": "tests",
"type": "dateFile",
"filename": "test/date-file-test.log",
"pattern": "-from-MM-dd",
"layout": {
"type": "messagePassThrough"
}
}
],
"levels": {
"tests": "WARN"
}
}

View File

@@ -4,8 +4,7 @@
"type": "file",
"filename": "tmp-test.log",
"maxLogSize": 1024,
"backups": 3,
"pollInterval": 15
"backups": 3
}
]
}
}

View File

@@ -6,7 +6,7 @@
"level": "WARN",
"appender": {
"type": "file",
"filename": "tmp-tests-warnings.log",
"filename": "test/logLevelFilter-warnings.log",
"layout": {
"type": "messagePassThrough"
}
@@ -15,7 +15,7 @@
{
"category": "tests",
"type": "file",
"filename": "tmp-tests.log",
"filename": "test/logLevelFilter.log",
"layout": {
"type": "messagePassThrough"
}

View File

@@ -1,43 +0,0 @@
require.paths.unshift("./spec/lib", "./lib");
require("jspec");
var sys = require("sys"), fs = require("fs");
quit = process.exit
print = sys.puts
readFile = function(path) {
var result;
try {
result = fs.readFileSync(path, "utf8");
} catch (e) {
throw e;
}
return result;
}
var specsFound = false;
if (process.ARGV[2]) {
specsFound = true;
JSpec.exec('spec/spec.' + process.ARGV[2] + '.js');
} else {
var files = fs.readdirSync('spec/');
files.filter(
function (file) {
return file.indexOf('spec.') === 0;
}
).forEach(
function(file) {
specsFound = true;
JSpec.exec('spec/'+file);
}
);
}
if (specsFound) {
JSpec.run({ reporter: JSpec.reporters.Terminal, failuresOnly: false });
JSpec.report();
} else {
print("No tests to run. This makes me sad.");
}