16 Commits

Author SHA1 Message Date
brianc
be7af371d8 Add more versions to the travis test matrix 2016-05-03 13:07:50 -05:00
brianc
17697e98d7 Re-enable old tests 2016-05-03 13:07:29 -05:00
Chris Kinsman
bd4a87d3a0 Remove node 0.10 and add 0.12 2015-12-16 16:58:18 -08:00
Chris Kinsman
9d197a91e1 Add resume 2015-12-16 16:43:22 -08:00
Chris Kinsman
e1ce9a3948 Eliminate detach/reattach strategy as it isn't able to differentiate between on/once and inconsistenly loses unshifted data depending on node version. Instead just split stream and send it to copy stream and the connection.stream at the same time. Disconnecting copy stream just means unpiping. Added handleCopyData to fulfill query contract but ignore the incoming data.
Add node 4.2.2 to Travis
Minimum postgres 9.2 to allow tests to complete in Travis

Remove test that is no longer needed since we no longer disconnect/reconnect listeners
2015-12-16 16:21:13 -08:00
Dan Robinson
d5b5c8c569 Merge pull request #33 from alexconlin/master
Change file format in example from .tdv to .tsv
2015-12-09 17:14:26 -08:00
Alex Conlin
6981ea6ac5 Change file format in example from .tdv to .tsv 2015-12-09 20:31:54 +00:00
brianc
d2b9677a02 Fix travis build 2015-03-09 08:11:33 -06:00
Brian M. Carlson
bfee86543f Remove database from .travis.yml - that is not the problem 2014-09-16 00:34:34 -04:00
Brian C
21f47220fc Merge pull request #20 from drob/error-message
Better test of error handling after initial response.
2014-09-16 00:33:21 -04:00
Dan
b1b613125f Better test of error handling after initial response. 2014-09-16 00:24:14 -04:00
Brian M. Carlson
4222053744 Last try before I abandon travis 2014-09-15 23:50:14 -04:00
Brian M. Carlson
040ed5f4da Mess with travis config 2014-09-15 23:48:14 -04:00
Brian M. Carlson
f9ee1c083a Merge branch 'master' of github.com:brianc/node-pg-copy-streams 2014-09-15 23:39:49 -04:00
Brian M. Carlson
dcfffd0670 Try new travis changes 2014-09-15 23:39:42 -04:00
Brian C
12e4ca33b0 Update README.md
dat travis badge
2014-09-15 21:10:42 -04:00
8 changed files with 49 additions and 61 deletions

View File

@@ -1,6 +1,17 @@
language: node_js
node_js:
- "0.10"
- "0.11"
- "0.12"
- "4"
- "5"
- "6"
addons:
postgresql: "9.2"
services:
- postgresql
before_install:
- npm install npm --global
env:
- PGUSER=postgres
- PGUSER=postgres PGDATABASE=postgres

View File

@@ -1,5 +1,7 @@
## pg-copy-streams
[![Build Status](https://travis-ci.org/brianc/node-pg-copy-streams.svg)](https://travis-ci.org/brianc/node-pg-copy-streams)
COPY FROM / COPY TO for node-postgres. Stream from one database to another, and stuff.
## how? what? huh?
@@ -38,7 +40,7 @@ var copyFrom = require('pg-copy-streams').from;
pg.connect(function(err, client, done) {
var stream = client.query(copyFrom('COPY my_table FROM STDIN'));
var fileStream = fs.createReadStream('some_file.tdv')
var fileStream = fs.createReadStream('some_file.tsv')
fileStream.on('error', done);
fileStream.pipe(stream).on('finish', done).on('error', done);
});

View File

@@ -8,7 +8,6 @@ var util = require('util')
var CopyStreamQuery = function(text, options) {
Transform.call(this, options)
this.text = text
this._listeners = {}
this._copyOutResponse = null
this.rowCount = 0
}
@@ -20,11 +19,7 @@ var eventTypes = ['close', 'data', 'end', 'error']
CopyStreamQuery.prototype.submit = function(connection) {
connection.query(this.text)
this.connection = connection
var self = this
eventTypes.forEach(function(type) {
self._listeners[type] = connection.stream.listeners(type)
connection.stream.removeAllListeners(type)
})
this.connection.removeAllListeners('copyData')
connection.stream.pipe(this)
}
@@ -36,16 +31,12 @@ var code = {
}
CopyStreamQuery.prototype._detach = function() {
this.connection.stream.unpipe()
var self = this
eventTypes.forEach(function(type) {
self.connection.stream.removeAllListeners(type)
self._listeners[type].forEach(function(listener) {
self.connection.stream.on(type, listener)
})
})
this.connection.stream.unpipe(this)
// Unpipe can drop us out of flowing mode
this.connection.stream.resume()
}
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var offset = 0
if(this._remainder && chunk) {
@@ -55,7 +46,6 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
this._copyOutResponse = true
if(chunk[0] == code.E) {
this._detach()
this.connection.stream.unshift(chunk)
this.push(null)
return cb();
}
@@ -71,11 +61,6 @@ CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
//complete or error
if(messageCode == code.c || messageCode == code.E) {
this._detach()
if (messageCode == code.c) {
this.connection.stream.unshift(chunk.slice(offset + 5))
} else {
this.connection.stream.unshift(chunk.slice(offset))
}
this.push(null)
return cb();
}
@@ -108,6 +93,9 @@ CopyStreamQuery.prototype.handleError = function(e) {
this.emit('error', e)
}
CopyStreamQuery.prototype.handleCopyData = function(chunk) {
}
CopyStreamQuery.prototype.handleCommandComplete = function() {
}

View File

@@ -1,5 +1,4 @@
var CopyToQueryStream = require('./copy-to')
module.exports = {
to: function(txt, options) {
return new CopyToQueryStream(txt, options)

View File

@@ -23,7 +23,7 @@
"url": "https://github.com/brianc/node-pg-copy-streams/issues"
},
"devDependencies": {
"pg.js": "~2.8.1",
"pg": "~4.4.3",
"concat-stream": "~1.1.0",
"gonna": "0.0.0",
"lodash": "~2.2.1",

View File

@@ -4,7 +4,7 @@ var gonna = require('gonna')
var async = require('async')
var concat = require('concat-stream')
var _ = require('lodash')
var pg = require('pg.js')
var pg = require('pg')
var from = require('../').from
var to = require('../').to

View File

@@ -3,7 +3,7 @@ var gonna = require('gonna')
var concat = require('concat-stream')
var _ = require('lodash')
var pg = require('pg.js')
var pg = require('pg')
var copy = require('../').from

View File

@@ -4,7 +4,7 @@ var gonna = require('gonna')
var _ = require('lodash')
var async = require('async')
var concat = require('concat-stream')
var pg = require('pg.js')
var pg = require('pg')
var copy = require('../').to
@@ -25,6 +25,8 @@ testConstruction()
var testRange = function(top) {
var fromClient = client()
var txt = 'COPY (SELECT * from generate_series(0, ' + (top - 1) + ')) TO STDOUT'
var res;
var stream = fromClient.query(copy(txt))
var done = gonna('finish piping out', 1000, function() {
@@ -32,57 +34,43 @@ var testRange = function(top) {
})
stream.pipe(concat(function(buf) {
var res = buf.toString('utf8')
res = buf.toString('utf8')
}))
stream.on('end', function() {
var expected = _.range(0, top).join('\n') + '\n'
assert.equal(res, expected)
assert.equal(stream.rowCount, top, 'should have rowCount ' + top + ' but got ' + stream.rowCount)
done()
}))
});
}
testRange(10000)
var testLeak = function(rounds) {
var fromClient = client()
var txt = 'COPY (SELECT 10) TO STDOUT'
var runStream = function(num, callback) {
var stream = fromClient.query(copy(txt))
stream.on('data', function(data) {
// Just throw away the data.
})
stream.on('end', callback)
stream.on('error', callback)
}
async.timesSeries(rounds, runStream, function(err) {
assert.equal(err, null)
assert.equal(fromClient.connection.stream.listeners('close').length, 0)
assert.equal(fromClient.connection.stream.listeners('data').length, 1)
assert.equal(fromClient.connection.stream.listeners('end').length, 2)
assert.equal(fromClient.connection.stream.listeners('error').length, 1)
fromClient.end()
})
}
testLeak(5)
var testInternalPostgresError = function() {
var fromClient = client()
// This attempts to make an array that's too large, and should fail.
var txt = "COPY (SELECT asdlfsdf AS e) t) TO STDOUT"
var cancelClient = client()
var queryClient = client()
var runStream = function(callback) {
var stream = fromClient.query(copy(txt))
var txt = "COPY (SELECT pg_sleep(10)) TO STDOUT"
var stream = queryClient.query(copy(txt))
stream.on('data', function(data) {
// Just throw away the data.
})
stream.on('error', callback)
setTimeout(function() {
var cancelQuery = "SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE query ~ 'pg_sleep' AND NOT query ~ 'pg_cancel_backend'"
cancelClient.query(cancelQuery)
}, 50)
}
runStream(function(err) {
assert.notEqual(err, null)
fromClient.end()
var expectedMessage = 'canceling statement due to user request'
assert.notEqual(err.toString().indexOf(expectedMessage), -1, 'Error message should mention reason for query failure.')
cancelClient.end()
queryClient.end()
})
}