61 Commits

Author SHA1 Message Date
Daniel García Aubert
5ff2cf6746 Stubs next version 2018-11-21 18:13:03 +01:00
Daniel García Aubert
94c8d89aba Release v1.2.0-carto.3 2018-11-21 18:08:01 +01:00
Daniel G. Aubert
c19ab0746c Merge pull request #3 from CartoDB/nodejs-10
Suport Node.js 10 LTS
2018-11-21 18:05:40 +01:00
Daniel García Aubert
a27bd1fd8f Prepare next release 2018-11-21 17:57:20 +01:00
Daniel García Aubert
01ab20fce2 Add note to README 2018-11-07 16:50:38 +01:00
Daniel García Aubert
d55dbe70d5 Do not use deprecated Buffer constructor 2018-11-07 15:06:38 +01:00
Daniel García Aubert
de47558628 Support 'package-lock.json' 2018-11-07 15:01:55 +01:00
Daniel García Aubert
38c78677e8 Drop support for old version of Node.js and support latest LTS releases 2018-11-07 15:01:24 +01:00
Daniel García Aubert
6f1d5cb4a5 Release v1.2.0-carto.2 2018-10-26 13:26:02 +02:00
Daniel G. Aubert
99d397956c Merge pull request #2 from CartoDB/use-strict
Use strict mode
2018-10-26 13:22:30 +02:00
Daniel García Aubert
c85f7d27b8 Update NEWS 2018-10-24 19:17:10 +02:00
Daniel García Aubert
8b355b6e72 Use strict 2018-10-24 18:52:57 +02:00
Rafa de la Torre
d7e5c1383f Update v1.2.0-carto.1 release date 2018-06-11 13:30:43 +02:00
Rafa de la Torre
cb2227d159 Merge pull request #1 from CartoDB/performance-tune-copy-to
Improve performance of COPY TO
2018-06-11 13:29:23 +02:00
Rafa de la Torre
7930d1b8dd Add entry to changelog 2018-06-11 13:27:44 +02:00
Rafa de la Torre
e94fefe902 Merge branch 'v1.2-carto' into performance-tune-copy-to 2018-06-11 13:24:09 +02:00
Rafa de la Torre
9293926047 Add a NEWS.carto.md with the changelog 2018-06-11 13:20:53 +02:00
Rafa de la Torre
fd3cc95573 Remove unused var buffer_sent 2018-06-11 12:17:39 +02:00
Rafa de la Torre
922627daaf Small refactor 2018-06-11 12:14:28 +02:00
Rafa de la Torre
61bc713e0c Improve performance of COPY TO #56
Under some circumstances, the COPY TO streamming can be CPU-bound,
particularly when PG holds the resultset in memory buffers and the size
of the rows << chunk (64 KB in my linux box).

This commits improves the situation by creating a buffer of `chunk`
size and fitting in as many rows as it can before pushing them. This
results in more balanced read and writes (in terms of size and in bigger
chunks) as well as more frequent calls to the callback, thus freeing the
main loop for other events to be processed, and therefore avoiding
starvation.
2018-06-08 15:04:42 +02:00
jeromew
e15feb199a README.md: copy-from error and vacuum #26 2016-08-23 12:10:25 +02:00
jeromew
191a4ec16a Fix documentation of copy-from completion 2016-08-23 11:32:10 +02:00
jeromew
399bff7ed7 Bump version 2016-08-22 16:27:04 +00:00
jeromew
8174e10fb5 Merge pull request #54 from jeromew/upstream-end
Test: `end` event should not be triggered 2 times on copy-from
2016-08-22 17:47:29 +02:00
jeromew
f29aef3bba Merge pull request #53 from jeromew/upstream-frontier
Bugfix - wrong tests on chunk frontiers
2016-08-22 17:47:01 +02:00
jeromew
b2e108571e Issue #54: We should probably delay the _flush cb() to CommandComplete 2016-07-30 00:08:54 +00:00
jeromew
7003f6070f Fix issue #54: end is being triggered 2 times 2016-07-29 23:51:41 +00:00
jeromew
ade7ab95a0 Test: end event should not be triggered 2 times on copy-from 2016-07-29 23:47:39 +00:00
jeromew
9ccda04036 Bugfix - Chunk frontiers were not correctly tested 2016-07-28 23:05:50 +00:00
jeromew
a5e532f20b Bugfix - wrong tests on chunk frontiers 2016-07-28 22:55:02 +00:00
jeromew
2a4db2920e Fix NoticeResponse test and handle other messages (#52) 2016-07-28 16:13:22 -05:00
jeromew
f155899570 Add a test for NoticeResponse handling (#51) 2016-07-28 13:15:59 -05:00
brianc
18be125596 Bump version 2016-07-26 14:40:01 -05:00
jeromew
ae5b344395 Refactor message format codes handling (#45) 2016-07-26 14:39:48 -05:00
jeromew
ee84aba89f Use end stream option instead of not calling _flush callback (#44) 2016-07-26 14:39:33 -05:00
brianc
e0aa7db324 Bump version 2016-05-24 17:20:26 -05:00
Jonathan Bergknoff
0f0ddf7ad4 Expose row count after COPY FROM command (#37)
* Expose row count after COPY FROM command

* correct conditional
2016-05-24 17:20:16 -05:00
brianc
ed57e131e9 Drop support for node@v0.10 2016-05-03 13:21:28 -05:00
Brian C
c4a0e6dd58 Fix compatibility with newer versions of node (#39)
* Eliminate detach/reattach strategy as it isn't able to differentiate between on/once and inconsistenly loses unshifted data depending on node version.  Instead just split stream and send it to copy stream and the connection.stream at the same time.  Disconnecting copy stream just means unpiping.  Added handleCopyData to fulfill query contract but ignore the incoming data.

Add node 4.2.2 to Travis
Minimum postgres 9.2 to allow tests to complete in Travis

Remove test that is no longer needed since we no longer disconnect/reconnect listeners

* Add resume

* Remove node 0.10 and add 0.12

* Re-enable old tests

* Add more versions to the travis test matrix
2016-05-03 13:20:04 -05:00
Dan Robinson
d5b5c8c569 Merge pull request #33 from alexconlin/master
Change file format in example from .tdv to .tsv
2015-12-09 17:14:26 -08:00
Alex Conlin
6981ea6ac5 Change file format in example from .tdv to .tsv 2015-12-09 20:31:54 +00:00
brianc
d2b9677a02 Fix travis build 2015-03-09 08:11:33 -06:00
Brian M. Carlson
bfee86543f Remove database from .travis.yml - that is not the problem 2014-09-16 00:34:34 -04:00
Brian C
21f47220fc Merge pull request #20 from drob/error-message
Better test of error handling after initial response.
2014-09-16 00:33:21 -04:00
Dan
b1b613125f Better test of error handling after initial response. 2014-09-16 00:24:14 -04:00
Brian M. Carlson
4222053744 Last try before I abandon travis 2014-09-15 23:50:14 -04:00
Brian M. Carlson
040ed5f4da Mess with travis config 2014-09-15 23:48:14 -04:00
Brian M. Carlson
f9ee1c083a Merge branch 'master' of github.com:brianc/node-pg-copy-streams 2014-09-15 23:39:49 -04:00
Brian M. Carlson
dcfffd0670 Try new travis changes 2014-09-15 23:39:42 -04:00
Brian C
12e4ca33b0 Update README.md
dat travis badge
2014-09-15 21:10:42 -04:00
Brian M. Carlson
8f2355e454 0.3.0 2014-09-15 20:56:39 -04:00
Brian M. Carlson
d6eab36b66 Make tests a bit more robusto 2014-09-15 20:56:34 -04:00
Brian M. Carlson
33f6ecc11b Add workflow boilerplate files 2014-09-15 20:49:09 -04:00
Brian C
36572a8b7b Merge pull request #16 from drob/fix-docs
Fixes pipe from a file to table example in README.md.
2014-09-15 20:48:23 -04:00
Brian C
0c5d08edae Merge pull request #19 from drob/transform-opts
Accept stream options in constructors, pass to internal transform streams.
2014-09-15 20:47:38 -04:00
Dan
b78a3eb845 Accept stream options in constructors, pass to internal transform streams.
Includes tests.
2014-09-15 15:01:39 -04:00
Dan
107f007249 Fixes pipe from a file to table example in README.md. 2014-08-10 13:33:33 -07:00
Brian C
25b8d6da5f Update README.md
Providing clarity for #6
2014-05-01 11:07:33 -05:00
Dan Robinson
1c0c8871c1 Bump version 2014-04-07 11:56:28 -07:00
Dan Robinson
beb54334e2 Merge pull request #12 from drob/error-handling
Adds handling for errors after initial response.
2014-04-07 11:55:36 -07:00
Dan Robinson
1db9b3ec3d Adds handling for errors after initial response.
Includes a test.
2014-04-06 22:04:20 -07:00
13 changed files with 543 additions and 127 deletions

16
.travis.yml Normal file
View File

@@ -0,0 +1,16 @@
language: node_js
node_js:
- "6"
- "8"
- "10"
addons:
postgresql: "9.2"
services:
- postgresql
before_install:
- npm install npm --global
env:
- PGUSER=postgres PGDATABASE=postgres

14
Makefile Normal file
View File

@@ -0,0 +1,14 @@
.PHONY: publish-patch test
test:
npm test
patch: test
npm version patch -m "Bump version"
git push origin master --tags
npm publish
minor: test
npm version minor -m "Bump version"
git push origin master --tags
npm publish

30
NEWS.carto.md Normal file
View File

@@ -0,0 +1,30 @@
# CARTO's Changelog
## v1.2.0-carto.4
Released 2018-mm-dd
## v1.2.0-carto.3
Released 2018-11-21
Features:
* Drop support for Node.js 0.12, 4 and, 5.
* Add support for Node.js 8 and 10.
* Add package-lock.json
* Do not use deprecated Buffer constructors.
## v1.2.0-carto.2
Released 2018-10-26
Bug fixes:
* Make all modules to use strict mode semantics.
## v1.2.0-carto.1
Released 2018-06-11
Bug fixes:
* Improves performance of COPY TO by sending bigger chunks through low level `push()`. See https://github.com/CartoDB/node-pg-copy-streams/pull/1
## v1.2.0
Released 2016-08-22
Vanilla version v1.2.0 from upstream repository. See https://github.com/CartoDB/node-pg-copy-streams/releases/tag/v1.2.0

View File

@@ -1,5 +1,11 @@
## Note
This is forked repository from [brianc/node-pg-copy-streams](https://github.com/brianc/node-pg-query-stream)
## pg-copy-streams
[![Build Status](https://travis-ci.org/brianc/node-pg-copy-streams.svg)](https://travis-ci.org/brianc/node-pg-copy-streams)
COPY FROM / COPY TO for node-postgres. Stream from one database to another, and stuff.
## how? what? huh?
@@ -38,13 +44,17 @@ var copyFrom = require('pg-copy-streams').from;
pg.connect(function(err, client, done) {
var stream = client.query(copyFrom('COPY my_table FROM STDIN'));
var fileStream = fs.createReadStream('some_file.tdv')
fileStream.pipe(stream);
fileStream.on('end', done);
var fileStream = fs.createReadStream('some_file.tsv')
fileStream.on('error', done);
stream.on('error', done);
stream.on('end', done);
fileStream.pipe(stream);
});
```
*Important*: Even if `pg-copy-streams.from` is used as a Writable (via `pipe`), you should not listen for the 'finish' event and expect that the COPY command has already been correctly acknowledged by the database. Internally, a duplex stream is used to pipe the data into the database connection and the COPY command should be considered complete only when the 'end' event is triggered.
## install
```sh
@@ -53,9 +63,14 @@ $ npm install pg-copy-streams
## notice
Before you set out on this magical piping journey, you _really_ should read this: http://www.postgresql.org/docs/9.3/static/sql-copy.html, and you might want to take a look at the [tests](https://github.com/brianc/node-pg-copy-streams/tree/master/test) to get an idea of how things work.
This module __only__ works with the pure JavaScript bindings. If you're using `require('pg').native` please make sure to use normal `require('pg')` or `require('pg.js')` when you're using copy streams.
## contributing
Before you set out on this magical piping journey, you _really_ should read this: http://www.postgresql.org/docs/current/static/sql-copy.html, and you might want to take a look at the [tests](https://github.com/brianc/node-pg-copy-streams/tree/master/test) to get an idea of how things work.
Take note of the following warning in the PostgreSQL documentation:
> COPY stops operation at the first error. This should not lead to problems in the event of a COPY TO, but the target table will already have received earlier rows in a COPY FROM. These rows will not be visible or accessible, but they still occupy disk space. This might amount to a considerable amount of wasted disk space if the failure happened well into a large copy operation. You might wish to invoke VACUUM to recover the wasted space.
## contributing
Instead of adding a bunch more code to the already bloated [node-postgres](https://github.com/brianc/node-postgres) I am trying to make the internals extensible and work on adding edge-case features as 3rd party modules.
This is one of those.

View File

@@ -1,15 +1,17 @@
module.exports = function(txt) {
return new CopyStreamQuery(txt)
'use strict';
module.exports = function(txt, options) {
return new CopyStreamQuery(txt, options)
}
var Transform = require('stream').Transform
var util = require('util')
var code = require('./message-formats')
var CopyStreamQuery = function(text) {
Transform.call(this)
var CopyStreamQuery = function(text, options) {
Transform.call(this, options)
this.text = text
this._listeners = {}
this._copyOutResponse = null
this._gotCopyOutResponse = false
this.rowCount = 0
}
@@ -20,77 +22,93 @@ var eventTypes = ['close', 'data', 'end', 'error']
CopyStreamQuery.prototype.submit = function(connection) {
connection.query(this.text)
this.connection = connection
var self = this
eventTypes.forEach(function(type) {
self._listeners[type] = connection.stream.listeners(type)
connection.stream.removeAllListeners(type)
})
this.connection.removeAllListeners('copyData')
connection.stream.pipe(this)
}
var code = {
E: 69, //Error
H: 72, //CopyOutResponse
d: 0x64, //CopyData
c: 0x63 //CopyDone
}
CopyStreamQuery.prototype._detach = function() {
this.connection.stream.unpipe()
var self = this
eventTypes.forEach(function(type) {
self.connection.stream.removeAllListeners(type)
self._listeners[type].forEach(function(listener) {
self.connection.stream.on(type, listener)
})
})
this.connection.stream.unpipe(this)
// Unpipe can drop us out of flowing mode
this.connection.stream.resume()
}
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var offset = 0
var Byte1Len = 1;
var Int32Len = 4;
if(this._remainder && chunk) {
chunk = Buffer.concat([this._remainder, chunk])
}
if(!this._copyOutResponse) {
this._copyOutResponse = true
if(chunk[0] == code.E) {
this._detach()
this.connection.stream.unshift(chunk)
this.push(null)
return cb();
var length;
var messageCode;
var needPush = false;
var buffer = Buffer.alloc(chunk.length);
var buffer_offset = 0;
this.pushBufferIfneeded = function() {
if (needPush && buffer_offset > 0) {
this.push(buffer.slice(0, buffer_offset))
buffer_offset = 0;
}
if(chunk[0] != code.H) {
this.emit('error', new Error('Expected copy out response'))
}
var length = chunk.readUInt32BE(1)
offset = 1
offset += length
}
while((chunk.length - offset) > 5) {
while((chunk.length - offset) >= (Byte1Len + Int32Len)) {
var messageCode = chunk[offset]
//complete
if(messageCode == code.c) {
this._detach()
this.connection.stream.unshift(chunk.slice(offset + 5))
this.push(null)
return cb();
//console.log('PostgreSQL message ' + String.fromCharCode(messageCode))
switch(messageCode) {
// detect COPY start
case code.CopyOutResponse:
if (!this._gotCopyOutResponse) {
this._gotCopyOutResponse = true
} else {
this.emit('error', new Error('Unexpected CopyOutResponse message (H)'))
}
break;
// meaningful row
case code.CopyData:
needPush = true;
break;
// standard interspersed messages. discard
case code.ParameterStatus:
case code.NoticeResponse:
case code.NotificationResponse:
break;
case code.ErrorResponse:
case code.CopyDone:
this.pushBufferIfneeded();
this._detach()
this.push(null)
return cb();
break;
default:
this.emit('error', new Error('Unexpected PostgreSQL message ' + String.fromCharCode(messageCode)))
}
//something bad happened
if(messageCode != code.d) {
return this.emit('error', new Error('expected "d" (copydata message)'))
}
var length = chunk.readUInt32BE(offset + 1) - 4 //subtract length of UInt32
//can we read the next row?
if(chunk.length > (offset + length + 5)) {
offset += 5
var slice = chunk.slice(offset, offset + length)
offset += length
this.push(slice)
this.rowCount++
length = chunk.readUInt32BE(offset+Byte1Len)
if(chunk.length >= (offset + Byte1Len + length)) {
offset += Byte1Len + Int32Len
if (needPush) {
var row = chunk.slice(offset, offset + length - Int32Len)
this.rowCount++
row.copy(buffer, buffer_offset);
buffer_offset += row.length;
}
offset += (length - Int32Len)
} else {
// we need more chunks for a complete message
break;
}
}
this.pushBufferIfneeded();
if(chunk.length - offset) {
var slice = chunk.slice(offset)
this._remainder = slice
@@ -104,6 +122,9 @@ CopyStreamQuery.prototype.handleError = function(e) {
this.emit('error', e)
}
CopyStreamQuery.prototype.handleCopyData = function(chunk) {
}
CopyStreamQuery.prototype.handleCommandComplete = function() {
}

View File

@@ -1,19 +1,21 @@
var CopyToQueryStream = require('./copy-to')
'use strict';
var CopyToQueryStream = require('./copy-to')
module.exports = {
to: function(txt) {
return new CopyToQueryStream(txt)
to: function(txt, options) {
return new CopyToQueryStream(txt, options)
},
from: function (txt) {
return new CopyStreamQuery(txt)
from: function (txt, options) {
return new CopyStreamQuery(txt, options)
}
}
var Transform = require('stream').Transform
var util = require('util')
var code = require('./message-formats')
var CopyStreamQuery = function(text) {
Transform.call(this)
var CopyStreamQuery = function(text, options) {
Transform.call(this, options)
this.text = text
this._listeners = null
this._copyOutResponse = null
@@ -27,28 +29,23 @@ CopyStreamQuery.prototype.submit = function(connection) {
connection.query(this.text)
}
var code = {
H: 72, //CopyOutResponse
d: 0x64, //CopyData
c: 0x63 //CopyDone
}
var copyDataBuffer = Buffer([code.d])
var copyDataBuffer = Buffer.from([code.CopyData])
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var Int32Len = 4;
this.push(copyDataBuffer)
var lenBuffer = Buffer(4)
lenBuffer.writeUInt32BE(chunk.length + 4, 0)
var lenBuffer = Buffer.alloc(Int32Len)
lenBuffer.writeUInt32BE(chunk.length + Int32Len, 0)
this.push(lenBuffer)
this.push(chunk)
this.rowCount++
cb()
}
CopyStreamQuery.prototype._flush = function(cb) {
var finBuffer = Buffer([code.c, 0, 0, 0, 4])
var Int32Len = 4;
var finBuffer = Buffer.from([code.CopyDone, 0, 0, 0, Int32Len])
this.push(finBuffer)
//never call this callback, do not close underlying stream
//cb()
this.cb_flush = cb
}
CopyStreamQuery.prototype.handleError = function(e) {
@@ -56,12 +53,24 @@ CopyStreamQuery.prototype.handleError = function(e) {
}
CopyStreamQuery.prototype.handleCopyInResponse = function(connection) {
this.pipe(connection.stream)
this.pipe(connection.stream, { end: false })
}
CopyStreamQuery.prototype.handleCommandComplete = function() {
this.unpipe()
this.emit('end')
CopyStreamQuery.prototype.handleCommandComplete = function(msg) {
// Parse affected row count as in
// https://github.com/brianc/node-postgres/blob/35e5567f86774f808c2a8518dd312b8aa3586693/lib/result.js#L37
var match = /COPY (\d+)/.exec((msg || {}).text)
if (match) {
this.rowCount = parseInt(match[1], 10)
}
// we delay the _flush cb so that the 'end' event is
// triggered after CommandComplete
this.cb_flush()
// unpipe from connection
this.unpipe(this.connection)
this.connection = null
}
CopyStreamQuery.prototype.handleReadyForQuery = function() {

25
message-formats.js Normal file
View File

@@ -0,0 +1,25 @@
/**
* The COPY feature uses the following protocol codes.
* The codes for the most recent protocol version are documented on
* https://www.postgresql.org/docs/current/static/protocol-message-formats.html
*
* The protocol flow itself is described on
* https://www.postgresql.org/docs/current/static/protocol-flow.html
*/
module.exports = {
ErrorResponse: 0x45, // E
CopyInResponse: 0x47, // G
CopyOutResponse: 0x48, // H
CopyBothResponse: 0x57, // W
CopyDone: 0x63, // c
CopyData: 0x64, // d
CopyFail: 0x66, // f
// It is possible for NoticeResponse and ParameterStatus messages to be interspersed between CopyData messages;
// frontends must handle these cases, and should be prepared for other asynchronous message types as well
// (see Section 50.2.6).
// Otherwise, any message type other than CopyData or CopyDone may be treated as terminating copy-out mode.
NotificationResponse: 0x41, // A
NoticeResponse: 0x4E, // N
ParameterStatus: 0x53 // S
}

193
package-lock.json generated Normal file
View File

@@ -0,0 +1,193 @@
{
"name": "pg-copy-streams",
"version": "1.2.0-carto.4",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"async": {
"version": "0.2.10",
"resolved": "http://registry.npmjs.org/async/-/async-0.2.10.tgz",
"integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=",
"dev": true
},
"base64-js": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.2.tgz",
"integrity": "sha1-Ak8Pcq+iW3X5wO5zzU9V7Bvtl4Q=",
"dev": true
},
"bops": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/bops/-/bops-0.0.6.tgz",
"integrity": "sha1-CC0dVfoB5g29wuvC26N/ZZVUzzo=",
"dev": true,
"requires": {
"base64-js": "0.0.2",
"to-utf8": "0.0.1"
}
},
"buffer-writer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-1.0.1.tgz",
"integrity": "sha1-Iqk2kB4wKa/NdUfrRIfOtpejvwg=",
"dev": true
},
"concat-stream": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.1.0.tgz",
"integrity": "sha1-hCae/YzGUCdeMi8wnfRIZ7xRxfM=",
"dev": true,
"requires": {
"bops": "0.0.6"
}
},
"generic-pool": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-2.1.1.tgz",
"integrity": "sha1-rwTcLDJc/Ll1Aj+lK/zpYXp0Nf0=",
"dev": true
},
"gonna": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/gonna/-/gonna-0.0.0.tgz",
"integrity": "sha1-6k4ZsVJ6F4LhJQVeMCSabUvHmlk=",
"dev": true
},
"heroku-env": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/heroku-env/-/heroku-env-0.1.1.tgz",
"integrity": "sha1-wGeRyUTpuHSOMXf1S/cBQyZ+Yxc=",
"dev": true,
"requires": {
"parse-database-url": "~0.2.0"
}
},
"lodash": {
"version": "2.2.1",
"resolved": "http://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz",
"integrity": "sha1-ypNf0UqzwMhyq6zxmLnNpQFECGc=",
"dev": true
},
"packet-reader": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-0.2.0.tgz",
"integrity": "sha1-gZ300BC4LV6lZx+KGjrPA5vNdwA=",
"dev": true
},
"parse-database-url": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/parse-database-url/-/parse-database-url-0.2.2.tgz",
"integrity": "sha1-SGFa56fA/HfjKU0jVCpqUnPDVws=",
"dev": true
},
"pg": {
"version": "4.4.6",
"resolved": "http://registry.npmjs.org/pg/-/pg-4.4.6.tgz",
"integrity": "sha1-EZgiP7rva6QRqm9Q4X9OtaGTFVk=",
"dev": true,
"requires": {
"buffer-writer": "1.0.1",
"generic-pool": "2.1.1",
"packet-reader": "0.2.0",
"pg-connection-string": "0.1.3",
"pg-types": "1.*",
"pgpass": "0.0.3",
"semver": "^4.1.0"
}
},
"pg-connection-string": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz",
"integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=",
"dev": true
},
"pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"dev": true
},
"pg-types": {
"version": "1.13.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.13.0.tgz",
"integrity": "sha512-lfKli0Gkl/+za/+b6lzENajczwZHc7D5kiUCZfgm914jipD2kIOIvEkAhZ8GrW3/TUoP9w8FHjwpPObBye5KQQ==",
"dev": true,
"requires": {
"pg-int8": "1.0.1",
"postgres-array": "~1.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.0",
"postgres-interval": "^1.1.0"
}
},
"pgpass": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-0.0.3.tgz",
"integrity": "sha1-EuZ+NDsxicLzEgbrycwL7//PkUA=",
"dev": true,
"requires": {
"split": "~0.3"
}
},
"postgres-array": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.3.tgz",
"integrity": "sha512-5wClXrAP0+78mcsNX3/ithQ5exKvCyK5lr5NEEEeGwwM6NJdQgzIJBVxLvRW+huFpX92F2QnZ5CcokH0VhK2qQ==",
"dev": true
},
"postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=",
"dev": true
},
"postgres-date": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.3.tgz",
"integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g=",
"dev": true
},
"postgres-interval": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.2.tgz",
"integrity": "sha512-fC3xNHeTskCxL1dC8KOtxXt7YeFmlbTYtn7ul8MkVERuTmf7pI4DrkAxcw3kh1fQ9uz4wQmd03a1mRiXUZChfQ==",
"dev": true,
"requires": {
"xtend": "^4.0.0"
}
},
"semver": {
"version": "4.3.6",
"resolved": "http://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto=",
"dev": true
},
"split": {
"version": "0.3.3",
"resolved": "http://registry.npmjs.org/split/-/split-0.3.3.tgz",
"integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=",
"dev": true,
"requires": {
"through": "2"
}
},
"through": {
"version": "2.3.8",
"resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
"dev": true
},
"to-utf8": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/to-utf8/-/to-utf8-0.0.1.tgz",
"integrity": "sha1-0Xrqcv8vujm55DYBvns/9y4ImFI=",
"dev": true
},
"xtend": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
"dev": true
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "pg-copy-streams",
"version": "0.2.3",
"version": "1.2.0-carto.4",
"description": "Low-Level COPY TO and COPY FROM streams for PostgreSQL in JavaScript using",
"main": "index.js",
"scripts": {
@@ -23,7 +23,7 @@
"url": "https://github.com/brianc/node-pg-copy-streams/issues"
},
"devDependencies": {
"pg.js": "~2.8.1",
"pg": "~4.4.3",
"concat-stream": "~1.1.0",
"gonna": "0.0.0",
"lodash": "~2.2.1",

View File

@@ -1,10 +1,12 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
var async = require('async')
var concat = require('concat-stream')
var _ = require('lodash')
var pg = require('pg.js')
var pg = require('pg')
var from = require('../').from
var to = require('../').to
@@ -19,10 +21,12 @@ var testBinaryCopy = function() {
var fromClient = client()
var toClient = client()
queries = [
'CREATE TABLE data (num BIGINT, word TEXT)',
var queries = [
'DROP TABLE IF EXISTS data',
'CREATE TABLE IF NOT EXISTS data (num BIGINT, word TEXT)',
'INSERT INTO data (num, word) VALUES (1, \'hello\'), (2, \'other thing\'), (3, \'goodbye\')',
'CREATE TABLE data_copy (LIKE data INCLUDING ALL)'
'DROP TABLE IF EXISTS data_copy',
'CREATE TABLE IF NOT EXISTS data_copy (LIKE data INCLUDING ALL)'
]
async.eachSeries(queries, _.bind(fromClient.query, fromClient), function(err) {
@@ -31,7 +35,7 @@ var testBinaryCopy = function() {
var fromStream = fromClient.query(to('COPY (SELECT * FROM data) TO STDOUT BINARY'))
var toStream = toClient.query(from('COPY data_copy FROM STDIN BINARY'))
runStream = function(callback) {
var runStream = function(callback) {
fromStream.on('error', callback)
toStream.on('error', callback)
toStream.on('finish', callback)

View File

@@ -1,31 +1,39 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
var concat = require('concat-stream')
var _ = require('lodash')
var pg = require('pg.js')
var pg = require('pg')
var copy = require('../').from
var client = function() {
var client = new pg.Client()
client.connect()
return client
}
var testConstruction = function() {
var highWaterMark = 10
var stream = copy('COPY numbers FROM STDIN', {highWaterMark: 10, objectMode: true})
for(var i = 0; i < highWaterMark * 1.5; i++) {
stream.write('1\t2\n')
}
assert(!stream.write('1\t2\n'), 'Should correctly set highWaterMark.')
}
testConstruction()
var testRange = function(top) {
var client = function() {
var client = new pg.Client()
client.connect()
client.query('CREATE TEMP TABLE numbers(num int, bigger_num int)')
return client
}
var fromClient = client()
var copy = require('../').from
fromClient.query('CREATE TEMP TABLE numbers(num int, bigger_num int)')
var txt = 'COPY numbers FROM STDIN'
var stream = fromClient.query(copy(txt))
var rowEmitCount = 0
stream.on('row', function() {
rowEmitCount++
})
for(var i = 0; i < top; i++) {
stream.write(Buffer('' + i + '\t' + i*10 + '\n'))
stream.write(Buffer.from('' + i + '\t' + i*10 + '\n'))
}
stream.end()
var countDone = gonna('have correct count')
@@ -33,7 +41,8 @@ var testRange = function(top) {
fromClient.query('SELECT COUNT(*) FROM numbers', function(err, res) {
assert.ifError(err)
assert.equal(res.rows[0].count, top, 'expected ' + top + ' rows but got ' + res.rows[0].count)
console.log('found ', res.rows.length, 'rows')
assert.equal(stream.rowCount, top, 'expected ' + top + ' rows but db count is ' + stream.rowCount)
//console.log('found ', res.rows.length, 'rows')
countDone()
var firstRowDone = gonna('have correct result')
assert.equal(stream.rowCount, top, 'should have rowCount ' + top + ' ')
@@ -48,3 +57,19 @@ var testRange = function(top) {
}
testRange(1000)
var testSingleEnd = function() {
var fromClient = client()
fromClient.query('CREATE TEMP TABLE numbers(num int)')
var txt = 'COPY numbers FROM STDIN';
var stream = fromClient.query(copy(txt))
var count = 0;
stream.on('end', function() {
count++;
assert(count==1, '`end` Event was triggered ' + count + ' times');
if (count == 1) fromClient.end();
})
stream.end(Buffer.from('1\n'))
}
testSingleEnd()

View File

@@ -1,12 +1,15 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
var _ = require('lodash')
var async = require('async')
var concat = require('concat-stream')
var pg = require('pg.js')
var pg = require('pg')
var copy = require('../').to
var code = require('../message-formats')
var client = function() {
var client = new pg.Client()
@@ -14,9 +17,31 @@ var client = function() {
return client
}
var testConstruction = function() {
var txt = 'COPY (SELECT * FROM generate_series(0, 10)) TO STDOUT'
var stream = copy(txt, {highWaterMark: 10})
assert.equal(stream._readableState.highWaterMark, 10, 'Client should have been set with a correct highWaterMark.')
}
testConstruction()
var testComparators = function() {
var copy1 = copy();
copy1.pipe(concat(function(buf) {
assert(copy1._gotCopyOutResponse, 'should have received CopyOutResponse')
assert(!copy1._remainder, 'Message with no additional data (len=Int4Len+0) should not leave a remainder')
}))
copy1.end(Buffer.from([code.CopyOutResponse, 0x00, 0x00, 0x00, 0x04]));
}
testComparators();
var testRange = function(top) {
var fromClient = client()
var txt = 'COPY (SELECT * from generate_series(0, ' + (top - 1) + ')) TO STDOUT'
var res;
var stream = fromClient.query(copy(txt))
var done = gonna('finish piping out', 1000, function() {
@@ -24,37 +49,74 @@ var testRange = function(top) {
})
stream.pipe(concat(function(buf) {
var res = buf.toString('utf8')
res = buf.toString('utf8')
}))
stream.on('end', function() {
var expected = _.range(0, top).join('\n') + '\n'
assert.equal(res, expected)
assert.equal(stream.rowCount, top, 'should have rowCount ' + top + ' but got ' + stream.rowCount)
done()
}))
});
}
testRange(10000)
var testLeak = function(rounds) {
var fromClient = client()
var txt = 'COPY (SELECT 10) TO STDOUT'
var testInternalPostgresError = function() {
var cancelClient = client()
var queryClient = client()
var runStream = function(num, callback) {
var stream = fromClient.query(copy(txt))
var runStream = function(callback) {
var txt = "COPY (SELECT pg_sleep(10)) TO STDOUT"
var stream = queryClient.query(copy(txt))
stream.on('data', function(data) {
// Just throw away the data.
})
stream.on('end', callback)
stream.on('error', callback)
setTimeout(function() {
var cancelQuery = "SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE query ~ 'pg_sleep' AND NOT query ~ 'pg_cancel_backend'"
cancelClient.query(cancelQuery)
}, 50)
}
async.timesSeries(rounds, runStream, function(err) {
assert.equal(err, null)
assert.equal(fromClient.connection.stream.listeners('close').length, 0)
assert.equal(fromClient.connection.stream.listeners('data').length, 1)
assert.equal(fromClient.connection.stream.listeners('end').length, 2)
assert.equal(fromClient.connection.stream.listeners('error').length, 1)
fromClient.end()
runStream(function(err) {
assert.notEqual(err, null)
var expectedMessage = 'canceling statement due to user request'
assert.notEqual(err.toString().indexOf(expectedMessage), -1, 'Error message should mention reason for query failure.')
cancelClient.end()
queryClient.end()
})
}
testInternalPostgresError()
var testNoticeResponse = function() {
// we use a special trick to generate a warning
// on the copy stream.
var queryClient = client()
var set = '';
set += 'SET SESSION client_min_messages = WARNING;'
set += 'SET SESSION standard_conforming_strings = off;'
set += 'SET SESSION escape_string_warning = on;'
queryClient.query(set, function(err, res) {
assert.equal(err, null, 'testNoticeResponse - could not SET parameters')
var runStream = function(callback) {
var txt = "COPY (SELECT '\\\n') TO STDOUT"
var stream = queryClient.query(copy(txt))
stream.on('data', function(data) {
})
stream.on('error', callback)
// make sure stream is pulled from
stream.pipe(concat(callback.bind(null,null)))
}
runStream(function(err) {
assert.equal(err, null, err)
queryClient.end()
})
})
}
testLeak(5)
testNoticeResponse();

View File

@@ -1,3 +1,5 @@
'use strict';
require('./copy-from')
require('./copy-to')
require('./binary')