35 Commits

Author SHA1 Message Date
Daniel García Aubert
5ff2cf6746 Stubs next version 2018-11-21 18:13:03 +01:00
Daniel García Aubert
94c8d89aba Release v1.2.0-carto.3 2018-11-21 18:08:01 +01:00
Daniel G. Aubert
c19ab0746c Merge pull request #3 from CartoDB/nodejs-10
Suport Node.js 10 LTS
2018-11-21 18:05:40 +01:00
Daniel García Aubert
a27bd1fd8f Prepare next release 2018-11-21 17:57:20 +01:00
Daniel García Aubert
01ab20fce2 Add note to README 2018-11-07 16:50:38 +01:00
Daniel García Aubert
d55dbe70d5 Do not use deprecated Buffer constructor 2018-11-07 15:06:38 +01:00
Daniel García Aubert
de47558628 Support 'package-lock.json' 2018-11-07 15:01:55 +01:00
Daniel García Aubert
38c78677e8 Drop support for old version of Node.js and support latest LTS releases 2018-11-07 15:01:24 +01:00
Daniel García Aubert
6f1d5cb4a5 Release v1.2.0-carto.2 2018-10-26 13:26:02 +02:00
Daniel G. Aubert
99d397956c Merge pull request #2 from CartoDB/use-strict
Use strict mode
2018-10-26 13:22:30 +02:00
Daniel García Aubert
c85f7d27b8 Update NEWS 2018-10-24 19:17:10 +02:00
Daniel García Aubert
8b355b6e72 Use strict 2018-10-24 18:52:57 +02:00
Rafa de la Torre
d7e5c1383f Update v1.2.0-carto.1 release date 2018-06-11 13:30:43 +02:00
Rafa de la Torre
cb2227d159 Merge pull request #1 from CartoDB/performance-tune-copy-to
Improve performance of COPY TO
2018-06-11 13:29:23 +02:00
Rafa de la Torre
7930d1b8dd Add entry to changelog 2018-06-11 13:27:44 +02:00
Rafa de la Torre
e94fefe902 Merge branch 'v1.2-carto' into performance-tune-copy-to 2018-06-11 13:24:09 +02:00
Rafa de la Torre
9293926047 Add a NEWS.carto.md with the changelog 2018-06-11 13:20:53 +02:00
Rafa de la Torre
fd3cc95573 Remove unused var buffer_sent 2018-06-11 12:17:39 +02:00
Rafa de la Torre
922627daaf Small refactor 2018-06-11 12:14:28 +02:00
Rafa de la Torre
61bc713e0c Improve performance of COPY TO #56
Under some circumstances, the COPY TO streamming can be CPU-bound,
particularly when PG holds the resultset in memory buffers and the size
of the rows << chunk (64 KB in my linux box).

This commits improves the situation by creating a buffer of `chunk`
size and fitting in as many rows as it can before pushing them. This
results in more balanced read and writes (in terms of size and in bigger
chunks) as well as more frequent calls to the callback, thus freeing the
main loop for other events to be processed, and therefore avoiding
starvation.
2018-06-08 15:04:42 +02:00
jeromew
e15feb199a README.md: copy-from error and vacuum #26 2016-08-23 12:10:25 +02:00
jeromew
191a4ec16a Fix documentation of copy-from completion 2016-08-23 11:32:10 +02:00
jeromew
399bff7ed7 Bump version 2016-08-22 16:27:04 +00:00
jeromew
8174e10fb5 Merge pull request #54 from jeromew/upstream-end
Test: `end` event should not be triggered 2 times on copy-from
2016-08-22 17:47:29 +02:00
jeromew
f29aef3bba Merge pull request #53 from jeromew/upstream-frontier
Bugfix - wrong tests on chunk frontiers
2016-08-22 17:47:01 +02:00
jeromew
b2e108571e Issue #54: We should probably delay the _flush cb() to CommandComplete 2016-07-30 00:08:54 +00:00
jeromew
7003f6070f Fix issue #54: end is being triggered 2 times 2016-07-29 23:51:41 +00:00
jeromew
ade7ab95a0 Test: end event should not be triggered 2 times on copy-from 2016-07-29 23:47:39 +00:00
jeromew
9ccda04036 Bugfix - Chunk frontiers were not correctly tested 2016-07-28 23:05:50 +00:00
jeromew
a5e532f20b Bugfix - wrong tests on chunk frontiers 2016-07-28 22:55:02 +00:00
jeromew
2a4db2920e Fix NoticeResponse test and handle other messages (#52) 2016-07-28 16:13:22 -05:00
jeromew
f155899570 Add a test for NoticeResponse handling (#51) 2016-07-28 13:15:59 -05:00
brianc
18be125596 Bump version 2016-07-26 14:40:01 -05:00
jeromew
ae5b344395 Refactor message format codes handling (#45) 2016-07-26 14:39:48 -05:00
jeromew
ee84aba89f Use end stream option instead of not calling _flush callback (#44) 2016-07-26 14:39:33 -05:00
12 changed files with 423 additions and 63 deletions

View File

@@ -1,9 +1,8 @@
language: node_js
node_js:
- "0.12"
- "4"
- "5"
- "6"
- "8"
- "10"
addons:
postgresql: "9.2"

30
NEWS.carto.md Normal file
View File

@@ -0,0 +1,30 @@
# CARTO's Changelog
## v1.2.0-carto.4
Released 2018-mm-dd
## v1.2.0-carto.3
Released 2018-11-21
Features:
* Drop support for Node.js 0.12, 4 and, 5.
* Add support for Node.js 8 and 10.
* Add package-lock.json
* Do not use deprecated Buffer constructors.
## v1.2.0-carto.2
Released 2018-10-26
Bug fixes:
* Make all modules to use strict mode semantics.
## v1.2.0-carto.1
Released 2018-06-11
Bug fixes:
* Improves performance of COPY TO by sending bigger chunks through low level `push()`. See https://github.com/CartoDB/node-pg-copy-streams/pull/1
## v1.2.0
Released 2016-08-22
Vanilla version v1.2.0 from upstream repository. See https://github.com/CartoDB/node-pg-copy-streams/releases/tag/v1.2.0

View File

@@ -1,3 +1,7 @@
## Note
This is forked repository from [brianc/node-pg-copy-streams](https://github.com/brianc/node-pg-query-stream)
## pg-copy-streams
[![Build Status](https://travis-ci.org/brianc/node-pg-copy-streams.svg)](https://travis-ci.org/brianc/node-pg-copy-streams)
@@ -42,10 +46,15 @@ pg.connect(function(err, client, done) {
var stream = client.query(copyFrom('COPY my_table FROM STDIN'));
var fileStream = fs.createReadStream('some_file.tsv')
fileStream.on('error', done);
fileStream.pipe(stream).on('finish', done).on('error', done);
stream.on('error', done);
stream.on('end', done);
fileStream.pipe(stream);
});
```
*Important*: Even if `pg-copy-streams.from` is used as a Writable (via `pipe`), you should not listen for the 'finish' event and expect that the COPY command has already been correctly acknowledged by the database. Internally, a duplex stream is used to pipe the data into the database connection and the COPY command should be considered complete only when the 'end' event is triggered.
## install
```sh
@@ -56,7 +65,10 @@ $ npm install pg-copy-streams
This module __only__ works with the pure JavaScript bindings. If you're using `require('pg').native` please make sure to use normal `require('pg')` or `require('pg.js')` when you're using copy streams.
Before you set out on this magical piping journey, you _really_ should read this: http://www.postgresql.org/docs/9.3/static/sql-copy.html, and you might want to take a look at the [tests](https://github.com/brianc/node-pg-copy-streams/tree/master/test) to get an idea of how things work.
Before you set out on this magical piping journey, you _really_ should read this: http://www.postgresql.org/docs/current/static/sql-copy.html, and you might want to take a look at the [tests](https://github.com/brianc/node-pg-copy-streams/tree/master/test) to get an idea of how things work.
Take note of the following warning in the PostgreSQL documentation:
> COPY stops operation at the first error. This should not lead to problems in the event of a COPY TO, but the target table will already have received earlier rows in a COPY FROM. These rows will not be visible or accessible, but they still occupy disk space. This might amount to a considerable amount of wasted disk space if the failure happened well into a large copy operation. You might wish to invoke VACUUM to recover the wasted space.
## contributing

View File

@@ -1,14 +1,17 @@
'use strict';
module.exports = function(txt, options) {
return new CopyStreamQuery(txt, options)
}
var Transform = require('stream').Transform
var util = require('util')
var code = require('./message-formats')
var CopyStreamQuery = function(text, options) {
Transform.call(this, options)
this.text = text
this._copyOutResponse = null
this._gotCopyOutResponse = false
this.rowCount = 0
}
@@ -23,63 +26,89 @@ CopyStreamQuery.prototype.submit = function(connection) {
connection.stream.pipe(this)
}
var code = {
E: 69, //Error
H: 72, //CopyOutResponse
d: 0x64, //CopyData
c: 0x63 //CopyDone
}
CopyStreamQuery.prototype._detach = function() {
this.connection.stream.unpipe(this)
// Unpipe can drop us out of flowing mode
this.connection.stream.resume()
}
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var offset = 0
var Byte1Len = 1;
var Int32Len = 4;
if(this._remainder && chunk) {
chunk = Buffer.concat([this._remainder, chunk])
}
if(!this._copyOutResponse) {
this._copyOutResponse = true
if(chunk[0] == code.E) {
this._detach()
this.push(null)
return cb();
var length;
var messageCode;
var needPush = false;
var buffer = Buffer.alloc(chunk.length);
var buffer_offset = 0;
this.pushBufferIfneeded = function() {
if (needPush && buffer_offset > 0) {
this.push(buffer.slice(0, buffer_offset))
buffer_offset = 0;
}
if(chunk[0] != code.H) {
this.emit('error', new Error('Expected copy out response'))
}
var length = chunk.readUInt32BE(1)
offset = 1
offset += length
}
while((chunk.length - offset) > 5) {
while((chunk.length - offset) >= (Byte1Len + Int32Len)) {
var messageCode = chunk[offset]
//complete or error
if(messageCode == code.c || messageCode == code.E) {
this._detach()
this.push(null)
return cb();
//console.log('PostgreSQL message ' + String.fromCharCode(messageCode))
switch(messageCode) {
// detect COPY start
case code.CopyOutResponse:
if (!this._gotCopyOutResponse) {
this._gotCopyOutResponse = true
} else {
this.emit('error', new Error('Unexpected CopyOutResponse message (H)'))
}
break;
// meaningful row
case code.CopyData:
needPush = true;
break;
// standard interspersed messages. discard
case code.ParameterStatus:
case code.NoticeResponse:
case code.NotificationResponse:
break;
case code.ErrorResponse:
case code.CopyDone:
this.pushBufferIfneeded();
this._detach()
this.push(null)
return cb();
break;
default:
this.emit('error', new Error('Unexpected PostgreSQL message ' + String.fromCharCode(messageCode)))
}
//something bad happened
if(messageCode != code.d) {
return this.emit('error', new Error('expected "d" (copydata message)'))
}
var length = chunk.readUInt32BE(offset + 1) - 4 //subtract length of UInt32
//can we read the next row?
if(chunk.length > (offset + length + 5)) {
offset += 5
var slice = chunk.slice(offset, offset + length)
offset += length
this.push(slice)
this.rowCount++
length = chunk.readUInt32BE(offset+Byte1Len)
if(chunk.length >= (offset + Byte1Len + length)) {
offset += Byte1Len + Int32Len
if (needPush) {
var row = chunk.slice(offset, offset + length - Int32Len)
this.rowCount++
row.copy(buffer, buffer_offset);
buffer_offset += row.length;
}
offset += (length - Int32Len)
} else {
// we need more chunks for a complete message
break;
}
}
this.pushBufferIfneeded();
if(chunk.length - offset) {
var slice = chunk.slice(offset)
this._remainder = slice

View File

@@ -1,3 +1,5 @@
'use strict';
var CopyToQueryStream = require('./copy-to')
module.exports = {
to: function(txt, options) {
@@ -10,6 +12,7 @@ module.exports = {
var Transform = require('stream').Transform
var util = require('util')
var code = require('./message-formats')
var CopyStreamQuery = function(text, options) {
Transform.call(this, options)
@@ -26,27 +29,23 @@ CopyStreamQuery.prototype.submit = function(connection) {
connection.query(this.text)
}
var code = {
H: 72, //CopyOutResponse
d: 0x64, //CopyData
c: 0x63 //CopyDone
}
var copyDataBuffer = Buffer([code.d])
var copyDataBuffer = Buffer.from([code.CopyData])
CopyStreamQuery.prototype._transform = function(chunk, enc, cb) {
var Int32Len = 4;
this.push(copyDataBuffer)
var lenBuffer = Buffer(4)
lenBuffer.writeUInt32BE(chunk.length + 4, 0)
var lenBuffer = Buffer.alloc(Int32Len)
lenBuffer.writeUInt32BE(chunk.length + Int32Len, 0)
this.push(lenBuffer)
this.push(chunk)
cb()
}
CopyStreamQuery.prototype._flush = function(cb) {
var finBuffer = Buffer([code.c, 0, 0, 0, 4])
var Int32Len = 4;
var finBuffer = Buffer.from([code.CopyDone, 0, 0, 0, Int32Len])
this.push(finBuffer)
//never call this callback, do not close underlying stream
//cb()
this.cb_flush = cb
}
CopyStreamQuery.prototype.handleError = function(e) {
@@ -54,7 +53,7 @@ CopyStreamQuery.prototype.handleError = function(e) {
}
CopyStreamQuery.prototype.handleCopyInResponse = function(connection) {
this.pipe(connection.stream)
this.pipe(connection.stream, { end: false })
}
CopyStreamQuery.prototype.handleCommandComplete = function(msg) {
@@ -65,8 +64,13 @@ CopyStreamQuery.prototype.handleCommandComplete = function(msg) {
this.rowCount = parseInt(match[1], 10)
}
this.unpipe()
this.emit('end')
// we delay the _flush cb so that the 'end' event is
// triggered after CommandComplete
this.cb_flush()
// unpipe from connection
this.unpipe(this.connection)
this.connection = null
}
CopyStreamQuery.prototype.handleReadyForQuery = function() {

25
message-formats.js Normal file
View File

@@ -0,0 +1,25 @@
/**
* The COPY feature uses the following protocol codes.
* The codes for the most recent protocol version are documented on
* https://www.postgresql.org/docs/current/static/protocol-message-formats.html
*
* The protocol flow itself is described on
* https://www.postgresql.org/docs/current/static/protocol-flow.html
*/
module.exports = {
ErrorResponse: 0x45, // E
CopyInResponse: 0x47, // G
CopyOutResponse: 0x48, // H
CopyBothResponse: 0x57, // W
CopyDone: 0x63, // c
CopyData: 0x64, // d
CopyFail: 0x66, // f
// It is possible for NoticeResponse and ParameterStatus messages to be interspersed between CopyData messages;
// frontends must handle these cases, and should be prepared for other asynchronous message types as well
// (see Section 50.2.6).
// Otherwise, any message type other than CopyData or CopyDone may be treated as terminating copy-out mode.
NotificationResponse: 0x41, // A
NoticeResponse: 0x4E, // N
ParameterStatus: 0x53 // S
}

193
package-lock.json generated Normal file
View File

@@ -0,0 +1,193 @@
{
"name": "pg-copy-streams",
"version": "1.2.0-carto.4",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"async": {
"version": "0.2.10",
"resolved": "http://registry.npmjs.org/async/-/async-0.2.10.tgz",
"integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=",
"dev": true
},
"base64-js": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.2.tgz",
"integrity": "sha1-Ak8Pcq+iW3X5wO5zzU9V7Bvtl4Q=",
"dev": true
},
"bops": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/bops/-/bops-0.0.6.tgz",
"integrity": "sha1-CC0dVfoB5g29wuvC26N/ZZVUzzo=",
"dev": true,
"requires": {
"base64-js": "0.0.2",
"to-utf8": "0.0.1"
}
},
"buffer-writer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-1.0.1.tgz",
"integrity": "sha1-Iqk2kB4wKa/NdUfrRIfOtpejvwg=",
"dev": true
},
"concat-stream": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.1.0.tgz",
"integrity": "sha1-hCae/YzGUCdeMi8wnfRIZ7xRxfM=",
"dev": true,
"requires": {
"bops": "0.0.6"
}
},
"generic-pool": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-2.1.1.tgz",
"integrity": "sha1-rwTcLDJc/Ll1Aj+lK/zpYXp0Nf0=",
"dev": true
},
"gonna": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/gonna/-/gonna-0.0.0.tgz",
"integrity": "sha1-6k4ZsVJ6F4LhJQVeMCSabUvHmlk=",
"dev": true
},
"heroku-env": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/heroku-env/-/heroku-env-0.1.1.tgz",
"integrity": "sha1-wGeRyUTpuHSOMXf1S/cBQyZ+Yxc=",
"dev": true,
"requires": {
"parse-database-url": "~0.2.0"
}
},
"lodash": {
"version": "2.2.1",
"resolved": "http://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz",
"integrity": "sha1-ypNf0UqzwMhyq6zxmLnNpQFECGc=",
"dev": true
},
"packet-reader": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-0.2.0.tgz",
"integrity": "sha1-gZ300BC4LV6lZx+KGjrPA5vNdwA=",
"dev": true
},
"parse-database-url": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/parse-database-url/-/parse-database-url-0.2.2.tgz",
"integrity": "sha1-SGFa56fA/HfjKU0jVCpqUnPDVws=",
"dev": true
},
"pg": {
"version": "4.4.6",
"resolved": "http://registry.npmjs.org/pg/-/pg-4.4.6.tgz",
"integrity": "sha1-EZgiP7rva6QRqm9Q4X9OtaGTFVk=",
"dev": true,
"requires": {
"buffer-writer": "1.0.1",
"generic-pool": "2.1.1",
"packet-reader": "0.2.0",
"pg-connection-string": "0.1.3",
"pg-types": "1.*",
"pgpass": "0.0.3",
"semver": "^4.1.0"
}
},
"pg-connection-string": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz",
"integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=",
"dev": true
},
"pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"dev": true
},
"pg-types": {
"version": "1.13.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.13.0.tgz",
"integrity": "sha512-lfKli0Gkl/+za/+b6lzENajczwZHc7D5kiUCZfgm914jipD2kIOIvEkAhZ8GrW3/TUoP9w8FHjwpPObBye5KQQ==",
"dev": true,
"requires": {
"pg-int8": "1.0.1",
"postgres-array": "~1.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.0",
"postgres-interval": "^1.1.0"
}
},
"pgpass": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-0.0.3.tgz",
"integrity": "sha1-EuZ+NDsxicLzEgbrycwL7//PkUA=",
"dev": true,
"requires": {
"split": "~0.3"
}
},
"postgres-array": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.3.tgz",
"integrity": "sha512-5wClXrAP0+78mcsNX3/ithQ5exKvCyK5lr5NEEEeGwwM6NJdQgzIJBVxLvRW+huFpX92F2QnZ5CcokH0VhK2qQ==",
"dev": true
},
"postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=",
"dev": true
},
"postgres-date": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.3.tgz",
"integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g=",
"dev": true
},
"postgres-interval": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.2.tgz",
"integrity": "sha512-fC3xNHeTskCxL1dC8KOtxXt7YeFmlbTYtn7ul8MkVERuTmf7pI4DrkAxcw3kh1fQ9uz4wQmd03a1mRiXUZChfQ==",
"dev": true,
"requires": {
"xtend": "^4.0.0"
}
},
"semver": {
"version": "4.3.6",
"resolved": "http://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto=",
"dev": true
},
"split": {
"version": "0.3.3",
"resolved": "http://registry.npmjs.org/split/-/split-0.3.3.tgz",
"integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=",
"dev": true,
"requires": {
"through": "2"
}
},
"through": {
"version": "2.3.8",
"resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
"dev": true
},
"to-utf8": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/to-utf8/-/to-utf8-0.0.1.tgz",
"integrity": "sha1-0Xrqcv8vujm55DYBvns/9y4ImFI=",
"dev": true
},
"xtend": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
"dev": true
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "pg-copy-streams",
"version": "1.1.0",
"version": "1.2.0-carto.4",
"description": "Low-Level COPY TO and COPY FROM streams for PostgreSQL in JavaScript using",
"main": "index.js",
"scripts": {

View File

@@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@@ -19,7 +21,7 @@ var testBinaryCopy = function() {
var fromClient = client()
var toClient = client()
queries = [
var queries = [
'DROP TABLE IF EXISTS data',
'CREATE TABLE IF NOT EXISTS data (num BIGINT, word TEXT)',
'INSERT INTO data (num, word) VALUES (1, \'hello\'), (2, \'other thing\'), (3, \'goodbye\')',
@@ -33,7 +35,7 @@ var testBinaryCopy = function() {
var fromStream = fromClient.query(to('COPY (SELECT * FROM data) TO STDOUT BINARY'))
var toStream = toClient.query(from('COPY data_copy FROM STDIN BINARY'))
runStream = function(callback) {
var runStream = function(callback) {
fromStream.on('error', callback)
toStream.on('error', callback)
toStream.on('finish', callback)

View File

@@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@@ -31,7 +33,7 @@ var testRange = function(top) {
var txt = 'COPY numbers FROM STDIN'
var stream = fromClient.query(copy(txt))
for(var i = 0; i < top; i++) {
stream.write(Buffer('' + i + '\t' + i*10 + '\n'))
stream.write(Buffer.from('' + i + '\t' + i*10 + '\n'))
}
stream.end()
var countDone = gonna('have correct count')
@@ -39,6 +41,7 @@ var testRange = function(top) {
fromClient.query('SELECT COUNT(*) FROM numbers', function(err, res) {
assert.ifError(err)
assert.equal(res.rows[0].count, top, 'expected ' + top + ' rows but got ' + res.rows[0].count)
assert.equal(stream.rowCount, top, 'expected ' + top + ' rows but db count is ' + stream.rowCount)
//console.log('found ', res.rows.length, 'rows')
countDone()
var firstRowDone = gonna('have correct result')
@@ -54,3 +57,19 @@ var testRange = function(top) {
}
testRange(1000)
var testSingleEnd = function() {
var fromClient = client()
fromClient.query('CREATE TEMP TABLE numbers(num int)')
var txt = 'COPY numbers FROM STDIN';
var stream = fromClient.query(copy(txt))
var count = 0;
stream.on('end', function() {
count++;
assert(count==1, '`end` Event was triggered ' + count + ' times');
if (count == 1) fromClient.end();
})
stream.end(Buffer.from('1\n'))
}
testSingleEnd()

View File

@@ -1,3 +1,5 @@
'use strict';
var assert = require('assert')
var gonna = require('gonna')
@@ -7,6 +9,7 @@ var concat = require('concat-stream')
var pg = require('pg')
var copy = require('../').to
var code = require('../message-formats')
var client = function() {
var client = new pg.Client()
@@ -22,6 +25,18 @@ var testConstruction = function() {
testConstruction()
var testComparators = function() {
var copy1 = copy();
copy1.pipe(concat(function(buf) {
assert(copy1._gotCopyOutResponse, 'should have received CopyOutResponse')
assert(!copy1._remainder, 'Message with no additional data (len=Int4Len+0) should not leave a remainder')
}))
copy1.end(Buffer.from([code.CopyOutResponse, 0x00, 0x00, 0x00, 0x04]));
}
testComparators();
var testRange = function(top) {
var fromClient = client()
var txt = 'COPY (SELECT * from generate_series(0, ' + (top - 1) + ')) TO STDOUT'
@@ -73,5 +88,35 @@ var testInternalPostgresError = function() {
queryClient.end()
})
}
testInternalPostgresError()
var testNoticeResponse = function() {
// we use a special trick to generate a warning
// on the copy stream.
var queryClient = client()
var set = '';
set += 'SET SESSION client_min_messages = WARNING;'
set += 'SET SESSION standard_conforming_strings = off;'
set += 'SET SESSION escape_string_warning = on;'
queryClient.query(set, function(err, res) {
assert.equal(err, null, 'testNoticeResponse - could not SET parameters')
var runStream = function(callback) {
var txt = "COPY (SELECT '\\\n') TO STDOUT"
var stream = queryClient.query(copy(txt))
stream.on('data', function(data) {
})
stream.on('error', callback)
// make sure stream is pulled from
stream.pipe(concat(callback.bind(null,null)))
}
runStream(function(err) {
assert.equal(err, null, err)
queryClient.end()
})
})
}
testNoticeResponse();

View File

@@ -1,3 +1,5 @@
'use strict';
require('./copy-from')
require('./copy-to')
require('./binary')