Compare commits

..

250 Commits
6.4.0 ... 7.0.0

Author SHA1 Message Date
Daniel García Aubert
ca7f0ad4a6 Release 7.0.0 2019-02-22 12:52:10 +01:00
Daniel G. Aubert
4fc4390173 Merge pull request #1076 from CartoDB/drop-support-nodejs-6
Drop suppor for Node.js 6, npm 3, yarn and redis 3
2019-02-22 12:17:05 +01:00
Daniel García Aubert
fff54f021c Improve doc 2019-02-22 12:11:14 +01:00
Daniel García Aubert
6fee16fe5e Update comments and config 2019-02-22 10:49:22 +01:00
Daniel García Aubert
5f43db2e36 Remove POSTGIS_VERSION env variable and run test using mvt renderer always 2019-02-22 08:31:22 +01:00
Daniel García Aubert
561bdb3938 Improve MD formatting 2019-02-22 07:53:18 +01:00
Daniel García Aubert
47576358a2 Remove hack of stripping PARALLEL labels for PG releases before 9.6 2019-02-22 07:52:39 +01:00
Daniel García Aubert
582947accd Update release date 2019-02-21 18:45:19 +01:00
Daniel García Aubert
30f4b58ced Add npm as dependency 2019-02-21 18:26:56 +01:00
Daniel García Aubert
227a271bea Typo 2019-02-21 18:21:37 +01:00
Daniel García Aubert
5455d2997f Remove items in requirements list 2019-02-21 18:11:27 +01:00
Daniel García Aubert
241bb511ea Drop support for Redis 3, Postgres 9.5 and PostGIS 2.2 2019-02-21 17:55:58 +01:00
Daniel García Aubert
85e19bf16c Drop suppor for Node.js 6, npm 3, yarn and redis 3 2019-02-21 17:34:29 +01:00
Rafa de la Torre
e93dd982b9 Merge pull request #1072 from CartoDB/upgrade-camshaft-0.63.4
Upgrade camshaft 0.63.4
2019-02-13 14:57:47 +01:00
Rafa de la Torre
535b52df55 Update package-lock.json (node 10) camshaft@0.63.4 2019-02-13 14:39:44 +01:00
Rafa de la Torre
a04782b63e Update yarn camshaft@0.63.4 2019-02-13 14:39:44 +01:00
Rafa de la Torre
feb1e53a4d Upgrade camshaft 0.63.4 2019-02-13 14:39:30 +01:00
Simon Martín
705c001681 Merge pull request #1070 from CartoDB/docker-new-node-version
Dynamic node version in Dockerfile
2019-02-13 10:55:47 +01:00
Rafa de la Torre
a973fc981a Merge pull request #1071 from CartoDB/upgrade-camshaft-0.63.3
Upgrade camshaft 0.63.3
2019-02-13 09:36:52 +01:00
Daniel G. Aubert
77cbe2b545 version 6 to 6.9.2 in travis
Co-Authored-By: oleurud <oleurud@pm.me>
2019-02-13 09:27:16 +01:00
Daniel G. Aubert
0b5ef6a5e1 version 6 to 6.9.2 in travis
Co-Authored-By: oleurud <oleurud@pm.me>
2019-02-13 09:27:08 +01:00
Simon Martín
97f813a777 extracting postgres start from Node.js installation file 2019-02-12 18:51:33 +01:00
Rafa de la Torre
a242588d95 Update package-lock.json (node 10) camshaft@0.63.3 2019-02-12 18:43:32 +01:00
Simon Martín
b2f5e72bf1 renaming var 2019-02-12 18:34:25 +01:00
Rafa de la Torre
0daf8dcd8d Update yarn camshaft@0.63.3 2019-02-12 18:34:17 +01:00
Rafa de la Torre
79e886cfb9 Update camshaft@0.63.3 2019-02-12 18:29:05 +01:00
Simon Martín
4f6d04a3ae docker bash with dynamic image 2019-02-12 18:21:00 +01:00
Simon Martín
0e26d5ba5a travis with dynamic image 2019-02-12 18:20:43 +01:00
Simon Martín
915e0f8483 running tests with dynamic image 2019-02-12 18:20:32 +01:00
Simon Martín
4058d9fbc7 typo 2019-02-12 18:15:32 +01:00
Simon Martín
6c6ff42879 renaming docker nodejs installation script 2019-02-12 18:13:49 +01:00
Simon Martín
450f74b387 dockerfile CMD sh file 2019-02-12 16:55:17 +01:00
Simon Martín
f684994868 new docker file with nvm 2019-02-12 16:54:50 +01:00
Alejandro Guirao
be4240623a Pin Mapnik version and set crankshaft minor version 2019-02-12 15:30:55 +01:00
Simon Martín
4b5e003f33 fix bad file name 2019-02-12 15:06:33 +01:00
Simon Martín
6c7c0eb7e7 making nodejs version dynamic and LTS as default 2019-02-12 15:02:59 +01:00
Simon Martín
1ba6480110 using nvm 2019-02-12 14:46:41 +01:00
Daniel G. Aubert
af31962b2d Merge pull request #1069 from CartoDB/refactor
Upgrade windshaft to version 4.13.1
2019-02-11 13:49:50 +01:00
Daniel García Aubert
f134bd459e Update yarn.lock 2019-02-11 13:35:53 +01:00
Daniel García Aubert
01d02f8c2e Update windshaft to version 4.13.1 2019-02-11 13:33:50 +01:00
Daniel García Aubert
092bed6d9d Do not use caret symbol 2019-02-11 13:25:13 +01:00
Daniel García Aubert
8d9b8aced2 Upgrade windshaft to version 4.13.1 2019-02-11 13:19:34 +01:00
Daniel García Aubert
2856703acc Update windshaft devel branch 2019-02-08 18:44:59 +01:00
Daniel García Aubert
14b8a72551 Update windshaft devel branch 2019-02-07 19:16:36 +01:00
Daniel García Aubert
0125dcdd1d Update windshaft devel branch 2019-02-07 18:32:15 +01:00
Daniel García Aubert
6a15d30579 Update windshaft devel branch 2019-02-06 17:05:33 +01:00
Daniel García Aubert
3dcefa3ea1 Update windshaft devel dep 2019-02-05 08:55:34 +01:00
Daniel García Aubert
b3a995f880 Merge branch 'master' into refactor 2019-02-05 08:49:59 +01:00
Daniel García Aubert
d40be45e9a Update windshaft devel dep 2019-02-04 18:53:38 +01:00
Daniel G. Aubert
ec952d88cc Merge pull request #1068 from CartoDB/dynamic-map-pool
Handle 'max waitingClients count exceeded' error as: "429 You are over platfor's limits"
2019-02-04 14:09:52 +01:00
Daniel García Aubert
3983dfe004 Update windshaft to version 4.13.0 2019-02-04 13:54:41 +01:00
Daniel García Aubert
e1c4f8445c Update tilelive-mapnik to development version 2019-02-04 13:52:03 +01:00
Daniel García Aubert
2a68e0565e Update windshaft dep 2019-02-01 17:43:47 +01:00
Daniel García Aubert
f4b6173da9 Fix test 2019-01-30 16:54:19 +01:00
Daniel García Aubert
63c95df81c Lint 2019-01-30 16:36:08 +01:00
Daniel García Aubert
065f9c0a53 Add subtype to the limit error 2019-01-30 16:15:29 +01:00
Daniel García Aubert
abe28937ca Lint 2019-01-30 15:28:43 +01:00
Daniel García Aubert
77b7e03869 Update NEWS 2019-01-30 15:18:41 +01:00
Daniel García Aubert
d5af1bd9a2 Implement tests to check the limit error is the expected one 2019-01-30 15:15:07 +01:00
Daniel García Aubert
86f313ec52 Update windshaft 2019-01-30 10:49:57 +01:00
Daniel García Aubert
2e85e130c8 Be able to customize max waiting workers parameter 2019-01-30 10:41:39 +01:00
Daniel García Aubert
d70a87b299 Handle 'max waitingClients count exceeded' error as 429 Too many requests 2019-01-30 10:27:00 +01:00
Daniel García Aubert
c4576564e5 Update windshaft to a development version 2019-01-29 17:47:12 +01:00
Daniel G. Aubert
f82a7a148b Merge pull request #1067 from CartoDB/audit-dependencies
Audit dependencies and fix critical vulns
2019-01-24 13:26:38 +01:00
Daniel García Aubert
b4431d823c Update NEWS 2019-01-24 13:02:42 +01:00
Daniel García Aubert
0168aa3a61 Update winshaft to version 4.12.3 2019-01-24 13:02:09 +01:00
Daniel García Aubert
2064b14015 Update engine compatible versions 2019-01-24 12:14:32 +01:00
Daniel García Aubert
aeea83d5f5 Update NEWS 2019-01-23 19:03:21 +01:00
Daniel García Aubert
221bf0cefd Update winshaft to version 4.12.2 2019-01-23 19:01:51 +01:00
Daniel García Aubert
2899f62d95 Don't use carets 2019-01-17 17:19:42 +01:00
Daniel García Aubert
442ca34eb1 Update NEWS 2019-01-17 17:17:21 +01:00
Daniel García Aubert
0202a17138 Upgrade jshint to version 2.9.7 2019-01-17 16:08:01 +01:00
Daniel García Aubert
37645ec663 Update mocha to version 5.2.0 2019-01-17 15:53:03 +01:00
Alberto Romeu
56eb4a8dc3 Merge pull request #1063 from CartoDB/cors-authorization-headers
Adding Authorization to Access-Control-Allow-Headers
2019-01-11 12:35:36 +01:00
Simon Martín
7b029c890a NEWS 2019-01-10 16:57:30 +01:00
Simon Martín
644b4232ca adding Authorization to Access-Control-Allow-Headers 2019-01-10 16:56:07 +01:00
Daniel G. Aubert
ff27e6744e Merge pull request #1062 from CartoDB/gc-stats
Report fine-grained Garbage Collector stats
2019-01-03 16:10:54 +01:00
Daniel García Aubert
fb929e71fc Update docs 2019-01-03 12:27:12 +01:00
Daniel García Aubert
0bd7bd1621 Update yarn.lock 2019-01-03 11:50:17 +01:00
Daniel García Aubert
5cad6e40c4 Get GC stats from dedicated binding 2019-01-03 11:47:54 +01:00
Daniel G. Aubert
3a4984c1ce Merge pull request #1061 from CartoDB/update-doc-nodejs-npm-versions
Update docs
2019-01-02 13:04:20 +01:00
Daniel García Aubert
caa5a648df Add deprecation warning 2019-01-02 12:54:39 +01:00
Daniel García Aubert
8487e4fb52 Update docs 2019-01-02 11:33:23 +01:00
Daniel García Aubert
b508689b53 Stubs next version 2018-12-26 16:57:19 +01:00
Daniel García Aubert
3b4d1bf72a Release 6.5.1 2018-12-26 16:55:35 +01:00
Daniel García Aubert
0bbbdfa092 Update NEWS 2018-12-26 16:54:14 +01:00
Daniel G. Aubert
c1e769ade2 Merge pull request #1060 from CartoDB/fix-carto-package
Fix carto-package.json
2018-12-26 16:52:49 +01:00
Daniel García Aubert
a20ce69028 Fix carto-package.json 2018-12-26 16:48:51 +01:00
Daniel García Aubert
80519cb397 Stubs next version 2018-12-26 15:26:09 +01:00
Daniel García Aubert
7347263dfd Release 6.5.0 2018-12-26 15:23:28 +01:00
Rafa de la Torre
2a3b6b830b Merge pull request #1059 from CartoDB/update-cartodb-psql-0.13.1
Update cartodb-psql to 0.13.1 and related reverse dependenciess (WIP)
2018-12-13 17:22:55 +01:00
Rafa de la Torre
11b299e116 Update NEWS.md 2018-12-13 16:28:37 +01:00
Rafa de la Torre
27eef4ce42 Update NEWS.md 2018-12-13 16:26:07 +01:00
Rafa de la Torre
59badc0137 Update yarn.lock deps 2018-12-13 15:25:57 +01:00
Rafa de la Torre
f49698efa1 Update package-lock.json deps 2018-12-13 15:23:11 +01:00
Rafa de la Torre
19d6cae10d Update cartodb-psql and rdeps (WIP) 2018-12-12 18:45:36 +01:00
Daniel G. Aubert
7057f5a5c2 Merge pull request #1056 from CartoDB/nodejs-10
Support Nodejs 10 LTS
2018-11-22 14:48:46 +01:00
Daniel García Aubert
3e336204df Update NEWS 2018-11-21 18:01:53 +01:00
Daniel García Aubert
3f4ecb195c Update NEWS 2018-11-21 17:51:56 +01:00
Daniel García Aubert
58b528d00a Update NEWS 2018-11-21 17:49:42 +01:00
Daniel García Aubert
2d2060088c Update yarn.lock 2018-11-21 17:43:50 +01:00
Daniel García Aubert
d1667fac73 Upgrade turbo-carto@0.21.0 2018-11-21 17:36:30 +01:00
Daniel García Aubert
98c0b1f9bd Upgrade cartodb-redis@2.1.0 2018-11-21 17:14:03 +01:00
Daniel García Aubert
1d2548a3e6 Upgrade cartodb-query-tables@0.4.0 2018-11-21 16:39:12 +01:00
Daniel García Aubert
3690959be4 Update camshaft@0.63.0 2018-11-21 15:42:12 +01:00
Daniel García Aubert
7de5fd1515 Update yarn.lock 2018-11-21 13:29:32 +01:00
Daniel García Aubert
f9d1e39b7b Update windshaft@4.12.0 2018-11-21 13:19:29 +01:00
Daniel García Aubert
16a0d9707b Update redis-mpool@0.7.0 2018-11-21 13:11:30 +01:00
Daniel García Aubert
6962abfd10 Update cartodb-psql@0.13.0 2018-11-21 13:09:18 +01:00
Daniel García Aubert
dc0d4f0011 Merge branch 'master' into nodejs-10 2018-11-20 11:20:20 +01:00
Daniel G. Aubert
6bcb535d3f Merge pull request #1057 from CartoDB/add-cpu-metrics
Add process CPU usage metrics
2018-11-15 17:59:24 +01:00
Daniel García Aubert
4ad9902601 Add process CPU usage metrics 2018-11-15 17:28:18 +01:00
Daniel García Aubert
2a933788fd Update yarn.lock 2018-11-06 13:27:30 +01:00
Daniel García Aubert
3febf3e357 Use 'npm ci' instead of 'npm install' to be able to install exactly what is in a lockfile 2018-11-06 12:58:51 +01:00
Daniel García Aubert
a8ca80c23c List dependency tree after install in docker tests 2018-11-06 12:35:10 +01:00
Daniel García Aubert
f3b1bb742a Ensure all pg connections are being refreshed 2018-11-06 12:17:39 +01:00
Daniel García Aubert
4d1ed0be27 Update package-lock.json 2018-11-05 19:47:21 +01:00
Daniel García Aubert
af4b9f57f5 Skip just torque database timeout limit 2018-11-05 19:10:08 +01:00
Daniel García Aubert
6e7bd2585f Skip database timeout test 2018-11-05 18:53:00 +01:00
Daniel García Aubert
40ccdfd9b3 Drain pool connection before 2018-11-05 18:37:20 +01:00
Daniel García Aubert
659b0ba889 Use pdql.end with callback 2018-11-05 18:14:26 +01:00
Daniel García Aubert
71b8699f47 Shut down the pool after setting the database timeout 2018-11-05 17:16:22 +01:00
Daniel García Aubert
24c5bbb182 Workaround to drain pg pool effectively 2018-11-05 16:50:52 +01:00
Daniel García Aubert
2eea20b161 Updated cartodb-psql 2018-11-05 16:03:43 +01:00
Daniel García Aubert
f2180576de Remove test filter 2018-11-02 13:57:56 +01:00
Daniel García Aubert
d9039569bd Output yarn version 2018-11-02 13:57:30 +01:00
Daniel García Aubert
4a82d18cc6 Fix docker command 2018-11-02 13:56:43 +01:00
Daniel García Aubert
f8fa78bb8b Output npm version 2018-11-02 13:30:24 +01:00
Daniel García Aubert
babfa9aae3 Revert "Skip test temporally"
This reverts commit ebf2f54cd5.
2018-11-02 13:30:04 +01:00
Daniel García Aubert
ebf2f54cd5 Skip test temporally 2018-11-02 12:39:34 +01:00
Daniel García Aubert
20c1e8ca05 Revert "Revert "Add cache-buster to hit database always""
This reverts commit 60724897cc.
2018-11-02 12:24:45 +01:00
Daniel García Aubert
2a35d51d45 Revert "Set DEBUG env variable to run test"
This reverts commit 7bd188dafb.
2018-11-02 12:24:01 +01:00
Daniel García Aubert
ebe2d2ddab Revert "Add log to debug server option"
This reverts commit b4e57438ed.
2018-11-02 12:23:48 +01:00
Daniel García Aubert
1ee30e9b53 Revert "Move log to debug server option"
This reverts commit 938e3b2b07.
2018-11-02 12:23:36 +01:00
Daniel García Aubert
7527003711 Revert "Move log to debug server option"
This reverts commit 6c3d8dbe64.
2018-11-02 12:23:05 +01:00
Daniel García Aubert
acd0bbc94f Revert "Add log"
This reverts commit 6cc746dc83.
2018-11-02 12:22:41 +01:00
Daniel García Aubert
00e3f331b4 Revert "Do not run test against node 6"
This reverts commit 7a6fbecac4.
2018-11-02 12:20:49 +01:00
Daniel García Aubert
7a6fbecac4 Do not run test against node 6 2018-11-02 12:17:28 +01:00
Daniel García Aubert
6cc746dc83 Add log 2018-11-02 12:01:59 +01:00
Daniel García Aubert
6c3d8dbe64 Move log to debug server option 2018-11-02 11:23:44 +01:00
Daniel García Aubert
938e3b2b07 Move log to debug server option 2018-11-02 11:12:32 +01:00
Daniel García Aubert
b4e57438ed Add log to debug server option 2018-11-02 10:58:17 +01:00
Daniel García Aubert
7bd188dafb Set DEBUG env variable to run test 2018-11-02 10:37:39 +01:00
Daniel García Aubert
60724897cc Revert "Add cache-buster to hit database always"
This reverts commit e03defc30f.
2018-11-02 10:29:19 +01:00
Daniel García Aubert
e03defc30f Add cache-buster to hit database always 2018-11-02 10:21:53 +01:00
Daniel García Aubert
3bb1f893af Test only 2018-11-01 15:05:38 +01:00
Daniel García Aubert
37a61f527c Do not set language settings for travis 2018-10-31 18:57:25 +01:00
Daniel García Aubert
5e3d546fb6 Add package-lock.json and link to development dependencies 2018-10-31 18:52:40 +01:00
Daniel García Aubert
b7b5f031f3 Update docker test scripts to support Node.js 6 and 10 buildings 2018-10-31 17:20:25 +01:00
Daniel García Aubert
e57e548c31 Run docker test against Node.js 10 2018-10-31 14:47:32 +01:00
Daniel G. Aubert
420c39337c Merge pull request #1052 from CartoDB/use-strict
Use strict mode
2018-10-26 11:21:37 +02:00
Javier Goizueta
214c796a4c Merge pull request #1054 from CartoDB/fix-time-dimension-iso-hours
Fix iso format for hours in time dimensions
2018-10-26 11:01:00 +02:00
Daniel García Aubert
8918d6bec0 Link dependencies to the released versions 2018-10-26 10:19:59 +02:00
Daniel García Aubert
ca7acb8339 Merge branch 'master' into use-strict 2018-10-25 11:11:30 +02:00
Daniel García Aubert
5083ccb605 Link development branches of related dependencies 2018-10-25 10:59:27 +02:00
Javier Goizueta
6908aa532c Fix iso format for hours in time dimensions
The HH specifies hour of day 01-12, we need HH24 for 00-23
2018-10-24 21:15:10 +02:00
Daniel García Aubert
a7daa077ac Update NEWS 2018-10-24 15:53:28 +02:00
Rafa de la Torre
9f0d4905b1 Merge pull request #1053 from CartoDB/fix-tests-undefined-server
Fix for non-deterministic test (undefined server)
2018-10-24 12:11:36 +02:00
Daniel García Aubert
89d10210be Fix undefined context error in test, it raised after forcing strict mode 2018-10-24 11:58:20 +02:00
Rafa de la Torre
545d387bb4 Fix for non-deterministic test (undefined server)
When running tests I got this error:

```
  1) multilayer error cases bogus sql raises 400 status code:
     TypeError: Cannot read property 'listen' of undefined
      at Function.assert.response (test/support/assert.js:93:26)
      at Function.requestLayergroup (test/acceptance/ported/support/test_client.js:79:20)
      at next (node_modules/step/lib/step.js:51:23)
      at Step (node_modules/step/lib/step.js:122:3)
      at Object.createLayergroup (test/acceptance/ported/support/test_client.js:75:5)
      at Context.<anonymous> (test/acceptance/ported/multilayer_error_cases.js:304:20)
```

The problem is that `server` is declared but its initialization
may depend on the order of execution of suites, which is basically
that of the filesystem/checkouts.

That is fixed by returning `getServer()`, which seems to be the
original intent: return a singleton of `CartodbServer` properly
initialized in case it is not overriden through options.
2018-10-23 18:55:31 +02:00
Daniel García Aubert
e2d27db828 Ise strict mode for moduler under test folder 2018-10-23 18:39:02 +02:00
Raul Ochoa
33bcac189f Merge pull request #1050 from CartoDB/prevent-layert-stats-undefined-access
Prevent "Cannot read property 'geom_type' of undefined" on layer stats
2018-10-23 18:37:00 +02:00
Daniel García Aubert
361e99006b Ignore line 2018-10-23 18:20:11 +02:00
Daniel García Aubert
7162ab1631 Add app.js to be linted 2018-10-23 18:19:55 +02:00
Daniel García Aubert
9374e0fe18 Add strict mode to main file (app.js) 2018-10-23 18:09:58 +02:00
Raul Ochoa
b13ae62d0f Do not assert the estimated count as it seems to change over pg versions 2018-10-23 16:02:37 +00:00
Raul Ochoa
49de289a9c Merge pull request #1051 from CartoDB/output-versions
Output PostgreSQL and PostGIS versions
2018-10-23 17:58:30 +02:00
Daniel García Aubert
b6dcf72268 Missing semicolon 2018-10-23 17:55:35 +02:00
Daniel García Aubert
76cfd185de Please jshint 2018-10-23 17:54:11 +02:00
Daniel García Aubert
79820a0f05 Add function as method of to be stric mode compilant 2018-10-23 17:52:19 +02:00
Daniel García Aubert
a0126f6a15 Use stric mode for resource-locator module 2018-10-23 17:48:53 +02:00
Raul Ochoa
abd378e5f6 Run tests based on PostGIS version 2018-10-23 15:47:48 +00:00
Daniel García Aubert
e7e3d612a1 Use strict mode in modules under lib folder (except lib/cartodb/models/resource-locator.js) 2018-10-23 17:45:42 +02:00
Raul Ochoa
208dbfd951 Output PostgreSQL and PostGIS versions 2018-10-23 14:34:17 +00:00
Raul Ochoa
26e4a05276 Going green: prevent TypeError for empty tables/results
This is the intial step to fix https://github.com/CartoDB/carto-vl/issues/1049.
2018-10-23 13:50:49 +00:00
Raul Ochoa
3e261fb353 Going red: fails with 400 due to "Cannot read property 'geom_type' of undefined" 2018-10-23 15:49:34 +02:00
Daniel G. Aubert
4775c73aee Merge pull request #1049 from CartoDB/upgrade-windshaft-4.11.4
Upgrade windshaft to version 4.11.4
2018-10-23 13:03:17 +02:00
Daniel García Aubert
1ece97d0a1 Update NEWS 2018-10-23 12:36:03 +02:00
Daniel García Aubert
87ef8d1977 Upgrade windshaft to version 4.11.4 2018-10-23 12:25:36 +02:00
Daniel G. Aubert
2ebb1728ee Merge pull request #1048 from CartoDB/fix-uncaught-error
Prevent from uncaught exception
2018-10-23 11:54:55 +02:00
Daniel García Aubert
621b11ebd6 Update NEWS 2018-10-23 10:05:53 +02:00
Daniel García Aubert
12d58f3af2 Prevent from uncaught exception: Range filter Error from camshaft when getting analysis query 2018-10-22 15:26:59 +02:00
Daniel G. Aubert
211e815d9c Merge pull request #1047 from CartoDB/upgrade-windshaft-4.11.3
Upgrade windshaft 4.11.3
2018-10-19 13:26:54 +02:00
Daniel García Aubert
465fd2ec0a Merge branch 'master' into upgrade-windshaft-4.11.3 2018-10-19 13:08:34 +02:00
Daniel García Aubert
d0c405ae46 Upgrade windshaft to version 4.11.3 2018-10-19 13:04:03 +02:00
Daniel G. Aubert
953d831d5f Merge pull request #1046 from CartoDB/fix-create-key
Do not use 'Object.assign' as '_.defautls' equivalent
2018-10-19 12:49:17 +02:00
Daniel García Aubert
5573db2bc1 Do not use Object.assign as _.defautls equivalent 2018-10-19 11:57:54 +02:00
Rafa de la Torre
195b23248b Merge pull request #1041 from CartoDB/config-postgis-for-mvts
Sample configs: use PostGIS to generate MVT's
2018-10-17 18:37:27 +02:00
Rafa de la Torre
83897293c6 Fix test by giving redis enough time to delete 2018-10-17 17:35:36 +02:00
Rafa de la Torre
f26ddef244 Make rate limit tests work in dual mode 2018-10-17 17:35:36 +02:00
Rafa de la Torre
d25e8e9798 Make the test work in dual mode (mapnik/pgis) 2018-10-17 17:35:36 +02:00
Rafa de la Torre
bfbd9a8f22 Fix another suite (compat mapnik/pgis) 2018-10-17 17:35:36 +02:00
Rafa de la Torre
bd17f9f5e1 A better implementation of mvt suites 2018-10-17 17:35:36 +02:00
Rafa de la Torre
8491b86c17 Extract test generation function 2018-10-17 17:35:36 +02:00
Rafa de la Torre
376a3743c1 Fix buffer size per format tests 2018-10-17 17:35:36 +02:00
Rafa de la Torre
a42af5e0d5 Do not run test if ST_AsMvt not avail. 2018-10-17 17:35:36 +02:00
Rafa de la Torre
e157649571 Use of postgis renderer based on availabilty 2018-10-17 17:35:36 +02:00
Rafa de la Torre
e50d1a10d0 Skip tests if they cannot be run
If configured with `mvt.usePostGIS` but with no postgis version
supporting it, they should be skipped.
2018-10-17 17:35:36 +02:00
Rafa de la Torre
d474d49ce8 Do not use point in world's border 2018-10-17 17:35:36 +02:00
Rafa de la Torre
4dba4ef641 Tweak the scale denominator for pg-mvt renderer
The scale denominator is calculated with float values and more
precision, resulting in different (but more accurate) values
2018-10-17 17:35:36 +02:00
Rafa de la Torre
be08fa3bfa Tweak id's to test against pg-mvt renderer
Actually, the ID's are not generated by ST_AsMVT. They appear as an
artifact of testing, when using toGeoJSONSync (implemented in mapnik).
2018-10-17 17:35:36 +02:00
Simon Martín
945b151712 using docker tag postgis-2.4.4.5 in travis 2018-10-17 17:35:36 +02:00
Simon Martín
2af6486f73 new docker tags 2018-10-17 17:35:36 +02:00
Rafa de la Torre
9cffc8781a Sample configs: use PostGIS to generate MVT's 2018-10-17 17:35:36 +02:00
Daniel García Aubert
b75c1f7f08 Update NEWS 2018-10-17 15:42:13 +02:00
Rafa de la Torre
c5d22bf9e3 Update package version 2018-10-17 15:29:07 +02:00
Rafa de la Torre
1baae5e709 Update dependencies (windshaft@4.11.2) 2018-10-17 15:28:56 +02:00
Rafa de la Torre
da3239cfa1 Merge pull request #1045 from CartoDB/update-windshaft-pg-mvt-trailing-semicolons
Update windshaft with a patch fo pg-mvt renderer
2018-10-17 15:25:27 +02:00
Rafa de la Torre
ba0078c51c Update windshaft with a patch fo pg-mvt renderer
Update windshaft to version 4.11.2, which contains a patch that makes
the pg-mvt renderer accept a trailing semicolon in the input query.
2018-10-17 15:12:33 +02:00
Daniel G. Aubert
47f64401a7 Merge pull request #1044 from CartoDB/upgrade-windshaft-4.11.1
Upgrade windshaft to version 4.11.1
2018-10-16 17:28:22 +02:00
Daniel García Aubert
8bdbe7c9b7 Update NEWS 2018-10-16 17:18:21 +02:00
Daniel García Aubert
0637018cca Upgrade windshaft to version 4.11.1 2018-10-16 17:16:57 +02:00
Daniel G. Aubert
8a7bef673b Merge pull request #1043 from CartoDB/mvt-query-rewrite
Use overviews query rewriter for mvt-renderer
2018-10-15 16:02:39 +02:00
Daniel García Aubert
a0e71ac396 Upgrade windshaft to version 4.11.0 2018-10-15 15:50:48 +02:00
Daniel García Aubert
184a804367 Improve assertions 2018-10-15 15:25:23 +02:00
Daniel García Aubert
c234b4ea91 Organize params 2018-10-15 08:30:51 +02:00
Daniel García Aubert
db13f5e4f3 ES6 tweaks 2018-10-15 08:25:44 +02:00
Daniel García Aubert
f9a8b3c827 Add acceptance test 2018-10-15 08:23:39 +02:00
Daniel García Aubert
17886d0e43 Revert "Use early return pattern"
This reverts commit 297e56f4e1.
2018-10-11 18:17:16 +02:00
Daniel García Aubert
1f112d587f Revert "Rename function"
This reverts commit 12dc1626a7.
2018-10-11 18:17:03 +02:00
Daniel García Aubert
5c56ea6b22 Add test to validate that Postgis and Mapnik renderers are using overviews tables 2018-10-11 17:59:49 +02:00
Daniel García Aubert
3c76dfbbb3 Use devel branch of windshaft 2018-10-11 15:41:58 +02:00
Daniel García Aubert
e158e3e426 Pass overview-query-rewriter to mvt renderer 2018-10-11 15:41:38 +02:00
Daniel García Aubert
12dc1626a7 Rename function 2018-10-10 15:07:47 +02:00
Daniel García Aubert
297e56f4e1 Use early return pattern 2018-10-10 15:06:56 +02:00
Javier Goizueta
09f75441ba Merge pull request #1039 from CartoDB/time-dimensions
Time dimensions
2018-10-09 16:06:06 +02:00
Javier Goizueta
41bd69d050 Remove public docs for the time being
We might want to make changes to the API after initial test usage
2018-10-09 15:17:29 +02:00
Javier Goizueta
73b3402d85 Refactor stats collection 2018-10-09 13:24:08 +02:00
Javier Goizueta
d66a304b00 Merge branch 'master' into time-dimensions
# Conflicts:
#	NEWS.md
2018-10-08 19:35:38 +02:00
Javier Goizueta
ee63b247cd Slight refactor 2018-10-08 19:25:04 +02:00
Javier Goizueta
418e0e2aa3 Documentation corrections 2018-10-08 19:16:50 +02:00
Javier Goizueta
d4bd706fe2 Clarify some tests 2018-10-08 19:16:32 +02:00
Javier Goizueta
a4dfc09c71 Aggregation dimensions documentation 2018-10-07 23:12:41 +02:00
Javier Goizueta
9ed39f149b Time dimension tests 2018-10-07 22:46:02 +02:00
Javier Goizueta
0e85aa56da Fix test 2018-10-07 11:35:28 +02:00
Javier Goizueta
2f59919f84 Dimension metadata test 2018-10-07 00:29:12 +02:00
Javier Goizueta
10baf43ede Fix dimension metadata bug 2018-10-07 00:28:53 +02:00
Javier Goizueta
996d7fc90d Lint fixes 2018-10-06 18:26:43 +02:00
Javier Goizueta
c0febf2fd1 Rename time dimension parameters 2018-10-05 20:08:40 +02:00
Javier Goizueta
f841f65a1e Dimensions metadata 2018-10-04 19:50:14 +02:00
Javier Goizueta
c9786ee3f6 Catch aggregation query errors 2018-10-03 23:13:22 +02:00
Javier Goizueta
99b62edcbd Bug fixes 2018-10-03 23:12:58 +02:00
Javier Goizueta
c588d4139e Refactor time dimensions 2018-10-03 21:02:22 +02:00
Javier Goizueta
aff55351ad Unify parameter names 2018-10-03 19:07:47 +02:00
Javier Goizueta
96ba075698 Unify handling of cyclic time groupings
Remove generic cyclic grouping
2018-10-03 18:57:00 +02:00
Javier Goizueta
a7d5415f64 Remove offsets from time dimension computations 2018-10-03 17:12:01 +02:00
Javier Goizueta
dede22c915 Changes in time dimensions API
Use single `starting` epoch instead of various offsets.
Add ISO text representation.
Adopt ISO conventions for day of week and week of year.
Rename internal parameters for consistency with external API.
2018-10-03 17:05:58 +02:00
Javier Goizueta
fbf3fd9d8c Support old and new dimension definitions 2018-09-25 19:10:56 +02:00
Simon Martín
e70de80cdf Stub next version 2018-09-24 11:35:10 +02:00
Javier Goizueta
fbcfc7a582 WIP: time dimensions for aggregation 2018-09-20 21:12:54 +02:00
277 changed files with 7196 additions and 3614 deletions

View File

@@ -4,78 +4,5 @@ jobs:
services:
- docker
language: generic
before_install: docker pull carto/nodejs6-xenial-pg101
script: npm run docker-test
- dist: precise
addons:
postgresql: "9.5"
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- pkg-config
- libcairo2-dev
- libjpeg8-dev
- libgif-dev
- libpango1.0-dev
- g++-4.9
- wget
before_install:
# Add custom PPAs from cartodb
- sudo add-apt-repository -y ppa:cartodb/postgresql-9.5
- sudo add-apt-repository -y ppa:cartodb/gis
- sudo add-apt-repository -y ppa:cartodb/gis-testing
- sudo apt-get update
# Force instalation of libgeos-3.5.0 (presumably needed because of existing version of postgis)
- sudo apt-get -y install libgeos-3.5.0=3.5.0-1cdb2
# Install postgres db and build deps
- sudo /etc/init.d/postgresql stop # stop travis default instance
- sudo apt-get -y remove --purge postgresql-9.1
- sudo apt-get -y remove --purge postgresql-9.2
- sudo apt-get -y remove --purge postgresql-9.3
- sudo apt-get -y remove --purge postgresql-9.4
- sudo apt-get -y remove --purge postgresql-9.5
- sudo apt-get -y remove --purge postgresql-9.6
- sudo rm -rf /var/lib/postgresql/
- sudo rm -rf /var/log/postgresql/
- sudo rm -rf /etc/postgresql/
- sudo apt-get -y remove --purge postgis-2.2
- sudo apt-get -y autoremove
- sudo apt-get -y install postgresql-9.5=9.5.2-3cdb3
- sudo apt-get -y install postgresql-server-dev-9.5=9.5.2-3cdb3
- sudo apt-get -y install postgresql-plpython-9.5=9.5.2-3cdb3
- sudo apt-get -y install postgresql-9.5-postgis-scripts=2.2.2.0-cdb2
- sudo apt-get -y install postgresql-9.5-postgis-2.2=2.2.2.0-cdb2
# configure it to accept local connections from postgres
- echo -e "# TYPE DATABASE USER ADDRESS METHOD \nlocal all postgres trust\nlocal all all trust\nhost all all 127.0.0.1/32 trust" \
| sudo tee /etc/postgresql/9.5/main/pg_hba.conf
- sudo /etc/init.d/postgresql restart 9.5
- createdb template_postgis
- createuser publicuser
- psql -c "CREATE EXTENSION postgis" template_postgis
# install yarn 0.27.5
- curl -o- -L https://yarnpkg.com/install.sh | bash -s -- --version 0.27.5
- export PATH="$HOME/.yarn/bin:$PATH"
# instal redis 4
- wget http://download.redis.io/releases/redis-4.0.8.tar.gz
- tar xvzf redis-4.0.8.tar.gz
- cd redis-4.0.8
- make
- sudo make install
- cd ..
- rm redis-4.0.8.tar.gz
env:
- NPROCS=1 JOBS=1 PGUSER=postgres CXX=g++-4.9
language: node_js
node_js:
- "6"
before_install: docker pull carto/nodejs-xenial-pg101:latest
script: npm run docker-test -- 10.15.1 # Node.js version

View File

@@ -1,8 +1,8 @@
1. Test (make clean all check), fix if broken before proceeding
2. Ensure proper version in package.json
2. Ensure proper version in package.json and package-lock.json
3. Ensure NEWS section exists for the new version, review it, add release date
4. If there are modified dependencies in package.json, update them with `yarn upgrade {{package_name}}@{{version}}`
5. Commit package.json, yarn.lock, NEWS
4. If there are modified dependencies in package.json, update them with `npm upgrade {{package_name}}@{{version}}`
5. Commit package.json, package-lock.json, NEWS
6. git tag -a Major.Minor.Patch # use NEWS section as content
7. Stub NEWS/package for next version

View File

@@ -1,35 +1,23 @@
# Installing Windshaft-CartoDB #
# Installing Windshaft-CartoDB
## Requirements ##
Make sure that you have the requirements needed. These are
## Requirements
- Core
- Node.js >=6.9.x
- yarn >=0.27.5 <1.0.0
- PostgreSQL >8.3.x, PostGIS >1.5.x
- Redis >2.4.0 (http://www.redis.io)
- Mapnik >3.x. See [Installing Mapnik](https://github.com/CartoDB/Windshaft#installing-mapnik).
- Windshaft: check [Windshaft dependencies and installation notes](https://github.com/CartoDB/Windshaft#dependencies)
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
Make sure that you have the requirements needed. These are:
- For cache control (optional)
- CartoDB 0.9.5+ (for `CDB_QueryTables`)
- Varnish (http://www.varnish-cache.org)
- Node 10.x
- npm 6.x
- PostgreSQL >= 10.0
- PostGIS >= 2.4
- CARTO Postgres Extension >= 0.24.1
- Redis >= 4
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
- C++11 (to build internal dependencies if needed)
On Ubuntu 14.04 the dependencies can be installed with
### Optional
```shell
sudo apt-get update
sudo apt-get install -y make g++ pkg-config git-core \
libgif-dev libjpeg-dev libcairo2-dev \
libhiredis-dev redis-server \
nodejs nodejs-legacy npm \
postgresql-9.3-postgis-2.1 postgresql-plpython-9.3 postgresql-server-dev-9.3
```
- Varnish (http://www.varnish-cache.org)
On Ubuntu 12.04 the [cartodb/cairo PPA](https://launchpad.net/~cartodb/+archive/ubuntu/cairo) may be useful.
## PostGIS setup ##
## PostGIS setup
A `template_postgis` database is expected. One can be set up with
@@ -38,16 +26,16 @@ createdb --owner postgres --template template0 template_postgis
psql -d template_postgis -c 'CREATE EXTENSION postgis;'
```
## Build/install ##
## Build/install
To fetch and build all node-based dependencies, run:
```
yarn
```shell
npm install
```
Note that the ```yarn``` step will populate the node_modules/
Note that the ```npm``` step will populate the node_modules/
directory with modules, some of which being compiled on demand. If you
happen to have startup errors you may need to force rebuilding those
modules. At any time just wipe out the node_modules/ directory and run
```yarn``` again.
```npm``` again.

57
NEWS.md
View File

@@ -1,5 +1,62 @@
# Changelog
## 7.0.0
Released 2019-02-22
Breaking changes:
- Drop support for Node.js 6
- Drop support for npm 3
- Stop supporting `yarn.lock`
- Drop support for Postgres 9.5
- Drop support for PosGIS 2.2
- Drop support for Redis 3
Announcements:
- In configuration, set `clipByBox2d` to true by default
- Update docs: compatible Node.js and npm versions
- Report fine-grained Garbage Collector stats
- Adding Authorization to Access-Control-Allow-Headers (https://github.com/CartoDB/CartoDB-SQL-API/issues/534)
- Update deps:
- windshaft@4.13.1: Upgrade tilelive-mapnik to version 0.6.18-cdb18
- camshaft@0.63.4: Improve error message for exceeded batch SQL API payload size: add suggestions about what the user can do about it.
- Update dev deps:
- jshint@2.9.7
- mocha@5.2.0
- Be able to customize max waiting workers parameter
- Handle 'max waitingClients count exceeded' error as "429, You are over platform's limits"
## 6.5.1
Released 2018-12-26
Bug Fixes:
- Update carto-package.json
## 6.5.0
Released 2018-12-26
New features
- Suport Node.js 10
- Configure travis to run docker tests against Node.js 6 & 10 versions
- Aggregation time dimensions
- Update sample configurations to use PostGIS to generate MVT's by default (as in production)
- Upgrades Windshaft to [4.12.1](https://github.com/CartoDB/Windshaft/blob/4.12.1/NEWS.md#version-4121)
- `pg-mvt`: Use `query-rewriter` to compose the query to render a MVT tile. If not defined, it will use a Default Query Rewriter.
- `pg-mvt`: Fix bug while building query and there is no columns defined for the layer.
- `pg-mvt`: Accept trailing semicolon in input queries.
- `Renderer Cache Entry`: Do not throw errors for integrity checks.
- Fix bug when releasing the renderer cache entry in some scenarios.
- Upgrade grainstore to [1.10.0](https://github.com/CartoDB/grainstore/releases/tag/1.10.0)
- Upgrade cartodb-redis to [2.1.0](https://github.com/CartoDB/node-cartodb-redis/releases/tag/2.1.0)
- Upgrade cartodb-query-tables to [0.4.0](https://github.com/CartoDB/node-cartodb-query-tables/releases/tag/0.4.0)
- Upgrade cartodb-psql to [0.13.1](https://github.com/CartoDB/node-cartodb-psql/releases/tag/0.13.1)
- Upgrade turbo-carto to [0.21.0](https://github.com/CartoDB/turbo-carto/releases/tag/0.21.0)
- Upgrade camshaft to [0.63.1](https://github.com/CartoDB/camshaft/releases/tag/0.63.1)
- Upgrade redis-mpool to [0.7.0](https://github.com/CartoDB/node-redis-mpool/releases/tag/0.7.0)
Bug Fixes:
- Prevent from uncaught exception: Range filter Error from camshaft when getting analysis query.
- Make all modules to use strict mode semantics.
## 6.4.0
Released 2018-09-24

View File

@@ -31,12 +31,10 @@ Upgrading
Checkout your commit/branch. If you need to reinstall dependencies (you can check [NEWS](NEWS.md)) do the following:
```sh
$ rm -rf node_modules
$ npm install
```
rm -rf node_modules; yarn
```
Run
---
```
node app.js <env>
@@ -71,12 +69,12 @@ See [CONTRIBUTING.md](CONTRIBUTING.md).
### Developing with a custom windshaft version
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency) the best option is
to use `yarn link`. You can read more about it at [yarn-link: Symlink a package folder](https://yarnpkg.com/en/docs/cli/link).
to use `npm link`. You can read more about it at [npm-link: Symlink a package folder](https://docs.npmjs.com/cli/link.html).
**Quick start**:
```shell
~/windshaft-directory $ yarn
~/windshaft-directory $ yarn link
~/windshaft-cartodb-directory $ yarn link windshaft
~/windshaft-directory $ npm install
~/windshaft-directory $ npm link
~/windshaft-cartodb-directory $ npm link windshaft
```

77
app.js
View File

@@ -1,3 +1,5 @@
'use strict';
var http = require('http');
var https = require('https');
var path = require('path');
@@ -126,6 +128,40 @@ listener.on('listening', function() {
);
});
function getCPUUsage (oldUsage) {
let usage;
if (oldUsage && oldUsage._start) {
usage = Object.assign({}, process.cpuUsage(oldUsage._start.cpuUsage));
usage.time = Date.now() - oldUsage._start.time;
} else {
usage = Object.assign({}, process.cpuUsage());
usage.time = process.uptime() * 1000; // s to ms
}
usage.percent = (usage.system + usage.user) / (usage.time * 10);
Object.defineProperty(usage, '_start', {
value: {
cpuUsage: process.cpuUsage(),
time: Date.now()
}
});
return usage;
}
let previousCPUUsage = getCPUUsage();
setInterval(function cpuUsageMetrics () {
const CPUUsage = getCPUUsage(previousCPUUsage);
Object.keys(CPUUsage).forEach(property => {
global.statsClient.gauge(`windshaft.cpu.${property}`, CPUUsage[property]);
});
previousCPUUsage = CPUUsage;
}, 5000);
setInterval(function() {
var memoryUsage = process.memoryUsage();
Object.keys(memoryUsage).forEach(function(k) {
@@ -152,9 +188,46 @@ if (global.gc) {
if (gcInterval > 0) {
setInterval(function gcForcedCycle() {
var start = Date.now();
global.gc();
global.statsClient.timing('windshaft.gc', Date.now() - start);
}, gcInterval);
}
}
const gcStats = require('gc-stats')();
gcStats.on('stats', function ({ pauseMS, gctype }) {
global.statsClient.timing('windshaft.gc', pauseMS);
global.statsClient.timing(`windshaft.gctype.${getGCTypeValue(gctype)}`, pauseMS);
});
function getGCTypeValue (type) {
// 1: Scavenge (minor GC)
// 2: Mark/Sweep/Compact (major GC)
// 4: Incremental marking
// 8: Weak/Phantom callback processing
// 15: All
let value;
switch (type) {
case 1:
value = 'Scavenge';
break;
case 2:
value = 'MarkSweepCompact';
break;
case 4:
value = 'IncrementalMarking';
break;
case 8:
value = 'ProcessWeakCallbacks';
break;
case 15:
value = 'All';
break;
default:
value = 'Unkown';
break;
}
return value;
}

View File

@@ -2,15 +2,16 @@
"name": "carto_windshaft",
"current_version": {
"requires": {
"node": ">=6.9.2 <10.0.0",
"yarn": ">=0.27.5 <1.0.0",
"mapnik": ">=3.0.15"
"node": "^10.15.1",
"npm": "^6.4.1",
"mapnik": "==3.0.15.9",
"crankshaft": "~0.8.1"
},
"works_with": {
"redis": ">=3.0.0",
"postgresql": ">=9.5.0",
"postgis": ">=2.2.0.0",
"carto_postgresql_ext": ">=0.19.0"
"redis": ">=4.0.0",
"postgresql": ">=10.0.0",
"postgis": ">=2.4.4.5",
"carto_postgresql_ext": ">=0.24.1"
}
}
}

View File

@@ -127,10 +127,9 @@ var config = {
cache_ttl: 60000,
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
mvt: {
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
//PostGIS 2.4 is required for this to work
//If disabled it will use Mapnik MVT generation
usePostGIS: false
//If enabled, MVTs will be generated with PostGIS directly
//If disabled, MVTs will be generated with Mapnik MVT
usePostGIS: true
},
mapnik: {
// The size of the pool of internal mapnik backend
@@ -139,6 +138,10 @@ var config = {
// Important: check the configuration of uv_threadpool_size to use suitable value
poolSize: 8,
// The maximum number of waiting clients of the pool of internal mapnik backend
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
poolMaxWaitingClients: 64,
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
// This will prevent blocking the main thread.
useCartocssWorkers: false,
@@ -182,7 +185,7 @@ var config = {
// SQL queries will be wrapped with ST_ClipByBox2D
// Returning the portion of a geometry falling within a rectangle
// It will only work if snapToGrid is enabled
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
clipByBox2d: true,
postgis: {
// Parameters to pass to datasource plugin of mapnik

View File

@@ -127,10 +127,9 @@ var config = {
cache_ttl: 60000,
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
mvt: {
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
//PostGIS 2.4 is required for this to work
//If disabled it will use Mapnik MVT generation
usePostGIS: false
//If enabled, MVTs will be generated with PostGIS directly
//If disabled, MVTs will be generated with Mapnik MVT
usePostGIS: true
},
mapnik: {
// The size of the pool of internal mapnik backend
@@ -139,6 +138,10 @@ var config = {
// Important: check the configuration of uv_threadpool_size to use suitable value
poolSize: 8,
// The maximum number of waiting clients of the pool of internal mapnik backend
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
poolMaxWaitingClients: 64,
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
// This will prevent blocking the main thread.
useCartocssWorkers: false,
@@ -182,7 +185,7 @@ var config = {
// SQL queries will be wrapped with ST_ClipByBox2D
// Returning the portion of a geometry falling within a rectangle
// It will only work if snapToGrid is enabled
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
clipByBox2d: true,
postgis: {
// Parameters to pass to datasource plugin of mapnik

View File

@@ -127,10 +127,9 @@ var config = {
cache_ttl: 60000,
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
mvt: {
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
//PostGIS 2.4 is required for this to work
//If disabled it will use Mapnik MVT generation
usePostGIS: false
//If enabled, MVTs will be generated with PostGIS directly
//If disabled, MVTs will be generated with Mapnik MVT
usePostGIS: true
},
mapnik: {
// The size of the pool of internal mapnik backend
@@ -139,6 +138,10 @@ var config = {
// Important: check the configuration of uv_threadpool_size to use suitable value
poolSize: 8,
// The maximum number of waiting clients of the pool of internal mapnik backend
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
poolMaxWaitingClients: 64,
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
// This will prevent blocking the main thread.
useCartocssWorkers: false,
@@ -182,7 +185,7 @@ var config = {
// SQL queries will be wrapped with ST_ClipByBox2D
// Returning the portion of a geometry falling within a rectangle
// It will only work if snapToGrid is enabled
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
clipByBox2d: true,
postgis: {
// Parameters to pass to datasource plugin of mapnik

View File

@@ -127,10 +127,9 @@ var config = {
cache_ttl: 60000,
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
mvt: {
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
//PostGIS 2.4 is required for this to work
//If disabled it will use Mapnik MVT generation
usePostGIS: false
//If enabled, MVTs will be generated with PostGIS directly
//If disabled, MVTs will be generated with Mapnik MVT
usePostGIS: true
},
mapnik: {
// The size of the pool of internal mapnik backend
@@ -139,6 +138,10 @@ var config = {
// Important: check the configuration of uv_threadpool_size to use suitable value
poolSize: 8,
// The maximum number of waiting clients of the pool of internal mapnik backend
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
poolMaxWaitingClients: 64,
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
// This will prevent blocking the main thread.
useCartocssWorkers: false,
@@ -182,7 +185,7 @@ var config = {
// SQL queries will be wrapped with ST_ClipByBox2D
// Returning the portion of a geometry falling within a rectangle
// It will only work if snapToGrid is enabled
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
clipByBox2d: true,
postgis: {
// Parameters to pass to datasource plugin of mapnik

13
docker-bash.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
echo "*********************"
echo "To install Node.js, run:"
echo "/src/nodejs-install.sh"
echo "Use NODEJS_VERSION env var to select the Node.js version"
echo " "
echo "To start postgres, run:"
echo "/etc/init.d/postgresql start"
echo "*********************"
echo " "
docker run -it -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash

4
docker-test.sh Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
docker run -e "NODEJS_VERSION=${1}" -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash run_tests_docker.sh && \
docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v

View File

@@ -0,0 +1,92 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
&& . ~/.nvm/nvm.sh \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.6+carto-1 \
postgresql-10-postgis-2.4=2.4.4.6+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.6+carto-1 \
postgresql-10-postgis-scripts=2.4.4.6+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
RUN chmod 777 /src/nodejs-install.sh
CMD /src/nodejs-install.sh

View File

@@ -0,0 +1,88 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_10.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -0,0 +1,89 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
# Install dependencies and PostGIS 2.4 from sources
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgresql-10-postgis-2.4 \
postgresql-10-postgis-2.4-scripts \
postgresql-10-postgis-scripts \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
postgis \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -0,0 +1,88 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -11,13 +11,13 @@ https://hub.docker.com/r/carto/
## Update image
- Edit the docker image file with your desired changes
- Build image:
- Build image:
- `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
- Upload to docker hub:
- Login into docker hub:
- Login into docker hub:
- `docker login`
- Create tag:
- Create tag:
- `docker tag carto/IMAGE carto/IMAGE`
- Upload:
- Upload:
- `docker push carto/IMAGE`

View File

@@ -0,0 +1,13 @@
#!/bin/bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
if [ -z $NODEJS_VERSION ]; then
NODEJS_VERSION="10"
NODEJS_VERSION_OPTIONS="--lts"
fi
nvm install $NODEJS_VERSION $NODEJS_VERSION_OPTIONS
nvm alias default $NODEJS_VERSION
nvm use default

View File

@@ -134,6 +134,10 @@ of the original dataset applying three different aggregate functions.
> Note that you can use the original column names as names of the result, but all the result column names must be unique. In particular, the names `cartodb_id`, `the_geom`, `the_geom_webmercator` and `_cdb_feature_count` cannot be used for aggregated columns, as they correspond to columns always present in the result.
#### Limitations:
* The iso text format does not admit `starting` or `count` parameters
* Cyclic units (day of the week, etc.) don't admit `count` or `starting` either.
### `resolution`
Defines the cell-size of the spatial aggregation grid. This is equivalent to the [CartoCSS `-torque-resolution`](https://carto.com/docs/carto-engine/cartocss/properties-for-torque/#-torque-resolution-float) property of Torque maps.

View File

@@ -1,3 +1,5 @@
'use strict';
const { Router: router } = require('express');
const RedisPool = require('redis-mpool');

View File

@@ -1,3 +1,5 @@
'use strict';
const PSQL = require('cartodb-psql');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');

View File

@@ -1,3 +1,5 @@
'use strict';
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');

View File

@@ -1,3 +1,5 @@
'use strict';
const windshaft = require('windshaft');
const MapConfig = windshaft.model.MapConfig;
const Datasource = windshaft.model.Datasource;
@@ -152,17 +154,17 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
}
};
mapConfigAdapter.getMapConfig(user,
requestMapConfig,
params,
context,
mapConfigAdapter.getMapConfig(user,
requestMapConfig,
params,
context,
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
req.profiler.done('anonymous.getMapConfig');
stats.mapType = 'anonymous';
req.profiler.add(stats);
if (err) {
return next(err);
}

View File

@@ -1,3 +1,5 @@
'use strict';
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');

View File

@@ -1,3 +1,5 @@
'use strict';
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');

View File

@@ -1,3 +1,5 @@
'use strict';
const { Router: router } = require('express');
const AnalysisLayergroupController = require('./analysis-layergroup-controller');

View File

@@ -1,3 +1,5 @@
'use strict';
const layergroupToken = require('../middlewares/layergroup-token');
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');

View File

@@ -1,3 +1,5 @@
'use strict';
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');

View File

@@ -1,3 +1,5 @@
'use strict';
const layergroupToken = require('../middlewares/layergroup-token');
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');

View File

@@ -1,3 +1,5 @@
'use strict';
const _ = require('underscore');
module.exports = function augmentLayergroupData () {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function authorize (authBackend) {
return function authorizeMiddleware (req, res, next) {
authBackend.authorize(req, res, (err, authorized) => {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setCacheChannelHeader () {
return function setCacheChannelHeaderMiddleware (req, res, next) {
if (req.method !== 'GET') {

View File

@@ -1,3 +1,5 @@
'use strict';
const ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365;
module.exports = function setCacheControlHeader ({ ttl = ONE_YEAR_IN_SECONDS, revalidate = false } = {}) {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function checkJsonContentType () {
return function checkJsonContentTypeMiddleware(req, res, next) {
if (req.method === 'POST' && !req.is('application/json')) {

View File

@@ -1,3 +1,5 @@
'use strict';
const VALID_IMAGE_FORMATS = ['png', 'jpg'];
module.exports = function checkStaticImageFormat () {

View File

@@ -1,3 +1,5 @@
'use strict';
const _ = require('underscore');
// Whitelist query parameters and attach format

View File

@@ -1,3 +1,5 @@
'use strict';
const positiveIntegerNumberRegExp = /^\d+$/;
const integerNumberRegExp = /^-?\d+$/;
const invalidZoomMessage = function (zoom) {

View File

@@ -1,9 +1,12 @@
'use strict';
module.exports = function cors () {
return function corsMiddleware (req, res, next) {
const headers = [
'X-Requested-With',
'X-Prototype-Version',
'X-CSRF-Token'
'X-CSRF-Token',
'Authorization'
];
if (req.method === 'OPTIONS') {

View File

@@ -1,3 +1,5 @@
'use strict';
const basicAuth = require('basic-auth');
module.exports = function credentials () {

View File

@@ -1,3 +1,5 @@
'use strict';
const _ = require('underscore');
module.exports = function dbConnSetup (pgConnection) {

View File

@@ -1,3 +1,5 @@
'use strict';
const _ = require('underscore');
const debug = require('debug')('windshaft:cartodb:error-middleware');
@@ -7,7 +9,7 @@ module.exports = function errorMiddleware (/* options */) {
// jshint maxcomplexity:9
var allErrors = Array.isArray(err) ? err : [err];
allErrors = populateTimeoutErrors(allErrors);
allErrors = populateLimitErrors(allErrors);
const label = err.label || 'UNKNOWN';
err = allErrors[0] || new Error(label);
@@ -57,8 +59,22 @@ function getErrorTypes(error) {
};
}
function populateTimeoutErrors (errors) {
function isMaxWaitingClientsError (err) {
return err.message === 'max waitingClients count exceeded';
}
function populateLimitErrors (errors) {
return errors.map(function (error) {
if (isMaxWaitingClientsError(error)) {
error.message = 'You are over platform\'s limits: Max render capacity exceeded.' +
' Contact CARTO support for more details.';
error.type = 'limit';
error.subtype = 'render-capacity';
error.http_status = 429;
return error;
}
const errorTypes = getErrorTypes(error);
if (isTimeoutError(errorTypes)) {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function incrementMapViewCount (metadataBackend) {
return function incrementMapViewCountMiddleware(req, res, next) {
const { mapConfig, user } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function initProfiler (isTemplateInstantiation) {
const operation = isTemplateInstantiation ? 'instance_template' : 'createmap';

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function initializeStatusCode () {
return function initializeStatusCodeMiddleware (req, res, next) {
if (req.method !== 'OPTIONS') {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setLastModifiedHeader () {
return function setLastModifiedHeaderMiddleware(req, res, next) {
if (req.method !== 'GET') {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setLastUpdatedTimeToLayergroup () {
return function setLastUpdatedTimeToLayergroupMiddleware (req, res, next) {
const { mapConfigProvider, analysesResults } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setLayerStats (pgConnection, statsBackend) {
return function setLayerStatsMiddleware(req, res, next) {
const { user, mapConfig } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setLayergroupIdHeader (templateMaps, useTemplateHash) {
return function setLayergroupIdHeaderMiddleware (req, res, next) {
const { user, template } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQuery) {
return function setMetadataToLayergroupMiddleware (req, res, next) {
const { user, mapConfig, analysesResults = [], context, api_key: userApiKey } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
const LayergroupToken = require('../../models/layergroup-token');
const authErrorMessageTemplate = function (signer, user) {
return `Cannot use map signature of user "${signer}" on db of user "${user}"`;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function logger (options) {
if (!global.log4js || !options.log_format) {
return function dummyLoggerMiddleware (req, res, next) {

View File

@@ -1,3 +1,5 @@
'use strict';
const LZMA = require('lzma').LZMA;
module.exports = function lzma () {

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function mapError (options) {
const { addContext = false, label = 'MAPS CONTROLLER' } = options;

View File

@@ -1,3 +1,5 @@
'use strict';
const MapStoreMapConfigProvider = require('../../models/mapconfig/provider/map-store-provider');
module.exports = function createMapStoreMapConfigProvider (

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
return function getNamedMapProviderMiddleware (req, res, next) {
const { user, token, cache_buster, api_key } = res.locals;

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function noop () {
return function noopMiddleware (req, res, next) {
next();

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function sendResponse () {
return function sendResponseMiddleware (req, res) {
req.profiler.done('res');

View File

@@ -1,3 +1,5 @@
'use strict';
const os = require('os');
module.exports = function servedByHostHeader () {

View File

@@ -1,3 +1,5 @@
'use strict';
const Profiler = require('../../stats/profiler_proxy');
const debug = require('debug')('windshaft:cartodb:stats');
const onHeaders = require('on-headers');

View File

@@ -1,3 +1,5 @@
'use strict';
const NamedMapsCacheEntry = require('../../cache/model/named_maps_entry');
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function syntaxError () {
return function syntaxErrorMiddleware (err, req, res, next) {
if (err.name === 'SyntaxError') {

View File

@@ -1,3 +1,5 @@
'use strict';
const CdbRequest = require('../../models/cdb_request');
module.exports = function user () {

View File

@@ -1,3 +1,5 @@
'use strict';
const fs = require('fs');
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../../assets/render-timeout-fallback.mvt');

View File

@@ -1,3 +1,5 @@
'use strict';
const { templateName } = require('../../backends/template_maps');
const credentials = require('../middlewares/credentials');
const rateLimit = require('../middlewares/rate-limit');

View File

@@ -1,3 +1,5 @@
'use strict';
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');

View File

@@ -1,3 +1,5 @@
'use strict';
const { Router: router } = require('express');
const NamedMapController = require('./named-template-controller');

View File

@@ -1,3 +1,5 @@
'use strict';
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');

View File

@@ -1,3 +1,5 @@
'use strict';
var PSQL = require('cartodb-psql');
function AnalysisStatusBackend() {

View File

@@ -1,3 +1,5 @@
'use strict';
/**
*
* @param {PgConnection} pgConnection

View File

@@ -1,3 +1,5 @@
'use strict';
var _ = require('underscore');
var PSQL = require('cartodb-psql');
var BBoxFilter = require('../models/filter/bbox');

View File

@@ -1,3 +1,5 @@
'use strict';
var _ = require('underscore');
var AnalysisFilter = require('../models/filter/analysis');

View File

@@ -1,3 +1,5 @@
'use strict';
function EmptyLayerStats(types) {
this._types = types || {};
}

View File

@@ -1,3 +1,5 @@
'use strict';
var LayerStats = require('./layer-stats');
var EmptyLayerStats = require('./empty-layer-stats');
var MapnikLayerStats = require('./mapnik-layer-stats');

View File

@@ -1,3 +1,5 @@
'use strict';
var queue = require('queue-async');
function LayerStats(layerStatsIterator) {

View File

@@ -1,5 +1,8 @@
'use strict';
const queryUtils = require('../../utils/query-utils');
const AggregationMapConfig = require('../../models/aggregation/aggregation-mapconfig');
const aggregationQuery = require('../../models/aggregation/aggregation-query');
function MapnikLayerStats () {
this._types = {
@@ -19,6 +22,9 @@ function columnAggregations(field) {
if (field.type === 'date') { // TODO other types too?
return ['min', 'max'];
}
if (field.type === 'timeDimension') {
return ['min', 'max'];
}
return [];
}
@@ -67,13 +73,13 @@ function _geometryType(ctx) {
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
return queryUtils.queryPromise(ctx.dbConnection, sqlQuery)
.then(res => ({ geometryType: res.rows[0].geom_type }));
.then(res => ({ geometryType: (res.rows[0] || {}).geom_type }));
}
return Promise.resolve();
}
function _columns(ctx) {
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats) {
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
// note: post-aggregation columns are in layer.options.columns when aggregation is present
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
.then(res => formatResultFields(ctx.dbConnection, res.fields));
@@ -137,51 +143,89 @@ function _sample(ctx, numRows) {
return Promise.resolve();
}
function _columnStats(ctx, columns) {
function _columnsMetadataRequired(options) {
// We need determine the columns of a query
// if either column stats or dimension stats are required,
// since we'll ultimately use the same query to fetch both
return options.columnStats || options.dimensions;
}
function _columnStats(ctx, columns, dimensions) {
if (!columns) {
return Promise.resolve();
}
if (ctx.metaOptions.columnStats) {
if (_columnsMetadataRequired(ctx.metaOptions)) {
let queries = [];
let aggr = [];
queries.push(new Promise(resolve => resolve(columns))); // add columns as first result
Object.keys(columns).forEach(name => {
aggr = aggr.concat(
columnAggregations(columns[name])
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
);
if (columns[name].type === 'string') {
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
ctx.metaOptions.columnStats.includeNulls :
true;
// TODO: ctx.metaOptions.columnStats.maxCategories
// => use PG stats to dismiss columns with more distinct values
queries.push(
queryUtils.queryPromise(
ctx.dbConnection,
_getSQL(ctx, sql => queryUtils.getQueryTopCategories(sql, name, topN, includeNulls))
).then(res => ({ [name]: { categories: res.rows } }))
if (ctx.metaOptions.columnStats) {
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
Object.keys(columns).forEach(name => {
aggr = aggr.concat(
columnAggregations(columns[name])
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
);
}
});
if (columns[name].type === 'string') {
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
ctx.metaOptions.columnStats.includeNulls :
true;
// TODO: ctx.metaOptions.columnStats.maxCategories
// => use PG stats to dismiss columns with more distinct values
queries.push(
queryUtils.queryPromise(
ctx.dbConnection,
_getSQL(ctx, sql => queryUtils.getQueryTopCategories(sql, name, topN, includeNulls))
).then(res => ({ columns: { [name]: { categories: res.rows } } }))
);
}
});
}
const dimensionsStats = {};
let dimensionsInfo = {};
if (ctx.metaOptions.dimensions && dimensions) {
dimensionsInfo = aggregationQuery.infoForOptions({ dimensions });
Object.keys(dimensionsInfo).forEach(dimName => {
const info = dimensionsInfo[dimName];
if (info.type === 'timeDimension') {
dimensionsStats[dimName] = {
params: info.params
};
aggr = aggr.concat(
columnAggregations(info).map(fn => `${fn}(${info.sql}) AS "${dimName}_${fn}"`)
);
}
});
}
queries.push(
queryUtils.queryPromise(
ctx.dbConnection,
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
).then(res => {
let stats = {};
let stats = { columns: {}, dimensions: {} };
Object.keys(columns).forEach(name => {
stats[name] = {};
stats.columns[name] = {};
columnAggregations(columns[name]).forEach(fn => {
stats[name][fn] = res.rows[0][`${name}_${fn}`];
stats.columns[name][fn] = res.rows[0][`${name}_${fn}`];
});
});
Object.keys(dimensionsInfo).forEach(name => {
stats.dimensions[name] = stats.dimensions[name] || Object.assign({}, dimensionsStats[name]);
let type = null;
columnAggregations(dimensionsInfo[name]).forEach(fn => {
type = type ||
fieldTypeSafe(ctx.dbConnection, res.fields.find(f => f.name === `${name}_${fn}`));
stats.dimensions[name][fn] = res.rows[0][`${name}_${fn}`];
});
stats.dimensions[name].type = type;
});
return stats;
})
);
return Promise.all(queries).then(results => ({ columns: mergeColumns(results) }));
return Promise.all(queries).then(results => ({
columns: mergeColumns(results.map(r => r.columns)),
dimensions: mergeColumns(results.map( r => r.dimensions))
}));
}
return Promise.resolve({ columns });
}
@@ -211,19 +255,17 @@ function fieldType(cname) {
return tname;
}
function fieldTypeSafe(dbConnection, field) {
const cname = dbConnection.typeName(field.dataTypeID);
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
}
// columns are returned as an object { columnName1: { type1: ...}, ..}
// for consistency with SQL API
function formatResultFields(dbConnection, fields = []) {
let nfields = {};
for (let field of fields) {
const cname = dbConnection.typeName(field.dataTypeID);
let tname;
if ( ! cname ) {
tname = 'unknown(' + field.dataTypeID + ')';
} else {
tname = fieldType(cname);
}
nfields[field.name] = { type: tname };
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
}
return nfields;
}
@@ -237,7 +279,7 @@ function (layer, dbConnection, callback) {
dbConnection,
preQuery,
aggrQuery,
metaOptions: layer.options.metadata || {}
metaOptions: layer.options.metadata || {},
};
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
@@ -248,6 +290,8 @@ function (layer, dbConnection, callback) {
// TODO: add support for sample.exclude option by, in that case, forcing the columns query and
// passing the results to the sample query function.
const dimensions = (layer.options.aggregation || {}).dimensions;
Promise.all([
_estimatedFeatureCount(ctx).then(
({ estimatedFeatureCount }) => _sample(ctx, estimatedFeatureCount)
@@ -256,9 +300,10 @@ function (layer, dbConnection, callback) {
_featureCount(ctx),
_aggrFeatureCount(ctx),
_geometryType(ctx),
_columns(ctx).then(columns => _columnStats(ctx, columns))
_columns(ctx).then(columns => _columnStats(ctx, columns, dimensions))
]).then(results => {
callback(null, mergeResults(results));
results = mergeResults(results);
callback(null, results);
}).catch(error => {
callback(error);
});

View File

@@ -1,3 +1,5 @@
'use strict';
function TorqueLayerStats() {
this._types = {
torque: true

View File

@@ -1,3 +1,5 @@
'use strict';
const queryUtils = require('../utils/query-utils');
function OverviewsMetadataBackend(pgQueryRunner) {

View File

@@ -1,3 +1,5 @@
'use strict';
var PSQL = require('cartodb-psql');
var _ = require('underscore');
const debug = require('debug')('cachechan');

View File

@@ -1,3 +1,5 @@
'use strict';
var PSQL = require('cartodb-psql');
function PgQueryRunner(pgConnection) {

View File

@@ -1,3 +1,5 @@
'use strict';
var layerStats = require('./layer-stats/factory');
function StatsBackend() {

View File

@@ -1,3 +1,5 @@
'use strict';
function TablesExtentBackend(pgQueryRunner) {
this.pgQueryRunner = pgQueryRunner;
}

View File

@@ -1,3 +1,5 @@
'use strict';
var crypto = require('crypto');
var debug = require('debug')('windshaft:templates');
var _ = require('underscore');

View File

@@ -1,3 +1,5 @@
'use strict';
/**
*
* @param metadataBackend

View File

@@ -1,3 +1,5 @@
'use strict';
var FastlyPurge = require('fastly-purge');
function FastlyCacheBackend(apiKey, serviceId) {

View File

@@ -1,3 +1,5 @@
'use strict';
var request = require('request');
function VarnishHttpCacheBackend(host, port) {

View File

@@ -1,3 +1,5 @@
'use strict';
var LruCache = require('lru-cache');
function LayergroupAffectedTables() {

View File

@@ -1,3 +1,5 @@
'use strict';
var crypto = require('crypto');
function NamedMaps(owner, name) {

View File

@@ -1,3 +1,5 @@
'use strict';
var _ = require('underscore');
var dot = require('dot');
var NamedMapMapConfigProvider = require('../models/mapconfig/provider/named-map-provider');

View File

@@ -1,3 +1,5 @@
'use strict';
var queue = require('queue-async');
/**

View File

@@ -1,3 +1,5 @@
'use strict';
const MapConfig = require('windshaft').model.MapConfig;
const aggregationQuery = require('./aggregation-query');
const aggregationValidator = require('./aggregation-validator');

View File

@@ -1,5 +1,10 @@
'use strict';
const timeDimension = require('./time-dimension');
const DEFAULT_PLACEMENT = 'point-sample';
/**
* Returns a template function (function that accepts template parameters and returns a string)
* to generate an aggregation query.
@@ -24,6 +29,16 @@ const templateForOptions = (options) => {
return templateFn;
};
function optionsToParams (options) {
return {
sourceQuery: options.query,
res: 256/options.resolution,
columns: options.columns,
dimensions: options.dimensions,
filters: options.filters
};
}
/**
* Generates an aggregation query given the aggregation options:
* - query
@@ -38,16 +53,23 @@ const templateForOptions = (options) => {
* When placement, columns or dimensions are specified, columns are aggregated as requested
* (by default only _cdb_feature_count) and with the_geom_webmercator as defined by placement.
*/
const queryForOptions = (options) => templateForOptions(options)({
sourceQuery: options.query,
res: 256/options.resolution,
columns: options.columns,
dimensions: options.dimensions,
filters: options.filters
});
const queryForOptions = (options) => templateForOptions(options)(optionsToParams(options));
module.exports = queryForOptions;
module.exports.infoForOptions = (options) => {
const params = optionsToParams(options);
const dimensions = {};
dimensionNamesAndExpressions(params).forEach(([dimensionName, info]) => {
dimensions[dimensionName] = {
sql: info.sql,
params: info.effectiveParams,
type: info.type
};
});
return dimensions;
};
const SUPPORTED_AGGREGATE_FUNCTIONS = {
'count': {
sql: (column_name, params) => `count(${params.aggregated_column || '*'})`
@@ -113,24 +135,56 @@ const aggregateColumnDefs = ctx => {
const aggregateDimensions = ctx => ctx.dimensions || {};
const dimensionNames = (ctx, table) => {
let dimensions = aggregateDimensions(ctx);
if (table) {
return sep(Object.keys(dimensions).map(
dimension_name => `${table}."${dimension_name}"`
));
const timeDimensionParameters = definition => {
// definition.column should correspond to a wrapped date column
const group = definition.group || {};
return {
time: `to_timestamp("${definition.column}")`,
timezone: group.timezone || 'utc',
units: group.units,
count: group.count || 1,
starting: group.starting,
format: definition.format
};
};
// Adapt old-style dimension definitions for backwards compatibility
const adaptDimensionDefinition = definition => {
if (typeof(definition) === 'string') {
return { column: definition };
}
return sep(Object.keys(dimensions).map(dimension_name => {
return `"${dimension_name}"`;
return definition;
};
const dimensionExpression = definition => {
if (definition.group) {
// Currently only time dimensions are supported with parameters
return Object.assign({ type: 'timeDimension' }, timeDimension(timeDimensionParameters(definition)));
} else {
return { sql: `"${definition.column}"` };
}
};
const dimensionNamesAndExpressions = (ctx) => {
let dimensions = aggregateDimensions(ctx);
return Object.keys(dimensions).map(dimensionName => {
const dimension = adaptDimensionDefinition(dimensions[dimensionName]);
const expression = dimensionExpression(dimension);
return [dimensionName, expression];
});
};
const dimensionNames = (ctx, table) => {
return sep(dimensionNamesAndExpressions(ctx).map(([dimensionName]) => {
return table ? `${table}."${dimensionName}"` : `"${dimensionName}"`;
}));
};
const dimensionDefs = ctx => {
let dimensions = aggregateDimensions(ctx);
return sep(Object.keys(dimensions).map(dimension_name => {
const expression = dimensions[dimension_name];
return `"${expression}" AS "${dimension_name}"`;
}));
return sep(
dimensionNamesAndExpressions(ctx)
.map(([dimensionName, expression]) => `${expression.sql} AS "${dimensionName}"`)
);
};
const aggregateFilters = ctx => ctx.filters || {};

View File

@@ -1,3 +1,5 @@
'use strict';
module.exports = function aggregationValidator (mapconfig) {
return function validateProperty (key, validator) {
for (let index = 0; index < mapconfig.getLayers().length; index++) {

View File

@@ -0,0 +1,267 @@
'use strict';
// timezones can be defined either by an numeric offset in seconds or by
// a valid (case-insensitive) tz/PG name;
// they include abbreviations defined by PG (which have precedence and
// are fixed offsets, not handling DST) or general names that can handle DST.
function timezone(tz) {
if (isFinite(tz)) {
return `INTERVAL '${tz} seconds'`;
}
return `'${tz}'`;
}
// We assume t is a TIMESTAMP WITH TIME ZONE.
// If this was to be used with a t which is a TIMESTAMP or TIME (no time zone)
// it should be converted with `timezone('utc',t)` to a type with time zone.
// Note that by default CARTO uses timestamp with time zone columns for dates
// and VectorMapConfigAdapter converts them to epoch numbers.
// So, for using this with aggregations, relying on dates & times
// converted to UTC UNIX epoch numbers, apply `to_timestamp` to the
// (converted) column.
function timeExpression(t, tz) {
if (tz !== undefined) {
return `timezone(${timezone(tz)}, ${t})`;
}
return t;
}
function epochWithDefaults(epoch) {
/* jshint maxcomplexity:9 */ // goddammit linter, I like this as is!!
const format = /^(\d\d\d\d)(?:\-?(\d\d)(?:\-?(\d\d)(?:[T\s]?(\d\d)(?:(\d\d)(?:\:(\d\d))?)?)?)?)?$/;
const match = (epoch || '').match(format) || [];
const year = match[1] || '0001';
const month = match[2] || '01';
const day = match[3] || '01';
const hour = match[4] || '00';
const minute = match[5] || '00';
const second = match[6] || '00';
return `${year}-${month}-${day}T${hour}:${minute}:${second}`;
}
// Epoch should be an ISO timestamp literal without time zone
// (it is interpreted as in the defined timzezone for the input time)
// It can be partial, e.g. 'YYYY', 'YYYY-MM', 'YYYY-MM-DDTHH', etc.
// Defaults are applied: YYYY=0001, MM=01, DD=01, HH=00, MM=00, S=00
// It returns a timestamp without time zone
function epochExpression(epoch) {
return `TIMESTAMP '${epoch}'`;
}
const YEARSPAN = "(date_part('year', $t)-date_part('year', $epoch))";
// Note that SECONDSPAN is not a UTC epoch, but an epoch in the specified TZ,
// so we can use it to compute any multiple of seconds with it without using date_part or date_trunc
const SECONDSPAN = "(date_part('epoch', $t) - date_part('epoch', $epoch))";
const serialParts = {
second: {
sql: `FLOOR(${SECONDSPAN})`,
zeroBased: true
},
minute: {
sql: `FLOOR(${SECONDSPAN}/60)`,
zeroBased: true
},
hour: {
sql: `FLOOR(${SECONDSPAN}/3600)`,
zeroBased: true
},
day: {
sql: `1 + FLOOR(${SECONDSPAN}/86400)`,
zeroBased: false
},
week: {
sql: `1 + FLOOR(${SECONDSPAN}/(7*86400))`,
zeroBased: false
},
month: {
sql: `1 + date_part('month', $t) - date_part('month', $epoch) + 12*${YEARSPAN}`,
zeroBased: false
},
quarter: {
sql: `1 + date_part('quarter', $t) - date_part('quarter', $epoch) + 4*${YEARSPAN}`,
zeroBased: false
},
semester: {
sql: `1 + FLOOR((date_part('month', $t) - date_part('month', $epoch))/6) + 2*${YEARSPAN}`,
zeroBased: false
},
trimester: {
sql: `1 + FLOOR((date_part('month', $t) - date_part('month', $epoch))/4) + 3*${YEARSPAN}`,
zeroBased: false
},
year: {
// for the default epoch this coincides with date_part('year', $t)
sql: `1 + ${YEARSPAN}`,
zeroBased: false
},
decade: {
// for the default epoch this coincides with date_part('decade', $t)
sql: `FLOOR((${YEARSPAN} + 1)/10)`,
zeroBased: true
},
century: {
// for the default epoch this coincides with date_part('century', $t)
sql: `1 + FLOOR(${YEARSPAN}/100)`,
zeroBased: false
},
millennium: {
// for the default epoch this coincides with date_part('millennium', $t)
sql: `1 + FLOOR(${YEARSPAN}/1000)`,
zeroBased: false
}
};
function serialSqlExpr(params) {
const { sql, zeroBased } = serialParts[params.units];
const column = timeExpression(params.time, params.timezone);
const epoch = epochExpression(params.starting);
const serial = sql.replace(/\$t/g, column).replace(/\$epoch/g, epoch);
let expr = serial;
if (params.count !== 1) {
if (zeroBased) {
expr = `FLOOR((${expr})/(${params.count}::double precision))::int`;
} else {
expr = `CEIL((${expr})/(${params.count}::double precision))::int`;
}
} else {
expr = `(${expr})::int`;
}
return expr;
}
const isoParts = {
second: `to_char($t, 'YYYY-MM-DD"T"HH24:MI:SS')`,
minute: `to_char($t, 'YYYY-MM-DD"T"HH24:MI')`,
hour: `to_char($t, 'YYYY-MM-DD"T"HH24')`,
day: `to_char($t, 'YYYY-MM-DD')`,
month: `to_char($t, 'YYYY-MM')`,
year: `to_char($t, 'YYYY')`,
week: `to_char($t, 'IYYY-"W"IW')`,
quarter: `to_char($t, 'YYYY-"Q"Q')`,
semester: `to_char($t, 'YYYY"S"') || to_char(CEIL(date_part('month', $t)/6), '9')`,
trimester: `to_char($t, 'YYYY"t"') || to_char(CEIL(date_part('month', $t)/4), '9')`,
decade: `to_char(date_part('decade', $t), '"D"999')`,
century: `to_char($t, '"C"CC')`,
millennium: `to_char(date_part('millennium', $t), '"M"999')`
};
function isoSqlExpr(params) {
const column = timeExpression(params.time, params.timezone);
if (params.count > 1) {
// TODO: it would be sensible to return the ISO of the first unit in the period
throw new Error('Multiple time units not supported for ISO format');
}
return isoParts[params.units].replace(/\$t/g, column);
}
const cyclicParts = {
dayOfWeek: `date_part('isodow', $t)`, // 1 = monday to 7 = sunday;
dayOfMonth: `date_part('day', $t)`, // 1 to 31
dayOfYear: `date_part('doy', $t)`, // 1 to 366
hourOfDay: `date_part('hour', $t)`, // 0 to 23
monthOfYear: `date_part('month', $t)`, // 1 to 12
quarterOfYear: `date_part('quarter', $t)`, // 1 to 4
semesterOfYear: `FLOOR((date_part('month', $t)-1)/6.0) + 1`, // 1 to 2
trimesterOfYear: `FLOOR((date_part('month', $t)-1)/4.0) + 1`, // 1 to 3
weekOfYear: `date_part('week', $t)`, // 1 to 53
minuteOfHour: `date_part('minute', $t)` // 0 to 59
};
function cyclicSqlExpr(params) {
const column = timeExpression(params.time, params.timezone);
return cyclicParts[params.units].replace(/\$t/g, column);
}
const ACCEPTED_PARAMETERS = ['time', 'units', 'timezone', 'count', 'starting', 'format'];
const REQUIRED_PARAMETERS = ['time', 'units'];
function validateParameters(params, checker) {
const errors = [];
const presentParams = Object.keys(params);
const invalidParams = presentParams.filter(param => !ACCEPTED_PARAMETERS.includes(param));
if (invalidParams.length) {
errors.push(`Invalid parameters: ${invalidParams.join(', ')}`);
}
const missingParams = REQUIRED_PARAMETERS.filter(param => !presentParams.includes(param));
if (missingParams.length) {
errors.push(`Missing parameters: ${missingParams.join(', ')}`);
}
const params_errors = checker(params);
errors.push(...params_errors.errors);
if (errors.length) {
throw new Error(`Invalid time dimension:\n${errors.join("\n")}`);
}
return params_errors.params;
}
const VALID_CYCLIC_UNITS = Object.keys(cyclicParts);
const VALID_SERIAL_UNITS = Object.keys(serialParts);
const VALID_ISO_UNITS = Object.keys(isoParts);
function cyclicCheckParams(params) {
const errors = [];
if (!VALID_CYCLIC_UNITS.includes(params.units)) {
errors.push(`Invalid units "${params.units}"`);
}
if (params.count && params.count > 1) {
errors.push(`Count ${params.count} not supported for cyclic ${params.units}`);
}
return { errors: errors, params: params };
}
function serialCheckParams(params) {
const errors = [];
if (!VALID_SERIAL_UNITS.includes(params.units)) {
errors.push(`Invalid grouping units "${params.units}"`);
}
return { errors: errors, params: Object.assign({}, params, { starting: epochWithDefaults(params.starting) }) };
}
function isoCheckParams(params) {
const errors = [];
if (!VALID_ISO_UNITS.includes(params.units)) {
errors.push(`Invalid units "${params.units}"`);
}
if (params.starting) {
errors.push("Parameter 'starting' not supported for ISO format");
}
return { errors: errors, params: params };
}
const CLASSIFIERS = {
cyclic: {
sqlExpr: cyclicSqlExpr,
checkParams: cyclicCheckParams
},
iso: {
sqlExpr: isoSqlExpr,
checkParams: isoCheckParams
},
serial: {
sqlExpr: serialSqlExpr,
checkParams: serialCheckParams
}
};
function isCyclic(units) {
return VALID_CYCLIC_UNITS.includes(units);
}
function classifierFor(params) {
let classifier = 'serial';
if (params.units && isCyclic(params.units)) {
classifier = 'cyclic';
} else if (params.format === 'iso') {
classifier = 'iso';
}
return CLASSIFIERS[classifier];
}
function classificationSql(params) {
const classifier = classifierFor(params);
params = validateParameters(params, classifier.checkParams);
return { sql: classifier.sqlExpr(params), effectiveParams: params };
}
module.exports = classificationSql;

View File

@@ -1,3 +1,5 @@
'use strict';
function CdbRequest() {
this.RE_USER_FROM_HOST = new RegExp(global.environment.user_from_host ||
'^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"

View File

@@ -1,3 +1,5 @@
'use strict';
const BaseDataview = require('./base');
const debug = require('debug')('windshaft:dataview:aggregation');

View File

@@ -1,3 +1,5 @@
'use strict';
const FLOAT_OIDS = {
700: true,
701: true,

View File

@@ -1,3 +1,5 @@
'use strict';
const dataviews = require('./');
module.exports = class DataviewFactory {

View File

@@ -1,3 +1,5 @@
'use strict';
const BaseDataview = require('./base');
const debug = require('debug')('windshaft:dataview:formula');
const utils = require('../../utils/query-utils');

Some files were not shown because too many files have changed in this diff Show More