Compare commits
250 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca7f0ad4a6 | ||
|
|
4fc4390173 | ||
|
|
fff54f021c | ||
|
|
6fee16fe5e | ||
|
|
5f43db2e36 | ||
|
|
561bdb3938 | ||
|
|
47576358a2 | ||
|
|
582947accd | ||
|
|
30f4b58ced | ||
|
|
227a271bea | ||
|
|
5455d2997f | ||
|
|
241bb511ea | ||
|
|
85e19bf16c | ||
|
|
e93dd982b9 | ||
|
|
535b52df55 | ||
|
|
a04782b63e | ||
|
|
feb1e53a4d | ||
|
|
705c001681 | ||
|
|
a973fc981a | ||
|
|
77cbe2b545 | ||
|
|
0b5ef6a5e1 | ||
|
|
97f813a777 | ||
|
|
a242588d95 | ||
|
|
b2f5e72bf1 | ||
|
|
0daf8dcd8d | ||
|
|
79e886cfb9 | ||
|
|
4f6d04a3ae | ||
|
|
0e26d5ba5a | ||
|
|
915e0f8483 | ||
|
|
4058d9fbc7 | ||
|
|
6c6ff42879 | ||
|
|
450f74b387 | ||
|
|
f684994868 | ||
|
|
be4240623a | ||
|
|
4b5e003f33 | ||
|
|
6c7c0eb7e7 | ||
|
|
1ba6480110 | ||
|
|
af31962b2d | ||
|
|
f134bd459e | ||
|
|
01d02f8c2e | ||
|
|
092bed6d9d | ||
|
|
8d9b8aced2 | ||
|
|
2856703acc | ||
|
|
14b8a72551 | ||
|
|
0125dcdd1d | ||
|
|
6a15d30579 | ||
|
|
3dcefa3ea1 | ||
|
|
b3a995f880 | ||
|
|
d40be45e9a | ||
|
|
ec952d88cc | ||
|
|
3983dfe004 | ||
|
|
e1c4f8445c | ||
|
|
2a68e0565e | ||
|
|
f4b6173da9 | ||
|
|
63c95df81c | ||
|
|
065f9c0a53 | ||
|
|
abe28937ca | ||
|
|
77b7e03869 | ||
|
|
d5af1bd9a2 | ||
|
|
86f313ec52 | ||
|
|
2e85e130c8 | ||
|
|
d70a87b299 | ||
|
|
c4576564e5 | ||
|
|
f82a7a148b | ||
|
|
b4431d823c | ||
|
|
0168aa3a61 | ||
|
|
2064b14015 | ||
|
|
aeea83d5f5 | ||
|
|
221bf0cefd | ||
|
|
2899f62d95 | ||
|
|
442ca34eb1 | ||
|
|
0202a17138 | ||
|
|
37645ec663 | ||
|
|
56eb4a8dc3 | ||
|
|
7b029c890a | ||
|
|
644b4232ca | ||
|
|
ff27e6744e | ||
|
|
fb929e71fc | ||
|
|
0bd7bd1621 | ||
|
|
5cad6e40c4 | ||
|
|
3a4984c1ce | ||
|
|
caa5a648df | ||
|
|
8487e4fb52 | ||
|
|
b508689b53 | ||
|
|
3b4d1bf72a | ||
|
|
0bbbdfa092 | ||
|
|
c1e769ade2 | ||
|
|
a20ce69028 | ||
|
|
80519cb397 | ||
|
|
7347263dfd | ||
|
|
2a3b6b830b | ||
|
|
11b299e116 | ||
|
|
27eef4ce42 | ||
|
|
59badc0137 | ||
|
|
f49698efa1 | ||
|
|
19d6cae10d | ||
|
|
7057f5a5c2 | ||
|
|
3e336204df | ||
|
|
3f4ecb195c | ||
|
|
58b528d00a | ||
|
|
2d2060088c | ||
|
|
d1667fac73 | ||
|
|
98c0b1f9bd | ||
|
|
1d2548a3e6 | ||
|
|
3690959be4 | ||
|
|
7de5fd1515 | ||
|
|
f9d1e39b7b | ||
|
|
16a0d9707b | ||
|
|
6962abfd10 | ||
|
|
dc0d4f0011 | ||
|
|
6bcb535d3f | ||
|
|
4ad9902601 | ||
|
|
2a933788fd | ||
|
|
3febf3e357 | ||
|
|
a8ca80c23c | ||
|
|
f3b1bb742a | ||
|
|
4d1ed0be27 | ||
|
|
af4b9f57f5 | ||
|
|
6e7bd2585f | ||
|
|
40ccdfd9b3 | ||
|
|
659b0ba889 | ||
|
|
71b8699f47 | ||
|
|
24c5bbb182 | ||
|
|
2eea20b161 | ||
|
|
f2180576de | ||
|
|
d9039569bd | ||
|
|
4a82d18cc6 | ||
|
|
f8fa78bb8b | ||
|
|
babfa9aae3 | ||
|
|
ebf2f54cd5 | ||
|
|
20c1e8ca05 | ||
|
|
2a35d51d45 | ||
|
|
ebe2d2ddab | ||
|
|
1ee30e9b53 | ||
|
|
7527003711 | ||
|
|
acd0bbc94f | ||
|
|
00e3f331b4 | ||
|
|
7a6fbecac4 | ||
|
|
6cc746dc83 | ||
|
|
6c3d8dbe64 | ||
|
|
938e3b2b07 | ||
|
|
b4e57438ed | ||
|
|
7bd188dafb | ||
|
|
60724897cc | ||
|
|
e03defc30f | ||
|
|
3bb1f893af | ||
|
|
37a61f527c | ||
|
|
5e3d546fb6 | ||
|
|
b7b5f031f3 | ||
|
|
e57e548c31 | ||
|
|
420c39337c | ||
|
|
214c796a4c | ||
|
|
8918d6bec0 | ||
|
|
ca7acb8339 | ||
|
|
5083ccb605 | ||
|
|
6908aa532c | ||
|
|
a7daa077ac | ||
|
|
9f0d4905b1 | ||
|
|
89d10210be | ||
|
|
545d387bb4 | ||
|
|
e2d27db828 | ||
|
|
33bcac189f | ||
|
|
361e99006b | ||
|
|
7162ab1631 | ||
|
|
9374e0fe18 | ||
|
|
b13ae62d0f | ||
|
|
49de289a9c | ||
|
|
b6dcf72268 | ||
|
|
76cfd185de | ||
|
|
79820a0f05 | ||
|
|
a0126f6a15 | ||
|
|
abd378e5f6 | ||
|
|
e7e3d612a1 | ||
|
|
208dbfd951 | ||
|
|
26e4a05276 | ||
|
|
3e261fb353 | ||
|
|
4775c73aee | ||
|
|
1ece97d0a1 | ||
|
|
87ef8d1977 | ||
|
|
2ebb1728ee | ||
|
|
621b11ebd6 | ||
|
|
12d58f3af2 | ||
|
|
211e815d9c | ||
|
|
465fd2ec0a | ||
|
|
d0c405ae46 | ||
|
|
953d831d5f | ||
|
|
5573db2bc1 | ||
|
|
195b23248b | ||
|
|
83897293c6 | ||
|
|
f26ddef244 | ||
|
|
d25e8e9798 | ||
|
|
bfbd9a8f22 | ||
|
|
bd17f9f5e1 | ||
|
|
8491b86c17 | ||
|
|
376a3743c1 | ||
|
|
a42af5e0d5 | ||
|
|
e157649571 | ||
|
|
e50d1a10d0 | ||
|
|
d474d49ce8 | ||
|
|
4dba4ef641 | ||
|
|
be08fa3bfa | ||
|
|
945b151712 | ||
|
|
2af6486f73 | ||
|
|
9cffc8781a | ||
|
|
b75c1f7f08 | ||
|
|
c5d22bf9e3 | ||
|
|
1baae5e709 | ||
|
|
da3239cfa1 | ||
|
|
ba0078c51c | ||
|
|
47f64401a7 | ||
|
|
8bdbe7c9b7 | ||
|
|
0637018cca | ||
|
|
8a7bef673b | ||
|
|
a0e71ac396 | ||
|
|
184a804367 | ||
|
|
c234b4ea91 | ||
|
|
db13f5e4f3 | ||
|
|
f9a8b3c827 | ||
|
|
17886d0e43 | ||
|
|
1f112d587f | ||
|
|
5c56ea6b22 | ||
|
|
3c76dfbbb3 | ||
|
|
e158e3e426 | ||
|
|
12dc1626a7 | ||
|
|
297e56f4e1 | ||
|
|
09f75441ba | ||
|
|
41bd69d050 | ||
|
|
73b3402d85 | ||
|
|
d66a304b00 | ||
|
|
ee63b247cd | ||
|
|
418e0e2aa3 | ||
|
|
d4bd706fe2 | ||
|
|
a4dfc09c71 | ||
|
|
9ed39f149b | ||
|
|
0e85aa56da | ||
|
|
2f59919f84 | ||
|
|
10baf43ede | ||
|
|
996d7fc90d | ||
|
|
c0febf2fd1 | ||
|
|
f841f65a1e | ||
|
|
c9786ee3f6 | ||
|
|
99b62edcbd | ||
|
|
c588d4139e | ||
|
|
aff55351ad | ||
|
|
96ba075698 | ||
|
|
a7d5415f64 | ||
|
|
dede22c915 | ||
|
|
fbf3fd9d8c | ||
|
|
e70de80cdf | ||
|
|
fbcfc7a582 |
77
.travis.yml
77
.travis.yml
@@ -4,78 +4,5 @@ jobs:
|
||||
services:
|
||||
- docker
|
||||
language: generic
|
||||
before_install: docker pull carto/nodejs6-xenial-pg101
|
||||
script: npm run docker-test
|
||||
- dist: precise
|
||||
addons:
|
||||
postgresql: "9.5"
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
- pkg-config
|
||||
- libcairo2-dev
|
||||
- libjpeg8-dev
|
||||
- libgif-dev
|
||||
- libpango1.0-dev
|
||||
- g++-4.9
|
||||
- wget
|
||||
|
||||
before_install:
|
||||
# Add custom PPAs from cartodb
|
||||
- sudo add-apt-repository -y ppa:cartodb/postgresql-9.5
|
||||
- sudo add-apt-repository -y ppa:cartodb/gis
|
||||
- sudo add-apt-repository -y ppa:cartodb/gis-testing
|
||||
|
||||
- sudo apt-get update
|
||||
|
||||
# Force instalation of libgeos-3.5.0 (presumably needed because of existing version of postgis)
|
||||
- sudo apt-get -y install libgeos-3.5.0=3.5.0-1cdb2
|
||||
|
||||
# Install postgres db and build deps
|
||||
- sudo /etc/init.d/postgresql stop # stop travis default instance
|
||||
- sudo apt-get -y remove --purge postgresql-9.1
|
||||
- sudo apt-get -y remove --purge postgresql-9.2
|
||||
- sudo apt-get -y remove --purge postgresql-9.3
|
||||
- sudo apt-get -y remove --purge postgresql-9.4
|
||||
- sudo apt-get -y remove --purge postgresql-9.5
|
||||
- sudo apt-get -y remove --purge postgresql-9.6
|
||||
- sudo rm -rf /var/lib/postgresql/
|
||||
- sudo rm -rf /var/log/postgresql/
|
||||
- sudo rm -rf /etc/postgresql/
|
||||
- sudo apt-get -y remove --purge postgis-2.2
|
||||
- sudo apt-get -y autoremove
|
||||
- sudo apt-get -y install postgresql-9.5=9.5.2-3cdb3
|
||||
- sudo apt-get -y install postgresql-server-dev-9.5=9.5.2-3cdb3
|
||||
- sudo apt-get -y install postgresql-plpython-9.5=9.5.2-3cdb3
|
||||
- sudo apt-get -y install postgresql-9.5-postgis-scripts=2.2.2.0-cdb2
|
||||
- sudo apt-get -y install postgresql-9.5-postgis-2.2=2.2.2.0-cdb2
|
||||
|
||||
# configure it to accept local connections from postgres
|
||||
- echo -e "# TYPE DATABASE USER ADDRESS METHOD \nlocal all postgres trust\nlocal all all trust\nhost all all 127.0.0.1/32 trust" \
|
||||
| sudo tee /etc/postgresql/9.5/main/pg_hba.conf
|
||||
- sudo /etc/init.d/postgresql restart 9.5
|
||||
|
||||
- createdb template_postgis
|
||||
- createuser publicuser
|
||||
- psql -c "CREATE EXTENSION postgis" template_postgis
|
||||
|
||||
# install yarn 0.27.5
|
||||
- curl -o- -L https://yarnpkg.com/install.sh | bash -s -- --version 0.27.5
|
||||
- export PATH="$HOME/.yarn/bin:$PATH"
|
||||
|
||||
# instal redis 4
|
||||
- wget http://download.redis.io/releases/redis-4.0.8.tar.gz
|
||||
- tar xvzf redis-4.0.8.tar.gz
|
||||
- cd redis-4.0.8
|
||||
- make
|
||||
- sudo make install
|
||||
- cd ..
|
||||
- rm redis-4.0.8.tar.gz
|
||||
|
||||
env:
|
||||
- NPROCS=1 JOBS=1 PGUSER=postgres CXX=g++-4.9
|
||||
|
||||
language: node_js
|
||||
node_js:
|
||||
- "6"
|
||||
before_install: docker pull carto/nodejs-xenial-pg101:latest
|
||||
script: npm run docker-test -- 10.15.1 # Node.js version
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
1. Test (make clean all check), fix if broken before proceeding
|
||||
2. Ensure proper version in package.json
|
||||
2. Ensure proper version in package.json and package-lock.json
|
||||
3. Ensure NEWS section exists for the new version, review it, add release date
|
||||
4. If there are modified dependencies in package.json, update them with `yarn upgrade {{package_name}}@{{version}}`
|
||||
5. Commit package.json, yarn.lock, NEWS
|
||||
4. If there are modified dependencies in package.json, update them with `npm upgrade {{package_name}}@{{version}}`
|
||||
5. Commit package.json, package-lock.json, NEWS
|
||||
6. git tag -a Major.Minor.Patch # use NEWS section as content
|
||||
7. Stub NEWS/package for next version
|
||||
|
||||
|
||||
50
INSTALL.md
50
INSTALL.md
@@ -1,35 +1,23 @@
|
||||
# Installing Windshaft-CartoDB #
|
||||
# Installing Windshaft-CartoDB
|
||||
|
||||
## Requirements ##
|
||||
Make sure that you have the requirements needed. These are
|
||||
## Requirements
|
||||
|
||||
- Core
|
||||
- Node.js >=6.9.x
|
||||
- yarn >=0.27.5 <1.0.0
|
||||
- PostgreSQL >8.3.x, PostGIS >1.5.x
|
||||
- Redis >2.4.0 (http://www.redis.io)
|
||||
- Mapnik >3.x. See [Installing Mapnik](https://github.com/CartoDB/Windshaft#installing-mapnik).
|
||||
- Windshaft: check [Windshaft dependencies and installation notes](https://github.com/CartoDB/Windshaft#dependencies)
|
||||
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
|
||||
Make sure that you have the requirements needed. These are:
|
||||
|
||||
- For cache control (optional)
|
||||
- CartoDB 0.9.5+ (for `CDB_QueryTables`)
|
||||
- Varnish (http://www.varnish-cache.org)
|
||||
- Node 10.x
|
||||
- npm 6.x
|
||||
- PostgreSQL >= 10.0
|
||||
- PostGIS >= 2.4
|
||||
- CARTO Postgres Extension >= 0.24.1
|
||||
- Redis >= 4
|
||||
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
|
||||
- C++11 (to build internal dependencies if needed)
|
||||
|
||||
On Ubuntu 14.04 the dependencies can be installed with
|
||||
### Optional
|
||||
|
||||
```shell
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y make g++ pkg-config git-core \
|
||||
libgif-dev libjpeg-dev libcairo2-dev \
|
||||
libhiredis-dev redis-server \
|
||||
nodejs nodejs-legacy npm \
|
||||
postgresql-9.3-postgis-2.1 postgresql-plpython-9.3 postgresql-server-dev-9.3
|
||||
```
|
||||
- Varnish (http://www.varnish-cache.org)
|
||||
|
||||
On Ubuntu 12.04 the [cartodb/cairo PPA](https://launchpad.net/~cartodb/+archive/ubuntu/cairo) may be useful.
|
||||
|
||||
## PostGIS setup ##
|
||||
## PostGIS setup
|
||||
|
||||
A `template_postgis` database is expected. One can be set up with
|
||||
|
||||
@@ -38,16 +26,16 @@ createdb --owner postgres --template template0 template_postgis
|
||||
psql -d template_postgis -c 'CREATE EXTENSION postgis;'
|
||||
```
|
||||
|
||||
## Build/install ##
|
||||
## Build/install
|
||||
|
||||
To fetch and build all node-based dependencies, run:
|
||||
|
||||
```
|
||||
yarn
|
||||
```shell
|
||||
npm install
|
||||
```
|
||||
|
||||
Note that the ```yarn``` step will populate the node_modules/
|
||||
Note that the ```npm``` step will populate the node_modules/
|
||||
directory with modules, some of which being compiled on demand. If you
|
||||
happen to have startup errors you may need to force rebuilding those
|
||||
modules. At any time just wipe out the node_modules/ directory and run
|
||||
```yarn``` again.
|
||||
```npm``` again.
|
||||
|
||||
57
NEWS.md
57
NEWS.md
@@ -1,5 +1,62 @@
|
||||
# Changelog
|
||||
|
||||
## 7.0.0
|
||||
Released 2019-02-22
|
||||
|
||||
Breaking changes:
|
||||
- Drop support for Node.js 6
|
||||
- Drop support for npm 3
|
||||
- Stop supporting `yarn.lock`
|
||||
- Drop support for Postgres 9.5
|
||||
- Drop support for PosGIS 2.2
|
||||
- Drop support for Redis 3
|
||||
|
||||
Announcements:
|
||||
- In configuration, set `clipByBox2d` to true by default
|
||||
- Update docs: compatible Node.js and npm versions
|
||||
- Report fine-grained Garbage Collector stats
|
||||
- Adding Authorization to Access-Control-Allow-Headers (https://github.com/CartoDB/CartoDB-SQL-API/issues/534)
|
||||
- Update deps:
|
||||
- windshaft@4.13.1: Upgrade tilelive-mapnik to version 0.6.18-cdb18
|
||||
- camshaft@0.63.4: Improve error message for exceeded batch SQL API payload size: add suggestions about what the user can do about it.
|
||||
- Update dev deps:
|
||||
- jshint@2.9.7
|
||||
- mocha@5.2.0
|
||||
- Be able to customize max waiting workers parameter
|
||||
- Handle 'max waitingClients count exceeded' error as "429, You are over platform's limits"
|
||||
|
||||
## 6.5.1
|
||||
Released 2018-12-26
|
||||
|
||||
Bug Fixes:
|
||||
- Update carto-package.json
|
||||
|
||||
## 6.5.0
|
||||
Released 2018-12-26
|
||||
|
||||
New features
|
||||
- Suport Node.js 10
|
||||
- Configure travis to run docker tests against Node.js 6 & 10 versions
|
||||
- Aggregation time dimensions
|
||||
- Update sample configurations to use PostGIS to generate MVT's by default (as in production)
|
||||
- Upgrades Windshaft to [4.12.1](https://github.com/CartoDB/Windshaft/blob/4.12.1/NEWS.md#version-4121)
|
||||
- `pg-mvt`: Use `query-rewriter` to compose the query to render a MVT tile. If not defined, it will use a Default Query Rewriter.
|
||||
- `pg-mvt`: Fix bug while building query and there is no columns defined for the layer.
|
||||
- `pg-mvt`: Accept trailing semicolon in input queries.
|
||||
- `Renderer Cache Entry`: Do not throw errors for integrity checks.
|
||||
- Fix bug when releasing the renderer cache entry in some scenarios.
|
||||
- Upgrade grainstore to [1.10.0](https://github.com/CartoDB/grainstore/releases/tag/1.10.0)
|
||||
- Upgrade cartodb-redis to [2.1.0](https://github.com/CartoDB/node-cartodb-redis/releases/tag/2.1.0)
|
||||
- Upgrade cartodb-query-tables to [0.4.0](https://github.com/CartoDB/node-cartodb-query-tables/releases/tag/0.4.0)
|
||||
- Upgrade cartodb-psql to [0.13.1](https://github.com/CartoDB/node-cartodb-psql/releases/tag/0.13.1)
|
||||
- Upgrade turbo-carto to [0.21.0](https://github.com/CartoDB/turbo-carto/releases/tag/0.21.0)
|
||||
- Upgrade camshaft to [0.63.1](https://github.com/CartoDB/camshaft/releases/tag/0.63.1)
|
||||
- Upgrade redis-mpool to [0.7.0](https://github.com/CartoDB/node-redis-mpool/releases/tag/0.7.0)
|
||||
|
||||
Bug Fixes:
|
||||
- Prevent from uncaught exception: Range filter Error from camshaft when getting analysis query.
|
||||
- Make all modules to use strict mode semantics.
|
||||
|
||||
## 6.4.0
|
||||
Released 2018-09-24
|
||||
|
||||
|
||||
16
README.md
16
README.md
@@ -31,12 +31,10 @@ Upgrading
|
||||
|
||||
Checkout your commit/branch. If you need to reinstall dependencies (you can check [NEWS](NEWS.md)) do the following:
|
||||
|
||||
```sh
|
||||
$ rm -rf node_modules
|
||||
$ npm install
|
||||
```
|
||||
rm -rf node_modules; yarn
|
||||
```
|
||||
|
||||
Run
|
||||
---
|
||||
|
||||
```
|
||||
node app.js <env>
|
||||
@@ -71,12 +69,12 @@ See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
### Developing with a custom windshaft version
|
||||
|
||||
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency) the best option is
|
||||
to use `yarn link`. You can read more about it at [yarn-link: Symlink a package folder](https://yarnpkg.com/en/docs/cli/link).
|
||||
to use `npm link`. You can read more about it at [npm-link: Symlink a package folder](https://docs.npmjs.com/cli/link.html).
|
||||
|
||||
**Quick start**:
|
||||
|
||||
```shell
|
||||
~/windshaft-directory $ yarn
|
||||
~/windshaft-directory $ yarn link
|
||||
~/windshaft-cartodb-directory $ yarn link windshaft
|
||||
~/windshaft-directory $ npm install
|
||||
~/windshaft-directory $ npm link
|
||||
~/windshaft-cartodb-directory $ npm link windshaft
|
||||
```
|
||||
|
||||
77
app.js
77
app.js
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var path = require('path');
|
||||
@@ -126,6 +128,40 @@ listener.on('listening', function() {
|
||||
);
|
||||
});
|
||||
|
||||
function getCPUUsage (oldUsage) {
|
||||
let usage;
|
||||
|
||||
if (oldUsage && oldUsage._start) {
|
||||
usage = Object.assign({}, process.cpuUsage(oldUsage._start.cpuUsage));
|
||||
usage.time = Date.now() - oldUsage._start.time;
|
||||
} else {
|
||||
usage = Object.assign({}, process.cpuUsage());
|
||||
usage.time = process.uptime() * 1000; // s to ms
|
||||
}
|
||||
|
||||
usage.percent = (usage.system + usage.user) / (usage.time * 10);
|
||||
|
||||
Object.defineProperty(usage, '_start', {
|
||||
value: {
|
||||
cpuUsage: process.cpuUsage(),
|
||||
time: Date.now()
|
||||
}
|
||||
});
|
||||
|
||||
return usage;
|
||||
}
|
||||
|
||||
let previousCPUUsage = getCPUUsage();
|
||||
setInterval(function cpuUsageMetrics () {
|
||||
const CPUUsage = getCPUUsage(previousCPUUsage);
|
||||
|
||||
Object.keys(CPUUsage).forEach(property => {
|
||||
global.statsClient.gauge(`windshaft.cpu.${property}`, CPUUsage[property]);
|
||||
});
|
||||
|
||||
previousCPUUsage = CPUUsage;
|
||||
}, 5000);
|
||||
|
||||
setInterval(function() {
|
||||
var memoryUsage = process.memoryUsage();
|
||||
Object.keys(memoryUsage).forEach(function(k) {
|
||||
@@ -152,9 +188,46 @@ if (global.gc) {
|
||||
|
||||
if (gcInterval > 0) {
|
||||
setInterval(function gcForcedCycle() {
|
||||
var start = Date.now();
|
||||
global.gc();
|
||||
global.statsClient.timing('windshaft.gc', Date.now() - start);
|
||||
}, gcInterval);
|
||||
}
|
||||
}
|
||||
|
||||
const gcStats = require('gc-stats')();
|
||||
|
||||
gcStats.on('stats', function ({ pauseMS, gctype }) {
|
||||
global.statsClient.timing('windshaft.gc', pauseMS);
|
||||
global.statsClient.timing(`windshaft.gctype.${getGCTypeValue(gctype)}`, pauseMS);
|
||||
});
|
||||
|
||||
function getGCTypeValue (type) {
|
||||
// 1: Scavenge (minor GC)
|
||||
// 2: Mark/Sweep/Compact (major GC)
|
||||
// 4: Incremental marking
|
||||
// 8: Weak/Phantom callback processing
|
||||
// 15: All
|
||||
let value;
|
||||
|
||||
switch (type) {
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
"name": "carto_windshaft",
|
||||
"current_version": {
|
||||
"requires": {
|
||||
"node": ">=6.9.2 <10.0.0",
|
||||
"yarn": ">=0.27.5 <1.0.0",
|
||||
"mapnik": ">=3.0.15"
|
||||
"node": "^10.15.1",
|
||||
"npm": "^6.4.1",
|
||||
"mapnik": "==3.0.15.9",
|
||||
"crankshaft": "~0.8.1"
|
||||
},
|
||||
"works_with": {
|
||||
"redis": ">=3.0.0",
|
||||
"postgresql": ">=9.5.0",
|
||||
"postgis": ">=2.2.0.0",
|
||||
"carto_postgresql_ext": ">=0.19.0"
|
||||
"redis": ">=4.0.0",
|
||||
"postgresql": ">=10.0.0",
|
||||
"postgis": ">=2.4.4.5",
|
||||
"carto_postgresql_ext": ">=0.24.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,10 +127,9 @@ var config = {
|
||||
cache_ttl: 60000,
|
||||
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||
mvt: {
|
||||
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
|
||||
//PostGIS 2.4 is required for this to work
|
||||
//If disabled it will use Mapnik MVT generation
|
||||
usePostGIS: false
|
||||
//If enabled, MVTs will be generated with PostGIS directly
|
||||
//If disabled, MVTs will be generated with Mapnik MVT
|
||||
usePostGIS: true
|
||||
},
|
||||
mapnik: {
|
||||
// The size of the pool of internal mapnik backend
|
||||
@@ -139,6 +138,10 @@ var config = {
|
||||
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||
poolSize: 8,
|
||||
|
||||
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||
poolMaxWaitingClients: 64,
|
||||
|
||||
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||
// This will prevent blocking the main thread.
|
||||
useCartocssWorkers: false,
|
||||
@@ -182,7 +185,7 @@ var config = {
|
||||
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||
// Returning the portion of a geometry falling within a rectangle
|
||||
// It will only work if snapToGrid is enabled
|
||||
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
|
||||
clipByBox2d: true,
|
||||
|
||||
postgis: {
|
||||
// Parameters to pass to datasource plugin of mapnik
|
||||
|
||||
@@ -127,10 +127,9 @@ var config = {
|
||||
cache_ttl: 60000,
|
||||
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||
mvt: {
|
||||
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
|
||||
//PostGIS 2.4 is required for this to work
|
||||
//If disabled it will use Mapnik MVT generation
|
||||
usePostGIS: false
|
||||
//If enabled, MVTs will be generated with PostGIS directly
|
||||
//If disabled, MVTs will be generated with Mapnik MVT
|
||||
usePostGIS: true
|
||||
},
|
||||
mapnik: {
|
||||
// The size of the pool of internal mapnik backend
|
||||
@@ -139,6 +138,10 @@ var config = {
|
||||
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||
poolSize: 8,
|
||||
|
||||
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||
poolMaxWaitingClients: 64,
|
||||
|
||||
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||
// This will prevent blocking the main thread.
|
||||
useCartocssWorkers: false,
|
||||
@@ -182,7 +185,7 @@ var config = {
|
||||
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||
// Returning the portion of a geometry falling within a rectangle
|
||||
// It will only work if snapToGrid is enabled
|
||||
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
|
||||
clipByBox2d: true,
|
||||
|
||||
postgis: {
|
||||
// Parameters to pass to datasource plugin of mapnik
|
||||
|
||||
@@ -127,10 +127,9 @@ var config = {
|
||||
cache_ttl: 60000,
|
||||
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||
mvt: {
|
||||
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
|
||||
//PostGIS 2.4 is required for this to work
|
||||
//If disabled it will use Mapnik MVT generation
|
||||
usePostGIS: false
|
||||
//If enabled, MVTs will be generated with PostGIS directly
|
||||
//If disabled, MVTs will be generated with Mapnik MVT
|
||||
usePostGIS: true
|
||||
},
|
||||
mapnik: {
|
||||
// The size of the pool of internal mapnik backend
|
||||
@@ -139,6 +138,10 @@ var config = {
|
||||
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||
poolSize: 8,
|
||||
|
||||
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||
poolMaxWaitingClients: 64,
|
||||
|
||||
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||
// This will prevent blocking the main thread.
|
||||
useCartocssWorkers: false,
|
||||
@@ -182,7 +185,7 @@ var config = {
|
||||
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||
// Returning the portion of a geometry falling within a rectangle
|
||||
// It will only work if snapToGrid is enabled
|
||||
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
|
||||
clipByBox2d: true,
|
||||
|
||||
postgis: {
|
||||
// Parameters to pass to datasource plugin of mapnik
|
||||
|
||||
@@ -127,10 +127,9 @@ var config = {
|
||||
cache_ttl: 60000,
|
||||
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||
mvt: {
|
||||
//If enabled, MVTs will be generated with PostGIS directly, instead of using Mapnik,
|
||||
//PostGIS 2.4 is required for this to work
|
||||
//If disabled it will use Mapnik MVT generation
|
||||
usePostGIS: false
|
||||
//If enabled, MVTs will be generated with PostGIS directly
|
||||
//If disabled, MVTs will be generated with Mapnik MVT
|
||||
usePostGIS: true
|
||||
},
|
||||
mapnik: {
|
||||
// The size of the pool of internal mapnik backend
|
||||
@@ -139,6 +138,10 @@ var config = {
|
||||
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||
poolSize: 8,
|
||||
|
||||
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||
poolMaxWaitingClients: 64,
|
||||
|
||||
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||
// This will prevent blocking the main thread.
|
||||
useCartocssWorkers: false,
|
||||
@@ -182,7 +185,7 @@ var config = {
|
||||
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||
// Returning the portion of a geometry falling within a rectangle
|
||||
// It will only work if snapToGrid is enabled
|
||||
clipByBox2d: false, // this requires postgis >=2.2 and geos >=3.5
|
||||
clipByBox2d: true,
|
||||
|
||||
postgis: {
|
||||
// Parameters to pass to datasource plugin of mapnik
|
||||
|
||||
13
docker-bash.sh
Executable file
13
docker-bash.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "*********************"
|
||||
echo "To install Node.js, run:"
|
||||
echo "/src/nodejs-install.sh"
|
||||
echo "Use NODEJS_VERSION env var to select the Node.js version"
|
||||
echo " "
|
||||
echo "To start postgres, run:"
|
||||
echo "/etc/init.d/postgresql start"
|
||||
echo "*********************"
|
||||
echo " "
|
||||
|
||||
docker run -it -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash
|
||||
4
docker-test.sh
Executable file
4
docker-test.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker run -e "NODEJS_VERSION=${1}" -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash run_tests_docker.sh && \
|
||||
docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v
|
||||
92
docker/Dockerfile-nodejs-xenial-pg101:latest
Normal file
92
docker/Dockerfile-nodejs-xenial-pg101:latest
Normal file
@@ -0,0 +1,92 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& . ~/.nvm/nvm.sh \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.6+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
|
||||
RUN chmod 777 /src/nodejs-install.sh
|
||||
CMD /src/nodejs-install.sh
|
||||
|
||||
88
docker/Dockerfile-nodejs10-xenial-pg101:postgis-2.4.4.5
Normal file
88
docker/Dockerfile-nodejs10-xenial-pg101:postgis-2.4.4.5
Normal file
@@ -0,0 +1,88 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_10.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
89
docker/Dockerfile-nodejs6-xenial-pg101:postgis-2.4
Normal file
89
docker/Dockerfile-nodejs6-xenial-pg101:postgis-2.4
Normal file
@@ -0,0 +1,89 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
# Install dependencies and PostGIS 2.4 from sources
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgresql-10-postgis-2.4 \
|
||||
postgresql-10-postgis-2.4-scripts \
|
||||
postgresql-10-postgis-scripts \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
postgis \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
88
docker/Dockerfile-nodejs6-xenial-pg101:postgis-2.4.4.5
Normal file
88
docker/Dockerfile-nodejs6-xenial-pg101:postgis-2.4.4.5
Normal file
@@ -0,0 +1,88 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -11,13 +11,13 @@ https://hub.docker.com/r/carto/
|
||||
|
||||
## Update image
|
||||
- Edit the docker image file with your desired changes
|
||||
- Build image:
|
||||
- Build image:
|
||||
- `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
|
||||
|
||||
- Upload to docker hub:
|
||||
- Login into docker hub:
|
||||
- Login into docker hub:
|
||||
- `docker login`
|
||||
- Create tag:
|
||||
- Create tag:
|
||||
- `docker tag carto/IMAGE carto/IMAGE`
|
||||
- Upload:
|
||||
- Upload:
|
||||
- `docker push carto/IMAGE`
|
||||
|
||||
13
docker/scripts/nodejs-install.sh
Normal file
13
docker/scripts/nodejs-install.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
export NVM_DIR="$HOME/.nvm"
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||
|
||||
if [ -z $NODEJS_VERSION ]; then
|
||||
NODEJS_VERSION="10"
|
||||
NODEJS_VERSION_OPTIONS="--lts"
|
||||
fi
|
||||
|
||||
nvm install $NODEJS_VERSION $NODEJS_VERSION_OPTIONS
|
||||
nvm alias default $NODEJS_VERSION
|
||||
nvm use default
|
||||
@@ -134,6 +134,10 @@ of the original dataset applying three different aggregate functions.
|
||||
|
||||
> Note that you can use the original column names as names of the result, but all the result column names must be unique. In particular, the names `cartodb_id`, `the_geom`, `the_geom_webmercator` and `_cdb_feature_count` cannot be used for aggregated columns, as they correspond to columns always present in the result.
|
||||
|
||||
#### Limitations:
|
||||
* The iso text format does not admit `starting` or `count` parameters
|
||||
* Cyclic units (day of the week, etc.) don't admit `count` or `starting` either.
|
||||
|
||||
### `resolution`
|
||||
|
||||
Defines the cell-size of the spatial aggregation grid. This is equivalent to the [CartoCSS `-torque-resolution`](https://carto.com/docs/carto-engine/cartocss/properties-for-torque/#-torque-resolution-float) property of Torque maps.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const { Router: router } = require('express');
|
||||
|
||||
const RedisPool = require('redis-mpool');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const PSQL = require('cartodb-psql');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const windshaft = require('windshaft');
|
||||
const MapConfig = windshaft.model.MapConfig;
|
||||
const Datasource = windshaft.model.Datasource;
|
||||
@@ -152,17 +154,17 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
}
|
||||
};
|
||||
|
||||
mapConfigAdapter.getMapConfig(user,
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
mapConfigAdapter.getMapConfig(user,
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
|
||||
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
|
||||
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const { Router: router } = require('express');
|
||||
|
||||
const AnalysisLayergroupController = require('./analysis-layergroup-controller');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const dbConnSetup = require('../middlewares/db-conn-setup');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('underscore');
|
||||
|
||||
module.exports = function augmentLayergroupData () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function authorize (authBackend) {
|
||||
return function authorizeMiddleware (req, res, next) {
|
||||
authBackend.authorize(req, res, (err, authorized) => {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setCacheChannelHeader () {
|
||||
return function setCacheChannelHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365;
|
||||
|
||||
module.exports = function setCacheControlHeader ({ ttl = ONE_YEAR_IN_SECONDS, revalidate = false } = {}) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function checkJsonContentType () {
|
||||
return function checkJsonContentTypeMiddleware(req, res, next) {
|
||||
if (req.method === 'POST' && !req.is('application/json')) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const VALID_IMAGE_FORMATS = ['png', 'jpg'];
|
||||
|
||||
module.exports = function checkStaticImageFormat () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('underscore');
|
||||
|
||||
// Whitelist query parameters and attach format
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const positiveIntegerNumberRegExp = /^\d+$/;
|
||||
const integerNumberRegExp = /^-?\d+$/;
|
||||
const invalidZoomMessage = function (zoom) {
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function cors () {
|
||||
return function corsMiddleware (req, res, next) {
|
||||
const headers = [
|
||||
'X-Requested-With',
|
||||
'X-Prototype-Version',
|
||||
'X-CSRF-Token'
|
||||
'X-CSRF-Token',
|
||||
'Authorization'
|
||||
];
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const basicAuth = require('basic-auth');
|
||||
|
||||
module.exports = function credentials () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('underscore');
|
||||
|
||||
module.exports = function dbConnSetup (pgConnection) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('underscore');
|
||||
const debug = require('debug')('windshaft:cartodb:error-middleware');
|
||||
|
||||
@@ -7,7 +9,7 @@ module.exports = function errorMiddleware (/* options */) {
|
||||
// jshint maxcomplexity:9
|
||||
var allErrors = Array.isArray(err) ? err : [err];
|
||||
|
||||
allErrors = populateTimeoutErrors(allErrors);
|
||||
allErrors = populateLimitErrors(allErrors);
|
||||
|
||||
const label = err.label || 'UNKNOWN';
|
||||
err = allErrors[0] || new Error(label);
|
||||
@@ -57,8 +59,22 @@ function getErrorTypes(error) {
|
||||
};
|
||||
}
|
||||
|
||||
function populateTimeoutErrors (errors) {
|
||||
function isMaxWaitingClientsError (err) {
|
||||
return err.message === 'max waitingClients count exceeded';
|
||||
}
|
||||
|
||||
function populateLimitErrors (errors) {
|
||||
return errors.map(function (error) {
|
||||
if (isMaxWaitingClientsError(error)) {
|
||||
error.message = 'You are over platform\'s limits: Max render capacity exceeded.' +
|
||||
' Contact CARTO support for more details.';
|
||||
error.type = 'limit';
|
||||
error.subtype = 'render-capacity';
|
||||
error.http_status = 429;
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
const errorTypes = getErrorTypes(error);
|
||||
|
||||
if (isTimeoutError(errorTypes)) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function incrementMapViewCount (metadataBackend) {
|
||||
return function incrementMapViewCountMiddleware(req, res, next) {
|
||||
const { mapConfig, user } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function initProfiler (isTemplateInstantiation) {
|
||||
const operation = isTemplateInstantiation ? 'instance_template' : 'createmap';
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function initializeStatusCode () {
|
||||
return function initializeStatusCodeMiddleware (req, res, next) {
|
||||
if (req.method !== 'OPTIONS') {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLastModifiedHeader () {
|
||||
return function setLastModifiedHeaderMiddleware(req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLastUpdatedTimeToLayergroup () {
|
||||
return function setLastUpdatedTimeToLayergroupMiddleware (req, res, next) {
|
||||
const { mapConfigProvider, analysesResults } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return function setLayerStatsMiddleware(req, res, next) {
|
||||
const { user, mapConfig } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLayergroupIdHeader (templateMaps, useTemplateHash) {
|
||||
return function setLayergroupIdHeaderMiddleware (req, res, next) {
|
||||
const { user, template } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQuery) {
|
||||
return function setMetadataToLayergroupMiddleware (req, res, next) {
|
||||
const { user, mapConfig, analysesResults = [], context, api_key: userApiKey } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const LayergroupToken = require('../../models/layergroup-token');
|
||||
const authErrorMessageTemplate = function (signer, user) {
|
||||
return `Cannot use map signature of user "${signer}" on db of user "${user}"`;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function logger (options) {
|
||||
if (!global.log4js || !options.log_format) {
|
||||
return function dummyLoggerMiddleware (req, res, next) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const LZMA = require('lzma').LZMA;
|
||||
|
||||
module.exports = function lzma () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function mapError (options) {
|
||||
const { addContext = false, label = 'MAPS CONTROLLER' } = options;
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const MapStoreMapConfigProvider = require('../../models/mapconfig/provider/map-store-provider');
|
||||
|
||||
module.exports = function createMapStoreMapConfigProvider (
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
|
||||
return function getNamedMapProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function sendResponse () {
|
||||
return function sendResponseMiddleware (req, res) {
|
||||
req.profiler.done('res');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const os = require('os');
|
||||
|
||||
module.exports = function servedByHostHeader () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const Profiler = require('../../stats/profiler_proxy');
|
||||
const debug = require('debug')('windshaft:cartodb:stats');
|
||||
const onHeaders = require('on-headers');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const NamedMapsCacheEntry = require('../../cache/model/named_maps_entry');
|
||||
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function syntaxError () {
|
||||
return function syntaxErrorMiddleware (err, req, res, next) {
|
||||
if (err.name === 'SyntaxError') {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const CdbRequest = require('../../models/cdb_request');
|
||||
|
||||
module.exports = function user () {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../../assets/render-timeout-fallback.mvt');
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const { templateName } = require('../../backends/template_maps');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const dbConnSetup = require('../middlewares/db-conn-setup');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const { Router: router } = require('express');
|
||||
|
||||
const NamedMapController = require('./named-template-controller');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function AnalysisStatusBackend() {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PgConnection} pgConnection
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var BBoxFilter = require('../models/filter/bbox');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
var AnalysisFilter = require('../models/filter/analysis');
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
function EmptyLayerStats(types) {
|
||||
this._types = types || {};
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var LayerStats = require('./layer-stats');
|
||||
var EmptyLayerStats = require('./empty-layer-stats');
|
||||
var MapnikLayerStats = require('./mapnik-layer-stats');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
function LayerStats(layerStatsIterator) {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
const queryUtils = require('../../utils/query-utils');
|
||||
const AggregationMapConfig = require('../../models/aggregation/aggregation-mapconfig');
|
||||
const aggregationQuery = require('../../models/aggregation/aggregation-query');
|
||||
|
||||
function MapnikLayerStats () {
|
||||
this._types = {
|
||||
@@ -19,6 +22,9 @@ function columnAggregations(field) {
|
||||
if (field.type === 'date') { // TODO other types too?
|
||||
return ['min', 'max'];
|
||||
}
|
||||
if (field.type === 'timeDimension') {
|
||||
return ['min', 'max'];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -67,13 +73,13 @@ function _geometryType(ctx) {
|
||||
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
|
||||
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
|
||||
return queryUtils.queryPromise(ctx.dbConnection, sqlQuery)
|
||||
.then(res => ({ geometryType: res.rows[0].geom_type }));
|
||||
.then(res => ({ geometryType: (res.rows[0] || {}).geom_type }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _columns(ctx) {
|
||||
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats) {
|
||||
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
|
||||
// note: post-aggregation columns are in layer.options.columns when aggregation is present
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
|
||||
.then(res => formatResultFields(ctx.dbConnection, res.fields));
|
||||
@@ -137,51 +143,89 @@ function _sample(ctx, numRows) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _columnStats(ctx, columns) {
|
||||
function _columnsMetadataRequired(options) {
|
||||
// We need determine the columns of a query
|
||||
// if either column stats or dimension stats are required,
|
||||
// since we'll ultimately use the same query to fetch both
|
||||
return options.columnStats || options.dimensions;
|
||||
}
|
||||
|
||||
function _columnStats(ctx, columns, dimensions) {
|
||||
if (!columns) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (ctx.metaOptions.columnStats) {
|
||||
if (_columnsMetadataRequired(ctx.metaOptions)) {
|
||||
let queries = [];
|
||||
let aggr = [];
|
||||
queries.push(new Promise(resolve => resolve(columns))); // add columns as first result
|
||||
Object.keys(columns).forEach(name => {
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(columns[name])
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
);
|
||||
if (columns[name].type === 'string') {
|
||||
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
|
||||
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
|
||||
ctx.metaOptions.columnStats.includeNulls :
|
||||
true;
|
||||
|
||||
// TODO: ctx.metaOptions.columnStats.maxCategories
|
||||
// => use PG stats to dismiss columns with more distinct values
|
||||
queries.push(
|
||||
queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => queryUtils.getQueryTopCategories(sql, name, topN, includeNulls))
|
||||
).then(res => ({ [name]: { categories: res.rows } }))
|
||||
if (ctx.metaOptions.columnStats) {
|
||||
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
|
||||
Object.keys(columns).forEach(name => {
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(columns[name])
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
);
|
||||
}
|
||||
});
|
||||
if (columns[name].type === 'string') {
|
||||
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
|
||||
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
|
||||
ctx.metaOptions.columnStats.includeNulls :
|
||||
true;
|
||||
|
||||
// TODO: ctx.metaOptions.columnStats.maxCategories
|
||||
// => use PG stats to dismiss columns with more distinct values
|
||||
queries.push(
|
||||
queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => queryUtils.getQueryTopCategories(sql, name, topN, includeNulls))
|
||||
).then(res => ({ columns: { [name]: { categories: res.rows } } }))
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
const dimensionsStats = {};
|
||||
let dimensionsInfo = {};
|
||||
if (ctx.metaOptions.dimensions && dimensions) {
|
||||
dimensionsInfo = aggregationQuery.infoForOptions({ dimensions });
|
||||
Object.keys(dimensionsInfo).forEach(dimName => {
|
||||
const info = dimensionsInfo[dimName];
|
||||
if (info.type === 'timeDimension') {
|
||||
dimensionsStats[dimName] = {
|
||||
params: info.params
|
||||
};
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(info).map(fn => `${fn}(${info.sql}) AS "${dimName}_${fn}"`)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
queries.push(
|
||||
queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
|
||||
).then(res => {
|
||||
let stats = {};
|
||||
let stats = { columns: {}, dimensions: {} };
|
||||
Object.keys(columns).forEach(name => {
|
||||
stats[name] = {};
|
||||
stats.columns[name] = {};
|
||||
columnAggregations(columns[name]).forEach(fn => {
|
||||
stats[name][fn] = res.rows[0][`${name}_${fn}`];
|
||||
stats.columns[name][fn] = res.rows[0][`${name}_${fn}`];
|
||||
});
|
||||
});
|
||||
Object.keys(dimensionsInfo).forEach(name => {
|
||||
stats.dimensions[name] = stats.dimensions[name] || Object.assign({}, dimensionsStats[name]);
|
||||
let type = null;
|
||||
columnAggregations(dimensionsInfo[name]).forEach(fn => {
|
||||
type = type ||
|
||||
fieldTypeSafe(ctx.dbConnection, res.fields.find(f => f.name === `${name}_${fn}`));
|
||||
stats.dimensions[name][fn] = res.rows[0][`${name}_${fn}`];
|
||||
});
|
||||
stats.dimensions[name].type = type;
|
||||
});
|
||||
return stats;
|
||||
})
|
||||
);
|
||||
return Promise.all(queries).then(results => ({ columns: mergeColumns(results) }));
|
||||
return Promise.all(queries).then(results => ({
|
||||
columns: mergeColumns(results.map(r => r.columns)),
|
||||
dimensions: mergeColumns(results.map( r => r.dimensions))
|
||||
}));
|
||||
}
|
||||
return Promise.resolve({ columns });
|
||||
}
|
||||
@@ -211,19 +255,17 @@ function fieldType(cname) {
|
||||
return tname;
|
||||
}
|
||||
|
||||
function fieldTypeSafe(dbConnection, field) {
|
||||
const cname = dbConnection.typeName(field.dataTypeID);
|
||||
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
|
||||
}
|
||||
|
||||
// columns are returned as an object { columnName1: { type1: ...}, ..}
|
||||
// for consistency with SQL API
|
||||
function formatResultFields(dbConnection, fields = []) {
|
||||
let nfields = {};
|
||||
for (let field of fields) {
|
||||
const cname = dbConnection.typeName(field.dataTypeID);
|
||||
let tname;
|
||||
if ( ! cname ) {
|
||||
tname = 'unknown(' + field.dataTypeID + ')';
|
||||
} else {
|
||||
tname = fieldType(cname);
|
||||
}
|
||||
nfields[field.name] = { type: tname };
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
}
|
||||
return nfields;
|
||||
}
|
||||
@@ -237,7 +279,7 @@ function (layer, dbConnection, callback) {
|
||||
dbConnection,
|
||||
preQuery,
|
||||
aggrQuery,
|
||||
metaOptions: layer.options.metadata || {}
|
||||
metaOptions: layer.options.metadata || {},
|
||||
};
|
||||
|
||||
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
|
||||
@@ -248,6 +290,8 @@ function (layer, dbConnection, callback) {
|
||||
// TODO: add support for sample.exclude option by, in that case, forcing the columns query and
|
||||
// passing the results to the sample query function.
|
||||
|
||||
const dimensions = (layer.options.aggregation || {}).dimensions;
|
||||
|
||||
Promise.all([
|
||||
_estimatedFeatureCount(ctx).then(
|
||||
({ estimatedFeatureCount }) => _sample(ctx, estimatedFeatureCount)
|
||||
@@ -256,9 +300,10 @@ function (layer, dbConnection, callback) {
|
||||
_featureCount(ctx),
|
||||
_aggrFeatureCount(ctx),
|
||||
_geometryType(ctx),
|
||||
_columns(ctx).then(columns => _columnStats(ctx, columns))
|
||||
_columns(ctx).then(columns => _columnStats(ctx, columns, dimensions))
|
||||
]).then(results => {
|
||||
callback(null, mergeResults(results));
|
||||
results = mergeResults(results);
|
||||
callback(null, results);
|
||||
}).catch(error => {
|
||||
callback(error);
|
||||
});
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
function TorqueLayerStats() {
|
||||
this._types = {
|
||||
torque: true
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const queryUtils = require('../utils/query-utils');
|
||||
|
||||
function OverviewsMetadataBackend(pgQueryRunner) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
var _ = require('underscore');
|
||||
const debug = require('debug')('cachechan');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function PgQueryRunner(pgConnection) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var layerStats = require('./layer-stats/factory');
|
||||
|
||||
function StatsBackend() {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
function TablesExtentBackend(pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
var debug = require('debug')('windshaft:templates');
|
||||
var _ = require('underscore');
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param metadataBackend
|
||||
|
||||
2
lib/cartodb/cache/backend/fastly.js
vendored
2
lib/cartodb/cache/backend/fastly.js
vendored
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var FastlyPurge = require('fastly-purge');
|
||||
|
||||
function FastlyCacheBackend(apiKey, serviceId) {
|
||||
|
||||
2
lib/cartodb/cache/backend/varnish_http.js
vendored
2
lib/cartodb/cache/backend/varnish_http.js
vendored
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var request = require('request');
|
||||
|
||||
function VarnishHttpCacheBackend(host, port) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var LruCache = require('lru-cache');
|
||||
|
||||
function LayergroupAffectedTables() {
|
||||
|
||||
2
lib/cartodb/cache/model/named_maps_entry.js
vendored
2
lib/cartodb/cache/model/named_maps_entry.js
vendored
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
function NamedMaps(owner, name) {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
var dot = require('dot');
|
||||
var NamedMapMapConfigProvider = require('../models/mapconfig/provider/named-map-provider');
|
||||
|
||||
2
lib/cartodb/cache/surrogate_keys_cache.js
vendored
2
lib/cartodb/cache/surrogate_keys_cache.js
vendored
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const MapConfig = require('windshaft').model.MapConfig;
|
||||
const aggregationQuery = require('./aggregation-query');
|
||||
const aggregationValidator = require('./aggregation-validator');
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const timeDimension = require('./time-dimension');
|
||||
|
||||
const DEFAULT_PLACEMENT = 'point-sample';
|
||||
|
||||
|
||||
/**
|
||||
* Returns a template function (function that accepts template parameters and returns a string)
|
||||
* to generate an aggregation query.
|
||||
@@ -24,6 +29,16 @@ const templateForOptions = (options) => {
|
||||
return templateFn;
|
||||
};
|
||||
|
||||
function optionsToParams (options) {
|
||||
return {
|
||||
sourceQuery: options.query,
|
||||
res: 256/options.resolution,
|
||||
columns: options.columns,
|
||||
dimensions: options.dimensions,
|
||||
filters: options.filters
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an aggregation query given the aggregation options:
|
||||
* - query
|
||||
@@ -38,16 +53,23 @@ const templateForOptions = (options) => {
|
||||
* When placement, columns or dimensions are specified, columns are aggregated as requested
|
||||
* (by default only _cdb_feature_count) and with the_geom_webmercator as defined by placement.
|
||||
*/
|
||||
const queryForOptions = (options) => templateForOptions(options)({
|
||||
sourceQuery: options.query,
|
||||
res: 256/options.resolution,
|
||||
columns: options.columns,
|
||||
dimensions: options.dimensions,
|
||||
filters: options.filters
|
||||
});
|
||||
const queryForOptions = (options) => templateForOptions(options)(optionsToParams(options));
|
||||
|
||||
module.exports = queryForOptions;
|
||||
|
||||
module.exports.infoForOptions = (options) => {
|
||||
const params = optionsToParams(options);
|
||||
const dimensions = {};
|
||||
dimensionNamesAndExpressions(params).forEach(([dimensionName, info]) => {
|
||||
dimensions[dimensionName] = {
|
||||
sql: info.sql,
|
||||
params: info.effectiveParams,
|
||||
type: info.type
|
||||
};
|
||||
});
|
||||
return dimensions;
|
||||
};
|
||||
|
||||
const SUPPORTED_AGGREGATE_FUNCTIONS = {
|
||||
'count': {
|
||||
sql: (column_name, params) => `count(${params.aggregated_column || '*'})`
|
||||
@@ -113,24 +135,56 @@ const aggregateColumnDefs = ctx => {
|
||||
|
||||
const aggregateDimensions = ctx => ctx.dimensions || {};
|
||||
|
||||
const dimensionNames = (ctx, table) => {
|
||||
let dimensions = aggregateDimensions(ctx);
|
||||
if (table) {
|
||||
return sep(Object.keys(dimensions).map(
|
||||
dimension_name => `${table}."${dimension_name}"`
|
||||
));
|
||||
const timeDimensionParameters = definition => {
|
||||
// definition.column should correspond to a wrapped date column
|
||||
const group = definition.group || {};
|
||||
return {
|
||||
time: `to_timestamp("${definition.column}")`,
|
||||
timezone: group.timezone || 'utc',
|
||||
units: group.units,
|
||||
count: group.count || 1,
|
||||
starting: group.starting,
|
||||
format: definition.format
|
||||
};
|
||||
};
|
||||
|
||||
// Adapt old-style dimension definitions for backwards compatibility
|
||||
const adaptDimensionDefinition = definition => {
|
||||
if (typeof(definition) === 'string') {
|
||||
return { column: definition };
|
||||
}
|
||||
return sep(Object.keys(dimensions).map(dimension_name => {
|
||||
return `"${dimension_name}"`;
|
||||
return definition;
|
||||
};
|
||||
|
||||
const dimensionExpression = definition => {
|
||||
if (definition.group) {
|
||||
// Currently only time dimensions are supported with parameters
|
||||
return Object.assign({ type: 'timeDimension' }, timeDimension(timeDimensionParameters(definition)));
|
||||
} else {
|
||||
return { sql: `"${definition.column}"` };
|
||||
}
|
||||
};
|
||||
|
||||
const dimensionNamesAndExpressions = (ctx) => {
|
||||
let dimensions = aggregateDimensions(ctx);
|
||||
return Object.keys(dimensions).map(dimensionName => {
|
||||
const dimension = adaptDimensionDefinition(dimensions[dimensionName]);
|
||||
const expression = dimensionExpression(dimension);
|
||||
return [dimensionName, expression];
|
||||
});
|
||||
};
|
||||
|
||||
const dimensionNames = (ctx, table) => {
|
||||
return sep(dimensionNamesAndExpressions(ctx).map(([dimensionName]) => {
|
||||
return table ? `${table}."${dimensionName}"` : `"${dimensionName}"`;
|
||||
}));
|
||||
};
|
||||
|
||||
const dimensionDefs = ctx => {
|
||||
let dimensions = aggregateDimensions(ctx);
|
||||
return sep(Object.keys(dimensions).map(dimension_name => {
|
||||
const expression = dimensions[dimension_name];
|
||||
return `"${expression}" AS "${dimension_name}"`;
|
||||
}));
|
||||
return sep(
|
||||
dimensionNamesAndExpressions(ctx)
|
||||
.map(([dimensionName, expression]) => `${expression.sql} AS "${dimensionName}"`)
|
||||
);
|
||||
};
|
||||
|
||||
const aggregateFilters = ctx => ctx.filters || {};
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function aggregationValidator (mapconfig) {
|
||||
return function validateProperty (key, validator) {
|
||||
for (let index = 0; index < mapconfig.getLayers().length; index++) {
|
||||
|
||||
267
lib/cartodb/models/aggregation/time-dimension.js
Normal file
267
lib/cartodb/models/aggregation/time-dimension.js
Normal file
@@ -0,0 +1,267 @@
|
||||
'use strict';
|
||||
|
||||
// timezones can be defined either by an numeric offset in seconds or by
|
||||
// a valid (case-insensitive) tz/PG name;
|
||||
// they include abbreviations defined by PG (which have precedence and
|
||||
// are fixed offsets, not handling DST) or general names that can handle DST.
|
||||
function timezone(tz) {
|
||||
if (isFinite(tz)) {
|
||||
return `INTERVAL '${tz} seconds'`;
|
||||
}
|
||||
return `'${tz}'`;
|
||||
}
|
||||
|
||||
// We assume t is a TIMESTAMP WITH TIME ZONE.
|
||||
// If this was to be used with a t which is a TIMESTAMP or TIME (no time zone)
|
||||
// it should be converted with `timezone('utc',t)` to a type with time zone.
|
||||
// Note that by default CARTO uses timestamp with time zone columns for dates
|
||||
// and VectorMapConfigAdapter converts them to epoch numbers.
|
||||
// So, for using this with aggregations, relying on dates & times
|
||||
// converted to UTC UNIX epoch numbers, apply `to_timestamp` to the
|
||||
// (converted) column.
|
||||
function timeExpression(t, tz) {
|
||||
if (tz !== undefined) {
|
||||
return `timezone(${timezone(tz)}, ${t})`;
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
function epochWithDefaults(epoch) {
|
||||
/* jshint maxcomplexity:9 */ // goddammit linter, I like this as is!!
|
||||
const format = /^(\d\d\d\d)(?:\-?(\d\d)(?:\-?(\d\d)(?:[T\s]?(\d\d)(?:(\d\d)(?:\:(\d\d))?)?)?)?)?$/;
|
||||
const match = (epoch || '').match(format) || [];
|
||||
const year = match[1] || '0001';
|
||||
const month = match[2] || '01';
|
||||
const day = match[3] || '01';
|
||||
const hour = match[4] || '00';
|
||||
const minute = match[5] || '00';
|
||||
const second = match[6] || '00';
|
||||
return `${year}-${month}-${day}T${hour}:${minute}:${second}`;
|
||||
}
|
||||
|
||||
// Epoch should be an ISO timestamp literal without time zone
|
||||
// (it is interpreted as in the defined timzezone for the input time)
|
||||
// It can be partial, e.g. 'YYYY', 'YYYY-MM', 'YYYY-MM-DDTHH', etc.
|
||||
// Defaults are applied: YYYY=0001, MM=01, DD=01, HH=00, MM=00, S=00
|
||||
// It returns a timestamp without time zone
|
||||
function epochExpression(epoch) {
|
||||
return `TIMESTAMP '${epoch}'`;
|
||||
}
|
||||
|
||||
const YEARSPAN = "(date_part('year', $t)-date_part('year', $epoch))";
|
||||
// Note that SECONDSPAN is not a UTC epoch, but an epoch in the specified TZ,
|
||||
// so we can use it to compute any multiple of seconds with it without using date_part or date_trunc
|
||||
const SECONDSPAN = "(date_part('epoch', $t) - date_part('epoch', $epoch))";
|
||||
|
||||
const serialParts = {
|
||||
second: {
|
||||
sql: `FLOOR(${SECONDSPAN})`,
|
||||
zeroBased: true
|
||||
},
|
||||
minute: {
|
||||
sql: `FLOOR(${SECONDSPAN}/60)`,
|
||||
zeroBased: true
|
||||
},
|
||||
hour: {
|
||||
sql: `FLOOR(${SECONDSPAN}/3600)`,
|
||||
zeroBased: true
|
||||
},
|
||||
day: {
|
||||
sql: `1 + FLOOR(${SECONDSPAN}/86400)`,
|
||||
zeroBased: false
|
||||
},
|
||||
week: {
|
||||
sql: `1 + FLOOR(${SECONDSPAN}/(7*86400))`,
|
||||
zeroBased: false
|
||||
},
|
||||
month: {
|
||||
sql: `1 + date_part('month', $t) - date_part('month', $epoch) + 12*${YEARSPAN}`,
|
||||
zeroBased: false
|
||||
},
|
||||
quarter: {
|
||||
sql: `1 + date_part('quarter', $t) - date_part('quarter', $epoch) + 4*${YEARSPAN}`,
|
||||
zeroBased: false
|
||||
},
|
||||
semester: {
|
||||
sql: `1 + FLOOR((date_part('month', $t) - date_part('month', $epoch))/6) + 2*${YEARSPAN}`,
|
||||
zeroBased: false
|
||||
},
|
||||
trimester: {
|
||||
sql: `1 + FLOOR((date_part('month', $t) - date_part('month', $epoch))/4) + 3*${YEARSPAN}`,
|
||||
zeroBased: false
|
||||
},
|
||||
year: {
|
||||
// for the default epoch this coincides with date_part('year', $t)
|
||||
sql: `1 + ${YEARSPAN}`,
|
||||
zeroBased: false
|
||||
},
|
||||
decade: {
|
||||
// for the default epoch this coincides with date_part('decade', $t)
|
||||
sql: `FLOOR((${YEARSPAN} + 1)/10)`,
|
||||
zeroBased: true
|
||||
},
|
||||
century: {
|
||||
// for the default epoch this coincides with date_part('century', $t)
|
||||
sql: `1 + FLOOR(${YEARSPAN}/100)`,
|
||||
zeroBased: false
|
||||
},
|
||||
millennium: {
|
||||
// for the default epoch this coincides with date_part('millennium', $t)
|
||||
sql: `1 + FLOOR(${YEARSPAN}/1000)`,
|
||||
zeroBased: false
|
||||
}
|
||||
};
|
||||
|
||||
function serialSqlExpr(params) {
|
||||
const { sql, zeroBased } = serialParts[params.units];
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
const epoch = epochExpression(params.starting);
|
||||
const serial = sql.replace(/\$t/g, column).replace(/\$epoch/g, epoch);
|
||||
let expr = serial;
|
||||
if (params.count !== 1) {
|
||||
if (zeroBased) {
|
||||
expr = `FLOOR((${expr})/(${params.count}::double precision))::int`;
|
||||
} else {
|
||||
expr = `CEIL((${expr})/(${params.count}::double precision))::int`;
|
||||
}
|
||||
} else {
|
||||
expr = `(${expr})::int`;
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
const isoParts = {
|
||||
second: `to_char($t, 'YYYY-MM-DD"T"HH24:MI:SS')`,
|
||||
minute: `to_char($t, 'YYYY-MM-DD"T"HH24:MI')`,
|
||||
hour: `to_char($t, 'YYYY-MM-DD"T"HH24')`,
|
||||
day: `to_char($t, 'YYYY-MM-DD')`,
|
||||
month: `to_char($t, 'YYYY-MM')`,
|
||||
year: `to_char($t, 'YYYY')`,
|
||||
week: `to_char($t, 'IYYY-"W"IW')`,
|
||||
quarter: `to_char($t, 'YYYY-"Q"Q')`,
|
||||
semester: `to_char($t, 'YYYY"S"') || to_char(CEIL(date_part('month', $t)/6), '9')`,
|
||||
trimester: `to_char($t, 'YYYY"t"') || to_char(CEIL(date_part('month', $t)/4), '9')`,
|
||||
decade: `to_char(date_part('decade', $t), '"D"999')`,
|
||||
century: `to_char($t, '"C"CC')`,
|
||||
millennium: `to_char(date_part('millennium', $t), '"M"999')`
|
||||
};
|
||||
|
||||
function isoSqlExpr(params) {
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
if (params.count > 1) {
|
||||
// TODO: it would be sensible to return the ISO of the first unit in the period
|
||||
throw new Error('Multiple time units not supported for ISO format');
|
||||
}
|
||||
return isoParts[params.units].replace(/\$t/g, column);
|
||||
}
|
||||
|
||||
const cyclicParts = {
|
||||
dayOfWeek: `date_part('isodow', $t)`, // 1 = monday to 7 = sunday;
|
||||
dayOfMonth: `date_part('day', $t)`, // 1 to 31
|
||||
dayOfYear: `date_part('doy', $t)`, // 1 to 366
|
||||
hourOfDay: `date_part('hour', $t)`, // 0 to 23
|
||||
monthOfYear: `date_part('month', $t)`, // 1 to 12
|
||||
quarterOfYear: `date_part('quarter', $t)`, // 1 to 4
|
||||
semesterOfYear: `FLOOR((date_part('month', $t)-1)/6.0) + 1`, // 1 to 2
|
||||
trimesterOfYear: `FLOOR((date_part('month', $t)-1)/4.0) + 1`, // 1 to 3
|
||||
weekOfYear: `date_part('week', $t)`, // 1 to 53
|
||||
minuteOfHour: `date_part('minute', $t)` // 0 to 59
|
||||
};
|
||||
|
||||
function cyclicSqlExpr(params) {
|
||||
const column = timeExpression(params.time, params.timezone);
|
||||
return cyclicParts[params.units].replace(/\$t/g, column);
|
||||
}
|
||||
|
||||
const ACCEPTED_PARAMETERS = ['time', 'units', 'timezone', 'count', 'starting', 'format'];
|
||||
const REQUIRED_PARAMETERS = ['time', 'units'];
|
||||
|
||||
function validateParameters(params, checker) {
|
||||
const errors = [];
|
||||
const presentParams = Object.keys(params);
|
||||
const invalidParams = presentParams.filter(param => !ACCEPTED_PARAMETERS.includes(param));
|
||||
if (invalidParams.length) {
|
||||
errors.push(`Invalid parameters: ${invalidParams.join(', ')}`);
|
||||
}
|
||||
const missingParams = REQUIRED_PARAMETERS.filter(param => !presentParams.includes(param));
|
||||
if (missingParams.length) {
|
||||
errors.push(`Missing parameters: ${missingParams.join(', ')}`);
|
||||
}
|
||||
const params_errors = checker(params);
|
||||
errors.push(...params_errors.errors);
|
||||
if (errors.length) {
|
||||
throw new Error(`Invalid time dimension:\n${errors.join("\n")}`);
|
||||
}
|
||||
return params_errors.params;
|
||||
}
|
||||
|
||||
const VALID_CYCLIC_UNITS = Object.keys(cyclicParts);
|
||||
const VALID_SERIAL_UNITS = Object.keys(serialParts);
|
||||
const VALID_ISO_UNITS = Object.keys(isoParts);
|
||||
|
||||
function cyclicCheckParams(params) {
|
||||
const errors = [];
|
||||
if (!VALID_CYCLIC_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid units "${params.units}"`);
|
||||
}
|
||||
if (params.count && params.count > 1) {
|
||||
errors.push(`Count ${params.count} not supported for cyclic ${params.units}`);
|
||||
}
|
||||
return { errors: errors, params: params };
|
||||
}
|
||||
|
||||
function serialCheckParams(params) {
|
||||
const errors = [];
|
||||
if (!VALID_SERIAL_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid grouping units "${params.units}"`);
|
||||
}
|
||||
return { errors: errors, params: Object.assign({}, params, { starting: epochWithDefaults(params.starting) }) };
|
||||
}
|
||||
|
||||
function isoCheckParams(params) {
|
||||
const errors = [];
|
||||
if (!VALID_ISO_UNITS.includes(params.units)) {
|
||||
errors.push(`Invalid units "${params.units}"`);
|
||||
}
|
||||
if (params.starting) {
|
||||
errors.push("Parameter 'starting' not supported for ISO format");
|
||||
}
|
||||
return { errors: errors, params: params };
|
||||
}
|
||||
|
||||
const CLASSIFIERS = {
|
||||
cyclic: {
|
||||
sqlExpr: cyclicSqlExpr,
|
||||
checkParams: cyclicCheckParams
|
||||
},
|
||||
iso: {
|
||||
sqlExpr: isoSqlExpr,
|
||||
checkParams: isoCheckParams
|
||||
},
|
||||
serial: {
|
||||
sqlExpr: serialSqlExpr,
|
||||
checkParams: serialCheckParams
|
||||
}
|
||||
};
|
||||
|
||||
function isCyclic(units) {
|
||||
return VALID_CYCLIC_UNITS.includes(units);
|
||||
}
|
||||
|
||||
function classifierFor(params) {
|
||||
let classifier = 'serial';
|
||||
if (params.units && isCyclic(params.units)) {
|
||||
classifier = 'cyclic';
|
||||
} else if (params.format === 'iso') {
|
||||
classifier = 'iso';
|
||||
}
|
||||
return CLASSIFIERS[classifier];
|
||||
}
|
||||
|
||||
function classificationSql(params) {
|
||||
const classifier = classifierFor(params);
|
||||
params = validateParameters(params, classifier.checkParams);
|
||||
return { sql: classifier.sqlExpr(params), effectiveParams: params };
|
||||
}
|
||||
|
||||
module.exports = classificationSql;
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
function CdbRequest() {
|
||||
this.RE_USER_FROM_HOST = new RegExp(global.environment.user_from_host ||
|
||||
'^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const BaseDataview = require('./base');
|
||||
const debug = require('debug')('windshaft:dataview:aggregation');
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const FLOAT_OIDS = {
|
||||
700: true,
|
||||
701: true,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const dataviews = require('./');
|
||||
|
||||
module.exports = class DataviewFactory {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
const BaseDataview = require('./base');
|
||||
const debug = require('debug')('windshaft:dataview:formula');
|
||||
const utils = require('../../utils/query-utils');
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user