Compare commits
390 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c792421687 | ||
|
|
15135b475c | ||
|
|
fd9f935676 | ||
|
|
71f5886a4d | ||
|
|
bc8c9f973c | ||
|
|
ec40614f4b | ||
|
|
5ed1a3a2d1 | ||
|
|
0aa5f394e2 | ||
|
|
2e1a3c7fb1 | ||
|
|
27eb00223d | ||
|
|
8d46780006 | ||
|
|
6ffd2c090e | ||
|
|
3995787c02 | ||
|
|
ddb1b0c0d8 | ||
|
|
a03d268260 | ||
|
|
5c491a25cf | ||
|
|
92be27e700 | ||
|
|
6b61f5e168 | ||
|
|
d79f1b41d0 | ||
|
|
e039204638 | ||
|
|
dc1becd15c | ||
|
|
a121fd75ab | ||
|
|
f85417a886 | ||
|
|
8ad72ff2ce | ||
|
|
4dd6bc466a | ||
|
|
c119c92de6 | ||
|
|
a3f7acb213 | ||
|
|
0f14ed55db | ||
|
|
528395103b | ||
|
|
288cd9584f | ||
|
|
cf82e1954e | ||
|
|
3b00cffc3b | ||
|
|
95bf39cada | ||
|
|
f9ad3c8acf | ||
|
|
28f70f6877 | ||
|
|
d5c5d07507 | ||
|
|
b646f71394 | ||
|
|
38fe2169aa | ||
|
|
a749d4fb43 | ||
|
|
b9198b59a1 | ||
|
|
3102d895f2 | ||
|
|
b60a69e7d2 | ||
|
|
3937b8c271 | ||
|
|
b32a073ac3 | ||
|
|
afd4ad500f | ||
|
|
cb17bba3f5 | ||
|
|
5b7341c0e9 | ||
|
|
d65565c091 | ||
|
|
360b98254b | ||
|
|
43a603922d | ||
|
|
74116523b4 | ||
|
|
6cddec562a | ||
|
|
22086ba914 | ||
|
|
a68618c336 | ||
|
|
578f543c01 | ||
|
|
49735308de | ||
|
|
2444b4c008 | ||
|
|
bf250e592a | ||
|
|
f6c8796c8a | ||
|
|
649f8d701e | ||
|
|
568e428a58 | ||
|
|
ff00fed43e | ||
|
|
561bc8aef0 | ||
|
|
e49ecda321 | ||
|
|
18525a60cd | ||
|
|
b8d3971c8a | ||
|
|
23839f5b4a | ||
|
|
f235dcdeda | ||
|
|
9c21194c68 | ||
|
|
7acbfc1e9b | ||
|
|
6f9580bae2 | ||
|
|
3583e064be | ||
|
|
9e14185990 | ||
|
|
a5c83edef6 | ||
|
|
04d0f2e530 | ||
|
|
e206a1bca3 | ||
|
|
b115bca07e | ||
|
|
07b9decb03 | ||
|
|
02c8e28494 | ||
|
|
d28744a5e3 | ||
|
|
a19e9a79b8 | ||
|
|
4d7eb555a8 | ||
|
|
6f9f53dd03 | ||
|
|
63bc8f75b9 | ||
|
|
adeffd2018 | ||
|
|
b2da00900f | ||
|
|
0c6d5a1e18 | ||
|
|
6945cfc93c | ||
|
|
7b53b7c30a | ||
|
|
d073f7e3dd | ||
|
|
210f5b01ec | ||
|
|
1dda183a31 | ||
|
|
0eadfe6ee9 | ||
|
|
c37e3f173d | ||
|
|
107a97aa9e | ||
|
|
219d2c9044 | ||
|
|
1e89821d97 | ||
|
|
29c6505252 | ||
|
|
7d8d05b865 | ||
|
|
afeb91dc86 | ||
|
|
b7b3392bdd | ||
|
|
b60116410a | ||
|
|
ffe19827fd | ||
|
|
48c28aea0b | ||
|
|
62d66f2dbc | ||
|
|
e644201756 | ||
|
|
481a5928c4 | ||
|
|
163c546236 | ||
|
|
656bc9344b | ||
|
|
b79a8587fa | ||
|
|
17337974a2 | ||
|
|
6bcf477532 | ||
|
|
bf7e8a6ec6 | ||
|
|
f31e8b43b6 | ||
|
|
0090811510 | ||
|
|
b97aeda53c | ||
|
|
f82232194c | ||
|
|
aff5c9a614 | ||
|
|
ddefb1a6ca | ||
|
|
4d06fee1e2 | ||
|
|
8febd81ed2 | ||
|
|
e575f01bef | ||
|
|
f25f507945 | ||
|
|
bdbb529ea8 | ||
|
|
0aac942aa1 | ||
|
|
8cc24bc665 | ||
|
|
478ea66678 | ||
|
|
4dfc898587 | ||
|
|
05e77b2aed | ||
|
|
24863b6393 | ||
|
|
3cf17c8bab | ||
|
|
8c38ecf808 | ||
|
|
a196a26ab4 | ||
|
|
8d73571f5b | ||
|
|
d5348dd9d4 | ||
|
|
7e31b956bf | ||
|
|
dbc5d65d90 | ||
|
|
c91d78fe51 | ||
|
|
798d010776 | ||
|
|
70f0b6ea50 | ||
|
|
4e3ef96374 | ||
|
|
c88a14bf43 | ||
|
|
7f5ed58a79 | ||
|
|
89e349146d | ||
|
|
c5cb2ea4cb | ||
|
|
fe9610abe9 | ||
|
|
1bbde4f5e3 | ||
|
|
e90c196598 | ||
|
|
6a2333be64 | ||
|
|
7d6a64d383 | ||
|
|
42dc2915ea | ||
|
|
3cec6b5a90 | ||
|
|
c31e3d6e3f | ||
|
|
6e4c8a6639 | ||
|
|
809c267419 | ||
|
|
5ac27d1002 | ||
|
|
7237fb04a8 | ||
|
|
d1696425fd | ||
|
|
a614fb1ef6 | ||
|
|
aa38dd3b59 | ||
|
|
2ac050501b | ||
|
|
03abe187ce | ||
|
|
a83d0cf7af | ||
|
|
8bb4fbec12 | ||
|
|
a8fb51ba25 | ||
|
|
24efc37737 | ||
|
|
c25678cc28 | ||
|
|
44970b78a1 | ||
|
|
a3bdbf6202 | ||
|
|
f583a4240a | ||
|
|
4054c6923f | ||
|
|
7a1d84a3fb | ||
|
|
58ed7c0093 | ||
|
|
f56e79ed1f | ||
|
|
45c423bbaf | ||
|
|
78f47e5873 | ||
|
|
21d1a56953 | ||
|
|
69a02bcee0 | ||
|
|
d2c0f553fc | ||
|
|
3967aecfdc | ||
|
|
7b8cc0a8b8 | ||
|
|
28c4e89ab5 | ||
|
|
8c42ac9053 | ||
|
|
86987f9e69 | ||
|
|
33a8267d2c | ||
|
|
779a8a8927 | ||
|
|
1888302cee | ||
|
|
34c446909e | ||
|
|
583765a298 | ||
|
|
4b1f0b5775 | ||
|
|
8f81c810e0 | ||
|
|
970be73052 | ||
|
|
e85469cc3c | ||
|
|
4a41ee8f75 | ||
|
|
9591a5a2b0 | ||
|
|
8f510f401e | ||
|
|
92678c3dae | ||
|
|
9f2d1f90d0 | ||
|
|
23e331610d | ||
|
|
59cb6f9c9c | ||
|
|
98325495ea | ||
|
|
576518b2c8 | ||
|
|
0631bafbbf | ||
|
|
d9b6284914 | ||
|
|
111b927033 | ||
|
|
d63337f06f | ||
|
|
7012e6a66a | ||
|
|
726e1a2268 | ||
|
|
6e455a1205 | ||
|
|
da07d550d2 | ||
|
|
1829a634e9 | ||
|
|
95f66b8c4b | ||
|
|
ea1f43bec7 | ||
|
|
c877d0b964 | ||
|
|
caf09ac644 | ||
|
|
17f151cd5a | ||
|
|
0940158d01 | ||
|
|
e6bbe8351d | ||
|
|
031bae2564 | ||
|
|
b8d790caab | ||
|
|
267557eb90 | ||
|
|
b2af93dfec | ||
|
|
7e81618769 | ||
|
|
eeac5ce998 | ||
|
|
fcf2fd1455 | ||
|
|
fb9dce0386 | ||
|
|
4c09a70647 | ||
|
|
eee59abfa1 | ||
|
|
c7effbccb4 | ||
|
|
2912e4fea6 | ||
|
|
2d09a214ae | ||
|
|
a88c085278 | ||
|
|
5dcca3e088 | ||
|
|
413a1685aa | ||
|
|
7081a7ec3c | ||
|
|
33143ea28e | ||
|
|
f8c86f3b72 | ||
|
|
ae53cc736b | ||
|
|
eca75d1365 | ||
|
|
ef201e6fcf | ||
|
|
38a556b7d6 | ||
|
|
c071746768 | ||
|
|
57512ba48b | ||
|
|
dcf765efda | ||
|
|
525d41e63c | ||
|
|
7d7ca0de4a | ||
|
|
11e5726ea9 | ||
|
|
d3f0c52474 | ||
|
|
8523f835dc | ||
|
|
63ccfac599 | ||
|
|
283baa4a3f | ||
|
|
c7bd132e2f | ||
|
|
3c92e186d6 | ||
|
|
67d8919f8a | ||
|
|
06c0b28d37 | ||
|
|
dfedb45254 | ||
|
|
b373965510 | ||
|
|
52d887f3b4 | ||
|
|
a6ca480210 | ||
|
|
16e80424e0 | ||
|
|
6c72d3adbe | ||
|
|
bbc9c9fb9b | ||
|
|
42d0c4c040 | ||
|
|
8f99886d62 | ||
|
|
60c01e583f | ||
|
|
f21f89f561 | ||
|
|
5f900a3b3c | ||
|
|
60db55b122 | ||
|
|
d9c05a9333 | ||
|
|
ab66ad83fd | ||
|
|
3498fceb6a | ||
|
|
e841774978 | ||
|
|
f297044203 | ||
|
|
c7e803a94c | ||
|
|
ac198d5b5a | ||
|
|
6eb66de94e | ||
|
|
f545b4d002 | ||
|
|
eee3e8b63c | ||
|
|
69afee61e0 | ||
|
|
724f67d381 | ||
|
|
8d69af4445 | ||
|
|
3c301ce742 | ||
|
|
f87c432744 | ||
|
|
d446ba9c1b | ||
|
|
dc669f5cd4 | ||
|
|
d4719d5707 | ||
|
|
f9082dad94 | ||
|
|
a8d421c9cc | ||
|
|
7b13c12ab4 | ||
|
|
97f4adbc1a | ||
|
|
602ab44375 | ||
|
|
77e6fb8225 | ||
|
|
2c8a030ecb | ||
|
|
ccd01e6da5 | ||
|
|
df10cfe641 | ||
|
|
09d3e8aabb | ||
|
|
f17411916f | ||
|
|
75583f67c5 | ||
|
|
bb745b0318 | ||
|
|
3834aeb73f | ||
|
|
aa09c079f6 | ||
|
|
3c586caba4 | ||
|
|
b05740048c | ||
|
|
2b5ed21207 | ||
|
|
acecb88efb | ||
|
|
734c373f3d | ||
|
|
e49cb524a8 | ||
|
|
cc24228511 | ||
|
|
27106fea57 | ||
|
|
990aaadc16 | ||
|
|
0c572b5947 | ||
|
|
3e7c294989 | ||
|
|
8a02156ac0 | ||
|
|
c4a75de0d8 | ||
|
|
db03bcdf8f | ||
|
|
dd5825c770 | ||
|
|
8fbe8f9f2a | ||
|
|
3bc3d19f40 | ||
|
|
575fe8e350 | ||
|
|
d5218a86f6 | ||
|
|
080f93f6de | ||
|
|
df931d95a3 | ||
|
|
d5406d5b50 | ||
|
|
f7e877ce60 | ||
|
|
ad4a1ada45 | ||
|
|
da0d0d21e3 | ||
|
|
7a1d2ca205 | ||
|
|
d89e785440 | ||
|
|
2423b5a4c4 | ||
|
|
1bee877b24 | ||
|
|
4d70ac0894 | ||
|
|
593d9e40f6 | ||
|
|
9fd1a3c663 | ||
|
|
8a781d241c | ||
|
|
be4d610de1 | ||
|
|
736d3460d9 | ||
|
|
f844d70275 | ||
|
|
0c9cfefcd0 | ||
|
|
8ed187b0f5 | ||
|
|
e5bada81dc | ||
|
|
655f817033 | ||
|
|
ebff2ac9f2 | ||
|
|
5a7ffcf499 | ||
|
|
f8e117a7b7 | ||
|
|
c4054f0ac9 | ||
|
|
f7707141d6 | ||
|
|
c40c42fc10 | ||
|
|
6cad976078 | ||
|
|
c82f17e5d2 | ||
|
|
1054bde7fd | ||
|
|
9e23b91f3f | ||
|
|
ea6e064e42 | ||
|
|
cf0858f5b9 | ||
|
|
69b11a8412 | ||
|
|
55aad4254c | ||
|
|
73e1659378 | ||
|
|
98f3e8159e | ||
|
|
e8cff194fc | ||
|
|
f1de1b3b91 | ||
|
|
a134ab3012 | ||
|
|
5a84d7233b | ||
|
|
8fe0112568 | ||
|
|
3acaac5403 | ||
|
|
7dbac5a565 | ||
|
|
8fb4f4063f | ||
|
|
808718fb26 | ||
|
|
6dc8de315a | ||
|
|
afb9b08925 | ||
|
|
2bed034e64 | ||
|
|
2328bb6261 | ||
|
|
06357fa3f9 | ||
|
|
83f58288f9 | ||
|
|
b1d5f0f9e8 | ||
|
|
7142e4db37 | ||
|
|
281a079a62 | ||
|
|
0d638e6bad | ||
|
|
43a63feaca | ||
|
|
4aa6ffe28c | ||
|
|
2ce688ee2a | ||
|
|
4e967980a3 | ||
|
|
93edf07da8 | ||
|
|
a684bead92 | ||
|
|
dd06de2632 | ||
|
|
975f07df99 | ||
|
|
5fe6845d7c | ||
|
|
4aa844946d | ||
|
|
3220e3de31 | ||
|
|
26bba3c5f5 | ||
|
|
c82a5c38df | ||
|
|
9cfaf6eefc |
22
.eslintrc.js
Normal file
22
.eslintrc.js
Normal file
@@ -0,0 +1,22 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
node: true,
|
||||
mocha: true
|
||||
},
|
||||
extends: [
|
||||
'standard'
|
||||
],
|
||||
globals: {
|
||||
Atomics: 'readonly',
|
||||
SharedArrayBuffer: 'readonly'
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018
|
||||
},
|
||||
rules: {
|
||||
"indent": ["error", 4],
|
||||
"semi": ["error", "always"]
|
||||
}
|
||||
}
|
||||
58
.github/workflows/main.yml
vendored
Normal file
58
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: continuous integration
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'LICENSE'
|
||||
- 'README**'
|
||||
- 'HOW_TO_RELEASE**'
|
||||
- 'LOGGING**'
|
||||
|
||||
env:
|
||||
GCLOUD_VERSION: '306.0.0'
|
||||
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||
|
||||
jobs:
|
||||
build-test-docker:
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
|
||||
|
||||
- name: Build image
|
||||
# we tag with "latest" but we don't push it on purpose. We use it as a base for the testing image
|
||||
run: |
|
||||
echo ${GITHUB_SHA::7}
|
||||
echo ${GITHUB_REF##*/}
|
||||
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7} .
|
||||
|
||||
- name: Build testing image
|
||||
# here it uses the lastest from prev step to add the needed parts on top
|
||||
run: |
|
||||
docker build -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft-test:latest -f private/Dockerfile.test .
|
||||
|
||||
- name: Setup gcloud authentication
|
||||
uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
version: ${{env.GCLOUD_VERSION}}
|
||||
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||
|
||||
- name: Configure docker and pull images
|
||||
# we pull images manually, as if done in next step using docker-compose it fails because missing openssl
|
||||
run: |
|
||||
gcloud auth configure-docker
|
||||
docker pull gcr.io/cartodb-on-gcp-main-artifacts/postgres:latest
|
||||
docker pull gcr.io/cartodb-on-gcp-main-artifacts/redis:latest
|
||||
|
||||
- name: Run tests inside container
|
||||
run: docker-compose -f private/ci/docker-compose.yml run windshaft-tests
|
||||
|
||||
- name: Upload image
|
||||
run: |
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7}
|
||||
|
||||
47
.github/workflows/master.yml
vendored
Normal file
47
.github/workflows/master.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# in this workflow we don't run the tests. Only build image, tag (also latests) and upload. The tests are not run because they are run
|
||||
# on each pull request, and there is a branch protection that forces to have branch up to date before merging, so tests are always run
|
||||
# with the latest code
|
||||
|
||||
name: master build image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
GCLOUD_VERSION: '306.0.0'
|
||||
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||
|
||||
jobs:
|
||||
build-master:
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
echo ${GITHUB_SHA::7}
|
||||
echo ${GITHUB_REF##*/}
|
||||
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} .
|
||||
|
||||
- name: Setup gcloud authentication
|
||||
uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
version: ${{env.GCLOUD_VERSION}}
|
||||
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||
|
||||
- name: Configure docker
|
||||
run: |
|
||||
gcloud auth configure-docker
|
||||
|
||||
- name: Upload image
|
||||
run: |
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
|
||||
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,3 +11,6 @@ redis.pid
|
||||
*.log
|
||||
coverage/
|
||||
.DS_Store
|
||||
.nyc_output
|
||||
build_resources/
|
||||
.dockerignore
|
||||
|
||||
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[submodule "private"]
|
||||
path = private
|
||||
url = git@github.com:CartoDB/Windshaft-cartodb-private.git
|
||||
branch = master
|
||||
95
.jshintrc
95
.jshintrc
@@ -1,95 +0,0 @@
|
||||
{
|
||||
// // JSHint Default Configuration File (as on JSHint website)
|
||||
// // See http://jshint.com/docs/ for more details
|
||||
//
|
||||
// "maxerr" : 50, // {int} Maximum error before stopping
|
||||
//
|
||||
// // Enforcing
|
||||
// "bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
|
||||
// "camelcase" : false, // true: Identifiers must be in camelCase
|
||||
"curly" : true, // true: Require {} for every new block or scope
|
||||
"eqeqeq" : true, // true: Require triple equals (===) for comparison
|
||||
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
|
||||
"freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
|
||||
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
|
||||
// "indent" : 4, // {int} Number of spaces to use for indentation
|
||||
// "latedef" : false, // true: Require variables/functions to be defined before being used
|
||||
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
|
||||
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
|
||||
// "noempty" : true, // true: Prohibit use of empty blocks
|
||||
"nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
|
||||
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
|
||||
// "plusplus" : false, // true: Prohibit use of `++` & `--`
|
||||
// "quotmark" : false, // Quotation mark consistency:
|
||||
// // false : do nothing (default)
|
||||
// // true : ensure whatever is used is consistent
|
||||
// // "single" : require single quotes
|
||||
// // "double" : require double quotes
|
||||
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
|
||||
"unused" : true, // true: Require all defined variables be used
|
||||
// "strict" : true, // true: Requires all functions run in ES5 Strict Mode
|
||||
// "maxparams" : false, // {int} Max number of formal params allowed per function
|
||||
// "maxdepth" : false, // {int} Max depth of nested blocks (within functions)
|
||||
// "maxstatements" : false, // {int} Max number statements per function
|
||||
"maxcomplexity" : 6, // {int} Max cyclomatic complexity per function
|
||||
"maxlen" : 120, // {int} Max number of characters per line
|
||||
//
|
||||
// // Relaxing
|
||||
// "asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
|
||||
// "boss" : false, // true: Tolerate assignments where comparisons would be expected
|
||||
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
|
||||
// "eqnull" : false, // true: Tolerate use of `== null`
|
||||
// "es5" : false, // true: Allow ES5 syntax (ex: getters and setters)
|
||||
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
|
||||
// "moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
|
||||
// // (ex: `for each`, multiple try/catch, function expression…)
|
||||
// "evil" : false, // true: Tolerate use of `eval` and `new Function()`
|
||||
// "expr" : false, // true: Tolerate `ExpressionStatement` as Programs
|
||||
// "funcscope" : false, // true: Tolerate defining variables inside control statements
|
||||
// "globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
|
||||
// "iterator" : false, // true: Tolerate using the `__iterator__` property
|
||||
// "lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
|
||||
// "laxbreak" : false, // true: Tolerate possibly unsafe line breakings
|
||||
// "laxcomma" : false, // true: Tolerate comma-first style coding
|
||||
// "loopfunc" : false, // true: Tolerate functions being defined in loops
|
||||
// "multistr" : false, // true: Tolerate multi-line strings
|
||||
// "noyield" : false, // true: Tolerate generator functions with no yield statement in them.
|
||||
// "notypeof" : false, // true: Tolerate invalid typeof operator values
|
||||
// "proto" : false, // true: Tolerate using the `__proto__` property
|
||||
// "scripturl" : false, // true: Tolerate script-targeted URLs
|
||||
// "shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
|
||||
// "sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
|
||||
// "supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
|
||||
// "validthis" : false, // true: Tolerate using this in a non-constructor function
|
||||
//
|
||||
// // Environments
|
||||
// "browser" : true, // Web Browser (window, document, etc)
|
||||
// "browserify" : false, // Browserify (node.js code in the browser)
|
||||
// "couch" : false, // CouchDB
|
||||
// "devel" : true, // Development/debugging (alert, confirm, etc)
|
||||
// "dojo" : false, // Dojo Toolkit
|
||||
// "jasmine" : false, // Jasmine
|
||||
// "jquery" : false, // jQuery
|
||||
// "mocha" : true, // Mocha
|
||||
// "mootools" : false, // MooTools
|
||||
"node" : true, // Node.js
|
||||
// "nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
|
||||
// "prototypejs" : false, // Prototype and Scriptaculous
|
||||
// "qunit" : false, // QUnit
|
||||
// "rhino" : false, // Rhino
|
||||
// "shelljs" : false, // ShellJS
|
||||
// "worker" : false, // Web Workers
|
||||
// "wsh" : false, // Windows Scripting Host
|
||||
// "yui" : false, // Yahoo User Interface
|
||||
|
||||
// Custom predefined global variables
|
||||
"predef": [
|
||||
"-console", // disallows console, use debug
|
||||
"beforeEach",
|
||||
"afterEach",
|
||||
"before",
|
||||
"after",
|
||||
"describe",
|
||||
"it"
|
||||
]
|
||||
}
|
||||
12
.travis.yml
12
.travis.yml
@@ -1,12 +0,0 @@
|
||||
language: generic
|
||||
sudo: required
|
||||
env:
|
||||
matrix:
|
||||
- NODE_VERSION=10.15.1
|
||||
DOCKER_IMAGE=carto/nodejs-xenial-pg101:latest
|
||||
- NODE_VERSION=10.15.1
|
||||
DOCKER_IMAGE=carto/nodejs-xenial-pg1121:latest
|
||||
services:
|
||||
- docker
|
||||
before_install: docker pull ${DOCKER_IMAGE}
|
||||
script: npm run docker-test -- ${DOCKER_IMAGE} ${NODE_VERSION}
|
||||
@@ -1,11 +0,0 @@
|
||||
Contributing
|
||||
---
|
||||
|
||||
The issue tracker is at [github.com/CartoDB/Windshaft-cartodb](https://github.com/CartoDB/Windshaft-cartodb).
|
||||
|
||||
We love pull requests from everyone, see [Contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
|
||||
|
||||
|
||||
## Submitting Contributions
|
||||
|
||||
* You will need to sign a Contributor License Agreement (CLA) before making a submission. [Learn more here](https://carto.com/contributions).
|
||||
@@ -1,18 +0,0 @@
|
||||
1. Test (make clean all check), fix if broken before proceeding
|
||||
2. Ensure proper version in package.json and package-lock.json
|
||||
3. Ensure NEWS section exists for the new version, review it, add release date
|
||||
4. If there are modified dependencies in package.json, update them with `npm upgrade {{package_name}}@{{version}}`
|
||||
5. Commit package.json, package-lock.json, NEWS
|
||||
6. git tag -a Major.Minor.Patch # use NEWS section as content
|
||||
7. Stub NEWS/package for next version
|
||||
|
||||
Versions:
|
||||
|
||||
Bugfix releases increment Patch component of version.
|
||||
Feature releases increment Minor and set Patch to zero.
|
||||
If backward compatibility is broken, increment Major and
|
||||
set to zero Minor and Patch.
|
||||
|
||||
Branches named 'b<Major>.<Minor>' are kept for any critical
|
||||
fix that might need to be shipped before next feature release
|
||||
is ready.
|
||||
16
HOW_TO_RELEASE.md
Normal file
16
HOW_TO_RELEASE.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# How to release
|
||||
|
||||
1. Test (npm test), fix if broken before proceeding.
|
||||
2. Ensure proper version in `package.json` and `package-lock.json`.
|
||||
3. Ensure NEWS section exists for the new version, review it, add release date.
|
||||
4. If there are modified dependencies in `package.json`, update them with `npm upgrade {{package_name}}@{{version}}`.
|
||||
5. Commit `package.json`, `package-lock.json`, NEWS.
|
||||
6. Run `git tag -a Major.Minor.Patch`. Use NEWS section as content.
|
||||
7. Stub NEWS/package for next version.
|
||||
|
||||
## Version:
|
||||
|
||||
* Bugfix releases increment Patch component of version.
|
||||
* Feature releases increment Minor and set Patch to zero.
|
||||
* If backward compatibility is broken, increment Major and set to zero Minor and Patch.
|
||||
* Branches named 'b<Major>.<Minor>' are kept for any critical fix that might need to be shipped before next feature release is ready.
|
||||
41
INSTALL.md
41
INSTALL.md
@@ -1,41 +0,0 @@
|
||||
# Installing Windshaft-CartoDB
|
||||
|
||||
## Requirements
|
||||
|
||||
Make sure that you have the requirements needed. These are:
|
||||
|
||||
- Node 10.x
|
||||
- npm 6.x
|
||||
- PostgreSQL >= 10.0
|
||||
- PostGIS >= 2.4
|
||||
- CARTO Postgres Extension >= 0.24.1
|
||||
- Redis >= 4
|
||||
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
|
||||
- C++11 (to build internal dependencies if needed)
|
||||
|
||||
### Optional
|
||||
|
||||
- Varnish (http://www.varnish-cache.org)
|
||||
|
||||
## PostGIS setup
|
||||
|
||||
A `template_postgis` database is expected. One can be set up with
|
||||
|
||||
```shell
|
||||
createdb --owner postgres --template template0 template_postgis
|
||||
psql -d template_postgis -c 'CREATE EXTENSION postgis;'
|
||||
```
|
||||
|
||||
## Build/install
|
||||
|
||||
To fetch and build all node-based dependencies, run:
|
||||
|
||||
```shell
|
||||
npm install
|
||||
```
|
||||
|
||||
Note that the ```npm``` step will populate the node_modules/
|
||||
directory with modules, some of which being compiled on demand. If you
|
||||
happen to have startup errors you may need to force rebuilding those
|
||||
modules. At any time just wipe out the node_modules/ directory and run
|
||||
```npm``` again.
|
||||
21
LOGGING.md
Normal file
21
LOGGING.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Logging structured traces
|
||||
|
||||
In order to have meaningful and useful log traces, you should follow
|
||||
some general guidelines described in the [Project Guidelines](http://doc-internal.cartodb.net/platform/guidelines.html#structured-logging).
|
||||
|
||||
In this project there is a specific logger in place that takes care of
|
||||
format and context of the traces for you. Take a look at [logger.js](https://github.com/CartoDB/Windshaft-cartodb/blob/cf82e1954e2244861e47fce0c2223ee466a5cd64/lib/utils/logger.js)
|
||||
(NOTE: that file will be moved soon to a common module).
|
||||
|
||||
The logger is instantiated as part of the [app startup process](https://github.com/CartoDB/Windshaft-cartodb/blob/cf82e1954e2244861e47fce0c2223ee466a5cd64/app.js#L53),
|
||||
then passed to middlewares and other client classes.
|
||||
|
||||
There are many examples of how to use the logger to generate traces
|
||||
throughout the code. Here are a few of them:
|
||||
|
||||
```js
|
||||
lib/api/middlewares/logger.js: res.locals.logger.info({ client_request: req }, 'Incoming request');
|
||||
lib/api/middlewares/logger.js: res.on('finish', () => res.locals.logger.info({ server_response: res, status: res.statusCode }, 'Response sent'));
|
||||
lib/api/middlewares/profiler.js: logger.info({ stats, duration: stats.response / 1000, duration_ms: stats.response }, 'Request profiling stats');
|
||||
lib/api/middlewares/tag.js: res.on('finish', () => logger.info({ tags: res.locals.tags }, 'Request tagged'));
|
||||
```
|
||||
53
Makefile
53
Makefile
@@ -1,53 +0,0 @@
|
||||
SHELL=/bin/bash
|
||||
|
||||
pre-install:
|
||||
@$(SHELL) ./scripts/check-node-canvas.sh
|
||||
|
||||
all:
|
||||
@$(SHELL) ./scripts/install.sh
|
||||
|
||||
clean:
|
||||
rm -rf node_modules/
|
||||
|
||||
distclean: clean
|
||||
rm config.status*
|
||||
|
||||
config.status--test:
|
||||
./configure --environment=test
|
||||
|
||||
config/environments/test.js: config.status--test
|
||||
./config.status--test
|
||||
|
||||
TEST_SUITE := $(shell find test/{acceptance,integration,unit} -name "*.js")
|
||||
TEST_SUITE_UNIT := $(shell find test/unit -name "*.js")
|
||||
TEST_SUITE_INTEGRATION := $(shell find test/integration -name "*.js")
|
||||
TEST_SUITE_ACCEPTANCE := $(shell find test/acceptance -name "*.js")
|
||||
|
||||
test: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE)
|
||||
|
||||
test-unit: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_UNIT)
|
||||
|
||||
test-integration: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_INTEGRATION)
|
||||
|
||||
test-acceptance: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_ACCEPTANCE)
|
||||
|
||||
jshint:
|
||||
@echo "***jshint***"
|
||||
@./node_modules/.bin/jshint lib/ test/ app.js
|
||||
|
||||
test-all: test jshint
|
||||
|
||||
coverage:
|
||||
@RUNTESTFLAGS=--with-coverage make test
|
||||
|
||||
check: test
|
||||
|
||||
.PHONY: pre-install test jshint coverage
|
||||
96
NEWS.md
96
NEWS.md
@@ -1,5 +1,101 @@
|
||||
# Changelog
|
||||
|
||||
## 10.0.0
|
||||
Released 2020-mm-dd
|
||||
|
||||
Breaking changes:
|
||||
- Log system revamp:
|
||||
- Logs to stdout, disabled while testing
|
||||
- Upgrade `camshaft` to version [`0.67.2`](https://github.com/CartoDB/camshaft/releases/tag/0.67.2)
|
||||
- Use header `X-Request-Id`, or create a new `uuid` when no present, to identyfy log entries
|
||||
- Be able to set log level from env variable `LOG_LEVEL`, useful while testing: `LOG_LEVEL=info npm test`; even more human-readable: `LOG_LEVEL=info npm t | ./node_modules/.bin/pino-pretty`
|
||||
- Stop responding with `X-Tiler-Errors` header. Now errors are properly logged and will end up in ELK as usual.
|
||||
- Stop responding with `X-Tiler-Profiler` header. Now profiling stats are properly logged and will end up in ELK as usual.
|
||||
- Be able to reduce the footprint in the final log file depending on the environment
|
||||
- Be able to pass the logger to the analysis creation (camshaft) while instantiating a named map with analysis.
|
||||
- Be able to tag requests with labels as an easier way to provide business metrics
|
||||
- Metro: Add log-collector utility (`metro`), it will be moved to its own repository. Attaching it here fro development purposes. Try it with the following command `LOG_LEVEL=info npm t | node metro`
|
||||
- Metro: Creates `metrics-collector.js` a stream to update Prometheus' counters and histograms and exposes them via Express' app (`:9145/metrics`). Use the ones defined in `grok_exporter`
|
||||
|
||||
Bug Fixes:
|
||||
- While instantiating a map, set the `cache buster` equal to `0` when there are no affected tables in the MapConfig. Thus `layergroupid` has the same structure always:
|
||||
- `${map_id}:${cache_buster}` for anonymous map
|
||||
- `${user}@${template_hash}@${map_id}:${cache_buster}` for named map
|
||||
|
||||
## 9.0.0
|
||||
Released 2020-06-05
|
||||
|
||||
Breaking changes:
|
||||
- Remove `/version` endpoint
|
||||
- Drop support for Node.js < 12
|
||||
|
||||
Announcements:
|
||||
- Support Node.js 12
|
||||
- Upgrade `windshaft` to version [`7.0.1`](https://github.com/CartoDB/Windshaft/releases/tag/7.0.1)
|
||||
- Upgrade `camshaft` to version [`0.65.3`](https://github.com/CartoDB/camshaft/blob/0.65.3/CHANGELOG.md#0653):
|
||||
- Fix noisy message logs while checking analyses' limits
|
||||
- Fix CI setup, explicit use of PGPORT while creating the PostgreSQL cluster
|
||||
- Upgrade `cartodb-redis` to version [`3.0.0`](https://github.com/CartoDB/node-cartodb-redis/releases/tag/3.0.0)
|
||||
- Fix test where `http-fallback-image` renderer was failing quietly
|
||||
- Fix stat `named map providers` cache count
|
||||
- Use new signature for `onTileErrorStrategy`. Required by `windshaft@6.0.0`
|
||||
- Extract `onTileErrorStrategy` to a module
|
||||
- In tests, stop using mapnik module exposed by windshaft and require it from development dependencies
|
||||
- Stop using `MapStore` from `windshaft` while testing and create a custom one instead
|
||||
- Rename NamedMapProviderReporter by NamedMapProviderCacheReporter
|
||||
- Remove `bootstrapFonts` at process startup (now done in `windshaft@6.0.0`)
|
||||
- Stop checking the installed version of some dependencies while testing
|
||||
- Send metrics about `map views` (#1162)
|
||||
- Add custom headers in responses to allow to other components to be able to get insights about user activity
|
||||
- Update dependencies to avoid security vulnerabilities
|
||||
|
||||
Bug Fixes:
|
||||
- Parsing date column in numeric histograms (#1160)
|
||||
- Use `Array.prototype.sort()`'s callback properly while testing. It should return a number not a boolean.
|
||||
|
||||
## 8.1.1
|
||||
Released 2020-02-17
|
||||
|
||||
Announcements:
|
||||
- Upgrade camshaft to [`0.65.2`](https://github.com/CartoDB/camshaft/blob/69c9447c9fccf00a70a67d713d1ce777775a17ff/CHANGELOG.md#0652): Fixes uncatched errors problem (#1117)
|
||||
|
||||
## 8.1.0
|
||||
Released 2020-01-27
|
||||
|
||||
Announcements:
|
||||
- Removed `jshint` as linter in favour of `eslint` to check syntax, find problems, and enforce code style.
|
||||
- Upgrade `camshaft` to [`0.65.1`](https://github.com/CartoDB/camshaft/blob/a2836c15fd2830f8364a222eeafdb4dc2f41b580/CHANGELOG.md#0651): Use quoted identifiers for column names and enforce the usage of the cartodb schema when using cartodb extension functions and tables.
|
||||
- Stop using two different tools for package management, testing, and any other developer workflow.
|
||||
- Removes Makefile and related bash scripts
|
||||
- Use npm scripts as the only tool for testing, CI and linting.
|
||||
- Simplified CI configuration.
|
||||
- Improved documentation:
|
||||
- Centralized several documents into README.md
|
||||
- Remove outdated sections
|
||||
- Update old sections
|
||||
- Added missing sections.
|
||||
- Remove deprecated coverage tool istanbul, using nyc instead.
|
||||
- Removed unused dockerfiles
|
||||
- Use cartodb schema when using cartodb extension functions and tables.
|
||||
- Implemented circle and polygon dataview filters.
|
||||
|
||||
## 8.0.0
|
||||
Released 2019-11-13
|
||||
|
||||
Breaking changes:
|
||||
- Schema change for "routes" in configuration file, each "router" is now an array instead of an object. See [`dd06de2`](https://github.com/CartoDB/Windshaft-cartodb/pull/1126/commits/dd06de2632661e19d64c9fbc2be0ba1a8059f54c) for more details.
|
||||
|
||||
Announcements:
|
||||
- Added validation to only allow "count" and "sum" aggregations in dataview overview.
|
||||
- Added mechanism to inject custom middlewares through configuration.
|
||||
- Stop requiring unused config properties: "base_url", "base_url_mapconfig", and "base_url_templated".
|
||||
- Upgraded cartodb-query-tables to version [0.7.0](https://github.com/CartoDB/node-cartodb-query-tables/blob/0.7.0/NEWS.md#version-0.7.0).
|
||||
- Be able to set a coherent TTL in Cache-Control header to expire all resources belonging to a map simultaneously.
|
||||
- When `cache buster` in request path is `0` set header `Last-Modified` to now, it avoids stalled content in 3rd party cache providers when they add `If-Modified-Since` header into the request.
|
||||
- Adding a logger to MapStore (#1134)
|
||||
- Qualify calls to cartodb extension so having it in the search_path isn't necessary.
|
||||
- Fix multiple DB login issues.
|
||||
|
||||
## 7.2.0
|
||||
Released 2019-09-30
|
||||
|
||||
|
||||
188
README.md
188
README.md
@@ -1,80 +1,146 @@
|
||||
Windshaft-CartoDB
|
||||
==================
|
||||
# Windshaft-CartoDB [](https://travis-ci.org/CartoDB/Windshaft-cartodb)
|
||||
|
||||
[](https://travis-ci.org/CartoDB/Windshaft-cartodb)
|
||||
The [`CARTO Maps API`](https://carto.com/developers/maps-api/) tiler. It extends [`Windshaft`](https://github.com/CartoDB/Windshaft) and exposes a web service with extra functionality:
|
||||
|
||||
This is the [CartoDB Maps API](http://docs.cartodb.com/cartodb-platform/maps-api.html) tiler. It extends
|
||||
[Windshaft](https://github.com/CartoDB/Windshaft) with some extra functionality and custom filters for authentication.
|
||||
* Instantiate [`Anonymous Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/03-anonymous-maps.md) through CARTO's map configuration ([`MapConfig`](https://github.com/CartoDB/Windshaft/blob/master/doc/MapConfig-specification.md)).
|
||||
* Create [`Named Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/04-named-maps.md) based on customizable templates.
|
||||
* Get map previews through [`Static Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/05-static-maps-API.md) API.
|
||||
* Render maps with a large amount of data faster using [`Tile Aggregation`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/06-tile-aggregation.md).
|
||||
* Build advanced maps with enriched data through [`Analyses Extension`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/09-MapConfig-analyses-extension.md).
|
||||
* Fetch tabular data from analysis nodes with [`Dataviews`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/10-MapConfig-dataviews-extension.md)
|
||||
|
||||
* reads dbname from subdomain and cartodb redis for pretty tile urls
|
||||
* configures windshaft to publish `cartodb_id` as the interactivity layer
|
||||
* gets the default geometry type from the cartodb redis store
|
||||
* allows tiles to be styled individually
|
||||
* provides a link to varnish high speed cache
|
||||
* provides a [template maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Template-maps.md)
|
||||
## Build
|
||||
|
||||
Install
|
||||
-------
|
||||
See [INSTALL.md](INSTALL.md) for detailed installation instructions.
|
||||
Requirements:
|
||||
|
||||
Configure
|
||||
---------
|
||||
* [`Node 12.x `](https://nodejs.org/dist/latest-v10.x/)
|
||||
* [`PostgreSQL >= 11.0`](https://www.postgresql.org/download/)
|
||||
* [`PostGIS >= 2.4`](https://postgis.net/install/)
|
||||
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
|
||||
* [`Redis >= 4`](https://redis.io/download)
|
||||
* `libcairo2-dev`, `libpango1.0-dev`, `libjpeg8-dev` and `libgif-dev` for server side canvas support
|
||||
* `C++11` to build internal dependencies. When there's no pre-built binaries for your OS/architecture distribution.
|
||||
|
||||
Create the config/environments/<env>.js files (there are .example files
|
||||
to start from). You can optionally use the ./configure script for this,
|
||||
see ```./configure --help``` to see available options.
|
||||
Optional:
|
||||
|
||||
Look at lib/cartodb/server_options.js for more on config
|
||||
* [`Varnish`](http://www.varnish-cache.org)
|
||||
* [`Statsd`](https://github.com/statsd/statsd)
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
### PostGIS setup
|
||||
|
||||
Checkout your commit/branch. If you need to reinstall dependencies (you can check [NEWS](NEWS.md)) do the following:
|
||||
A `template_postgis` database is expected. One can be set up with
|
||||
|
||||
```sh
|
||||
$ rm -rf node_modules
|
||||
```shell
|
||||
$ createdb --owner postgres --template template0 template_postgis
|
||||
$ psql -d template_postgis -c 'CREATE EXTENSION postgis;'
|
||||
```
|
||||
|
||||
### Install
|
||||
|
||||
To fetch and build all node-based dependencies, run:
|
||||
|
||||
```shell
|
||||
$ npm install
|
||||
```
|
||||
|
||||
```
|
||||
node app.js <env>
|
||||
```
|
||||
### Run
|
||||
|
||||
Where <env> is the name of a configuration file under config/environments/.
|
||||
You can inject the configuration through environment variables at run time. Check the file `./config/environments/config.js` to see the ones you have available.
|
||||
|
||||
Note that caches are kept in redis. If you're not seeing what you expect
|
||||
there may be out-of-sync records in there.
|
||||
Take a look: http://redis.io/commands
|
||||
While the migration to the new environment based configuration, you can still use the old method of copying a config file. To enabled the one with environment variables you need to pass `CARTO_WINDSHAFT_ENV_BASED_CONF=true`. You can use the docker image to run it.
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation
|
||||
resources, from higher level to more detailed ones:
|
||||
The [Maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Map-API.md) defined the endpoints and their
|
||||
expected parameters and outputs.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
[CartoDB's Map Gallery](http://cartodb.com/gallery/) showcases several examples of visualisations built on top of this.
|
||||
|
||||
Contributing
|
||||
---
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
### Developing with a custom windshaft version
|
||||
|
||||
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency) the best option is
|
||||
to use `npm link`. You can read more about it at [npm-link: Symlink a package folder](https://docs.npmjs.com/cli/link.html).
|
||||
|
||||
**Quick start**:
|
||||
Old way:
|
||||
|
||||
```shell
|
||||
~/windshaft-directory $ npm install
|
||||
~/windshaft-directory $ npm link
|
||||
~/windshaft-cartodb-directory $ npm link windshaft
|
||||
$ node app.js <env>
|
||||
```
|
||||
|
||||
Where `<env>` is the name of a configuration file under `./config/environments/`.
|
||||
|
||||
### Test
|
||||
|
||||
You can easily run the tests against the dependencies from the `dev-env`. To do so, you need to build the test docker image:
|
||||
|
||||
```shell
|
||||
$ docker-compose build
|
||||
```
|
||||
|
||||
Then you can run the tests like:
|
||||
|
||||
```shell
|
||||
$ docker-compose run windshaft-tests
|
||||
```
|
||||
|
||||
It will mount your code inside a volume. In case you want to play and run `npm test` or something else you can do:
|
||||
|
||||
```shell
|
||||
$ docker-compose run --entrypoint bash windshaft-tests
|
||||
```
|
||||
|
||||
So you will have a bash shell inside the test container, with the code from your host.
|
||||
|
||||
### Coverage
|
||||
|
||||
```shell
|
||||
$ npm run cover
|
||||
```
|
||||
|
||||
Open `./coverage/lcov-report/index.html`.
|
||||
|
||||
### Docker support
|
||||
|
||||
We provide docker images just for testing and continuous integration purposes:
|
||||
|
||||
* [`nodejs-xenial-pg1121`](https://hub.docker.com/r/carto/nodejs-xenial-pg1121/tags)
|
||||
* [`nodejs-xenial-pg101`](https://hub.docker.com/r/carto/nodejs-xenial-pg101/tags)
|
||||
|
||||
You can find instructions to install Docker, download, and update images [here](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docker/reference.md).
|
||||
|
||||
### Useful `npm` scripts
|
||||
|
||||
Run test in a docker image with a specific Node.js version:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> NODE_VERSION=<nodejs-version> npm run test:docker
|
||||
```
|
||||
|
||||
Where:
|
||||
|
||||
* `<docker-image-tag>`: the tag of required docker image, e.g. `carto/nodejs-xenial-pg1121:latest`
|
||||
* `<nodejs-version>`: the Node.js version, e.g. `10.15.1`
|
||||
|
||||
In case you need to debug:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> npm run docker:bash
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find an overview, guides, full reference, and support in [`CARTO's developer center`](https://carto.com/developers/maps-api/). The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation resources, from a higher level to more detailed ones.
|
||||
|
||||
## Contributing
|
||||
|
||||
* The issue tracker: [`Github`](https://github.com/CartoDB/Windshaft-cartodb/issues).
|
||||
* We love Pull Requests from everyone, see [contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
|
||||
* You'll need to sign a Contributor License Agreement (CLA) before submitting a Pull Request. [Learn more here](https://carto.com/contributions).
|
||||
|
||||
## Developing with a custom `Windshaft` version
|
||||
|
||||
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency), the best option is to use `npm link`. You can read more about it at `npm-link`: [symlink a package folder](https://docs.npmjs.com/cli/link.html).
|
||||
|
||||
```shell
|
||||
$ cd /path/to/Windshaft
|
||||
$ npm install
|
||||
$ npm link
|
||||
$ cd /path/to/Windshaft-cartodb
|
||||
$ npm link windshaft
|
||||
```
|
||||
|
||||
## Versioning
|
||||
|
||||
We follow [`SemVer`](http://semver.org/) for versioning. For available versions, see the [tags on this repository](https://github.com/CartoDB/Windshaft-cartodb/tags).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the BSD 3-clause "New" or "Revised" License. See the [LICENSE](LICENSE) file for details.
|
||||
|
||||
256
app.js
256
app.js
@@ -1,132 +1,93 @@
|
||||
'use strict';
|
||||
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var _ = require('underscore');
|
||||
var semver = require('semver');
|
||||
const setICUEnvVariable = require('./lib/cartodb/utils/icu_data_env_setter');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
|
||||
// jshint undef:false
|
||||
var log = console.log.bind(console);
|
||||
var logError = console.error.bind(console);
|
||||
// jshint undef:true
|
||||
|
||||
var nodejsVersion = process.versions.node;
|
||||
const { engines } = require('./package.json');
|
||||
if (!semver.satisfies(nodejsVersion, engines.node)) {
|
||||
logError(`Node version ${nodejsVersion} is not supported, please use Node.js ${engines.node}.`);
|
||||
process.exit(1);
|
||||
}
|
||||
// TODO: research it it's still needed
|
||||
const setICUEnvVariable = require('./lib/utils/icu-data-env-setter');
|
||||
|
||||
// This function should be called before the require('yargs').
|
||||
setICUEnvVariable();
|
||||
|
||||
var argv = require('yargs')
|
||||
.usage('Usage: $0 <environment> [options]')
|
||||
const argv = require('yargs')
|
||||
.usage('Usage: node $0 <environment> [options]')
|
||||
.help('h')
|
||||
.example(
|
||||
'$0 production -c /etc/sql-api/config.js',
|
||||
'start server in production environment with /etc/sql-api/config.js as config file'
|
||||
)
|
||||
'node $0 production -c /etc/windshaft-cartodb/config.js',
|
||||
'start server in production environment with /etc/windshaft-cartodb/config.js as config file'
|
||||
)
|
||||
.alias('h', 'help')
|
||||
.alias('c', 'config')
|
||||
.nargs('c', 1)
|
||||
.describe('c', 'Load configuration from path')
|
||||
.argv;
|
||||
|
||||
var environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||
var configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js');
|
||||
if (!fs.existsSync(configurationFile)) {
|
||||
logError('Configuration file "%s" does not exist', configurationFile);
|
||||
process.exit(1);
|
||||
const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||
let configFileName = environmentArg;
|
||||
if (process.env.CARTO_WINDSHAFT_ENV_BASED_CONF) {
|
||||
// we override the file with the one with env vars
|
||||
configFileName = 'config';
|
||||
}
|
||||
const configurationFile = path.resolve(argv.config || `./config/environments/${configFileName}.js`);
|
||||
|
||||
global.environment = require(configurationFile);
|
||||
var ENVIRONMENT = argv._[0] || process.env.NODE_ENV || global.environment.environment;
|
||||
process.env.NODE_ENV = ENVIRONMENT;
|
||||
process.env.NODE_ENV = argv._[0] || process.env.NODE_ENV || global.environment.environment;
|
||||
|
||||
var availableEnvironments = {
|
||||
production: true,
|
||||
staging: true,
|
||||
development: true
|
||||
};
|
||||
|
||||
// sanity check
|
||||
if (!availableEnvironments[ENVIRONMENT]){
|
||||
logError('node app.js [environment]');
|
||||
logError('environments: %s', Object.keys(availableEnvironments).join(', '));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.env.NODE_ENV = ENVIRONMENT;
|
||||
if (global.environment.uv_threadpool_size) {
|
||||
process.env.UV_THREADPOOL_SIZE = global.environment.uv_threadpool_size;
|
||||
}
|
||||
|
||||
// set global HTTP and HTTPS agent default configurations
|
||||
// ref https://nodejs.org/api/http.html#http_new_agent_options
|
||||
var agentOptions = _.defaults(global.environment.httpAgent || {}, {
|
||||
const agentOptions = Object.assign({
|
||||
keepAlive: false,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: Infinity,
|
||||
maxFreeSockets: 256
|
||||
});
|
||||
}, global.environment.httpAgent || {});
|
||||
|
||||
http.globalAgent = new http.Agent(agentOptions);
|
||||
https.globalAgent = new https.Agent(agentOptions);
|
||||
|
||||
|
||||
global.log4js = require('log4js');
|
||||
var log4jsConfig = {
|
||||
appenders: [],
|
||||
replaceConsole: true
|
||||
};
|
||||
|
||||
if ( global.environment.log_filename ) {
|
||||
var logFilename = path.resolve(global.environment.log_filename);
|
||||
var logDirectory = path.dirname(logFilename);
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
logError("Log filename directory does not exist: " + logDirectory);
|
||||
process.exit(1);
|
||||
}
|
||||
log("Logs will be written to " + logFilename);
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "file", absolute: true, filename: logFilename }
|
||||
);
|
||||
} else {
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "console", layout: { type:'basic' } }
|
||||
);
|
||||
}
|
||||
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
|
||||
// Include cartodb_windshaft only _after_ the "global" variable is set
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/28
|
||||
var cartodbWindshaft = require('./lib/cartodb/server');
|
||||
var serverOptions = require('./lib/cartodb/server_options');
|
||||
const createServer = require('./lib/server');
|
||||
const serverOptions = require('./lib/server-options');
|
||||
const { logger } = serverOptions;
|
||||
|
||||
var server = cartodbWindshaft(serverOptions);
|
||||
const availableEnvironments = {
|
||||
production: true,
|
||||
staging: true,
|
||||
development: true
|
||||
};
|
||||
|
||||
// Maximum number of connections for one process
|
||||
// 128 is a good number if you have up to 1024 filedescriptors
|
||||
// 4 is good if you have max 32 filedescriptors
|
||||
// 1 is good if you have max 16 filedescriptors
|
||||
var backlog = global.environment.maxConnections || 128;
|
||||
if (!availableEnvironments[process.env.NODE_ENV]) {
|
||||
logger.fatal(new Error(`Invalid environment ${process.env.NODE_ENV} argument, valid ones: ${Object.keys(availableEnvironments).join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
|
||||
const { engines } = require('./package.json');
|
||||
if (!semver.satisfies(process.versions.node, engines.node)) {
|
||||
logger.fatal(new Error(`Node version ${process.versions.node} is not supported, please use Node.js ${engines.node}.`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var version = require("./package").version;
|
||||
const server = createServer(serverOptions);
|
||||
|
||||
listener.on('listening', function() {
|
||||
log("Using Node.js %s", process.version);
|
||||
log('Using configuration file "%s"', configurationFile);
|
||||
log(
|
||||
"Windshaft tileserver %s started on %s:%s PID=%d (%s)",
|
||||
version, serverOptions.bind.host, serverOptions.bind.port, process.pid, ENVIRONMENT
|
||||
);
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||
const backlog = global.environment.maxConnections || 128;
|
||||
|
||||
const listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
|
||||
const { version, name } = require('./package');
|
||||
|
||||
listener.on('listening', function () {
|
||||
const { address, port } = listener.address();
|
||||
logger.info({ 'Node.js': process.version, pid: process.pid, environment: process.env.NODE_ENV, [name]: version, address, port, config: configurationFile }, `${name} initialized successfully`);
|
||||
});
|
||||
|
||||
function getCPUUsage (oldUsage) {
|
||||
@@ -161,32 +122,24 @@ setInterval(function cpuUsageMetrics () {
|
||||
});
|
||||
|
||||
previousCPUUsage = CPUUsage;
|
||||
}, 5000);
|
||||
}, 5000).unref();
|
||||
|
||||
setInterval(function() {
|
||||
setInterval(function () {
|
||||
var memoryUsage = process.memoryUsage();
|
||||
Object.keys(memoryUsage).forEach(function(k) {
|
||||
Object.keys(memoryUsage).forEach(function (k) {
|
||||
global.statsClient.gauge('windshaft.memory.' + k, memoryUsage[k]);
|
||||
});
|
||||
}, 5000);
|
||||
|
||||
process.on('SIGHUP', function() {
|
||||
global.log4js.clearAndShutdownAppenders(function() {
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
log('Log files reloaded');
|
||||
});
|
||||
});
|
||||
}, 5000).unref();
|
||||
|
||||
if (global.gc) {
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval) ?
|
||||
global.environment.gc_interval :
|
||||
10000;
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval)
|
||||
? global.environment.gc_interval
|
||||
: 10000;
|
||||
|
||||
if (gcInterval > 0) {
|
||||
setInterval(function gcForcedCycle() {
|
||||
setInterval(function gcForcedCycle () {
|
||||
global.gc();
|
||||
}, gcInterval);
|
||||
}, gcInterval).unref();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,64 +159,59 @@ function getGCTypeValue (type) {
|
||||
let value;
|
||||
|
||||
switch (type) {
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
addHandlers(listener, global.logger, 45000);
|
||||
const exitProcess = logger.finish((err, finalLogger, listener, signal, killTimeout) => {
|
||||
scheduleForcedExit(killTimeout, finalLogger);
|
||||
|
||||
function addHandlers(listener, logger, killTimeout) {
|
||||
process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
|
||||
process.on('unhandledRejection', exitProcess(listener, logger, killTimeout));
|
||||
process.on('ENOMEM', exitProcess(listener, logger, killTimeout));
|
||||
process.on('SIGINT', exitProcess(listener, logger, killTimeout));
|
||||
process.on('SIGTERM', exitProcess(listener, logger, killTimeout));
|
||||
finalLogger.info(`Process has received signal: ${signal}`);
|
||||
|
||||
let code = 0;
|
||||
|
||||
if (err) {
|
||||
code = 1;
|
||||
finalLogger.fatal(err);
|
||||
}
|
||||
|
||||
finalLogger.info(`Process is going to exit with code: ${code}`);
|
||||
listener.close(() => process.exit(code));
|
||||
});
|
||||
|
||||
function addHandlers (listener, killTimeout) {
|
||||
process.on('uncaughtException', (err) => exitProcess(err, listener, 'uncaughtException', killTimeout));
|
||||
process.on('unhandledRejection', (err) => exitProcess(err, listener, 'unhandledRejection', killTimeout));
|
||||
process.on('ENOMEM', (err) => exitProcess(err, listener, 'ENOMEM', killTimeout));
|
||||
process.on('SIGINT', () => exitProcess(null, listener, 'SIGINT', killTimeout));
|
||||
process.on('SIGTERM', () => exitProcess(null, listener, 'SIGTERM', killTimeout));
|
||||
}
|
||||
|
||||
function exitProcess (listener, logger, killTimeout) {
|
||||
return function exitProcessFn (signal) {
|
||||
scheduleForcedExit(killTimeout, logger);
|
||||
addHandlers(listener, 45000);
|
||||
|
||||
let code = 0;
|
||||
|
||||
if (!['SIGINT', 'SIGTERM'].includes(signal)) {
|
||||
const err = signal instanceof Error ? signal : new Error(signal);
|
||||
signal = undefined;
|
||||
code = 1;
|
||||
|
||||
logger.fatal(err);
|
||||
} else {
|
||||
logger.info(`Process has received signal: ${signal}`);
|
||||
}
|
||||
|
||||
logger.info(`Process is going to exit with code: ${code}`);
|
||||
listener.close(() => global.log4js.shutdown(() => process.exit(code)));
|
||||
};
|
||||
}
|
||||
|
||||
function scheduleForcedExit (killTimeout, logger) {
|
||||
function scheduleForcedExit (killTimeout, finalLogger) {
|
||||
// Schedule exit if there is still ongoing work to deal with
|
||||
const killTimer = setTimeout(() => {
|
||||
logger.info('Process didn\'t close on time. Force exit');
|
||||
finalLogger.info('Process didn\'t close on time. Force exit');
|
||||
process.exit(1);
|
||||
}, killTimeout);
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
"name": "carto_windshaft",
|
||||
"current_version": {
|
||||
"requires": {
|
||||
"node": "^10.15.1",
|
||||
"npm": "^6.4.1",
|
||||
"node": "^12.16.3",
|
||||
"npm": "^6.14.4",
|
||||
"mapnik": "==3.0.15.16",
|
||||
"crankshaft": "~0.8.1"
|
||||
},
|
||||
@@ -11,7 +11,7 @@
|
||||
"redis": ">=4.0.0",
|
||||
"postgresql": ">=10.0.0",
|
||||
"postgis": ">=2.4.4.5",
|
||||
"carto_postgresql_ext": ">=0.24.1"
|
||||
"carto_postgresql_ext": ">=0.35.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
411
config/environments/config.js
Normal file
411
config/environments/config.js
Normal file
@@ -0,0 +1,411 @@
|
||||
var config = {
|
||||
environment: process.env.CARTO_WINDSHAFT_NODE_ENV,
|
||||
port: 8181,
|
||||
host: null, // null on purpouse so it listens to whatever address docker assigns
|
||||
// Size of the threadpool which can be used to run user code and get notified in the loop thread
|
||||
// Its default size is 4, but it can be changed at startup time (the absolute maximum is 128).
|
||||
// See http://docs.libuv.org/en/latest/threadpool.html
|
||||
uv_threadpool_size: undefined,
|
||||
// Time in milliseconds to force GC cycle.
|
||||
// Disable by using <=0 value.
|
||||
gc_interval: 10000,
|
||||
// Regular expression pattern to extract username
|
||||
// from hostname. Must have a single grabbing block.
|
||||
user_from_host: process.env.CARTO_WINDSHAFT_USER_FROM_HOST || '^(.*)\\.cartodb\\.com$',
|
||||
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
routes: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1'
|
||||
],
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
};
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/map'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
},
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
//
|
||||
// This URLs depend on how `routes` and `user_from_host` are configured: the application can be
|
||||
// configured to accept request with the {user} in the header host or in the request path.
|
||||
// It also might depend on the configured cdn_url via `serverMetadata.cdn_url`.
|
||||
//
|
||||
// This template allows to make the endpoints generation more flexible, the template exposes the following params:
|
||||
// 1. {{=it.cdn_url}}: will be used when `serverMetadata.cdn_url` exists.
|
||||
// 2. {{=it.user}}: will use the username as extraced from `user_from_host` or `routes`.
|
||||
// 3. {{=it.port}}: will use the `port` from this very same configuration file.
|
||||
resources_url_templates: {
|
||||
http: process.env.CARTO_WINDSHAFT_RESOURCE_URL_TEMPLATE_HTTP || 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
|
||||
https: process.env.CARTO_WINDSHAFT_RESOURCE_URL_TEMPLATE_HTTPS || 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
|
||||
},
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||
maxConnections: 128,
|
||||
// Maximum number of templates per user. Unlimited by default.
|
||||
maxUserTemplates: 1024,
|
||||
// Seconds since "last creation" before a detached
|
||||
// or template instance map expires. Or: how long do you want
|
||||
// to be able to navigate the map without a reload ?
|
||||
// Defaults to 7200 (2 hours)
|
||||
mapConfigTTL: 7200,
|
||||
// idle socket timeout, in milliseconds
|
||||
socket_timeout: 600000,
|
||||
enable_cors: true,
|
||||
cache_enabled: true,
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
postgres_auth_user: process.env.CARTO_WINDSHAFT_DB_USER || 'cartodb_user_<%= user_id %>',
|
||||
// Templated database password for authorized user
|
||||
// Supported labels: 'user_id', 'user_password' (both read from redis)
|
||||
postgres_auth_pass: '<%= user_password %>',
|
||||
postgres: {
|
||||
user: 'publicuser',
|
||||
password: 'public',
|
||||
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || 'localhost',
|
||||
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
|
||||
pool: {
|
||||
// maximum number of resources to create at any given time
|
||||
size: 16,
|
||||
// max milliseconds a resource can go unused before it should be destroyed
|
||||
idleTimeout: 3000,
|
||||
// frequency to check for idle resources
|
||||
reapInterval: 1000
|
||||
}
|
||||
},
|
||||
mapnik_version: undefined,
|
||||
mapnik_tile_format: 'png8:m=h',
|
||||
statsd: {
|
||||
host: process.env.CARTO_WINDSHAFT_STATSD_HOST || 'localhost',
|
||||
port: 8125,
|
||||
prefix: process.env.CARTO_WINDSHAFT_STATSD_PREFIX || ':host.', // could be hostname, better not containing dots
|
||||
cacheDns: true
|
||||
// support all allowed node-statsd options
|
||||
},
|
||||
renderer: {
|
||||
// Milliseconds since last access before renderer cache item expires
|
||||
cache_ttl: 60000,
|
||||
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||
mvt: {
|
||||
// If enabled, MVTs will be generated with PostGIS directly
|
||||
// If disabled, MVTs will be generated with Mapnik MVT
|
||||
usePostGIS: true
|
||||
},
|
||||
mapnik: {
|
||||
// The size of the pool of internal mapnik backend
|
||||
// This pool size is per mapnik renderer created in Windshaft's RendererFactory
|
||||
// See https://github.com/CartoDB/Windshaft/blob/master/lib/windshaft/renderers/renderer_factory.js
|
||||
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||
poolSize: 8,
|
||||
|
||||
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||
poolMaxWaitingClients: 64,
|
||||
|
||||
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||
// This will prevent blocking the main thread.
|
||||
useCartocssWorkers: false,
|
||||
|
||||
// Metatile is the number of tiles-per-side that are going
|
||||
// to be rendered at once. If all of them will be requested
|
||||
// we'd have saved time. If only one will be used, we'd have
|
||||
// wasted time.
|
||||
metatile: 2,
|
||||
|
||||
// tilelive-mapnik uses an internal cache to store tiles/grids
|
||||
// generated when using metatile. This options allow to tune
|
||||
// the behaviour for that internal cache.
|
||||
metatileCache: {
|
||||
// Time an object must stay in the cache until is removed
|
||||
ttl: 0,
|
||||
// Whether an object must be removed after the first hit
|
||||
// Usually you want to use `true` here when ttl>0.
|
||||
deleteOnHit: false
|
||||
},
|
||||
|
||||
// Override metatile behaviour depending on the format
|
||||
formatMetatile: {
|
||||
png: 2,
|
||||
'grid.json': 1
|
||||
},
|
||||
|
||||
// Buffer size is the tickness in pixel of a buffer
|
||||
// around the rendered (meta?)tile.
|
||||
//
|
||||
// This is important for labels and other marker that overlap tile boundaries.
|
||||
// Setting to 128 ensures no render artifacts.
|
||||
// 64 may have artifacts but is faster.
|
||||
// Less important if we can turn metatiling on.
|
||||
bufferSize: 64,
|
||||
|
||||
// SQL queries will be wrapped with ST_SnapToGrid
|
||||
// Snapping all points of the geometry to a regular grid
|
||||
snapToGrid: false,
|
||||
|
||||
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||
// Returning the portion of a geometry falling within a rectangle
|
||||
// It will only work if snapToGrid is enabled
|
||||
clipByBox2d: true,
|
||||
|
||||
postgis: {
|
||||
// Parameters to pass to datasource plugin of mapnik
|
||||
// See http://github.com/mapnik/mapnik/wiki/PostGIS
|
||||
user: 'publicuser',
|
||||
password: 'public',
|
||||
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || '127.0.0.1',
|
||||
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
|
||||
extent: '-20037508.3,-20037508.3,20037508.3,20037508.3',
|
||||
// max number of rows to return when querying data, 0 means no limit
|
||||
row_limit: 65535,
|
||||
/*
|
||||
* Set persist_connection to false if you want
|
||||
* database connections to be closed on renderer
|
||||
* expiration (1 minute after last use).
|
||||
* Setting to true (the default) would never
|
||||
* close any connection for the server's lifetime
|
||||
*/
|
||||
persist_connection: false,
|
||||
simplify_geometries: true,
|
||||
use_overviews: true, // use overviews to retrieve raster
|
||||
max_size: 500,
|
||||
twkb_encoding: true
|
||||
},
|
||||
|
||||
limits: {
|
||||
// Time in milliseconds a render request can take before it fails, some notes:
|
||||
// - 0 means no render limit
|
||||
// - it considers metatiling, naive implementation: (render timeout) * (number of tiles in metatile)
|
||||
render: 0,
|
||||
// As the render request will finish even if timed out, whether it should be placed in the internal
|
||||
// cache or it should be fully discarded. When placed in the internal cache another attempt to retrieve
|
||||
// the same tile will result in an immediate response, however that will use a lot of more application
|
||||
// memory. If we want to enforce this behaviour we have to implement a cache eviction policy for the
|
||||
// internal cache.
|
||||
cacheOnTimeout: true
|
||||
},
|
||||
|
||||
// If enabled Mapnik will reuse the features retrieved from the database
|
||||
// instead of requesting them once per style inside a layer
|
||||
'cache-features': true,
|
||||
|
||||
// Require metrics to the renderer
|
||||
metrics: false,
|
||||
|
||||
// Options for markers attributes, ellipses and images caches
|
||||
markers_symbolizer_caches: {
|
||||
disabled: false
|
||||
}
|
||||
},
|
||||
http: {
|
||||
timeout: 2000, // the timeout in ms for a http tile request
|
||||
proxy: undefined, // the url for a proxy server
|
||||
whitelist: [ // the whitelist of urlTemplates that can be used
|
||||
'.*', // will enable any URL
|
||||
'http://{s}.example.com/{z}/{x}/{y}.png'
|
||||
],
|
||||
// image to use as placeholder when urlTemplate is not in the whitelist
|
||||
// if provided the http renderer will use it instead of throw an error
|
||||
fallbackImage: {
|
||||
type: 'fs', // 'fs' and 'url' supported
|
||||
src: __dirname + '/../../assets/default-placeholder.png'
|
||||
}
|
||||
},
|
||||
torque: {}
|
||||
},
|
||||
// anything analyses related
|
||||
analysis: {
|
||||
// batch configuration
|
||||
batch: {
|
||||
// Inline execution avoid the use of SQL API as batch endpoint
|
||||
// When set to true it will run all analysis queries in series, with a direct connection to the DB
|
||||
// This might be useful for:
|
||||
// - testing
|
||||
// - running an standalone server without any dependency on external services
|
||||
inlineExecution: false,
|
||||
// where the SQL API is running, it will use a custom Host header to specify the username.
|
||||
endpoint: 'http://127.0.0.1:8080/api/v2/sql/job',
|
||||
// the template to use for adding the host header in the batch api requests
|
||||
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
limits: {
|
||||
moran: { timeout: 120000, maxNumberOfRows: 1e5 },
|
||||
cpu2x: { timeout: 60000 }
|
||||
}
|
||||
},
|
||||
millstone: {
|
||||
// Needs to be writable by server user
|
||||
cache_basedir: process.env.CARTO_WINDSHAFT_TILE_CACHE || '/home/ubuntu/tile_assets/'
|
||||
},
|
||||
redis: {
|
||||
host: process.env.CARTO_WINDSHAFT_REDIS_HOST || '127.0.0.1',
|
||||
port: process.env.CARTO_WINDSHAFT_REDIS_PORT || 6379,
|
||||
// Max number of connections in each pool.
|
||||
// Users will be put on a queue when the limit is hit.
|
||||
// Set to maxConnection to have no possible queues.
|
||||
// There are currently 2 pools involved in serving
|
||||
// windshaft-cartodb requests so multiply this number
|
||||
// by 2 to know how many possible connections will be
|
||||
// kept open by the servelsr. The default is 50.
|
||||
max: 50,
|
||||
returnToHead: true, // defines the behaviour of the pool: false => queue, true => stack
|
||||
idleTimeoutMillis: 30000, // idle time before dropping connection
|
||||
reapIntervalMillis: 1000, // time between cleanups
|
||||
slowQueries: {
|
||||
log: true,
|
||||
elapsedThreshold: 200
|
||||
},
|
||||
slowPool: {
|
||||
log: true, // whether a slow acquire must be logged or not
|
||||
elapsedThreshold: 25 // the threshold to determine an slow acquire must be reported or not
|
||||
},
|
||||
emitter: {
|
||||
statusInterval: 5000 // time, in ms, between each status report is emitted from the pool, status is sent to statsd
|
||||
},
|
||||
unwatchOnRelease: false, // Send unwatch on release, see http://github.com/CartoDB/Windshaft-cartodb/issues/161
|
||||
noReadyCheck: true // Check `no_ready_check` at https://github.com/mranney/node_redis/tree/v0.12.1#overloading
|
||||
},
|
||||
// For more details about this options check https://nodejs.org/api/http.html#http_new_agent_options
|
||||
httpAgent: {
|
||||
keepAlive: true,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: 25,
|
||||
maxFreeSockets: 256
|
||||
},
|
||||
varnish: {
|
||||
host: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 'localhost',
|
||||
port: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 6082, // the por for the telnet interface where varnish is listening to
|
||||
http_port: 6081, // the port for the HTTP interface where varnish is listening to
|
||||
purge_enabled: process.env.CARTO_WINDSHAFT_VARNISH_PURGE_ENABLED === 'true' || false, // whether the purge/invalidation mechanism is enabled in varnish or not
|
||||
secret: 'xxx',
|
||||
ttl: 86400,
|
||||
fallbackTtl: 300,
|
||||
layergroupTtl: 86400 // the max-age for cache-control header in layergroup responses
|
||||
},
|
||||
// this [OPTIONAL] configuration enables invalidating by surrogate key in fastly
|
||||
fastly: {
|
||||
// whether the invalidation is enabled or not
|
||||
enabled: false,
|
||||
// the fastly api key
|
||||
apiKey: 'wadus_api_key',
|
||||
// the service that will get surrogate key invalidation
|
||||
serviceId: 'wadus_service_id'
|
||||
},
|
||||
// If useProfiler is true every response will be served with an
|
||||
// X-Tiler-Profile header containing elapsed timing for various
|
||||
// steps taken for producing the response.
|
||||
useProfiler: false,
|
||||
serverMetadata: {
|
||||
cdn_url: {
|
||||
http: process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTP === 'undefined' ? undefined : process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTP || 'api.cartocdn.com',
|
||||
https: process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTPS === 'undefined' ? undefined : process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTPS || 'cartocdn.global.ssl.fastly.net'
|
||||
}
|
||||
},
|
||||
// Settings for the health check available at /health
|
||||
health: {
|
||||
enabled: process.env.CARTO_WINDSHAFT_HEALTH_ENABLED === 'true' || false,
|
||||
username: 'localhost',
|
||||
z: 0,
|
||||
x: 0,
|
||||
y: 0
|
||||
},
|
||||
disabled_file: 'pids/disabled',
|
||||
|
||||
// Use this as a feature flags enabling/disabling mechanism
|
||||
enabledFeatures: {
|
||||
// whether it should intercept tile render errors an act based on them, enabled by default.
|
||||
onTileErrorStrategy: false,
|
||||
// whether the affected tables for a given SQL must query directly postgresql or use the SQL API
|
||||
cdbQueryTablesFromPostgres: true,
|
||||
// whether in mapconfig is available stats & metadata for each layer
|
||||
layerStats: process.env.CARTO_WINDSHAFT_LAYERSTATS_ENABLED === 'true' || false,
|
||||
// whether it should rate limit endpoints (global configuration)
|
||||
rateLimitsEnabled: false,
|
||||
// whether it should rate limit one or more endpoints (only if rateLimitsEnabled = true)
|
||||
rateLimitsByEndpoint: {
|
||||
anonymous: false,
|
||||
static: false,
|
||||
static_named: false,
|
||||
dataview: false,
|
||||
dataview_search: false,
|
||||
analysis: false,
|
||||
analysis_catalog: false,
|
||||
tile: false,
|
||||
attributes: false,
|
||||
named_list: false,
|
||||
named_create: false,
|
||||
named_get: false,
|
||||
named: false,
|
||||
named_update: false,
|
||||
named_delete: false,
|
||||
named_tiles: false
|
||||
}
|
||||
},
|
||||
pubSubMetrics: {
|
||||
enabled: process.env.CARTO_WINDSHAFT_METRICS_ENABLED === 'true' || false,
|
||||
project_id: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'avid-wavelet-844',
|
||||
credentials: '',
|
||||
topic: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'raw-metric-events'
|
||||
}
|
||||
};
|
||||
|
||||
// override some defaults for tests
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
config.user_from_host = '(.*)';
|
||||
config.postgres_auth_pass = 'test_windshaft_cartodb_user_<%= user_id %>_pass';
|
||||
config.millstone.cache_basedir = '/tmp/tile_assets';
|
||||
config.postgres.user = 'test_windshaft_publicuser';
|
||||
config.resources_url_templates = {
|
||||
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
|
||||
https: 'https://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map'
|
||||
};
|
||||
config.cache_enabled = false;
|
||||
config.postgres_auth_user = 'test_windshaft_cartodb_user_<%= user_id %>';
|
||||
config.renderer.mapnik.postgis.twkb_encoding = false;
|
||||
config.renderer.mapnik['cache-features'] = false;
|
||||
config.renderer.http.whitelist = [ // the whitelist of urlTemplates that can be used
|
||||
'.*', // will enable any URL
|
||||
'http://{s}.example.com/{z}/{x}/{y}.png',
|
||||
// for testing purposes
|
||||
'http://{s}.basemaps.cartocdn.com/dark_nolabels/{z}/{x}/{y}.png'
|
||||
];
|
||||
config.analysis.batch.inlineExecution = true;
|
||||
config.redis.idleTimeoutMillis = 1;
|
||||
config.redis.reapIntervalMillis = 1;
|
||||
config.varnish.purge_enabled = false;
|
||||
config.health.enabled = false;
|
||||
config.enabledFeatures.layerStats = true;
|
||||
}
|
||||
|
||||
module.exports = config;
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -73,9 +67,10 @@ var config = {
|
||||
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
|
||||
https: 'http://localhost.lan:{{=it.port}}/user/{{=it.user}}/api/v1/map'
|
||||
}
|
||||
|
||||
// Maximum number of connections for one process
|
||||
// 128 is a good value with a limit of 1024 open file descriptors
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||
,maxConnections:128
|
||||
// Maximum number of templates per user. Unlimited by default.
|
||||
,maxUserTemplates:1024
|
||||
@@ -88,11 +83,6 @@ var config = {
|
||||
,socket_timeout: 600000
|
||||
,enable_cors: true
|
||||
,cache_enabled: true
|
||||
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
|
||||
// If log_filename is given logs will be written
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'development_cartodb_user_<%= user_id %>'
|
||||
@@ -267,12 +257,6 @@ var config = {
|
||||
// the template to use for adding the host header in the batch api requests
|
||||
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||
},
|
||||
logger: {
|
||||
// If filename is given logs comming from analysis client will be written
|
||||
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
filename: 'logs/node-windshaft-analysis.log'
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
limits: {
|
||||
@@ -388,6 +372,12 @@ var config = {
|
||||
named_tiles: false
|
||||
}
|
||||
}
|
||||
,pubSubMetrics: {
|
||||
enabled: false,
|
||||
project_id: '',
|
||||
credentials: '',
|
||||
topic: ''
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -73,9 +67,10 @@ var config = {
|
||||
http: 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
|
||||
https: 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
|
||||
}
|
||||
|
||||
// Maximum number of connections for one process
|
||||
// 128 is a good value with a limit of 1024 open file descriptors
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||
,maxConnections:128
|
||||
// Maximum number of templates per user. Unlimited by default.
|
||||
,maxUserTemplates:1024
|
||||
@@ -88,11 +83,6 @@ var config = {
|
||||
,socket_timeout: 600000
|
||||
,enable_cors: true
|
||||
,cache_enabled: true
|
||||
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
|
||||
// If log_filename is given logs will be written
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'cartodb_user_<%= user_id %>'
|
||||
@@ -267,12 +257,6 @@ var config = {
|
||||
// the template to use for adding the host header in the batch api requests
|
||||
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||
},
|
||||
logger: {
|
||||
// If filename is given logs comming from analysis client will be written
|
||||
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
filename: 'logs/node-windshaft-analysis.log'
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
limits: {
|
||||
@@ -388,6 +372,12 @@ var config = {
|
||||
named_tiles: false
|
||||
}
|
||||
}
|
||||
,pubSubMetrics: {
|
||||
enabled: true,
|
||||
project_id: 'avid-wavelet-844',
|
||||
credentials: '',
|
||||
topic: 'raw-metric-events'
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -73,9 +67,9 @@ var config = {
|
||||
http: 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
|
||||
https: 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
|
||||
}
|
||||
|
||||
// Maximum number of connections for one process
|
||||
// 128 is a good value with a limit of 1024 open file descriptors
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
,maxConnections:128
|
||||
// Maximum number of templates per user. Unlimited by default.
|
||||
,maxUserTemplates:1024
|
||||
@@ -88,11 +82,6 @@ var config = {
|
||||
,socket_timeout: 600000
|
||||
,enable_cors: true
|
||||
,cache_enabled: true
|
||||
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
|
||||
// If log_filename is given logs will be written
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'cartodb_staging_user_<%= user_id %>'
|
||||
@@ -267,12 +256,6 @@ var config = {
|
||||
// the template to use for adding the host header in the batch api requests
|
||||
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||
},
|
||||
logger: {
|
||||
// If filename is given logs comming from analysis client will be written
|
||||
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
filename: 'logs/node-windshaft-analysis.log'
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
limits: {
|
||||
@@ -388,6 +371,12 @@ var config = {
|
||||
named_tiles: false
|
||||
}
|
||||
}
|
||||
,pubSubMetrics: {
|
||||
enabled: true,
|
||||
project_id: '',
|
||||
credentials: '',
|
||||
topic: 'raw-metric-events'
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -73,9 +67,10 @@ var config = {
|
||||
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
|
||||
https: 'https://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map'
|
||||
}
|
||||
|
||||
// Maximum number of connections for one process
|
||||
// 128 is a good value with a limit of 1024 open file descriptors
|
||||
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||
// The default value of this parameter is 511 (not 512).
|
||||
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||
,maxConnections:128
|
||||
// Maximum number of templates per user. Unlimited by default.
|
||||
,maxUserTemplates:1024
|
||||
@@ -88,11 +83,6 @@ var config = {
|
||||
,socket_timeout: 600000
|
||||
,enable_cors: true
|
||||
,cache_enabled: false
|
||||
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
|
||||
// If log_filename is given logs will be written
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: '/tmp/node-windshaft.log'
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'test_windshaft_cartodb_user_<%= user_id %>'
|
||||
@@ -269,12 +259,6 @@ var config = {
|
||||
// the template to use for adding the host header in the batch api requests
|
||||
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||
},
|
||||
logger: {
|
||||
// If filename is given logs comming from analysis client will be written
|
||||
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
filename: '/tmp/node-windshaft-analysis.log'
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
limits: {
|
||||
@@ -390,6 +374,12 @@ var config = {
|
||||
named_tiles: false
|
||||
}
|
||||
}
|
||||
,pubSubMetrics: {
|
||||
enabled: false,
|
||||
project_id: '',
|
||||
credentials: '',
|
||||
topic: ''
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
||||
81
configure
vendored
81
configure
vendored
@@ -1,81 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# This script creates config/environments/*.js files using
|
||||
# config/environments/*.js.example files as input and performing
|
||||
# settings substitutions.
|
||||
#
|
||||
# It relies on a known format of the .js.example files which haven't
|
||||
# been made easier to parse to still let humans copy them manually and
|
||||
# do further editing or leave them as such to get the same setup as before
|
||||
# the introduction of this script.
|
||||
#
|
||||
# The script is a work in progress. Available switches are printed
|
||||
# by invoking with the --help switch. More switches will be added
|
||||
# as the need/request for them arises.
|
||||
#
|
||||
# --strk(2012-07-23)
|
||||
#
|
||||
|
||||
ENVDIR=config/environments
|
||||
|
||||
PGPORT=
|
||||
MAPNIK_VERSION=
|
||||
ENVIRONMENT=development
|
||||
|
||||
STATUS="$0 $*"
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [OPTION]"
|
||||
echo
|
||||
echo "Configuration:"
|
||||
echo " --help display this help and exit"
|
||||
echo " --with-pgport=NUM access PostgreSQL server on TCP port NUM [$PGPORT]"
|
||||
echo " --with-mapnik-version=STRING set mapnik version string [$MAPNIK_VERSION]"
|
||||
echo " --environment=STRING set output environment name [$ENVIRONMENT]"
|
||||
}
|
||||
|
||||
while test -n "$1"; do
|
||||
case "$1" in
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
--with-pgport=*)
|
||||
PGPORT=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
--with-mapnik-version=*)
|
||||
MAPNIK_VERSION=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
--environment=*)
|
||||
ENVIRONMENT=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
*)
|
||||
echo "Unused option '$1'" >&2
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
ENVEX=./${ENVDIR}/${ENVIRONMENT}.js.example
|
||||
|
||||
if [ -z "$PGPORT" ]; then
|
||||
PGPORT=`node -e "console.log(require('${ENVEX}').postgres.port)"`
|
||||
fi
|
||||
|
||||
echo "PGPORT: $PGPORT"
|
||||
echo "MAPNIK_VERSION: $MAPNIK_VERSION"
|
||||
echo "ENVIRONMENT: $ENVIRONMENT"
|
||||
|
||||
o=`dirname "${ENVEX}"`/`basename "${ENVEX}" .example`
|
||||
echo "Writing $o"
|
||||
|
||||
# See http://austinmatzko.com/2008/04/26/sed-multi-line-search-and-replace/
|
||||
sed -n "1h;1!H;\${;g;s/\(,postgres: {[^}]*port: *'\?\)[^',]*\('\?,\)/\1$PGPORT\2/;p;}" < "${ENVEX}" \
|
||||
| sed "s/mapnik_version:.*/mapnik_version: '$MAPNIK_VERSION'/" \
|
||||
> "$o"
|
||||
|
||||
STATUSFILE=config.status--${ENVIRONMENT}
|
||||
echo "Writing ${STATUSFILE}"
|
||||
echo ${STATUS} > ${STATUSFILE} && chmod +x ${STATUSFILE}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "*********************"
|
||||
echo "To install Node.js, run:"
|
||||
echo "/src/nodejs-install.sh"
|
||||
echo "Use NODEJS_VERSION env var to select the Node.js version"
|
||||
echo " "
|
||||
echo "To start postgres, run:"
|
||||
echo "/etc/init.d/postgresql start"
|
||||
echo "*********************"
|
||||
echo " "
|
||||
|
||||
docker run -it -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker run -e "NODEJS_VERSION=${2}" -v `pwd`:/srv ${1} bash run_tests_docker.sh && \
|
||||
docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v
|
||||
@@ -1,92 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& . ~/.nvm/nvm.sh \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.6+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.6+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
|
||||
RUN chmod 777 /src/nodejs-install.sh
|
||||
CMD /src/nodejs-install.sh
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-11 \
|
||||
&& add-apt-repository -y ppa:cartodb/redis-next \
|
||||
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& . ~/.nvm/nvm.sh \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev=2.3.2+dfsg-2build2~carto1 \
|
||||
libgdal20=2.3.2+dfsg-2build2~carto1 \
|
||||
libgeos-dev=3.7.1~carto1 \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin=2.3.2+dfsg-2build2~carto1 \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
libopenscenegraph100v5 \
|
||||
libsfcgal1 \
|
||||
liblwgeom-2.5.0=2.5.1.4+carto-1 \
|
||||
postgresql-11 \
|
||||
postgresql-11-plproxy \
|
||||
postgis=2.5.1.4+carto-1 \
|
||||
postgresql-11-postgis-2.5=2.5.1.4+carto-1 \
|
||||
postgresql-11-postgis-2.5-scripts=2.5.1.4+carto-1 \
|
||||
postgresql-client-11 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-11 \
|
||||
postgresql-server-dev-11 \
|
||||
redis=5:4.0.9-1carto1~xenial1 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/11/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/11/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/11/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/11/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
|
||||
RUN chmod 777 /src/nodejs-install.sh
|
||||
CMD /src/nodejs-install.sh
|
||||
@@ -1,88 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_10.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,89 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
# Install dependencies and PostGIS 2.4 from sources
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgresql-10-postgis-2.4 \
|
||||
postgresql-10-postgis-2.4-scripts \
|
||||
postgresql-10-postgis-scripts \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
postgis \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,89 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
# Install dependencies and PostGIS 2.4 from sources
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgresql-10-postgis-2.4 \
|
||||
postgresql-10-postgis-2.4-scripts \
|
||||
postgresql-10-postgis-scripts \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
postgis \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,88 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,23 +0,0 @@
|
||||
After running the tests with docker, you will need Docker installed and the docker image downloaded.
|
||||
|
||||
## Install docker
|
||||
`sudo apt install docker.io && sudo usermod -aG docker $(whoami)`
|
||||
|
||||
## Download image
|
||||
`docker pull carto/IMAGE`
|
||||
|
||||
## Carto account
|
||||
https://hub.docker.com/r/carto/
|
||||
|
||||
## Update image
|
||||
- Edit the docker image file with your desired changes
|
||||
- Build image:
|
||||
- `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
|
||||
|
||||
- Upload to docker hub:
|
||||
- Login into docker hub:
|
||||
- `docker login`
|
||||
- Create tag:
|
||||
- `docker tag carto/IMAGE carto/IMAGE`
|
||||
- Upload:
|
||||
- `docker push carto/IMAGE`
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
export NVM_DIR="$HOME/.nvm"
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||
|
||||
if [ -z $NODEJS_VERSION ]; then
|
||||
NODEJS_VERSION="10"
|
||||
NODEJS_VERSION_OPTIONS="--lts"
|
||||
fi
|
||||
|
||||
nvm install $NODEJS_VERSION $NODEJS_VERSION_OPTIONS
|
||||
nvm alias default $NODEJS_VERSION
|
||||
nvm use default
|
||||
@@ -5,14 +5,14 @@ const { Router: router } = require('express');
|
||||
const RedisPool = require('redis-mpool');
|
||||
const cartodbRedis = require('cartodb-redis');
|
||||
|
||||
const windshaft = require('windshaft');
|
||||
const { factory: windshaftFactory } = require('windshaft');
|
||||
|
||||
const PgConnection = require('../backends/pg_connection');
|
||||
const PgConnection = require('../backends/pg-connection');
|
||||
const AnalysisBackend = require('../backends/analysis');
|
||||
const AnalysisStatusBackend = require('../backends/analysis-status');
|
||||
const DataviewBackend = require('../backends/dataview');
|
||||
const TemplateMaps = require('../backends/template_maps.js');
|
||||
const PgQueryRunner = require('../backends/pg_query_runner');
|
||||
const TemplateMaps = require('../backends/template-maps');
|
||||
const PgQueryRunner = require('../backends/pg-query-runner');
|
||||
const StatsBackend = require('../backends/stats');
|
||||
const AuthBackend = require('../backends/auth');
|
||||
|
||||
@@ -20,16 +20,16 @@ const UserLimitsBackend = require('../backends/user-limits');
|
||||
const OverviewsMetadataBackend = require('../backends/overviews-metadata');
|
||||
const FilterStatsApi = require('../backends/filter-stats');
|
||||
const TablesExtentBackend = require('../backends/tables-extent');
|
||||
|
||||
const ClusterBackend = require('../backends/cluster');
|
||||
const PubSubMetricsBackend = require('../backends/metrics');
|
||||
|
||||
const LayergroupAffectedTablesCache = require('../cache/layergroup_affected_tables');
|
||||
const SurrogateKeysCache = require('../cache/surrogate_keys_cache');
|
||||
const VarnishHttpCacheBackend = require('../cache/backend/varnish_http');
|
||||
const LayergroupAffectedTablesCache = require('../cache/layergroup-affected-tables');
|
||||
const SurrogateKeysCache = require('../cache/surrogate-keys-cache');
|
||||
const VarnishHttpCacheBackend = require('../cache/backend/varnish-http');
|
||||
const FastlyCacheBackend = require('../cache/backend/fastly');
|
||||
const NamedMapProviderCache = require('../cache/named_map_provider_cache');
|
||||
const NamedMapsCacheEntry = require('../cache/model/named_maps_entry');
|
||||
const NamedMapProviderReporter = require('../stats/reporter/named-map-provider');
|
||||
const NamedMapProviderCache = require('../cache/named-map-provider-cache');
|
||||
const NamedMapsCacheEntry = require('../cache/model/named-maps-entry');
|
||||
const NamedMapProviderCacheReporter = require('../stats/reporter/named-map-provider-cache');
|
||||
|
||||
const SqlWrapMapConfigAdapter = require('../models/mapconfig/adapter/sql-wrap-mapconfig-adapter');
|
||||
const MapConfigNamedLayersAdapter = require('../models/mapconfig/adapter/mapconfig-named-layers-adapter');
|
||||
@@ -47,20 +47,23 @@ const LayergroupMetadata = require('../utils/layergroup-metadata');
|
||||
const RendererStatsReporter = require('../stats/reporter/renderer');
|
||||
|
||||
const initializeStatusCode = require('./middlewares/initialize-status-code');
|
||||
const logger = require('./middlewares/logger');
|
||||
const initLogger = require('./middlewares/logger');
|
||||
const bodyParser = require('body-parser');
|
||||
const servedByHostHeader = require('./middlewares/served-by-host-header');
|
||||
const stats = require('./middlewares/stats');
|
||||
const profiler = require('./middlewares/profiler');
|
||||
const lzmaMiddleware = require('./middlewares/lzma');
|
||||
const cors = require('./middlewares/cors');
|
||||
const user = require('./middlewares/user');
|
||||
const sendResponse = require('./middlewares/send-response');
|
||||
const syntaxError = require('./middlewares/syntax-error');
|
||||
const errorMiddleware = require('./middlewares/error-middleware');
|
||||
const clientHeader = require('./middlewares/client-header');
|
||||
|
||||
const MapRouter = require('./map/map-router');
|
||||
const TemplateRouter = require('./template/template-router');
|
||||
|
||||
const getOnTileErrorStrategy = require('../utils/on-tile-error-strategy');
|
||||
|
||||
module.exports = class ApiRouter {
|
||||
constructor ({ serverOptions, environmentOptions }) {
|
||||
this.serverOptions = serverOptions;
|
||||
@@ -80,36 +83,22 @@ module.exports = class ApiRouter {
|
||||
global.statsClient.gauge(keyPrefix + 'waiting', status.waiting);
|
||||
});
|
||||
|
||||
const { rendererCache, tileBackend, attributesBackend, previewBackend, mapBackend, mapStore } = windshaftFactory({
|
||||
rendererOptions: serverOptions,
|
||||
redisPool,
|
||||
onTileErrorStrategy: getOnTileErrorStrategy({ enabled: environmentOptions.enabledFeatures.onTileErrorStrategy }),
|
||||
logger: this.serverOptions.logger
|
||||
});
|
||||
|
||||
const rendererStatsReporter = new RendererStatsReporter(rendererCache, serverOptions.renderCache.statsInterval);
|
||||
rendererStatsReporter.start();
|
||||
|
||||
const metadataBackend = cartodbRedis({ pool: redisPool });
|
||||
const pgConnection = new PgConnection(metadataBackend);
|
||||
|
||||
const mapStore = new windshaft.storage.MapStore({
|
||||
pool: redisPool,
|
||||
expire_time: serverOptions.grainstore.default_layergroup_ttl
|
||||
});
|
||||
|
||||
const rendererFactory = createRendererFactory({ redisPool, serverOptions, environmentOptions });
|
||||
|
||||
const rendererCacheOpts = Object.assign({
|
||||
ttl: 60000, // 60 seconds TTL by default
|
||||
statsInterval: 60000 // reports stats every milliseconds defined here
|
||||
}, serverOptions.renderCache || {});
|
||||
|
||||
const rendererCache = new windshaft.cache.RendererCache(rendererFactory, rendererCacheOpts);
|
||||
const rendererStatsReporter = new RendererStatsReporter(rendererCache, rendererCacheOpts.statsInterval);
|
||||
rendererStatsReporter.start();
|
||||
|
||||
const tileBackend = new windshaft.backend.Tile(rendererCache);
|
||||
const attributesBackend = new windshaft.backend.Attributes();
|
||||
const concurrency = serverOptions.renderer.mapnik.poolSize +
|
||||
serverOptions.renderer.mapnik.poolMaxWaitingClients;
|
||||
const previewBackend = new windshaft.backend.Preview(rendererCache, { concurrency });
|
||||
const mapValidatorBackend = new windshaft.backend.MapValidator(tileBackend, attributesBackend);
|
||||
const mapBackend = new windshaft.backend.Map(rendererCache, mapStore, mapValidatorBackend);
|
||||
|
||||
const surrogateKeysCacheBackends = createSurrogateKeysCacheBackends(serverOptions);
|
||||
const surrogateKeysCache = new SurrogateKeysCache(surrogateKeysCacheBackends);
|
||||
const templateMaps = createTemplateMaps({ redisPool, surrogateKeysCache });
|
||||
const templateMaps = createTemplateMaps({ redisPool, surrogateKeysCache, logger: this.serverOptions.logger });
|
||||
|
||||
const analysisStatusBackend = new AnalysisStatusBackend();
|
||||
const analysisBackend = new AnalysisBackend(metadataBackend, serverOptions.analysis);
|
||||
@@ -162,14 +151,16 @@ module.exports = class ApiRouter {
|
||||
layergroupAffectedTablesCache
|
||||
);
|
||||
|
||||
const namedMapProviderReporter = new NamedMapProviderReporter({
|
||||
const namedMapProviderCacheReporter = new NamedMapProviderCacheReporter({
|
||||
namedMapProviderCache,
|
||||
intervalInMilliseconds: rendererCacheOpts.statsInterval
|
||||
intervalInMilliseconds: serverOptions.renderCache.statsInterval
|
||||
});
|
||||
namedMapProviderCacheReporter.start();
|
||||
|
||||
namedMapProviderReporter.start();
|
||||
const metricsBackend = new PubSubMetricsBackend(serverOptions.pubSubMetrics);
|
||||
|
||||
const collaborators = {
|
||||
config: serverOptions,
|
||||
analysisStatusBackend,
|
||||
attributesBackend,
|
||||
dataviewBackend,
|
||||
@@ -189,82 +180,78 @@ module.exports = class ApiRouter {
|
||||
layergroupMetadata,
|
||||
namedMapProviderCache,
|
||||
tablesExtentBackend,
|
||||
clusterBackend
|
||||
clusterBackend,
|
||||
metricsBackend
|
||||
};
|
||||
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.mapRouter = new MapRouter({ collaborators });
|
||||
this.templateRouter = new TemplateRouter({ collaborators });
|
||||
}
|
||||
|
||||
register (app) {
|
||||
route (app, routes) {
|
||||
// FIXME: we need a better way to reset cache while running tests
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
app.layergroupAffectedTablesCache = this.layergroupAffectedTablesCache;
|
||||
}
|
||||
|
||||
Object.keys(this.serverOptions.routes).forEach(apiVersion => {
|
||||
const routes = this.serverOptions.routes[apiVersion];
|
||||
|
||||
routes.forEach(route => {
|
||||
const apiRouter = router({ mergeParams: true });
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
apiRouter.use(logger(this.serverOptions));
|
||||
apiRouter.use(initializeStatusCode());
|
||||
apiRouter.use(bodyParser.json());
|
||||
apiRouter.use(servedByHostHeader());
|
||||
apiRouter.use(stats({
|
||||
apiRouter.use(initLogger({ logger: this.serverOptions.logger }));
|
||||
apiRouter.use(user(this.metadataBackend));
|
||||
apiRouter.use(profiler({
|
||||
enabled: this.serverOptions.useProfiler,
|
||||
statsClient: global.statsClient
|
||||
}));
|
||||
|
||||
middlewares.forEach(middleware => apiRouter.use(middleware()));
|
||||
|
||||
apiRouter.use(initializeStatusCode());
|
||||
apiRouter.use(bodyParser.json());
|
||||
apiRouter.use(servedByHostHeader());
|
||||
apiRouter.use(clientHeader());
|
||||
apiRouter.use(lzmaMiddleware());
|
||||
apiRouter.use(cors());
|
||||
apiRouter.use(user());
|
||||
|
||||
this.templateRouter.register(apiRouter, routes.template.paths);
|
||||
this.mapRouter.register(apiRouter, routes.map.paths);
|
||||
this.templateRouter.route(apiRouter, route.template);
|
||||
this.mapRouter.route(apiRouter, route.map);
|
||||
|
||||
apiRouter.use(sendResponse());
|
||||
apiRouter.use(syntaxError());
|
||||
apiRouter.use(errorMiddleware());
|
||||
|
||||
const apiPaths = routes.paths;
|
||||
|
||||
apiPaths.forEach(path => app.use(path, apiRouter));
|
||||
paths.forEach(path => app.use(path, apiRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
function createTemplateMaps ({ redisPool, surrogateKeysCache, logger }) {
|
||||
const templateMaps = new TemplateMaps(redisPool, {
|
||||
max_user_templates: global.environment.maxUserTemplates
|
||||
});
|
||||
|
||||
function invalidateNamedMap (owner, templateName) {
|
||||
var startTime = Date.now();
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function(err) {
|
||||
var logMessage = JSON.stringify({
|
||||
username: owner,
|
||||
type: 'named_map_invalidation',
|
||||
elapsed: Date.now() - startTime,
|
||||
error: !!err ? JSON.stringify(err.message) : undefined
|
||||
});
|
||||
function invalidateNamedMap (user, templateName) {
|
||||
const startTime = Date.now();
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(user, templateName), (err) => {
|
||||
if (err) {
|
||||
global.logger.warn(logMessage);
|
||||
} else {
|
||||
global.logger.info(logMessage);
|
||||
return logger.error({ exception: err, 'cdb-user': user, template_id: templateName }, 'Named map invalidation failed');
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime;
|
||||
logger.info({ 'cdb-user': user, template_id: templateName, duration: elapsed / 1000, duration_ms: elapsed }, 'Named map invalidation success');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
['update', 'delete'].forEach(function(eventType) {
|
||||
['update', 'delete'].forEach(function (eventType) {
|
||||
templateMaps.on(eventType, invalidateNamedMap);
|
||||
});
|
||||
|
||||
return templateMaps;
|
||||
}
|
||||
|
||||
function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
function createSurrogateKeysCacheBackends (serverOptions) {
|
||||
var cacheBackends = [];
|
||||
|
||||
if (serverOptions.varnish_purge_enabled) {
|
||||
@@ -282,52 +269,3 @@ function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
|
||||
return cacheBackends;
|
||||
}
|
||||
|
||||
const timeoutErrorTilePath = __dirname + '/../../../assets/render-timeout-fallback.png';
|
||||
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, {encoding: null});
|
||||
|
||||
function createRendererFactory ({ redisPool, serverOptions, environmentOptions }) {
|
||||
var onTileErrorStrategy;
|
||||
if (environmentOptions.enabledFeatures.onTileErrorStrategy !== false) {
|
||||
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile(err, tile, headers, stats, format, callback) {
|
||||
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
|
||||
function isDatasourceTimeoutError (err) {
|
||||
return err.message && err.message.match(/canceling statement due to statement timeout/i);
|
||||
}
|
||||
|
||||
function isTimeoutError (err) {
|
||||
return isRenderTimeoutError(err) || isDatasourceTimeoutError(err);
|
||||
}
|
||||
|
||||
function isRasterFormat (format) {
|
||||
return format === 'png' || format === 'jpg';
|
||||
}
|
||||
|
||||
if (isTimeoutError(err) && isRasterFormat(format)) {
|
||||
return callback(null, timeoutErrorTile, {
|
||||
'Content-Type': 'image/png',
|
||||
}, {});
|
||||
} else {
|
||||
return callback(err, tile, headers, stats);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const rendererFactory = new windshaft.renderer.Factory({
|
||||
onTileErrorStrategy: onTileErrorStrategy,
|
||||
mapnik: {
|
||||
redisPool: redisPool,
|
||||
grainstore: serverOptions.grainstore,
|
||||
mapnik: serverOptions.renderer.mapnik
|
||||
},
|
||||
http: serverOptions.renderer.http,
|
||||
mvt: serverOptions.renderer.mvt,
|
||||
torque: serverOptions.renderer.torque
|
||||
});
|
||||
|
||||
return rendererFactory;
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const PSQL = require('cartodb-psql');
|
||||
const tag = require('../middlewares/tag');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const authorize = require('../middlewares/authorize');
|
||||
@@ -17,12 +18,13 @@ module.exports = class AnalysesController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/analyses/catalog', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['analysis', 'catalog'] }),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
dbConnSetup(this.pgConnection),
|
||||
@@ -48,10 +50,10 @@ function createPGClient () {
|
||||
};
|
||||
}
|
||||
|
||||
function getDataFromQuery({ queryTemplate, key }) {
|
||||
function getDataFromQuery ({ queryTemplate, key }) {
|
||||
const readOnlyTransactionOn = true;
|
||||
|
||||
return function getCatalogMiddleware(req, res, next) {
|
||||
return function getCatalogMiddleware (req, res, next) {
|
||||
const { pg, user } = res.locals;
|
||||
const sql = queryTemplate({ _username: user });
|
||||
|
||||
@@ -82,27 +84,27 @@ function prepareResponse () {
|
||||
}, {});
|
||||
|
||||
const analysisCatalog = catalog.map(analysis => {
|
||||
if (analysisIdToTable.hasOwnProperty(analysis.node_id)) {
|
||||
if (Object.prototype.hasOwnProperty.call(analysisIdToTable, analysis.node_id)) {
|
||||
analysis.table = analysisIdToTable[analysis.node_id];
|
||||
}
|
||||
|
||||
return analysis;
|
||||
})
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
|
||||
if (analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!!analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!!analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
});
|
||||
});
|
||||
|
||||
res.statusCode = 200;
|
||||
res.body = { catalog: analysisCatalog };
|
||||
@@ -112,7 +114,7 @@ function prepareResponse () {
|
||||
}
|
||||
|
||||
function unauthorizedError () {
|
||||
return function unathorizedErrorMiddleware(err, req, res, next) {
|
||||
return function unathorizedErrorMiddleware (err, req, res, next) {
|
||||
if (err.message.match(/permission\sdenied/)) {
|
||||
err = new Error('Unauthorized');
|
||||
err.http_status = 401;
|
||||
@@ -123,7 +125,7 @@ function unauthorizedError () {
|
||||
}
|
||||
|
||||
const catalogQueryTpl = ctx => `
|
||||
SELECT analysis_def->>'type' as type, * FROM cdb_analysis_catalog WHERE username = '${ctx._username}'
|
||||
SELECT analysis_def->>'type' as type, * FROM cartodb.cdb_analysis_catalog WHERE username = '${ctx._username}'
|
||||
`;
|
||||
|
||||
var tablesQueryTpl = ctx => `
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
@@ -17,12 +18,13 @@ module.exports = class AnalysisLayergroupController {
|
||||
this.authBackend = authBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/analysis/node/:nodeId', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['analysis', 'node'] }),
|
||||
layergroupToken(),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
@@ -35,7 +37,7 @@ module.exports = class AnalysisLayergroupController {
|
||||
};
|
||||
|
||||
function analysisNodeStatus (analysisStatusBackend) {
|
||||
return function analysisNodeStatusMiddleware(req, res, next) {
|
||||
return function analysisNodeStatusMiddleware (req, res, next) {
|
||||
const { nodeId } = req.params;
|
||||
const dbParams = dbParamsFromResLocals(res.locals);
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
const windshaft = require('windshaft');
|
||||
const MapConfig = windshaft.model.MapConfig;
|
||||
const Datasource = windshaft.model.Datasource;
|
||||
const tag = require('../middlewares/tag');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const dbConnSetup = require('../middlewares/db-conn-setup');
|
||||
const authorize = require('../middlewares/authorize');
|
||||
const initProfiler = require('../middlewares/init-profiler');
|
||||
const checkJsonContentType = require('../middlewares/check-json-content-type');
|
||||
const incrementMapViewCount = require('../middlewares/increment-map-view-count');
|
||||
const augmentLayergroupData = require('../middlewares/augment-layergroup-data');
|
||||
@@ -23,6 +23,7 @@ const mapError = require('../middlewares/map-error');
|
||||
const CreateLayergroupMapConfigProvider = require('../../models/mapconfig/provider/create-layergroup-provider');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
|
||||
const metrics = require('../middlewares/metrics');
|
||||
|
||||
module.exports = class AnonymousMapController {
|
||||
/**
|
||||
@@ -39,6 +40,7 @@ module.exports = class AnonymousMapController {
|
||||
* @constructor
|
||||
*/
|
||||
constructor (
|
||||
config,
|
||||
pgConnection,
|
||||
templateMaps,
|
||||
mapBackend,
|
||||
@@ -49,8 +51,10 @@ module.exports = class AnonymousMapController {
|
||||
mapConfigAdapter,
|
||||
statsBackend,
|
||||
authBackend,
|
||||
layergroupMetadata
|
||||
layergroupMetadata,
|
||||
metricsBackend
|
||||
) {
|
||||
this.config = config;
|
||||
this.pgConnection = pgConnection;
|
||||
this.templateMaps = templateMaps;
|
||||
this.mapBackend = mapBackend;
|
||||
@@ -62,32 +66,46 @@ module.exports = class AnonymousMapController {
|
||||
this.statsBackend = statsBackend;
|
||||
this.authBackend = authBackend;
|
||||
this.layergroupMetadata = layergroupMetadata;
|
||||
this.metricsBackend = metricsBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.options('/');
|
||||
mapRouter.get('/', this.middlewares());
|
||||
mapRouter.post('/', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
const isTemplateInstantiation = false;
|
||||
const useTemplateHash = false;
|
||||
const includeQuery = true;
|
||||
const label = 'ANONYMOUS LAYERGROUP';
|
||||
const addContext = true;
|
||||
const metricsTags = {
|
||||
event: 'map_view',
|
||||
attributes: { map_type: 'anonymous' },
|
||||
from: {
|
||||
req: {
|
||||
query: { client: 'client' }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [
|
||||
tag({ tags: ['map', 'anonymous'] }),
|
||||
metrics({
|
||||
enabled: this.config.pubSubMetrics.enabled,
|
||||
metricsBackend: this.metricsBackend,
|
||||
tags: metricsTags
|
||||
}),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
dbConnSetup(this.pgConnection),
|
||||
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ANONYMOUS),
|
||||
cleanUpQueryParams(['aggregation']),
|
||||
initProfiler(isTemplateInstantiation),
|
||||
checkJsonContentType(),
|
||||
checkCreateLayergroup(),
|
||||
prepareAdapterMapConfig(this.mapConfigAdapter),
|
||||
createLayergroup (
|
||||
createLayergroup(
|
||||
this.mapBackend,
|
||||
this.userLimitsBackend,
|
||||
this.pgConnection,
|
||||
@@ -124,22 +142,23 @@ function checkCreateLayergroup () {
|
||||
}
|
||||
}
|
||||
|
||||
req.profiler.done('checkCreateLayergroup');
|
||||
return next();
|
||||
};
|
||||
}
|
||||
|
||||
function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
return function prepareAdapterMapConfigMiddleware(req, res, next) {
|
||||
return function prepareAdapterMapConfigMiddleware (req, res, next) {
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { user, api_key } = res.locals;
|
||||
const { logger } = res.locals;
|
||||
const { user, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
const context = {
|
||||
analysisConfiguration: {
|
||||
user,
|
||||
logger,
|
||||
db: {
|
||||
host: dbhost,
|
||||
port: dbport,
|
||||
@@ -149,31 +168,25 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
},
|
||||
batch: {
|
||||
username: user,
|
||||
apiKey: api_key
|
||||
apiKey
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
mapConfigAdapter.getMapConfig(user,
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
next();
|
||||
});
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -182,12 +195,17 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { context } = res.locals;
|
||||
const { user, cache_buster, api_key } = res.locals;
|
||||
const { user, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
|
||||
const params = {
|
||||
cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport
|
||||
};
|
||||
|
||||
const datasource = context.datasource || Datasource.EmptyDatasource();
|
||||
@@ -203,6 +221,7 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
|
||||
);
|
||||
|
||||
res.locals.mapConfig = mapConfig;
|
||||
res.locals.mapConfigProvider = mapConfigProvider;
|
||||
res.locals.analysesResults = context.analysesResults;
|
||||
|
||||
const mapParams = { dbuser, dbname, dbpassword, dbhost, dbport };
|
||||
@@ -216,7 +235,6 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
|
||||
|
||||
res.statusCode = 200;
|
||||
res.body = layergroup;
|
||||
res.locals.mapConfigProvider = mapConfigProvider;
|
||||
|
||||
next();
|
||||
});
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
@@ -32,12 +33,13 @@ module.exports = class AttributesLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/:layer/attributes/:fid', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['attributes'] }),
|
||||
layergroupToken(),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
@@ -61,8 +63,6 @@ module.exports = class AttributesLayergroupController {
|
||||
|
||||
function getFeatureAttributes (attributesBackend) {
|
||||
return function getFeatureAttributesMiddleware (req, res, next) {
|
||||
req.profiler.start('windshaft.maplayer_attribute');
|
||||
|
||||
const { mapConfigProvider } = res.locals;
|
||||
const { token } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
@@ -70,8 +70,13 @@ function getFeatureAttributes (attributesBackend) {
|
||||
|
||||
const params = {
|
||||
token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, fid
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
fid
|
||||
};
|
||||
|
||||
attributesBackend.getFeatureAttributes(mapConfigProvider, params, false, (err, tile, stats = {}) => {
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
@@ -32,19 +33,20 @@ module.exports = class AggregatedFeaturesLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/:layer/:z/cluster/:clusterId', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['cluster'] }),
|
||||
layergroupToken(),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
dbConnSetup(this.pgConnection),
|
||||
// TODO: create its rate limit
|
||||
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ATTRIBUTES),
|
||||
cleanUpQueryParams([ 'aggregation' ]),
|
||||
cleanUpQueryParams(['aggregation']),
|
||||
createMapStoreMapConfigProvider(
|
||||
this.mapStore,
|
||||
this.userLimitsBackend,
|
||||
@@ -62,8 +64,6 @@ module.exports = class AggregatedFeaturesLayergroupController {
|
||||
|
||||
function getClusteredFeatures (clusterBackend) {
|
||||
return function getFeatureAttributesMiddleware (req, res, next) {
|
||||
req.profiler.start('windshaft.maplayer_cluster_features');
|
||||
|
||||
const { mapConfigProvider } = res.locals;
|
||||
const { user, token } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
@@ -71,9 +71,16 @@ function getClusteredFeatures (clusterBackend) {
|
||||
const { aggregation } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, zoom, clusterId,
|
||||
user,
|
||||
token,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
zoom,
|
||||
clusterId,
|
||||
aggregation
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
@@ -18,14 +19,16 @@ const ALLOWED_DATAVIEW_QUERY_PARAMS = [
|
||||
'own_filter', // 0, 1
|
||||
'no_filters', // 0, 1
|
||||
'bbox', // w,s,e,n
|
||||
'circle', // json
|
||||
'polygon', // json
|
||||
'start', // number
|
||||
'end', // number
|
||||
'column_type', // string
|
||||
'bins', // number
|
||||
'aggregation', //string
|
||||
'aggregation', // string
|
||||
'offset', // number
|
||||
'q', // widgets search
|
||||
'categories', // number
|
||||
'categories' // number
|
||||
];
|
||||
|
||||
module.exports = class DataviewLayergroupController {
|
||||
@@ -47,7 +50,7 @@ module.exports = class DataviewLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
// Undocumented/non-supported API endpoint methods.
|
||||
// Use at your own peril.
|
||||
|
||||
@@ -74,6 +77,7 @@ module.exports = class DataviewLayergroupController {
|
||||
|
||||
middlewares ({ action, rateLimitGroup }) {
|
||||
return [
|
||||
tag({ tags: ['dataview', action] }),
|
||||
layergroupToken(),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
@@ -15,6 +15,7 @@ const ClusteredFeaturesLayergroupController = require('./clustered-features-laye
|
||||
module.exports = class MapRouter {
|
||||
constructor ({ collaborators }) {
|
||||
const {
|
||||
config,
|
||||
analysisStatusBackend,
|
||||
attributesBackend,
|
||||
dataviewBackend,
|
||||
@@ -34,7 +35,8 @@ module.exports = class MapRouter {
|
||||
layergroupMetadata,
|
||||
namedMapProviderCache,
|
||||
tablesExtentBackend,
|
||||
clusterBackend
|
||||
clusterBackend,
|
||||
metricsBackend
|
||||
} = collaborators;
|
||||
|
||||
this.analysisLayergroupController = new AnalysisLayergroupController(
|
||||
@@ -85,6 +87,7 @@ module.exports = class MapRouter {
|
||||
);
|
||||
|
||||
this.anonymousMapController = new AnonymousMapController(
|
||||
config,
|
||||
pgConnection,
|
||||
templateMaps,
|
||||
mapBackend,
|
||||
@@ -95,10 +98,12 @@ module.exports = class MapRouter {
|
||||
mapConfigAdapter,
|
||||
statsBackend,
|
||||
authBackend,
|
||||
layergroupMetadata
|
||||
layergroupMetadata,
|
||||
metricsBackend
|
||||
);
|
||||
|
||||
this.previewTemplateController = new PreviewTemplateController(
|
||||
config,
|
||||
namedMapProviderCache,
|
||||
previewBackend,
|
||||
surrogateKeysCache,
|
||||
@@ -106,7 +111,8 @@ module.exports = class MapRouter {
|
||||
metadataBackend,
|
||||
pgConnection,
|
||||
authBackend,
|
||||
userLimitsBackend
|
||||
userLimitsBackend,
|
||||
metricsBackend
|
||||
);
|
||||
|
||||
this.analysesController = new AnalysesCatalogController(
|
||||
@@ -126,19 +132,25 @@ module.exports = class MapRouter {
|
||||
);
|
||||
}
|
||||
|
||||
register (apiRouter, mapPaths) {
|
||||
route (apiRouter, routes) {
|
||||
const mapRouter = router({ mergeParams: true });
|
||||
|
||||
this.analysisLayergroupController.register(mapRouter);
|
||||
this.attributesLayergroupController.register(mapRouter);
|
||||
this.dataviewLayergroupController.register(mapRouter);
|
||||
this.previewLayergroupController.register(mapRouter);
|
||||
this.tileLayergroupController.register(mapRouter);
|
||||
this.anonymousMapController.register(mapRouter);
|
||||
this.previewTemplateController.register(mapRouter);
|
||||
this.analysesController.register(mapRouter);
|
||||
this.clusteredFeaturesLayergroupController.register(mapRouter);
|
||||
routes.forEach(route => {
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
mapPaths.forEach(path => apiRouter.use(path, mapRouter));
|
||||
middlewares.forEach(middleware => mapRouter.use(middleware()));
|
||||
|
||||
this.analysisLayergroupController.route(mapRouter);
|
||||
this.attributesLayergroupController.route(mapRouter);
|
||||
this.dataviewLayergroupController.route(mapRouter);
|
||||
this.previewLayergroupController.route(mapRouter);
|
||||
this.tileLayergroupController.route(mapRouter);
|
||||
this.anonymousMapController.route(mapRouter);
|
||||
this.previewTemplateController.route(mapRouter);
|
||||
this.analysesController.route(mapRouter);
|
||||
this.clusteredFeaturesLayergroupController.route(mapRouter);
|
||||
|
||||
paths.forEach(path => apiRouter.use(path, mapRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
@@ -35,7 +36,7 @@ module.exports = class PreviewLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/static/center/:token/:z/:lat/:lng/:width/:height.:format', this.middlewares({
|
||||
validateZoom: true,
|
||||
previewType: 'centered'
|
||||
@@ -61,6 +62,7 @@ module.exports = class PreviewLayergroupController {
|
||||
}
|
||||
|
||||
return [
|
||||
tag({ tags: ['static', 'tile'] }),
|
||||
layergroupToken(),
|
||||
validateZoom ? coordinates({ z: true, x: false, y: false }) : noop(),
|
||||
credentials(),
|
||||
@@ -100,7 +102,6 @@ function getPreviewImageByCenter (previewBackend) {
|
||||
const options = { mapConfigProvider, format, width, height, zoom, center };
|
||||
|
||||
previewBackend.getImage(options, (err, image, stats = {}) => {
|
||||
req.profiler.done(`render-${format}`);
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
@@ -133,7 +134,6 @@ function getPreviewImageByBoundingBox (previewBackend) {
|
||||
const options = { mapConfigProvider, format, width, height, bbox };
|
||||
|
||||
previewBackend.getImage(options, (err, image, stats = {}) => {
|
||||
req.profiler.done(`render-${format}`);
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const dbConnSetup = require('../middlewares/db-conn-setup');
|
||||
@@ -12,6 +13,7 @@ const lastModifiedHeader = require('../middlewares/last-modified-header');
|
||||
const checkStaticImageFormat = require('../middlewares/check-static-image-format');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
|
||||
const metrics = require('../middlewares/metrics');
|
||||
|
||||
const DEFAULT_ZOOM_CENTER = {
|
||||
zoom: 1,
|
||||
@@ -21,12 +23,13 @@ const DEFAULT_ZOOM_CENTER = {
|
||||
}
|
||||
};
|
||||
|
||||
function numMapper(n) {
|
||||
function numMapper (n) {
|
||||
return +n;
|
||||
}
|
||||
|
||||
module.exports = class PreviewTemplateController {
|
||||
constructor (
|
||||
config,
|
||||
namedMapProviderCache,
|
||||
previewBackend,
|
||||
surrogateKeysCache,
|
||||
@@ -34,8 +37,10 @@ module.exports = class PreviewTemplateController {
|
||||
metadataBackend,
|
||||
pgConnection,
|
||||
authBackend,
|
||||
userLimitsBackend
|
||||
userLimitsBackend,
|
||||
metricsBackend
|
||||
) {
|
||||
this.config = config;
|
||||
this.namedMapProviderCache = namedMapProviderCache;
|
||||
this.previewBackend = previewBackend;
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
@@ -44,14 +49,31 @@ module.exports = class PreviewTemplateController {
|
||||
this.pgConnection = pgConnection;
|
||||
this.authBackend = authBackend;
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
this.metricsBackend = metricsBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/static/named/:template_id/:width/:height.:format', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
const metricsTags = {
|
||||
event: 'map_view',
|
||||
attributes: { map_type: 'static' },
|
||||
from: {
|
||||
req: {
|
||||
query: { client: 'client' }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [
|
||||
tag({ tags: ['named', 'static', 'tile'] }),
|
||||
metrics({
|
||||
enabled: this.config.pubSubMetrics.enabled,
|
||||
metricsBackend: this.metricsBackend,
|
||||
tags: metricsTags
|
||||
}),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
dbConnSetup(this.pgConnection),
|
||||
@@ -60,7 +82,8 @@ module.exports = class PreviewTemplateController {
|
||||
checkStaticImageFormat(),
|
||||
namedMapProvider({
|
||||
namedMapProviderCache: this.namedMapProviderCache,
|
||||
label: 'STATIC_VIZ_MAP', forcedFormat: 'png'
|
||||
label: 'STATIC_VIZ_MAP',
|
||||
forcedFormat: 'png'
|
||||
}),
|
||||
getTemplate({ label: 'STATIC_VIZ_MAP' }),
|
||||
prepareLayerFilterFromPreviewLayers({
|
||||
@@ -99,7 +122,7 @@ function getTemplate ({ label }) {
|
||||
function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label }) {
|
||||
return function prepareLayerFilterFromPreviewLayersMiddleware (req, res, next) {
|
||||
const { template } = res.locals;
|
||||
const { config, auth_token } = req.query;
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
if (!template || !template.view || !template.view.preview_layers) {
|
||||
return next();
|
||||
@@ -109,8 +132,8 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
var layerVisibilityFilter = [];
|
||||
|
||||
template.layergroup.layers.forEach((layer, index) => {
|
||||
if (previewLayers[''+index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push(''+index);
|
||||
if (previewLayers['' + index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push('' + index);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -118,21 +141,29 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
return next();
|
||||
}
|
||||
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id, format } = req.params;
|
||||
const { template_id: templateId, format } = req.params;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
template_id, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
format
|
||||
};
|
||||
|
||||
// overwrites 'all' default filter
|
||||
params.layer = layerVisibilityFilter.join(',');
|
||||
|
||||
// recreates the provider
|
||||
namedMapProviderCache.get(user, template_id, config, auth_token, params, (err, provider) => {
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, provider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
@@ -146,7 +177,7 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
}
|
||||
|
||||
function getStaticImageOptions ({ tablesExtentBackend }) {
|
||||
return function getStaticImageOptionsMiddleware(req, res, next) {
|
||||
return function getStaticImageOptionsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider, template } = res.locals;
|
||||
const { zoom, lon, lat, bbox } = req.query;
|
||||
const params = { zoom, lon, lat, bbox };
|
||||
@@ -248,7 +279,7 @@ function getImageOptionsFromBoundingBox (bbox = '') {
|
||||
}
|
||||
}
|
||||
|
||||
function getImage({ previewBackend, label }) {
|
||||
function getImage ({ previewBackend, label }) {
|
||||
return function getImageMiddleware (req, res, next) {
|
||||
const { imageOpts, mapConfigProvider } = res.locals;
|
||||
const { zoom, center, bbox } = imageOpts;
|
||||
@@ -263,7 +294,7 @@ function getImage({ previewBackend, label }) {
|
||||
if (zoom !== undefined && center) {
|
||||
const options = { mapConfigProvider, format, width, height, zoom, center };
|
||||
|
||||
return previewBackend.getImage(options, (err, image, stats) => {
|
||||
return previewBackend.getImage(options, (err, image, stats = {}) => {
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
@@ -280,9 +311,8 @@ function getImage({ previewBackend, label }) {
|
||||
|
||||
const options = { mapConfigProvider, format, width, height, bbox };
|
||||
|
||||
previewBackend.getImage(options, (err, image, stats) => {
|
||||
previewBackend.getImage(options, (err, image, stats = {}) => {
|
||||
req.profiler.add(stats);
|
||||
req.profiler.done('render-' + format);
|
||||
|
||||
if (err) {
|
||||
err.label = label;
|
||||
@@ -298,7 +328,7 @@ function getImage({ previewBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
const format = req.params.format === 'jpg' ? 'jpeg' : 'png';
|
||||
|
||||
res.set('Content-Type', `image/${format}`);
|
||||
@@ -307,25 +337,23 @@ function setContentTypeHeader () {
|
||||
};
|
||||
}
|
||||
|
||||
function incrementMapViewsError (ctx) {
|
||||
return `ERROR: failed to increment mapview count for user '${ctx.user}': ${ctx.err}`;
|
||||
}
|
||||
|
||||
function incrementMapViews ({ metadataBackend }) {
|
||||
return function incrementMapViewsMiddleware(req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
return function incrementMapViewsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider, logger } = res.locals;
|
||||
|
||||
mapConfigProvider.getMapConfig((err, mapConfig) => {
|
||||
if (err) {
|
||||
global.logger.log(incrementMapViewsError({ user, err }));
|
||||
logger.warn({ exception: err }, 'Failed to increment mapview count');
|
||||
return next();
|
||||
}
|
||||
|
||||
res.locals.mapConfig = mapConfig;
|
||||
|
||||
const statTag = mapConfig.obj().stat_tag;
|
||||
|
||||
metadataBackend.incMapviewCount(user, statTag, (err) => {
|
||||
if (err) {
|
||||
global.logger.log(incrementMapViewsError({ user, err }));
|
||||
logger.warn({ exception: err }, 'Failed to increment mapview count');
|
||||
}
|
||||
|
||||
next();
|
||||
@@ -334,7 +362,7 @@ function incrementMapViews ({ metadataBackend }) {
|
||||
};
|
||||
}
|
||||
|
||||
function templateZoomCenter(view) {
|
||||
function templateZoomCenter (view) {
|
||||
if (view.zoom !== undefined && view.center) {
|
||||
return {
|
||||
zoom: view.zoom,
|
||||
@@ -344,7 +372,7 @@ function templateZoomCenter(view) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function templateBounds(view) {
|
||||
function templateBounds (view) {
|
||||
if (view.bounds) {
|
||||
var hasAllBounds = ['west', 'south', 'east', 'north'].every(prop => Number.isFinite(view.bounds[prop]));
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const layergroupToken = require('../middlewares/layergroup-token');
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
@@ -43,22 +44,23 @@ module.exports = class TileLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
// REGEXP: doesn't match with `val`
|
||||
const not = (val) => `(?!${val})([^\/]+?)`;
|
||||
const not = (val) => `(?!${val})([^\/]+?)`; // eslint-disable-line no-useless-escape
|
||||
|
||||
// Sadly the path that matches 1 also matches with 2 so we need to tell to express
|
||||
// that performs only the middlewares of the first path that matches
|
||||
// for that we use one array to group all paths.
|
||||
mapRouter.get([
|
||||
`/:token/:z/:x/:y@:scale_factor?x.:format`, // 1
|
||||
`/:token/:z/:x/:y.:format`, // 2
|
||||
'/:token/:z/:x/:y@:scale_factor?x.:format', // 1
|
||||
'/:token/:z/:x/:y.:format', // 2
|
||||
`/:token${not('static')}/:layer/:z/:x/:y.(:format)`
|
||||
], this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['tile'] }),
|
||||
layergroupToken(),
|
||||
coordinates(),
|
||||
credentials(),
|
||||
@@ -90,14 +92,12 @@ function parseFormat (format = '') {
|
||||
return SUPPORTED_FORMATS[prettyFormat] ? prettyFormat : 'invalid';
|
||||
}
|
||||
|
||||
function getStatusCode(tile, format){
|
||||
function getStatusCode (tile, format) {
|
||||
return tile.length === 0 && format === 'mvt' ? 204 : 200;
|
||||
}
|
||||
|
||||
function getTile (tileBackend) {
|
||||
return function getTileMiddleware (req, res, next) {
|
||||
req.profiler.start(`windshaft.${req.params.layer ? 'maplayer_tile' : 'map_tile'}`);
|
||||
|
||||
const { mapConfigProvider } = res.locals;
|
||||
const { token } = res.locals;
|
||||
const { layer, z, x, y, format } = req.params;
|
||||
@@ -149,9 +149,8 @@ function incrementErrorMetrics (statsClient) {
|
||||
|
||||
function tileError () {
|
||||
return function tileErrorMiddleware (err, req, res, next) {
|
||||
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
let errMsg = err.message ? ( '' + err.message ) : ( '' + err );
|
||||
let errMsg = err.message ? ('' + err.message) : ('' + err);
|
||||
|
||||
// Rewrite mapnik parsing errors to start with layer number
|
||||
const matches = errMsg.match("(.*) in style 'layer([0-9]+)'");
|
||||
@@ -3,14 +3,12 @@
|
||||
module.exports = function authorize (authBackend) {
|
||||
return function authorizeMiddleware (req, res, next) {
|
||||
authBackend.authorize(req, res, (err, authorized) => {
|
||||
req.profiler.done('authorize');
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if(!authorized) {
|
||||
err = new Error("Sorry, you are unauthorized (permission denied)");
|
||||
if (!authorized) {
|
||||
err = new Error('Sorry, you are unauthorized (permission denied)');
|
||||
err.http_status = 403;
|
||||
return next(err);
|
||||
}
|
||||
@@ -6,11 +6,11 @@ module.exports = function setCacheChannelHeader () {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { mapConfigProvider } = res.locals;
|
||||
const { mapConfigProvider, logger } = res.locals;
|
||||
|
||||
mapConfigProvider.getAffectedTables((err, affectedTables) => {
|
||||
if (err) {
|
||||
global.logger.warn('ERROR generating Cache Channel Header:', err);
|
||||
logger.warn({ exception: err }, 'Error generating Cache Channel Header');
|
||||
return next();
|
||||
}
|
||||
|
||||
85
lib/api/middlewares/cache-control-header.js
Normal file
85
lib/api/middlewares/cache-control-header.js
Normal file
@@ -0,0 +1,85 @@
|
||||
'use strict';
|
||||
|
||||
const ONE_MINUTE_IN_SECONDS = 60;
|
||||
const THREE_MINUTE_IN_SECONDS = 60 * 3;
|
||||
const FIVE_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 5;
|
||||
const TEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 10;
|
||||
const FIFTEEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 15;
|
||||
const THIRTY_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 30;
|
||||
const ONE_HOUR_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 60;
|
||||
const ONE_YEAR_IN_SECONDS = ONE_HOUR_IN_SECONDS * 24 * 365;
|
||||
|
||||
const FALLBACK_TTL = global.environment.varnish.fallbackTtl || FIVE_MINUTES_IN_SECONDS;
|
||||
|
||||
const validFallbackTTL = [
|
||||
ONE_MINUTE_IN_SECONDS,
|
||||
THREE_MINUTE_IN_SECONDS,
|
||||
FIVE_MINUTES_IN_SECONDS,
|
||||
TEN_MINUTES_IN_SECONDS,
|
||||
FIFTEEN_MINUTES_IN_SECONDS,
|
||||
THIRTY_MINUTES_IN_SECONDS,
|
||||
ONE_HOUR_IN_SECONDS
|
||||
];
|
||||
|
||||
module.exports = function setCacheControlHeader ({
|
||||
ttl = ONE_YEAR_IN_SECONDS,
|
||||
fallbackTtl = FALLBACK_TTL,
|
||||
revalidate = false
|
||||
} = {}) {
|
||||
if (!validFallbackTTL.includes(fallbackTtl)) {
|
||||
const message = [
|
||||
'Invalid fallback TTL value for Cache-Control header.',
|
||||
`Got ${fallbackTtl}, expected ${validFallbackTTL.join(', ')}`
|
||||
].join(' ');
|
||||
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
return function setCacheControlHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { mapConfigProvider = { getAffectedTables: callback => callback() }, logger } = res.locals;
|
||||
|
||||
mapConfigProvider.getAffectedTables((err, affectedTables) => {
|
||||
if (err) {
|
||||
logger.warn({ exception: err }, 'Error generating Cache Control Header');
|
||||
return next();
|
||||
}
|
||||
|
||||
const directives = ['public'];
|
||||
|
||||
if (everyAffectedTableCanBeInvalidated(affectedTables)) {
|
||||
directives.push(`max-age=${ttl}`);
|
||||
} else {
|
||||
directives.push(`max-age=${computeNextTTL({ ttlInSeconds: fallbackTtl })}`);
|
||||
}
|
||||
|
||||
if (revalidate) {
|
||||
directives.push('must-revalidate');
|
||||
}
|
||||
|
||||
res.set('Cache-Control', directives.join(','));
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
function everyAffectedTableCanBeInvalidated (affectedTables) {
|
||||
const skipNotUpdatedAtTables = false;
|
||||
const skipAnalysisCachedTables = true;
|
||||
|
||||
return affectedTables &&
|
||||
affectedTables.getTables(skipNotUpdatedAtTables, skipAnalysisCachedTables)
|
||||
.every(table => table.updated_at !== null);
|
||||
}
|
||||
|
||||
function computeNextTTL ({ ttlInSeconds } = {}) {
|
||||
const nowInSeconds = Math.ceil(Date.now() / 1000);
|
||||
const secondsAfterPreviousTTLStep = nowInSeconds % ttlInSeconds;
|
||||
const secondsToReachTheNextTTLStep = ttlInSeconds - secondsAfterPreviousTTLStep;
|
||||
|
||||
return secondsToReachTheNextTTLStep;
|
||||
}
|
||||
@@ -1,13 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function checkJsonContentType () {
|
||||
return function checkJsonContentTypeMiddleware(req, res, next) {
|
||||
return function checkJsonContentTypeMiddleware (req, res, next) {
|
||||
if (req.method === 'POST' && !req.is('application/json')) {
|
||||
return next(new Error('POST data must be of type application/json'));
|
||||
}
|
||||
|
||||
req.profiler.done('checkJsonContentTypeMiddleware');
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
@@ -4,7 +4,7 @@ const VALID_IMAGE_FORMATS = ['png', 'jpg'];
|
||||
|
||||
module.exports = function checkStaticImageFormat () {
|
||||
return function checkStaticImageFormatMiddleware (req, res, next) {
|
||||
if(!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
if (!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
return next(new Error(`Unsupported image format "${req.params.format}"`));
|
||||
}
|
||||
|
||||
13
lib/api/middlewares/client-header.js
Normal file
13
lib/api/middlewares/client-header.js
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function clientHeader () {
|
||||
return function clientHeaderMiddleware (req, res, next) {
|
||||
const { client } = req.query;
|
||||
|
||||
if (client) {
|
||||
res.set('Carto-Client', client);
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
@@ -6,15 +6,18 @@ module.exports = function cors () {
|
||||
'X-Requested-With',
|
||||
'X-Prototype-Version',
|
||||
'X-CSRF-Token',
|
||||
'Authorization'
|
||||
'Authorization',
|
||||
'Carto-Event',
|
||||
'Carto-Event-Source',
|
||||
'Carto-Event-Group-Id'
|
||||
];
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
headers.push('Content-Type');
|
||||
}
|
||||
|
||||
res.set("Access-Control-Allow-Origin", "*");
|
||||
res.set("Access-Control-Allow-Headers", headers.join(', '));
|
||||
res.set('Access-Control-Allow-Origin', '*');
|
||||
res.set('Access-Control-Allow-Headers', headers.join(', '));
|
||||
|
||||
next();
|
||||
};
|
||||
@@ -3,24 +3,24 @@
|
||||
const basicAuth = require('basic-auth');
|
||||
|
||||
module.exports = function credentials () {
|
||||
return function credentialsMiddleware(req, res, next) {
|
||||
return function credentialsMiddleware (req, res, next) {
|
||||
const apikeyCredentials = getApikeyCredentialsFromRequest(req);
|
||||
|
||||
res.locals.api_key = apikeyCredentials.token;
|
||||
res.locals.basicAuthUsername = apikeyCredentials.username;
|
||||
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
|
||||
res.set('vary', 'Authorization'); // Honor Authorization header when caching.
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
function getApikeyCredentialsFromRequest(req) {
|
||||
function getApikeyCredentialsFromRequest (req) {
|
||||
let apikeyCredentials = {
|
||||
token: null,
|
||||
username: null,
|
||||
username: null
|
||||
};
|
||||
|
||||
for (let getter of apikeyGetters) {
|
||||
for (const getter of apikeyGetters) {
|
||||
apikeyCredentials = getter(req);
|
||||
if (apikeyTokenFound(apikeyCredentials)) {
|
||||
break;
|
||||
@@ -33,10 +33,10 @@ function getApikeyCredentialsFromRequest(req) {
|
||||
const apikeyGetters = [
|
||||
getApikeyTokenFromHeaderAuthorization,
|
||||
getApikeyTokenFromRequestQueryString,
|
||||
getApikeyTokenFromRequestBody,
|
||||
getApikeyTokenFromRequestBody
|
||||
];
|
||||
|
||||
function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
function getApikeyTokenFromHeaderAuthorization (req) {
|
||||
const credentials = basicAuth(req);
|
||||
|
||||
if (credentials) {
|
||||
@@ -47,12 +47,12 @@ function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
} else {
|
||||
return {
|
||||
username: null,
|
||||
token: null,
|
||||
token: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestQueryString(req) {
|
||||
function getApikeyTokenFromRequestQueryString (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.query && req.query.api_key) {
|
||||
@@ -63,11 +63,11 @@ function getApikeyTokenFromRequestQueryString(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestBody(req) {
|
||||
function getApikeyTokenFromRequestBody (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.body && req.body.api_key) {
|
||||
@@ -78,10 +78,10 @@ function getApikeyTokenFromRequestBody(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function apikeyTokenFound(apikey) {
|
||||
function apikeyTokenFound (apikey) {
|
||||
return !!apikey && !!apikey.token;
|
||||
}
|
||||
@@ -7,10 +7,8 @@ module.exports = function dbConnSetup (pgConnection) {
|
||||
const { user } = res.locals;
|
||||
|
||||
pgConnection.setDBConn(user, res.locals, (err) => {
|
||||
req.profiler.done('dbConnSetup');
|
||||
|
||||
if (err) {
|
||||
if (err.message && -1 !== err.message.indexOf('name not found')) {
|
||||
if (err.message && err.message.indexOf('name not found') !== -1) {
|
||||
err.http_status = 404;
|
||||
}
|
||||
|
||||
@@ -1,42 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('underscore');
|
||||
const debug = require('debug')('windshaft:cartodb:error-middleware');
|
||||
const setCommonHeaders = require('../../utils/common-headers');
|
||||
|
||||
module.exports = function errorMiddleware (/* options */) {
|
||||
return function error (err, req, res, next) {
|
||||
// jshint unused:false
|
||||
// jshint maxcomplexity:9
|
||||
var allErrors = Array.isArray(err) ? err : [err];
|
||||
const { logger } = res.locals;
|
||||
const errors = populateLimitErrors(Array.isArray(err) ? err : [err]);
|
||||
|
||||
allErrors = populateLimitErrors(allErrors);
|
||||
errors.forEach((err) => logger.error({ exception: err }, 'Error while handling the request'));
|
||||
|
||||
const label = err.label || 'UNKNOWN';
|
||||
err = allErrors[0] || new Error(label);
|
||||
allErrors[0] = err;
|
||||
setCommonHeaders(req, res, () => {
|
||||
const errorResponseBody = {
|
||||
errors: errors.map(errorMessage),
|
||||
errors_with_context: errors.map(errorMessageWithContext)
|
||||
};
|
||||
|
||||
var statusCode = findStatusCode(err);
|
||||
// If a callback was requested, force status to 200
|
||||
res.status(req.query.callback ? 200 : findStatusCode(errors[0]));
|
||||
|
||||
setErrorHeader(allErrors, statusCode, res);
|
||||
debug('[%s ERROR] -- %d: %s, %s', label, statusCode, err, err.stack);
|
||||
if (req.query && req.query.callback) {
|
||||
res.jsonp(errorResponseBody);
|
||||
} else {
|
||||
res.json(errorResponseBody);
|
||||
}
|
||||
|
||||
// If a callback was requested, force status to 200
|
||||
if (req.query && req.query.callback) {
|
||||
statusCode = 200;
|
||||
}
|
||||
|
||||
var errorResponseBody = {
|
||||
errors: allErrors.map(errorMessage),
|
||||
errors_with_context: allErrors.map(errorMessageWithContext)
|
||||
};
|
||||
|
||||
res.status(statusCode);
|
||||
|
||||
if (req.query && req.query.callback) {
|
||||
res.jsonp(errorResponseBody);
|
||||
} else {
|
||||
res.json(errorResponseBody);
|
||||
}
|
||||
return next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
@@ -52,10 +41,10 @@ function isTimeoutError (errorTypes) {
|
||||
return errorTypes.renderTimeoutError || errorTypes.datasourceTimeoutError;
|
||||
}
|
||||
|
||||
function getErrorTypes(error) {
|
||||
function getErrorTypes (error) {
|
||||
return {
|
||||
renderTimeoutError: isRenderTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -99,9 +88,9 @@ function populateLimitErrors (errors) {
|
||||
});
|
||||
}
|
||||
|
||||
function findStatusCode(err) {
|
||||
function findStatusCode (err) {
|
||||
var statusCode;
|
||||
if ( err.http_status ) {
|
||||
if (err.http_status) {
|
||||
statusCode = err.http_status;
|
||||
} else {
|
||||
statusCode = statusFromErrorMessage('' + err);
|
||||
@@ -111,43 +100,38 @@ function findStatusCode(err) {
|
||||
|
||||
module.exports.findStatusCode = findStatusCode;
|
||||
|
||||
function statusFromErrorMessage(errMsg) {
|
||||
function statusFromErrorMessage (errMsg) {
|
||||
// Find an appropriate statusCode based on message
|
||||
// jshint maxcomplexity:7
|
||||
var statusCode = 400;
|
||||
if ( -1 !== errMsg.indexOf('permission denied') ) {
|
||||
if (errMsg.indexOf('permission denied') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('authentication failed') ) {
|
||||
} else if (errMsg.indexOf('authentication failed') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
} else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
statusCode = 400;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('does not exist') ) {
|
||||
if ( -1 !== errMsg.indexOf(' role ') ) {
|
||||
} else if (errMsg.indexOf('does not exist') !== -1) {
|
||||
if (errMsg.indexOf(' role ') !== -1) {
|
||||
statusCode = 403; // role 'xxx' does not exist
|
||||
} else if ( errMsg.match(/function .* does not exist/) ) {
|
||||
} else if (errMsg.match(/function .* does not exist/)) {
|
||||
statusCode = 400; // invalid SQL (SQL function does not exist)
|
||||
} else {
|
||||
statusCode = 404;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return statusCode;
|
||||
}
|
||||
|
||||
function errorMessage(err) {
|
||||
function errorMessage (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
var message = (typeof err === 'string' ? err : err.message) || 'Unknown error';
|
||||
|
||||
return stripConnectionInfo(message);
|
||||
}
|
||||
|
||||
module.exports.errorMessage = errorMessage;
|
||||
|
||||
function stripConnectionInfo(message) {
|
||||
function stripConnectionInfo (message) {
|
||||
// Strip connection info, if any
|
||||
return message
|
||||
// See https://github.com/CartoDB/Windshaft/issues/173
|
||||
@@ -168,71 +152,21 @@ function shouldBeExposed (prop) {
|
||||
return !!ERROR_INFO_TO_EXPOSE[prop];
|
||||
}
|
||||
|
||||
function errorMessageWithContext(err) {
|
||||
function errorMessageWithContext (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
var message = (typeof err === 'string' ? err : err.message) || 'Unknown error';
|
||||
|
||||
var error = {
|
||||
type: err.type || 'unknown',
|
||||
message: stripConnectionInfo(message),
|
||||
message: stripConnectionInfo(message)
|
||||
};
|
||||
|
||||
for (var prop in err) {
|
||||
// type & message are properties from Error's prototype and will be skipped
|
||||
if (err.hasOwnProperty(prop) && shouldBeExposed(prop)) {
|
||||
if (Object.prototype.hasOwnProperty.call(err, prop) && shouldBeExposed(prop)) {
|
||||
error[prop] = err[prop];
|
||||
}
|
||||
}
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
function setErrorHeader(errors, statusCode, res) {
|
||||
let errorsCopy = errors.slice(0);
|
||||
const mainError = errorsCopy.shift();
|
||||
|
||||
let errorsLog = {
|
||||
mainError: {
|
||||
statusCode: statusCode || 200,
|
||||
message: mainError.message,
|
||||
name: mainError.name,
|
||||
label: mainError.label,
|
||||
type: mainError.type,
|
||||
subtype: mainError.subtype
|
||||
}
|
||||
};
|
||||
|
||||
errorsLog.moreErrors = errorsCopy.map(error => {
|
||||
return {
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
label: error.label,
|
||||
type: error.type,
|
||||
subtype: error.subtype
|
||||
};
|
||||
});
|
||||
|
||||
res.set('X-Tiler-Errors', stringifyForLogs(errorsLog));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove problematic nested characters
|
||||
* from object for logs RegEx
|
||||
*
|
||||
* @param {Object} object
|
||||
*/
|
||||
function stringifyForLogs(object) {
|
||||
Object.keys(object).map(key => {
|
||||
if(typeof object[key] === 'string') {
|
||||
object[key] = object[key].replace(/[^a-zA-Z0-9]/g, ' ');
|
||||
} else if (typeof object[key] === 'object') {
|
||||
stringifyForLogs(object[key]);
|
||||
} else if (object[key] instanceof Array) {
|
||||
for (let element of object[key]) {
|
||||
stringifyForLogs(element);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return JSON.stringify(object);
|
||||
}
|
||||
16
lib/api/middlewares/increment-map-view-count.js
Normal file
16
lib/api/middlewares/increment-map-view-count.js
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function incrementMapViewCount (metadataBackend) {
|
||||
return function incrementMapViewCountMiddleware (req, res, next) {
|
||||
const { mapConfig, user, logger } = res.locals;
|
||||
const statTag = mapConfig.obj().stat_tag;
|
||||
|
||||
metadataBackend.incMapviewCount(user, statTag, (err) => {
|
||||
if (err) {
|
||||
logger.warn({ exception: err }, 'Failed to increment mapview count');
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
@@ -1,16 +1,18 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLastModifiedHeader () {
|
||||
return function setLastModifiedHeaderMiddleware(req, res, next) {
|
||||
return function setLastModifiedHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { mapConfigProvider, cache_buster } = res.locals;
|
||||
const { mapConfigProvider, cache_buster: cacheBuster, logger } = res.locals;
|
||||
|
||||
if (cache_buster) {
|
||||
const cacheBuster = parseInt(cache_buster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBuster) ? new Date(cacheBuster) : new Date();
|
||||
if (cacheBuster) {
|
||||
const cacheBusterTimestamp = parseInt(cacheBuster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBusterTimestamp) && cacheBusterTimestamp !== 0
|
||||
? new Date(cacheBusterTimestamp)
|
||||
: new Date();
|
||||
|
||||
res.set('Last-Modified', lastModifiedDate.toUTCString());
|
||||
|
||||
@@ -19,7 +21,7 @@ module.exports = function setLastModifiedHeader () {
|
||||
|
||||
mapConfigProvider.getAffectedTables((err, affectedTables) => {
|
||||
if (err) {
|
||||
global.logger.warn('ERROR generating Last Modified Header:', err);
|
||||
logger.warn({ exception: err }, 'Error generating Last Modified Header');
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -34,6 +36,8 @@ module.exports = function setLastModifiedHeader () {
|
||||
|
||||
res.set('Last-Modified', lastModifiedDate.toUTCString());
|
||||
|
||||
res.locals.cache_buster = lastUpdatedAt;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
@@ -11,6 +11,10 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
|
||||
}
|
||||
|
||||
if (!affectedTables) {
|
||||
res.locals.cache_buster = 0;
|
||||
layergroup.layergroupid = `${layergroup.layergroupid}:${res.locals.cache_buster}`;
|
||||
layergroup.last_updated = new Date(res.locals.cache_buster).toISOString();
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
@@ -22,17 +26,19 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
|
||||
layergroup.layergroupid = layergroup.layergroupid + ':' + lastUpdateTime;
|
||||
layergroup.last_updated = new Date(lastUpdateTime).toISOString();
|
||||
|
||||
res.locals.cache_buster = lastUpdateTime;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
function getLastUpdatedTime(analysesResults, lastUpdateTime) {
|
||||
function getLastUpdatedTime (analysesResults, lastUpdateTime) {
|
||||
if (!Array.isArray(analysesResults)) {
|
||||
return lastUpdateTime;
|
||||
}
|
||||
return analysesResults.reduce(function(lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function(lastNodeUpdatedAtTime, node) {
|
||||
return analysesResults.reduce(function (lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function (lastNodeUpdatedAtTime, node) {
|
||||
var nodeUpdatedAtDate = node.getUpdatedAt();
|
||||
var nodeUpdatedTimeAt = (nodeUpdatedAtDate && nodeUpdatedAtDate.getTime()) || 0;
|
||||
return nodeUpdatedTimeAt > lastNodeUpdatedAtTime ? nodeUpdatedTimeAt : lastNodeUpdatedAtTime;
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return function setLayerStatsMiddleware(req, res, next) {
|
||||
return function setLayerStatsMiddleware (req, res, next) {
|
||||
const { user, mapConfig } = res.locals;
|
||||
const layergroup = res.body;
|
||||
|
||||
@@ -10,7 +10,7 @@ module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
statsBackend.getStats(mapConfig, connection, function(err, layersStats) {
|
||||
statsBackend.getStats(mapConfig, connection, function (err, layersStats) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
@@ -6,8 +6,9 @@ module.exports = function setLayergroupIdHeader (templateMaps, useTemplateHash)
|
||||
const layergroup = res.body;
|
||||
|
||||
if (useTemplateHash) {
|
||||
var templateHash = templateMaps.fingerPrint(template).substring(0, 8);
|
||||
const templateHash = templateMaps.fingerPrint(template).substring(0, 8);
|
||||
layergroup.layergroupid = `${user}@${templateHash}@${layergroup.layergroupid}`;
|
||||
res.locals.templateHash = templateHash;
|
||||
}
|
||||
|
||||
res.set('X-Layergroup-Id', layergroup.layergroupid);
|
||||
@@ -9,7 +9,7 @@ module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQu
|
||||
layergroupMetadata.addAnalysesMetadata(user, layergroup, analysesResults, includeQuery);
|
||||
layergroupMetadata.addTurboCartoContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addAggregationContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addDateWrappingMetadata (layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addDateWrappingMetadata(layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addTileJsonMetadata(layergroup, user, mapConfig, userApiKey);
|
||||
|
||||
next();
|
||||
@@ -13,13 +13,17 @@ module.exports = function layergroupToken () {
|
||||
res.locals.token = layergroupToken.token;
|
||||
res.locals.cache_buster = layergroupToken.cacheBuster;
|
||||
|
||||
if (layergroupToken.templateHash) {
|
||||
res.locals.templateHash = layergroupToken.templateHash;
|
||||
}
|
||||
|
||||
if (layergroupToken.signer) {
|
||||
res.locals.signer = layergroupToken.signer;
|
||||
|
||||
if (res.locals.signer !== user) {
|
||||
const err = new Error(authErrorMessageTemplate(res.locals.signer, user));
|
||||
err.type = 'auth';
|
||||
err.http_status = (req.query && req.query.callback) ? 200: 403;
|
||||
err.http_status = (req.query && req.query.callback) ? 200 : 403;
|
||||
|
||||
return next(err);
|
||||
}
|
||||
12
lib/api/middlewares/logger.js
Normal file
12
lib/api/middlewares/logger.js
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const uuid = require('uuid');
|
||||
|
||||
module.exports = function initLogger ({ logger }) {
|
||||
return function initLoggerMiddleware (req, res, next) {
|
||||
res.locals.logger = logger.child({ request_id: req.get('X-Request-Id') || uuid.v4(), 'cdb-user': res.locals.user });
|
||||
res.locals.logger.info({ client_request: req }, 'Incoming request');
|
||||
res.on('finish', () => res.locals.logger.info({ server_response: res, status: res.statusCode }, 'Response sent'));
|
||||
next();
|
||||
};
|
||||
};
|
||||
@@ -6,26 +6,24 @@ module.exports = function lzma () {
|
||||
const lzmaWorker = new LZMA();
|
||||
|
||||
return function lzmaMiddleware (req, res, next) {
|
||||
if (!req.query.hasOwnProperty('lzma')) {
|
||||
if (!Object.prototype.hasOwnProperty.call(req.query, 'lzma')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Decode (from base64)
|
||||
var lzma = new Buffer(req.query.lzma, 'base64')
|
||||
var lzma = Buffer.from(req.query.lzma, 'base64')
|
||||
.toString('binary')
|
||||
.split('')
|
||||
.map(function(c) {
|
||||
.map(function (c) {
|
||||
return c.charCodeAt(0) - 128;
|
||||
});
|
||||
|
||||
// Decompress
|
||||
lzmaWorker.decompress(lzma, function(result) {
|
||||
lzmaWorker.decompress(lzma, function (result) {
|
||||
try {
|
||||
delete req.query.lzma;
|
||||
Object.assign(req.query, JSON.parse(result));
|
||||
|
||||
req.profiler.done('lzma');
|
||||
|
||||
next();
|
||||
} catch (err) {
|
||||
next(new Error('Error parsing lzma as JSON: ' + err));
|
||||
@@ -4,7 +4,6 @@ module.exports = function mapError (options) {
|
||||
const { addContext = false, label = 'MAPS CONTROLLER' } = options;
|
||||
|
||||
return function mapErrorMiddleware (err, req, res, next) {
|
||||
req.profiler.done('error');
|
||||
const { mapConfig } = res.locals;
|
||||
|
||||
if (addContext) {
|
||||
@@ -17,7 +16,7 @@ module.exports = function mapError (options) {
|
||||
};
|
||||
};
|
||||
|
||||
function populateError(err, mapConfig) {
|
||||
function populateError (err, mapConfig) {
|
||||
var error = new Error(err.message);
|
||||
error.http_status = err.http_status;
|
||||
|
||||
@@ -10,15 +10,27 @@ module.exports = function createMapStoreMapConfigProvider (
|
||||
forcedFormat = null
|
||||
) {
|
||||
return function createMapStoreMapConfigProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor, format } = req.params;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor: scaleFactor, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer: (layerFromQuery || layerFromParams), z, x, y, scale_factor, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
scale_factor: scaleFactor,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
181
lib/api/middlewares/metrics.js
Normal file
181
lib/api/middlewares/metrics.js
Normal file
@@ -0,0 +1,181 @@
|
||||
'use strict';
|
||||
|
||||
const EVENT_VERSION = '1';
|
||||
const MAX_LENGTH = 100;
|
||||
|
||||
module.exports = function metrics ({ enabled, tags, metricsBackend }) {
|
||||
if (!enabled) {
|
||||
return function metricsDisabledMiddleware (req, res, next) {
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
if (!tags || !tags.event) {
|
||||
throw new Error('Missing required "event" parameter to report metrics');
|
||||
}
|
||||
|
||||
return function metricsMiddleware (req, res, next) {
|
||||
// FIXME: use parent logger as we don't want bind the error to the request
|
||||
// but we still want to know if an error is thrown
|
||||
const { logger } = res.locals;
|
||||
|
||||
res.on('finish', () => {
|
||||
const { event, attributes } = getEventData(req, res, tags);
|
||||
|
||||
metricsBackend.send(event, attributes)
|
||||
.catch((err) => logger.error({ exception: err, event }, 'Failed to publish event'));
|
||||
});
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
function getEventData (req, res, tags) {
|
||||
const event = tags.event;
|
||||
const extra = {};
|
||||
if (tags.from) {
|
||||
if (tags.from.req) {
|
||||
Object.assign(extra, getFromReq(req, tags.from.req));
|
||||
}
|
||||
|
||||
if (tags.from.res) {
|
||||
Object.assign(extra, getFromRes(res, tags.from.res));
|
||||
}
|
||||
}
|
||||
|
||||
const attributes = Object.assign({}, {
|
||||
client_event: normalizedField(req.get('Carto-Event')),
|
||||
client_event_group_id: normalizedField(req.get('Carto-Event-Group-Id')),
|
||||
event_source: normalizedField(req.get('Carto-Event-Source')),
|
||||
event_time: new Date().toISOString(),
|
||||
user_id: res.locals.userId,
|
||||
user_agent: req.get('User-Agent'),
|
||||
map_id: getLayergroupid({ res }),
|
||||
cache_buster: getCacheBuster({ res }),
|
||||
template_hash: getTemplateHash({ res }),
|
||||
stat_tag: getStatTag({ res }),
|
||||
response_code: res.statusCode.toString(),
|
||||
response_time: getResponseTime(req),
|
||||
source_domain: req.hostname,
|
||||
event_version: EVENT_VERSION
|
||||
}, tags.attributes, extra);
|
||||
|
||||
// remove undefined properties
|
||||
Object.keys(attributes).forEach(key => attributes[key] === undefined && delete attributes[key]);
|
||||
|
||||
return { event, attributes };
|
||||
}
|
||||
|
||||
function normalizedField (field) {
|
||||
if (!field) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return field.toString().trim().substr(0, MAX_LENGTH);
|
||||
}
|
||||
|
||||
function getLayergroupid ({ res }) {
|
||||
if (res.locals.token) {
|
||||
return res.locals.token;
|
||||
}
|
||||
|
||||
if (res.locals.mapConfig) {
|
||||
return res.locals.mapConfig.id();
|
||||
}
|
||||
|
||||
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.mapConfig) {
|
||||
return res.locals.mapConfigProvider.mapConfig.id();
|
||||
}
|
||||
}
|
||||
|
||||
function getCacheBuster ({ res }) {
|
||||
if (res.locals.cache_buster !== undefined) {
|
||||
return `${res.locals.cache_buster}`;
|
||||
}
|
||||
|
||||
if (res.locals.mapConfigProvider) {
|
||||
return `${res.locals.mapConfigProvider.getCacheBuster()}`;
|
||||
}
|
||||
}
|
||||
|
||||
function getTemplateHash ({ res }) {
|
||||
if (res.locals.templateHash) {
|
||||
return res.locals.templateHash;
|
||||
}
|
||||
|
||||
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.getTemplateHash) {
|
||||
let templateHash;
|
||||
|
||||
try {
|
||||
templateHash = res.locals.mapConfigProvider.getTemplateHash().substring(0, 8);
|
||||
} catch (e) {}
|
||||
|
||||
return templateHash;
|
||||
}
|
||||
}
|
||||
|
||||
function getStatTag ({ res }) {
|
||||
if (res.locals.mapConfig) {
|
||||
return res.locals.mapConfig.obj().stat_tag;
|
||||
}
|
||||
|
||||
// FIXME: don't expect that mapConfig is already set
|
||||
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.mapConfig) {
|
||||
return res.locals.mapConfigProvider.mapConfig.obj().stat_tag;
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: 'Profiler' might not be accurate enough
|
||||
function getResponseTime (req) {
|
||||
let stats;
|
||||
|
||||
try {
|
||||
stats = req.profiler.toJSON();
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return stats && stats.total ? stats.total.toString() : undefined;
|
||||
}
|
||||
|
||||
function getFromReq (req, { query = {}, body = {}, params = {}, headers = {} } = {}) {
|
||||
const extra = {};
|
||||
|
||||
for (const [queryParam, eventName] of Object.entries(query)) {
|
||||
extra[eventName] = req.query[queryParam];
|
||||
}
|
||||
|
||||
for (const [bodyParam, eventName] of Object.entries(body)) {
|
||||
extra[eventName] = req.body[bodyParam];
|
||||
}
|
||||
|
||||
for (const [pathParam, eventName] of Object.entries(params)) {
|
||||
extra[eventName] = req.params[pathParam];
|
||||
}
|
||||
|
||||
for (const [header, eventName] of Object.entries(headers)) {
|
||||
extra[eventName] = req.get(header);
|
||||
}
|
||||
|
||||
return extra;
|
||||
}
|
||||
|
||||
function getFromRes (res, { body = {}, headers = {}, locals = {} } = {}) {
|
||||
const extra = {};
|
||||
|
||||
if (res.body) {
|
||||
for (const [bodyParam, eventName] of Object.entries(body)) {
|
||||
extra[eventName] = res.body[bodyParam];
|
||||
}
|
||||
}
|
||||
|
||||
for (const [header, eventName] of Object.entries(headers)) {
|
||||
extra[eventName] = res.get(header);
|
||||
}
|
||||
|
||||
for (const [localParam, eventName] of Object.entries(locals)) {
|
||||
extra[eventName] = res.locals[localParam];
|
||||
}
|
||||
|
||||
return extra;
|
||||
}
|
||||
46
lib/api/middlewares/named-map-provider.js
Normal file
46
lib/api/middlewares/named-map-provider.js
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
|
||||
return function getNamedMapProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id: templateId, layer: layerFromParams, z, x, y, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
params.format = forcedFormat;
|
||||
params.layer = params.layer || 'all';
|
||||
}
|
||||
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, namedMapProvider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.mapConfigProvider = namedMapProvider;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
37
lib/api/middlewares/profiler.js
Normal file
37
lib/api/middlewares/profiler.js
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict';
|
||||
|
||||
const Profiler = require('../../stats/profiler-proxy');
|
||||
const debug = require('debug')('windshaft:cartodb:stats');
|
||||
const { name: prefix } = require('../../../package.json');
|
||||
|
||||
module.exports = function profiler (options) {
|
||||
const { enabled = true, statsClient } = options;
|
||||
|
||||
return function profilerMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
|
||||
// TODO: stop using profiler and log stats instead of adding them to the profiler
|
||||
req.profiler = new Profiler({
|
||||
statsd_client: statsClient,
|
||||
profile: enabled
|
||||
});
|
||||
|
||||
req.profiler.start(prefix);
|
||||
|
||||
res.on('finish', () => {
|
||||
req.profiler.done('response');
|
||||
req.profiler.end();
|
||||
const stats = req.profiler.toJSON();
|
||||
logger.info({ stats, duration: stats.response / 1000, duration_ms: stats.response }, 'Request profiling stats');
|
||||
|
||||
try {
|
||||
// May throw due to dns, see: http://github.com/CartoDB/Windshaft/issues/166
|
||||
req.profiler.sendStats();
|
||||
} catch (err) {
|
||||
debug('error sending profiling stats: ' + err);
|
||||
}
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
@@ -19,12 +19,12 @@ const RATE_LIMIT_ENDPOINTS_GROUPS = {
|
||||
NAMED_TILES: 'named_tiles'
|
||||
};
|
||||
|
||||
function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
function rateLimit (userLimitsBackend, endpointGroup = null) {
|
||||
if (!isRateLimitEnabled(endpointGroup)) {
|
||||
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
|
||||
return function rateLimitDisabledMiddleware (req, res, next) { next(); };
|
||||
}
|
||||
|
||||
return function rateLimitMiddleware(req, res, next) {
|
||||
return function rateLimitMiddleware (req, res, next) {
|
||||
userLimitsBackend.getRateLimit(res.locals.user, endpointGroup, function (err, userRateLimit) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
@@ -46,7 +46,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
// retry is floor rounded in seconds by redis-cell
|
||||
res.set('Retry-After', retry + 1);
|
||||
|
||||
let rateLimitError = new Error(
|
||||
const rateLimitError = new Error(
|
||||
'You are over platform\'s limits: too many requests.' +
|
||||
' Please contact us to know more details'
|
||||
);
|
||||
@@ -61,8 +61,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function isRateLimitEnabled(endpointGroup) {
|
||||
function isRateLimitEnabled (endpointGroup) {
|
||||
return global.environment.enabledFeatures.rateLimitsEnabled &&
|
||||
endpointGroup &&
|
||||
global.environment.enabledFeatures.rateLimitsByEndpoint[endpointGroup];
|
||||
24
lib/api/middlewares/send-response.js
Normal file
24
lib/api/middlewares/send-response.js
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
const setCommonHeaders = require('../../utils/common-headers');
|
||||
|
||||
module.exports = function sendResponse () {
|
||||
return function sendResponseMiddleware (req, res, next) {
|
||||
setCommonHeaders(req, res, () => {
|
||||
res.status(res.statusCode);
|
||||
|
||||
if (Buffer.isBuffer(res.body)) {
|
||||
res.send(res.body);
|
||||
return next();
|
||||
}
|
||||
|
||||
if (req.query.callback) {
|
||||
res.jsonp(res.body);
|
||||
return next();
|
||||
}
|
||||
|
||||
res.json(res.body);
|
||||
return next();
|
||||
});
|
||||
};
|
||||
};
|
||||
@@ -1,11 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
const NamedMapsCacheEntry = require('../../cache/model/named_maps_entry');
|
||||
const NamedMapsCacheEntry = require('../../cache/model/named-maps-entry');
|
||||
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
|
||||
|
||||
module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
|
||||
return function setSurrogateKeyHeaderMiddleware(req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
return function setSurrogateKeyHeaderMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider, logger } = res.locals;
|
||||
|
||||
if (mapConfigProvider instanceof NamedMapMapConfigProvider) {
|
||||
surrogateKeysCache.tag(res, new NamedMapsCacheEntry(user, mapConfigProvider.getTemplateName()));
|
||||
@@ -17,7 +17,7 @@ module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
|
||||
|
||||
mapConfigProvider.getAffectedTables((err, affectedTables) => {
|
||||
if (err) {
|
||||
global.logger.warn('ERROR generating Surrogate Key Header:', err);
|
||||
logger.warn({ exception: err }, 'Error generating Surrogate Key Header');
|
||||
return next();
|
||||
}
|
||||
|
||||
15
lib/api/middlewares/tag.js
Normal file
15
lib/api/middlewares/tag.js
Normal file
@@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function tag ({ tags }) {
|
||||
if (!Array.isArray(tags) || !tags.every((tag) => typeof tag === 'string')) {
|
||||
throw new Error('Required "tags" option must be a valid Array: [string, string, ...]');
|
||||
}
|
||||
|
||||
return function tagMiddleware (req, res, next) {
|
||||
const { logger } = res.locals;
|
||||
res.locals.tags = tags;
|
||||
res.on('finish', () => logger.info({ tags: res.locals.tags }, 'Request tagged'));
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
29
lib/api/middlewares/user.js
Normal file
29
lib/api/middlewares/user.js
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
const CdbRequest = require('../../models/cdb-request');
|
||||
|
||||
module.exports = function user (metadataBackend) {
|
||||
const cdbRequest = new CdbRequest();
|
||||
|
||||
return function userMiddleware (req, res, next) {
|
||||
try {
|
||||
res.locals.user = getUserNameFromRequest(req, cdbRequest);
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
metadataBackend.getUserId(res.locals.user, (err, userId) => {
|
||||
if (err || !userId) {
|
||||
return next();
|
||||
}
|
||||
|
||||
res.locals.userId = userId;
|
||||
|
||||
return next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
function getUserNameFromRequest (req, cdbRequest) {
|
||||
return cdbRequest.userByReq(req);
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../../assets/render-timeout-fallback.mvt');
|
||||
|
||||
module.exports = function vectorError() {
|
||||
return function vectorErrorMiddleware(err, req, res, next) {
|
||||
if(req.params.format === 'mvt') {
|
||||
const path = require('path');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(path.join(__dirname, '/../../../assets/render-timeout-fallback.mvt'));
|
||||
|
||||
module.exports = function vectorError () {
|
||||
return function vectorErrorMiddleware (err, req, res, next) {
|
||||
if (req.params.format === 'mvt') {
|
||||
if (isTimeoutError(err) || isRateLimitError(err)) {
|
||||
res.set('Content-Type', 'application/x-protobuf');
|
||||
return res.status(429).send(timeoutErrorVectorTile);
|
||||
@@ -17,7 +17,6 @@ module.exports = function vectorError() {
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const { templateName } = require('../../backends/template_maps');
|
||||
const { templateName } = require('../../backends/template-maps');
|
||||
const tag = require('../middlewares/tag');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
|
||||
@@ -18,8 +19,8 @@ module.exports = class AdminTemplateController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
templateRouter.options(`/:template_id`);
|
||||
route (templateRouter) {
|
||||
templateRouter.options('/:template_id');
|
||||
|
||||
templateRouter.post('/', this.middlewares({
|
||||
action: 'create',
|
||||
@@ -76,6 +77,7 @@ module.exports = class AdminTemplateController {
|
||||
}
|
||||
|
||||
return [
|
||||
tag({ tags: ['named', 'admin', action] }),
|
||||
credentials(),
|
||||
authorizedByAPIKey({ authBackend: this.authBackend, action, label }),
|
||||
rateLimit(this.userLimitsBackend, rateLimitGroup),
|
||||
@@ -166,8 +168,6 @@ function updateTemplate ({ templateMaps }) {
|
||||
|
||||
function retrieveTemplate ({ templateMaps }) {
|
||||
return function retrieveTemplateMiddleware (req, res, next) {
|
||||
req.profiler.start('windshaft-cartodb.get_template');
|
||||
|
||||
const { user } = res.locals;
|
||||
const templateId = templateName(req.params.template_id);
|
||||
|
||||
@@ -195,8 +195,6 @@ function retrieveTemplate ({ templateMaps }) {
|
||||
|
||||
function destroyTemplate ({ templateMaps }) {
|
||||
return function destroyTemplateMiddleware (req, res, next) {
|
||||
req.profiler.start('windshaft-cartodb.delete_template');
|
||||
|
||||
const { user } = res.locals;
|
||||
const templateId = templateName(req.params.template_id);
|
||||
|
||||
@@ -215,8 +213,6 @@ function destroyTemplate ({ templateMaps }) {
|
||||
|
||||
function listTemplates ({ templateMaps }) {
|
||||
return function listTemplatesMiddleware (req, res, next) {
|
||||
req.profiler.start('windshaft-cartodb.get_template_list');
|
||||
|
||||
const { user } = res.locals;
|
||||
|
||||
templateMaps.listTemplates(user, (err, templateIds) => {
|
||||
@@ -1,10 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const dbConnSetup = require('../middlewares/db-conn-setup');
|
||||
const authorize = require('../middlewares/authorize');
|
||||
const initProfiler = require('../middlewares/init-profiler');
|
||||
const checkJsonContentType = require('../middlewares/check-json-content-type');
|
||||
const incrementMapViewCount = require('../middlewares/increment-map-view-count');
|
||||
const augmentLayergroupData = require('../middlewares/augment-layergroup-data');
|
||||
@@ -21,6 +21,7 @@ const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named
|
||||
const CreateLayergroupMapConfigProvider = require('../../models/mapconfig/provider/create-layergroup-provider');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
|
||||
const metrics = require('../middlewares/metrics');
|
||||
|
||||
module.exports = class NamedMapController {
|
||||
/**
|
||||
@@ -38,6 +39,7 @@ module.exports = class NamedMapController {
|
||||
* @constructor
|
||||
*/
|
||||
constructor (
|
||||
config,
|
||||
pgConnection,
|
||||
templateMaps,
|
||||
mapBackend,
|
||||
@@ -48,8 +50,10 @@ module.exports = class NamedMapController {
|
||||
mapConfigAdapter,
|
||||
statsBackend,
|
||||
authBackend,
|
||||
layergroupMetadata
|
||||
layergroupMetadata,
|
||||
metricsBackend
|
||||
) {
|
||||
this.config = config;
|
||||
this.pgConnection = pgConnection;
|
||||
this.templateMaps = templateMaps;
|
||||
this.mapBackend = mapBackend;
|
||||
@@ -61,27 +65,41 @@ module.exports = class NamedMapController {
|
||||
this.statsBackend = statsBackend;
|
||||
this.authBackend = authBackend;
|
||||
this.layergroupMetadata = layergroupMetadata;
|
||||
this.metricsBackend = metricsBackend;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
route (templateRouter) {
|
||||
templateRouter.get('/:template_id/jsonp', this.middlewares());
|
||||
templateRouter.post('/:template_id', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
const isTemplateInstantiation = true;
|
||||
const useTemplateHash = true;
|
||||
const includeQuery = false;
|
||||
const label = 'NAMED MAP LAYERGROUP';
|
||||
const addContext = false;
|
||||
const metricsTags = {
|
||||
event: 'map_view',
|
||||
attributes: { map_type: 'named' },
|
||||
from: {
|
||||
req: {
|
||||
query: { client: 'client' }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [
|
||||
tag({ tags: ['map', 'named'] }),
|
||||
metrics({
|
||||
enabled: this.config.pubSubMetrics.enabled,
|
||||
metricsBackend: this.metricsBackend,
|
||||
tags: metricsTags
|
||||
}),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
dbConnSetup(this.pgConnection),
|
||||
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.NAMED),
|
||||
cleanUpQueryParams(['aggregation']),
|
||||
initProfiler(isTemplateInstantiation),
|
||||
checkJsonContentType(),
|
||||
checkInstantiteLayergroup(),
|
||||
getTemplate(
|
||||
@@ -106,7 +124,7 @@ module.exports = class NamedMapController {
|
||||
lastModifiedHeader(),
|
||||
lastUpdatedTimeLayergroup(),
|
||||
layerStats(this.pgConnection, this.statsBackend),
|
||||
layergroupIdHeader(this.templateMaps ,useTemplateHash),
|
||||
layergroupIdHeader(this.templateMaps, useTemplateHash),
|
||||
layergroupMetadata(this.layergroupMetadata, includeQuery),
|
||||
mapError({ label, addContext })
|
||||
];
|
||||
@@ -114,7 +132,7 @@ module.exports = class NamedMapController {
|
||||
};
|
||||
|
||||
function checkInstantiteLayergroup () {
|
||||
return function checkInstantiteLayergroupMiddleware(req, res, next) {
|
||||
return function checkInstantiteLayergroupMiddleware (req, res, next) {
|
||||
if (req.method === 'GET') {
|
||||
const { callback, config } = req.query;
|
||||
|
||||
@@ -125,14 +143,12 @@ function checkInstantiteLayergroup () {
|
||||
if (config) {
|
||||
try {
|
||||
req.body = JSON.parse(config);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
return next(new Error('Invalid config parameter, should be a valid JSON'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
req.profiler.done('checkInstantiteLayergroup');
|
||||
|
||||
return next();
|
||||
};
|
||||
}
|
||||
@@ -148,8 +164,8 @@ function getTemplate (
|
||||
return function getTemplateMiddleware (req, res, next) {
|
||||
const templateParams = req.body;
|
||||
const { user, dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id } = req.params;
|
||||
const { auth_token } = req.query;
|
||||
const { template_id: templateId } = req.params;
|
||||
const { auth_token: authToken } = req.query;
|
||||
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
@@ -161,16 +177,15 @@ function getTemplate (
|
||||
mapConfigAdapter,
|
||||
affectedTablesCache,
|
||||
user,
|
||||
template_id,
|
||||
templateId,
|
||||
templateParams,
|
||||
auth_token,
|
||||
authToken,
|
||||
params
|
||||
);
|
||||
|
||||
mapConfigProvider.getMapConfig((err, mapConfig, rendererParams, context, stats = {}) => {
|
||||
req.profiler.done('named.getMapConfig');
|
||||
mapConfigProvider.logger = res.locals.logger;
|
||||
|
||||
stats.mapType = 'named';
|
||||
mapConfigProvider.getMapConfig((err, mapConfig, rendererParams, context, stats = {}) => {
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
@@ -9,6 +9,7 @@ const TileTemplateController = require('./tile-template-controller');
|
||||
module.exports = class TemplateRouter {
|
||||
constructor ({ collaborators }) {
|
||||
const {
|
||||
config,
|
||||
pgConnection,
|
||||
templateMaps,
|
||||
mapBackend,
|
||||
@@ -22,9 +23,11 @@ module.exports = class TemplateRouter {
|
||||
layergroupMetadata,
|
||||
namedMapProviderCache,
|
||||
tileBackend,
|
||||
metricsBackend
|
||||
} = collaborators;
|
||||
|
||||
this.namedMapController = new NamedMapController(
|
||||
config,
|
||||
pgConnection,
|
||||
templateMaps,
|
||||
mapBackend,
|
||||
@@ -35,7 +38,8 @@ module.exports = class TemplateRouter {
|
||||
mapConfigAdapter,
|
||||
statsBackend,
|
||||
authBackend,
|
||||
layergroupMetadata
|
||||
layergroupMetadata,
|
||||
metricsBackend
|
||||
);
|
||||
|
||||
this.tileTemplateController = new TileTemplateController(
|
||||
@@ -54,13 +58,19 @@ module.exports = class TemplateRouter {
|
||||
);
|
||||
}
|
||||
|
||||
register (apiRouter, templatePaths) {
|
||||
route (apiRouter, routes) {
|
||||
const templateRouter = router({ mergeParams: true });
|
||||
|
||||
this.namedMapController.register(templateRouter);
|
||||
this.tileTemplateController.register(templateRouter);
|
||||
this.adminTemplateController.register(templateRouter);
|
||||
routes.forEach(route => {
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
templatePaths.forEach(path => apiRouter.use(path, templateRouter));
|
||||
middlewares.forEach(middleware => templateRouter.use(middleware()));
|
||||
|
||||
this.namedMapController.route(templateRouter);
|
||||
this.tileTemplateController.route(templateRouter);
|
||||
this.adminTemplateController.route(templateRouter);
|
||||
|
||||
paths.forEach(path => apiRouter.use(path, templateRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const tag = require('../middlewares/tag');
|
||||
const coordinates = require('../middlewares/coordinates');
|
||||
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
@@ -31,12 +32,13 @@ module.exports = class TileTemplateController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
route (templateRouter) {
|
||||
templateRouter.get('/:template_id/:layer/:z/:x/:y.(:format)', this.middlewares());
|
||||
}
|
||||
|
||||
middlewares () {
|
||||
return [
|
||||
tag({ tags: ['tile', 'named'] }),
|
||||
coordinates(),
|
||||
credentials(),
|
||||
authorize(this.authBackend),
|
||||
@@ -67,9 +69,8 @@ function getTile ({ tileBackend, label }) {
|
||||
const { layer, z, x, y, format } = req.params;
|
||||
const params = { layer, z, x, y, format };
|
||||
|
||||
tileBackend.getTile(mapConfigProvider, params, (err, tile, headers, stats) => {
|
||||
tileBackend.getTile(mapConfigProvider, params, (err, tile, headers, stats = {}) => {
|
||||
req.profiler.add(stats);
|
||||
req.profiler.done('render-' + format);
|
||||
|
||||
if (err) {
|
||||
err.label = label;
|
||||
@@ -89,7 +90,7 @@ function getTile ({ tileBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
res.set('Content-Type', res.get('content-type') || res.get('Content-Type') || 'image/png');
|
||||
|
||||
next();
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function AnalysisStatusBackend() {
|
||||
function AnalysisStatusBackend () {
|
||||
}
|
||||
|
||||
module.exports = AnalysisStatusBackend;
|
||||
@@ -10,12 +10,12 @@ module.exports = AnalysisStatusBackend;
|
||||
AnalysisStatusBackend.prototype.getNodeStatus = function (nodeId, dbParams, callback) {
|
||||
var statusQuery = [
|
||||
'SELECT node_id, status, updated_at, last_error_message as error_message',
|
||||
'FROM cdb_analysis_catalog where node_id = \'' + nodeId + '\''
|
||||
'FROM cartodb.cdb_analysis_catalog where node_id = \'' + nodeId + '\''
|
||||
].join(' ');
|
||||
|
||||
var pg = new PSQL(dbParams);
|
||||
|
||||
pg.query(statusQuery, function(err, result) {
|
||||
pg.query(statusQuery, function (err, result) {
|
||||
if (err) {
|
||||
return callback(err, result);
|
||||
}
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
var _ = require('underscore');
|
||||
var camshaft = require('camshaft');
|
||||
var fs = require('fs');
|
||||
|
||||
var REDIS_LIMITS = {
|
||||
DB: 5,
|
||||
@@ -14,7 +13,6 @@ function AnalysisBackend (metadataBackend, options) {
|
||||
this.options = options || {};
|
||||
this.options.limits = this.options.limits || {};
|
||||
this.setBatchConfig(this.options.batch);
|
||||
this.setLoggerConfig(this.options.logger);
|
||||
}
|
||||
|
||||
module.exports = AnalysisBackend;
|
||||
@@ -27,38 +25,19 @@ AnalysisBackend.prototype.setBatchConfig = function (options) {
|
||||
this.batchConfig = batchConfig;
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.setLoggerConfig = function (options) {
|
||||
this.loggerConfig = options || {};
|
||||
|
||||
if (this.loggerConfig.filename) {
|
||||
this.stream = fs.createWriteStream(this.loggerConfig.filename, { flags: 'a', encoding: 'utf8' });
|
||||
|
||||
process.on('SIGHUP', function () {
|
||||
if (this.stream) {
|
||||
this.stream.destroy();
|
||||
}
|
||||
|
||||
this.stream = fs.createWriteStream(this.loggerConfig.filename, { flags: 'a', encoding: 'utf8' });
|
||||
}.bind(this));
|
||||
}
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefinition, callback) {
|
||||
AnalysisBackend.prototype.create = function (analysisConfiguration, analysisDefinition, callback) {
|
||||
analysisConfiguration.batch.endpoint = this.batchConfig.endpoint;
|
||||
analysisConfiguration.batch.inlineExecution = this.batchConfig.inlineExecution;
|
||||
analysisConfiguration.batch.hostHeaderTemplate = this.batchConfig.hostHeaderTemplate;
|
||||
|
||||
analysisConfiguration.logger = {
|
||||
stream: this.stream ? this.stream : process.stdout
|
||||
};
|
||||
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function(err, limits) {
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function (err, limits) {
|
||||
if (err) {}
|
||||
analysisConfiguration.limits = limits || {};
|
||||
camshaft.create(analysisConfiguration, analysisDefinition, callback);
|
||||
});
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function (username, callback) {
|
||||
var self = this;
|
||||
|
||||
var analysesLimits = {
|
||||
@@ -70,16 +49,17 @@ AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
}
|
||||
};
|
||||
|
||||
Object.keys(self.options.limits).forEach(function(analysisTypeOrTag) {
|
||||
Object.keys(self.options.limits).forEach(function (analysisTypeOrTag) {
|
||||
analysesLimits.analyses[analysisTypeOrTag] = _.extend({}, self.options.limits[analysisTypeOrTag]);
|
||||
});
|
||||
|
||||
var analysesLimitsKey = REDIS_LIMITS.PREFIX + username;
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function(err, analysesTimeouts) {
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function (err, analysesTimeouts) {
|
||||
if (err) {}
|
||||
// analysesTimeouts wil be something like: { moran: 3000, intersection: 5000 }
|
||||
analysesTimeouts = analysesTimeouts || {};
|
||||
|
||||
Object.keys(analysesTimeouts).forEach(function(analysisType) {
|
||||
Object.keys(analysesTimeouts).forEach(function (analysisType) {
|
||||
analysesLimits.analyses[analysisType] = _.defaults(
|
||||
{
|
||||
timeout: Number.isFinite(+analysesTimeouts[analysisType]) ? +analysesTimeouts[analysisType] : 0
|
||||
@@ -9,7 +9,7 @@
|
||||
* @constructor
|
||||
* @type {AuthBackend}
|
||||
*/
|
||||
function AuthBackend(pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
function AuthBackend (pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
this.pgConnection = pgConnection;
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.mapStore = mapStore;
|
||||
@@ -25,28 +25,28 @@ module.exports = AuthBackend;
|
||||
// null if the request is not signed by anyone
|
||||
// or will be a string cartodb username otherwise.
|
||||
//
|
||||
AuthBackend.prototype.authorizedBySigner = function(req, res, callback) {
|
||||
if ( ! res.locals.token || ! res.locals.signer ) {
|
||||
AuthBackend.prototype.authorizedBySigner = function (req, res, callback) {
|
||||
if (!res.locals.token || !res.locals.signer) {
|
||||
return callback(null, false); // no signer requested
|
||||
}
|
||||
|
||||
var self = this;
|
||||
|
||||
var layergroup_id = res.locals.token;
|
||||
var auth_token = req.query.auth_token;
|
||||
var layergroupId = res.locals.token;
|
||||
var authToken = req.query.auth_token;
|
||||
|
||||
this.mapStore.load(layergroup_id, function(err, mapConfig) {
|
||||
this.mapStore.load(layergroupId, function (err, mapConfig) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, auth_token);
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, authToken);
|
||||
|
||||
return callback(null, authorized);
|
||||
});
|
||||
};
|
||||
|
||||
function isValidApiKey(apikey) {
|
||||
function isValidApiKey (apikey) {
|
||||
return apikey.type &&
|
||||
apikey.user &&
|
||||
apikey.databasePassword &&
|
||||
@@ -60,11 +60,11 @@ function isValidApiKey(apikey) {
|
||||
// @param callback function(err, authorized)
|
||||
// NOTE: authorized is expected to be 0 or 1 (integer)
|
||||
//
|
||||
AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
AuthBackend.prototype.authorizedByAPIKey = function (user, res, callback) {
|
||||
const apikeyToken = res.locals.api_key;
|
||||
const basicAuthUsername = res.locals.basicAuthUsername;
|
||||
|
||||
if ( ! apikeyToken ) {
|
||||
if (!apikeyToken) {
|
||||
return callback(null, false); // no api key, no authorization...
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if ( !isValidApiKey(apikey)) {
|
||||
if (!isValidApiKey(apikey)) {
|
||||
const error = new Error('Unauthorized');
|
||||
error.type = 'auth';
|
||||
error.subtype = 'api-key-not-found';
|
||||
@@ -109,7 +109,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
@@ -123,7 +123,7 @@ function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
* @param res - standard res object. Contains the auth parameters in locals
|
||||
* @param callback function(err, allowed) is access allowed not?
|
||||
*/
|
||||
AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
AuthBackend.prototype.authorize = function (req, res, callback) {
|
||||
var user = res.locals.user;
|
||||
|
||||
this.authorizedByAPIKey(user, res, (err, isAuthorizedByApikey) => {
|
||||
@@ -133,8 +133,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
|
||||
if (isAuthorizedByApikey) {
|
||||
return this.pgConnection.setDBAuth(user, res.locals, 'regular', function (err) {
|
||||
req.profiler.done('setDBAuth');
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@@ -150,8 +148,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
|
||||
if (isAuthorizedBySigner) {
|
||||
return this.pgConnection.setDBAuth(user, res.locals, 'master', function (err) {
|
||||
req.profiler.done('setDBAuth');
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@@ -163,8 +159,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
// if no signer name was given, use default api key
|
||||
if (!res.locals.signer) {
|
||||
return this.pgConnection.setDBAuth(user, res.locals, 'default', function (err) {
|
||||
req.profiler.done('setDBAuth');
|
||||
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ const AggregationMapConfig = require('../models/aggregation/aggregation-mapconfi
|
||||
|
||||
const WebMercatorHelper = require('cartodb-query-tables').utils.webMercatorHelper;
|
||||
const webmercator = new WebMercatorHelper();
|
||||
const queryUtils = require('../../lib/utils/query-utils');
|
||||
|
||||
module.exports = class ClusterBackend {
|
||||
getClusterFeatures (mapConfigProvider, params, callback) {
|
||||
@@ -71,8 +72,8 @@ function getFeatures (pg, layer, params, callback) {
|
||||
}
|
||||
|
||||
const SKIP_COLUMNS = {
|
||||
'the_geom': true,
|
||||
'the_geom_webmercator': true
|
||||
the_geom: true,
|
||||
the_geom_webmercator: true
|
||||
};
|
||||
|
||||
function getColumnsName (pg, query, callback) {
|
||||
@@ -89,7 +90,7 @@ function getColumnsName (pg, query, callback) {
|
||||
|
||||
const fields = resultSet.fields || [];
|
||||
const columnNames = fields.map(field => field.name)
|
||||
.filter(columnName => !SKIP_COLUMNS[columnName]);
|
||||
.filter(columnName => !SKIP_COLUMNS[queryUtils.stripQuotes(columnName)]);
|
||||
|
||||
return callback(null, columnNames);
|
||||
}, true);
|
||||
@@ -100,7 +101,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
zoom: zoom,
|
||||
id: clusterId,
|
||||
query: query,
|
||||
res: 256/resolution,
|
||||
res: 256 / resolution,
|
||||
columns: columns
|
||||
});
|
||||
|
||||
@@ -127,7 +128,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
}
|
||||
|
||||
return callback(null, data);
|
||||
} , true); // use read-only transaction
|
||||
}, true); // use read-only transaction
|
||||
}
|
||||
|
||||
const schemaQuery = ctx => `SELECT * FROM (${ctx.query}) __cdb_cluster_schema LIMIT 0`;
|
||||
@@ -159,8 +160,8 @@ const clusterFeaturesQuery = ctx => `
|
||||
`;
|
||||
|
||||
const gridResolution = ctx => {
|
||||
const zoomResolution = webmercator.getResolution({ z : Math.min(38, ctx.zoom) });
|
||||
return `${256/ctx.res} * (${zoomResolution})::double precision`;
|
||||
const zoomResolution = webmercator.getResolution({ z: Math.min(38, ctx.zoom) });
|
||||
return `${256 / ctx.res} * (${zoomResolution})::double precision`;
|
||||
};
|
||||
|
||||
const aggregationQuery = ctx => `
|
||||
@@ -194,9 +195,8 @@ function parseAggregation (aggregation) {
|
||||
try {
|
||||
aggregation = JSON.parse(aggregation);
|
||||
} catch (err) {
|
||||
throw new Error(`Invalid aggregation input, should be a a valid JSON`);
|
||||
throw new Error('Invalid aggregation input, should be a a valid JSON');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return aggregation;
|
||||
@@ -207,7 +207,7 @@ function validateAggregation (aggregation) {
|
||||
const { columns, expressions } = aggregation;
|
||||
|
||||
if (!hasColumns(columns)) {
|
||||
throw new Error(`Invalid aggregation input, columns should be and array of column names`);
|
||||
throw new Error('Invalid aggregation input, columns should be and array of column names');
|
||||
}
|
||||
|
||||
validateExpressions(expressions);
|
||||
@@ -221,16 +221,16 @@ function hasColumns (columns) {
|
||||
function validateExpressions (expressions) {
|
||||
if (expressions !== undefined) {
|
||||
if (!isValidExpression(expressions)) {
|
||||
throw new Error(`Invalid aggregation input, expressions should be and object with valid functions`);
|
||||
throw new Error('Invalid aggregation input, expressions should be and object with valid functions');
|
||||
}
|
||||
|
||||
for (const { aggregate_function, aggregated_column } of Object.values(expressions)) {
|
||||
if (typeof aggregated_column !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregated column should be an string`);
|
||||
for (const { aggregate_function: aggregateFunction, aggregated_column: aggregatedColumn } of Object.values(expressions)) {
|
||||
if (typeof aggregatedColumn !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregated column should be an string');
|
||||
}
|
||||
|
||||
if (typeof aggregate_function !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregate function should be an string`);
|
||||
if (typeof aggregateFunction !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregate function should be an string');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,18 +3,20 @@
|
||||
var _ = require('underscore');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var BBoxFilter = require('../models/filter/bbox');
|
||||
const CircleFilter = require('../models/filter/circle');
|
||||
const PolygonFilter = require('../models/filter/polygon');
|
||||
var DataviewFactory = require('../models/dataview/factory');
|
||||
var DataviewFactoryWithOverviews = require('../models/dataview/overviews/factory');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
var OverviewsQueryRewriter = require('../utils/overviews_query_rewriter');
|
||||
var OverviewsQueryRewriter = require('../utils/overviews-query-rewriter');
|
||||
var overviewsQueryRewriter = new OverviewsQueryRewriter({
|
||||
zoom_level: 'CDB_ZoomFromScale(!scale_denominator!)'
|
||||
zoom_level: 'cartodb.CDB_ZoomFromScale(!scale_denominator!)'
|
||||
});
|
||||
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function DataviewBackend(analysisBackend) {
|
||||
function DataviewBackend (analysisBackend) {
|
||||
this.analysisBackend = analysisBackend;
|
||||
}
|
||||
|
||||
@@ -84,14 +86,20 @@ function getQueryWithFilters (dataviewDefinition, params) {
|
||||
var query = getDataviewQuery(dataviewDefinition, ownFilter, noFilters);
|
||||
|
||||
if (params.bbox) {
|
||||
var bboxFilter = new BBoxFilter({column: 'the_geom_webmercator', srid: 3857}, {bbox: params.bbox});
|
||||
var bboxFilter = new BBoxFilter({ column: 'the_geom_webmercator', srid: 3857 }, { bbox: params.bbox });
|
||||
query = bboxFilter.sql(query);
|
||||
} else if (params.circle) {
|
||||
const circleFilter = new CircleFilter({ column: 'the_geom_webmercator', srid: 3857 }, { circle: params.circle });
|
||||
query = circleFilter.sql(query);
|
||||
} else if (params.polygon) {
|
||||
const polygonFilter = new PolygonFilter({ column: 'the_geom_webmercator', srid: 3857 }, { polygon: params.polygon });
|
||||
query = polygonFilter.sql(query);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
function getDataviewQuery (dataviewDefinition, ownFilter, noFilters) {
|
||||
if (noFilters) {
|
||||
return dataviewDefinition.sql.no_filters;
|
||||
} else if (ownFilter === 1) {
|
||||
@@ -101,9 +109,9 @@ function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
}
|
||||
}
|
||||
|
||||
function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
function getQueryRewriteData (mapConfig, dataviewDefinition, params) {
|
||||
var sourceId = dataviewDefinition.source.id; // node.id
|
||||
var layer = _.find(mapConfig.obj().layers, function(l) {
|
||||
var layer = _.find(mapConfig.obj().layers, function (l) {
|
||||
return l.options.source && (l.options.source.id === sourceId);
|
||||
});
|
||||
var queryRewriteData = layer && layer.options.query_rewrite_data;
|
||||
@@ -115,7 +123,7 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
}
|
||||
|
||||
if (params.bbox && queryRewriteData) {
|
||||
var bbox_filter_definition = {
|
||||
var bboxFilterDefinition = {
|
||||
type: 'bbox',
|
||||
options: {
|
||||
column: 'the_geom_webmercator',
|
||||
@@ -125,22 +133,22 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
bbox: params.bbox
|
||||
}
|
||||
};
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bbox_filter_definition });
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bboxFilterDefinition });
|
||||
}
|
||||
|
||||
return queryRewriteData;
|
||||
}
|
||||
|
||||
function getOverrideParams(params, ownFilter) {
|
||||
function getOverrideParams (params, ownFilter) {
|
||||
var overrideParams = _.reduce(_.pick(params, 'start', 'end', 'bins', 'offset', 'categories'),
|
||||
function castNumbers(overrides, val, k) {
|
||||
function castNumbers (overrides, val, k) {
|
||||
if (!Number.isFinite(+val)) {
|
||||
throw new Error('Invalid number format for parameter \'' + k + '\'');
|
||||
}
|
||||
overrides[k] = +val;
|
||||
return overrides;
|
||||
},
|
||||
{ownFilter: ownFilter}
|
||||
{ ownFilter: ownFilter }
|
||||
);
|
||||
|
||||
// validation will be delegated to the proper dataview
|
||||
@@ -197,12 +205,18 @@ function getQueryWithOwnFilters (dataviewDefinition, params) {
|
||||
if (params.bbox) {
|
||||
var bboxFilter = new BBoxFilter({ column: 'the_geom', srid: 4326 }, { bbox: params.bbox });
|
||||
query = bboxFilter.sql(query);
|
||||
} else if (params.circle) {
|
||||
const circleFilter = new CircleFilter({ column: 'the_geom', srid: 4326 }, { circle: params.circle });
|
||||
query = circleFilter.sql(query);
|
||||
} else if (params.polygon) {
|
||||
const polygonFilter = new PolygonFilter({ column: 'the_geom', srid: 4326 }, { polygon: params.polygon });
|
||||
query = polygonFilter.sql(query);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewDefinition(mapConfig, dataviewName) {
|
||||
function getDataviewDefinition (mapConfig, dataviewName) {
|
||||
var dataviews = mapConfig.dataviews || {};
|
||||
return dataviews[dataviewName];
|
||||
}
|
||||
@@ -3,32 +3,32 @@
|
||||
var _ = require('underscore');
|
||||
var AnalysisFilter = require('../models/filter/analysis');
|
||||
|
||||
function FilterStatsBackends(pgQueryRunner) {
|
||||
function FilterStatsBackends (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
module.exports = FilterStatsBackends;
|
||||
|
||||
function getEstimatedRows(pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, "EXPLAIN (FORMAT JSON)"+query, function(err, result_rows) {
|
||||
if (err){
|
||||
function getEstimatedRows (pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, 'EXPLAIN (FORMAT JSON)' + query, function (err, resultRows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var rows;
|
||||
if ( result_rows[0] && result_rows[0]['QUERY PLAN'] &&
|
||||
result_rows[0]['QUERY PLAN'][0] && result_rows[0]['QUERY PLAN'][0].Plan ) {
|
||||
rows = result_rows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
if (resultRows[0] && resultRows[0]['QUERY PLAN'] &&
|
||||
resultRows[0]['QUERY PLAN'][0] && resultRows[0]['QUERY PLAN'][0].Plan) {
|
||||
rows = resultRows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
}
|
||||
return callback(null, rows);
|
||||
});
|
||||
}
|
||||
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_query, filters, callback) {
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfilteredQuery, filters, callback) {
|
||||
var stats = {};
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfiltered_query, (err, rows) => {
|
||||
if (err){
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfilteredQuery, (err, rows) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
@@ -39,10 +39,10 @@ FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_qu
|
||||
}
|
||||
|
||||
var analysisFilter = new AnalysisFilter(filters);
|
||||
var query = analysisFilter.sql(unfiltered_query);
|
||||
var query = analysisFilter.sql(unfilteredQuery);
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, query, (err, rows) => {
|
||||
if (err){
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function EmptyLayerStats(types) {
|
||||
function EmptyLayerStats (types) {
|
||||
this._types = types || {};
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ EmptyLayerStats.prototype.is = function (type) {
|
||||
|
||||
EmptyLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
setImmediate(function() {
|
||||
setImmediate(function () {
|
||||
callback(null, {});
|
||||
});
|
||||
};
|
||||
@@ -5,7 +5,7 @@ var EmptyLayerStats = require('./empty-layer-stats');
|
||||
var MapnikLayerStats = require('./mapnik-layer-stats');
|
||||
var TorqueLayerStats = require('./torque-layer-stats');
|
||||
|
||||
module.exports = function LayerStatsFactory(type) {
|
||||
module.exports = function LayerStatsFactory (type) {
|
||||
var layerStatsIterator = [];
|
||||
var selectedType = type || 'ALL';
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
function LayerStats(layerStatsIterator) {
|
||||
function LayerStats (layerStatsIterator) {
|
||||
this.layerStatsIterator = layerStatsIterator;
|
||||
}
|
||||
|
||||
@@ -41,7 +41,6 @@ LayerStats.prototype.getStats = function (mapConfig, dbConnection, callback) {
|
||||
|
||||
return callback(err, stats);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports = LayerStats;
|
||||
@@ -15,7 +15,7 @@ MapnikLayerStats.prototype.is = function (type) {
|
||||
return this._types[type] ? this._types[type] : false;
|
||||
};
|
||||
|
||||
function columnAggregations(field) {
|
||||
function columnAggregations (field) {
|
||||
if (field.type === 'number') {
|
||||
return ['min', 'max', 'avg', 'sum'];
|
||||
}
|
||||
@@ -28,25 +28,24 @@ function columnAggregations(field) {
|
||||
return [];
|
||||
}
|
||||
|
||||
function _getSQL(ctx, query, type='pre', zoom=0) {
|
||||
function _getSQL (ctx, query, type = 'pre', zoom = 0) {
|
||||
let sql;
|
||||
if (type === 'pre') {
|
||||
sql = ctx.preQuery;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
sql = ctx.aggrQuery;
|
||||
}
|
||||
sql = queryUtils.substituteTokensForZoom(sql, zoom || 0);
|
||||
return query(sql);
|
||||
}
|
||||
|
||||
function _estimatedFeatureCount(ctx) {
|
||||
function _estimatedFeatureCount (ctx) {
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryRowEstimation))
|
||||
.then(res => ({ estimatedFeatureCount: res.rows[0].rows }))
|
||||
.catch(() => ({ estimatedFeatureCount: -1 }));
|
||||
}
|
||||
|
||||
function _featureCount(ctx) {
|
||||
function _featureCount (ctx) {
|
||||
if (ctx.metaOptions.featureCount) {
|
||||
// TODO: if ctx.metaOptions.columnStats we can combine this with column stats query
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryActualRowCount))
|
||||
@@ -55,20 +54,20 @@ function _featureCount(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _aggrFeatureCount(ctx) {
|
||||
if (ctx.metaOptions.hasOwnProperty('aggrFeatureCount')) {
|
||||
function _aggrFeatureCount (ctx) {
|
||||
if (Object.prototype.hasOwnProperty.call(ctx.metaOptions, 'aggrFeatureCount')) {
|
||||
// We expect as zoom level as the value of aggrFeatureCount
|
||||
// TODO: it'd be nice to admit an array of zoom levels to
|
||||
// return metadata for multiple levels.
|
||||
return queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
).then(res => ({ aggrFeatureCount: res.rows[0].rows }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _geometryType(ctx) {
|
||||
function _geometryType (ctx) {
|
||||
if (ctx.metaOptions.geometryType) {
|
||||
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
|
||||
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
|
||||
@@ -78,7 +77,7 @@ function _geometryType(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _columns(ctx) {
|
||||
function _columns (ctx) {
|
||||
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
|
||||
// note: post-aggregation columns are in layer.options.columns when aggregation is present
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
|
||||
@@ -89,7 +88,7 @@ function _columns(ctx) {
|
||||
|
||||
// combine a list of results merging the properties of all the objects
|
||||
// undefined results are admitted and ignored
|
||||
function mergeResults(results) {
|
||||
function mergeResults (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
@@ -108,15 +107,15 @@ function mergeResults(results) {
|
||||
|
||||
// deeper (1 level) combination of a list of objects:
|
||||
// mergeColumns([{ col1: { a: 1 }, col2: { a: 2 } }, { col1: { b: 3 } }]) => { col1: { a: 1, b: 3 }, col2: { a: 2 } }
|
||||
function mergeColumns(results) {
|
||||
function mergeColumns (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
}
|
||||
return results.reduce((a, b) => {
|
||||
let c = Object.assign({}, b || {}, a || {});
|
||||
const c = Object.assign({}, b || {}, a || {});
|
||||
Object.keys(c).forEach(key => {
|
||||
if (b.hasOwnProperty(key)) {
|
||||
if (Object.prototype.hasOwnProperty.call(b, key)) {
|
||||
c[key] = Object.assign(c[key], b[key]);
|
||||
}
|
||||
});
|
||||
@@ -127,7 +126,7 @@ function mergeColumns(results) {
|
||||
|
||||
const DEFAULT_SAMPLE_ROWS = 100;
|
||||
|
||||
function _sample(ctx) {
|
||||
function _sample (ctx) {
|
||||
if (!ctx.metaOptions.sample) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
@@ -164,32 +163,32 @@ function _getSampleValuesFromRange (min, span, limit) {
|
||||
return Array.from(sample);
|
||||
}
|
||||
|
||||
function _columnsMetadataRequired(options) {
|
||||
function _columnsMetadataRequired (options) {
|
||||
// We need determine the columns of a query
|
||||
// if either column stats or dimension stats are required,
|
||||
// since we'll ultimately use the same query to fetch both
|
||||
return options.columnStats || options.dimensions;
|
||||
}
|
||||
|
||||
function _columnStats(ctx, columns, dimensions) {
|
||||
function _columnStats (ctx, columns, dimensions) {
|
||||
if (!columns) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (_columnsMetadataRequired(ctx.metaOptions)) {
|
||||
let queries = [];
|
||||
const queries = [];
|
||||
let aggr = [];
|
||||
if (ctx.metaOptions.columnStats) {
|
||||
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
|
||||
Object.keys(columns).forEach(name => {
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(columns[name])
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
);
|
||||
if (columns[name].type === 'string') {
|
||||
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
|
||||
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
|
||||
ctx.metaOptions.columnStats.includeNulls :
|
||||
true;
|
||||
const includeNulls = Object.prototype.hasOwnProperty.call(ctx.metaOptions.columnStats, 'includeNulls')
|
||||
? ctx.metaOptions.columnStats.includeNulls
|
||||
: true;
|
||||
|
||||
// TODO: ctx.metaOptions.columnStats.maxCategories
|
||||
// => use PG stats to dismiss columns with more distinct values
|
||||
@@ -223,7 +222,7 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
|
||||
).then(res => {
|
||||
let stats = { columns: {}, dimensions: {} };
|
||||
const stats = { columns: {}, dimensions: {} };
|
||||
Object.keys(columns).forEach(name => {
|
||||
stats.columns[name] = {};
|
||||
columnAggregations(columns[name]).forEach(fn => {
|
||||
@@ -245,62 +244,62 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
);
|
||||
return Promise.all(queries).then(results => ({
|
||||
columns: mergeColumns(results.map(r => r.columns)),
|
||||
dimensions: mergeColumns(results.map( r => r.dimensions))
|
||||
dimensions: mergeColumns(results.map(r => r.dimensions))
|
||||
}));
|
||||
}
|
||||
return Promise.resolve({ columns });
|
||||
}
|
||||
|
||||
// This is adapted from SQL API:
|
||||
function fieldType(cname) {
|
||||
function fieldType (cname) {
|
||||
let tname;
|
||||
switch (true) {
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
}
|
||||
if ( tname && cname.match(/^_/) ) {
|
||||
if (tname && cname.match(/^_/)) {
|
||||
tname += '[]';
|
||||
}
|
||||
return tname;
|
||||
}
|
||||
|
||||
function fieldTypeSafe(dbConnection, field) {
|
||||
function fieldTypeSafe (dbConnection, field) {
|
||||
const cname = dbConnection.typeName(field.dataTypeID);
|
||||
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
|
||||
}
|
||||
|
||||
// columns are returned as an object { columnName1: { type1: ...}, ..}
|
||||
// for consistency with SQL API
|
||||
function formatResultFields(dbConnection, fields = []) {
|
||||
let nfields = {};
|
||||
for (let field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
function formatResultFields (dbConnection, fields = []) {
|
||||
const nfields = {};
|
||||
for (const field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
}
|
||||
return nfields;
|
||||
}
|
||||
|
||||
MapnikLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
let aggrQuery = layer.options.sql;
|
||||
let preQuery = layer.options.sql_raw || aggrQuery;
|
||||
const aggrQuery = layer.options.sql;
|
||||
const preQuery = layer.options.sql_raw || aggrQuery;
|
||||
|
||||
let ctx = {
|
||||
const ctx = {
|
||||
dbConnection,
|
||||
preQuery,
|
||||
aggrQuery,
|
||||
metaOptions: layer.options.metadata || {},
|
||||
metaOptions: layer.options.metadata || {}
|
||||
};
|
||||
|
||||
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
|
||||
@@ -316,7 +315,7 @@ function (layer, dbConnection, callback) {
|
||||
Promise.all([
|
||||
_estimatedFeatureCount(ctx).then(
|
||||
({ estimatedFeatureCount }) => _sample(ctx)
|
||||
.then(sampleResults => mergeResults([ sampleResults, { estimatedFeatureCount }] ))
|
||||
.then(sampleResults => mergeResults([sampleResults, { estimatedFeatureCount }]))
|
||||
),
|
||||
_featureCount(ctx),
|
||||
_aggrFeatureCount(ctx),
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function TorqueLayerStats() {
|
||||
function TorqueLayerStats () {
|
||||
this._types = {
|
||||
torque: true
|
||||
};
|
||||
17
lib/backends/metrics.js
Normal file
17
lib/backends/metrics.js
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
const { PubSub } = require('@google-cloud/pubsub');
|
||||
|
||||
module.exports = class MetricsBackend {
|
||||
constructor (options = {}) {
|
||||
const { project_id: projectId, credentials: keyFilename, topic } = options;
|
||||
|
||||
this._metricsClient = new PubSub({ projectId, keyFilename });
|
||||
this._topicName = topic;
|
||||
}
|
||||
|
||||
send (event, attributes) {
|
||||
const data = Buffer.from(event);
|
||||
return this._metricsClient.topic(this._topicName).publish(data, attributes);
|
||||
}
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
const queryUtils = require('../utils/query-utils');
|
||||
|
||||
function OverviewsMetadataBackend(pgQueryRunner) {
|
||||
function OverviewsMetadataBackend (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@@ -12,20 +12,20 @@ OverviewsMetadataBackend.prototype.getOverviewsMetadata = function (username, sq
|
||||
// FIXME: Currently using internal function _cdb_schema_name
|
||||
// CDB_Overviews should provide the schema information directly.
|
||||
const query = `
|
||||
SELECT *, _cdb_schema_name(base_table)
|
||||
FROM CDB_Overviews(
|
||||
CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
|
||||
SELECT *, cartodb._cdb_schema_name(base_table)
|
||||
FROM cartodb.CDB_Overviews(
|
||||
cartodb.CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
|
||||
);
|
||||
`;
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows(err, rows) {
|
||||
if (err){
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows (err, rows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var metadata = rows.reduce(function(metadata, row){
|
||||
var metadata = rows.reduce(function (metadata, row) {
|
||||
var table = row.base_table;
|
||||
var schema = row._cdb_schema_name;
|
||||
if ( !metadata[table] ) {
|
||||
if (!metadata[table]) {
|
||||
metadata[table] = {};
|
||||
}
|
||||
metadata[table][row.z] = { table: row.overview_table };
|
||||
@@ -3,14 +3,14 @@
|
||||
var PSQL = require('cartodb-psql');
|
||||
var _ = require('underscore');
|
||||
const debug = require('debug')('cachechan');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
|
||||
function PgConnection(metadataBackend) {
|
||||
function PgConnection (metadataBackend) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
}
|
||||
|
||||
module.exports = PgConnection;
|
||||
|
||||
|
||||
// Set db authentication parameters to those of the given username
|
||||
//
|
||||
// @param username the cartodb username, mapped to a database username
|
||||
@@ -21,7 +21,7 @@ module.exports = PgConnection;
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callback) {
|
||||
PgConnection.prototype.setDBAuth = function (username, params, apikeyType, callback) {
|
||||
if (apikeyType === 'master') {
|
||||
this.metadataBackend.getMasterApikey(username, (err, apikey) => {
|
||||
if (err) {
|
||||
@@ -36,7 +36,7 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
|
||||
return callback();
|
||||
});
|
||||
} else if (apikeyType === 'regular') { //Actually it can be any type of api key
|
||||
} else if (apikeyType === 'regular') { // Actually it can be any type of api key
|
||||
this.metadataBackend.getApikey(username, params.api_key, (err, apikey) => {
|
||||
if (err) {
|
||||
if (isNameNotFoundError(err)) {
|
||||
@@ -70,10 +70,9 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
|
||||
// Set db connection parameters to those for the given username
|
||||
//
|
||||
// @param dbowner cartodb username of database owner,
|
||||
@@ -85,7 +84,7 @@ function isNameNotFoundError (err) {
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
PgConnection.prototype.setDBConn = function (dbowner, params, callback) {
|
||||
_.defaults(params, {
|
||||
// dbuser: global.environment.postgres.user,
|
||||
// dbpassword: global.environment.postgres.password,
|
||||
@@ -117,25 +116,18 @@ PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
* @param {Function} callback function({Error}, {PSQL})
|
||||
*/
|
||||
|
||||
PgConnection.prototype.getConnection = function(username, callback) {
|
||||
debug("getConn1");
|
||||
PgConnection.prototype.getConnection = function (username, callback) {
|
||||
debug('getConn1');
|
||||
|
||||
this.getDatabaseParams(username, (err, databaseParams) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(err, new PSQL({
|
||||
user: databaseParams.dbuser,
|
||||
pass: databaseParams.dbpass,
|
||||
host: databaseParams.dbhost,
|
||||
port: databaseParams.dbport,
|
||||
dbname: databaseParams.dbname
|
||||
}));
|
||||
|
||||
return callback(err, new PSQL(dbParamsFromReqParams(databaseParams)));
|
||||
});
|
||||
};
|
||||
|
||||
PgConnection.prototype.getDatabaseParams = function(username, callback) {
|
||||
PgConnection.prototype.getDatabaseParams = function (username, callback) {
|
||||
const databaseParams = {};
|
||||
|
||||
this.setDBAuth(username, databaseParams, 'master', err => {
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user