Compare commits
193 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f510f401e | ||
|
|
92678c3dae | ||
|
|
9f2d1f90d0 | ||
|
|
23e331610d | ||
|
|
576518b2c8 | ||
|
|
0631bafbbf | ||
|
|
d9b6284914 | ||
|
|
111b927033 | ||
|
|
d63337f06f | ||
|
|
7012e6a66a | ||
|
|
726e1a2268 | ||
|
|
6e455a1205 | ||
|
|
da07d550d2 | ||
|
|
1829a634e9 | ||
|
|
95f66b8c4b | ||
|
|
ea1f43bec7 | ||
|
|
c877d0b964 | ||
|
|
caf09ac644 | ||
|
|
17f151cd5a | ||
|
|
0940158d01 | ||
|
|
e6bbe8351d | ||
|
|
031bae2564 | ||
|
|
b8d790caab | ||
|
|
267557eb90 | ||
|
|
b2af93dfec | ||
|
|
7e81618769 | ||
|
|
eeac5ce998 | ||
|
|
fcf2fd1455 | ||
|
|
fb9dce0386 | ||
|
|
4c09a70647 | ||
|
|
eee59abfa1 | ||
|
|
c7effbccb4 | ||
|
|
2912e4fea6 | ||
|
|
2d09a214ae | ||
|
|
a88c085278 | ||
|
|
5dcca3e088 | ||
|
|
413a1685aa | ||
|
|
7081a7ec3c | ||
|
|
33143ea28e | ||
|
|
f8c86f3b72 | ||
|
|
ae53cc736b | ||
|
|
eca75d1365 | ||
|
|
ef201e6fcf | ||
|
|
38a556b7d6 | ||
|
|
c071746768 | ||
|
|
57512ba48b | ||
|
|
dcf765efda | ||
|
|
525d41e63c | ||
|
|
7d7ca0de4a | ||
|
|
11e5726ea9 | ||
|
|
d3f0c52474 | ||
|
|
8523f835dc | ||
|
|
63ccfac599 | ||
|
|
283baa4a3f | ||
|
|
c7bd132e2f | ||
|
|
3c92e186d6 | ||
|
|
67d8919f8a | ||
|
|
06c0b28d37 | ||
|
|
dfedb45254 | ||
|
|
b373965510 | ||
|
|
52d887f3b4 | ||
|
|
a6ca480210 | ||
|
|
16e80424e0 | ||
|
|
6c72d3adbe | ||
|
|
bbc9c9fb9b | ||
|
|
42d0c4c040 | ||
|
|
8f99886d62 | ||
|
|
60c01e583f | ||
|
|
f21f89f561 | ||
|
|
5f900a3b3c | ||
|
|
60db55b122 | ||
|
|
d9c05a9333 | ||
|
|
ab66ad83fd | ||
|
|
3498fceb6a | ||
|
|
e841774978 | ||
|
|
f297044203 | ||
|
|
c7e803a94c | ||
|
|
ac198d5b5a | ||
|
|
6eb66de94e | ||
|
|
f545b4d002 | ||
|
|
eee3e8b63c | ||
|
|
69afee61e0 | ||
|
|
724f67d381 | ||
|
|
8d69af4445 | ||
|
|
3c301ce742 | ||
|
|
f87c432744 | ||
|
|
d446ba9c1b | ||
|
|
dc669f5cd4 | ||
|
|
d4719d5707 | ||
|
|
f9082dad94 | ||
|
|
a8d421c9cc | ||
|
|
7b13c12ab4 | ||
|
|
97f4adbc1a | ||
|
|
602ab44375 | ||
|
|
77e6fb8225 | ||
|
|
2c8a030ecb | ||
|
|
ccd01e6da5 | ||
|
|
df10cfe641 | ||
|
|
09d3e8aabb | ||
|
|
f17411916f | ||
|
|
75583f67c5 | ||
|
|
bb745b0318 | ||
|
|
3834aeb73f | ||
|
|
aa09c079f6 | ||
|
|
3c586caba4 | ||
|
|
b05740048c | ||
|
|
2b5ed21207 | ||
|
|
acecb88efb | ||
|
|
734c373f3d | ||
|
|
e49cb524a8 | ||
|
|
cc24228511 | ||
|
|
27106fea57 | ||
|
|
990aaadc16 | ||
|
|
0c572b5947 | ||
|
|
3e7c294989 | ||
|
|
8a02156ac0 | ||
|
|
c4a75de0d8 | ||
|
|
db03bcdf8f | ||
|
|
dd5825c770 | ||
|
|
8fbe8f9f2a | ||
|
|
3bc3d19f40 | ||
|
|
575fe8e350 | ||
|
|
d5218a86f6 | ||
|
|
080f93f6de | ||
|
|
df931d95a3 | ||
|
|
d5406d5b50 | ||
|
|
f7e877ce60 | ||
|
|
ad4a1ada45 | ||
|
|
da0d0d21e3 | ||
|
|
7a1d2ca205 | ||
|
|
d89e785440 | ||
|
|
2423b5a4c4 | ||
|
|
1bee877b24 | ||
|
|
4d70ac0894 | ||
|
|
593d9e40f6 | ||
|
|
9fd1a3c663 | ||
|
|
8a781d241c | ||
|
|
be4d610de1 | ||
|
|
736d3460d9 | ||
|
|
f844d70275 | ||
|
|
0c9cfefcd0 | ||
|
|
8ed187b0f5 | ||
|
|
e5bada81dc | ||
|
|
655f817033 | ||
|
|
ebff2ac9f2 | ||
|
|
5a7ffcf499 | ||
|
|
f8e117a7b7 | ||
|
|
c4054f0ac9 | ||
|
|
f7707141d6 | ||
|
|
c40c42fc10 | ||
|
|
6cad976078 | ||
|
|
c82f17e5d2 | ||
|
|
1054bde7fd | ||
|
|
9e23b91f3f | ||
|
|
ea6e064e42 | ||
|
|
cf0858f5b9 | ||
|
|
69b11a8412 | ||
|
|
55aad4254c | ||
|
|
73e1659378 | ||
|
|
98f3e8159e | ||
|
|
e8cff194fc | ||
|
|
f1de1b3b91 | ||
|
|
a134ab3012 | ||
|
|
5a84d7233b | ||
|
|
8fe0112568 | ||
|
|
3acaac5403 | ||
|
|
7dbac5a565 | ||
|
|
8fb4f4063f | ||
|
|
808718fb26 | ||
|
|
6dc8de315a | ||
|
|
afb9b08925 | ||
|
|
2bed034e64 | ||
|
|
2328bb6261 | ||
|
|
06357fa3f9 | ||
|
|
83f58288f9 | ||
|
|
b1d5f0f9e8 | ||
|
|
7142e4db37 | ||
|
|
281a079a62 | ||
|
|
0d638e6bad | ||
|
|
43a63feaca | ||
|
|
4aa6ffe28c | ||
|
|
2ce688ee2a | ||
|
|
4e967980a3 | ||
|
|
93edf07da8 | ||
|
|
a684bead92 | ||
|
|
dd06de2632 | ||
|
|
975f07df99 | ||
|
|
5fe6845d7c | ||
|
|
4aa844946d | ||
|
|
3220e3de31 | ||
|
|
26bba3c5f5 | ||
|
|
c82a5c38df | ||
|
|
9cfaf6eefc |
22
.eslintrc.js
Normal file
22
.eslintrc.js
Normal file
@@ -0,0 +1,22 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
node: true,
|
||||
mocha: true
|
||||
},
|
||||
extends: [
|
||||
'standard'
|
||||
],
|
||||
globals: {
|
||||
Atomics: 'readonly',
|
||||
SharedArrayBuffer: 'readonly'
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018
|
||||
},
|
||||
rules: {
|
||||
"indent": ["error", 4],
|
||||
"semi": ["error", "always"]
|
||||
}
|
||||
}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -11,3 +11,4 @@ redis.pid
|
||||
*.log
|
||||
coverage/
|
||||
.DS_Store
|
||||
.nyc_output
|
||||
|
||||
95
.jshintrc
95
.jshintrc
@@ -1,95 +0,0 @@
|
||||
{
|
||||
// // JSHint Default Configuration File (as on JSHint website)
|
||||
// // See http://jshint.com/docs/ for more details
|
||||
//
|
||||
// "maxerr" : 50, // {int} Maximum error before stopping
|
||||
//
|
||||
// // Enforcing
|
||||
// "bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
|
||||
// "camelcase" : false, // true: Identifiers must be in camelCase
|
||||
"curly" : true, // true: Require {} for every new block or scope
|
||||
"eqeqeq" : true, // true: Require triple equals (===) for comparison
|
||||
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
|
||||
"freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
|
||||
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
|
||||
// "indent" : 4, // {int} Number of spaces to use for indentation
|
||||
// "latedef" : false, // true: Require variables/functions to be defined before being used
|
||||
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
|
||||
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
|
||||
// "noempty" : true, // true: Prohibit use of empty blocks
|
||||
"nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
|
||||
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
|
||||
// "plusplus" : false, // true: Prohibit use of `++` & `--`
|
||||
// "quotmark" : false, // Quotation mark consistency:
|
||||
// // false : do nothing (default)
|
||||
// // true : ensure whatever is used is consistent
|
||||
// // "single" : require single quotes
|
||||
// // "double" : require double quotes
|
||||
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
|
||||
"unused" : true, // true: Require all defined variables be used
|
||||
// "strict" : true, // true: Requires all functions run in ES5 Strict Mode
|
||||
// "maxparams" : false, // {int} Max number of formal params allowed per function
|
||||
// "maxdepth" : false, // {int} Max depth of nested blocks (within functions)
|
||||
// "maxstatements" : false, // {int} Max number statements per function
|
||||
"maxcomplexity" : 6, // {int} Max cyclomatic complexity per function
|
||||
"maxlen" : 120, // {int} Max number of characters per line
|
||||
//
|
||||
// // Relaxing
|
||||
// "asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
|
||||
// "boss" : false, // true: Tolerate assignments where comparisons would be expected
|
||||
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
|
||||
// "eqnull" : false, // true: Tolerate use of `== null`
|
||||
// "es5" : false, // true: Allow ES5 syntax (ex: getters and setters)
|
||||
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
|
||||
// "moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
|
||||
// // (ex: `for each`, multiple try/catch, function expression…)
|
||||
// "evil" : false, // true: Tolerate use of `eval` and `new Function()`
|
||||
// "expr" : false, // true: Tolerate `ExpressionStatement` as Programs
|
||||
// "funcscope" : false, // true: Tolerate defining variables inside control statements
|
||||
// "globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
|
||||
// "iterator" : false, // true: Tolerate using the `__iterator__` property
|
||||
// "lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
|
||||
// "laxbreak" : false, // true: Tolerate possibly unsafe line breakings
|
||||
// "laxcomma" : false, // true: Tolerate comma-first style coding
|
||||
// "loopfunc" : false, // true: Tolerate functions being defined in loops
|
||||
// "multistr" : false, // true: Tolerate multi-line strings
|
||||
// "noyield" : false, // true: Tolerate generator functions with no yield statement in them.
|
||||
// "notypeof" : false, // true: Tolerate invalid typeof operator values
|
||||
// "proto" : false, // true: Tolerate using the `__proto__` property
|
||||
// "scripturl" : false, // true: Tolerate script-targeted URLs
|
||||
// "shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
|
||||
// "sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
|
||||
// "supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
|
||||
// "validthis" : false, // true: Tolerate using this in a non-constructor function
|
||||
//
|
||||
// // Environments
|
||||
// "browser" : true, // Web Browser (window, document, etc)
|
||||
// "browserify" : false, // Browserify (node.js code in the browser)
|
||||
// "couch" : false, // CouchDB
|
||||
// "devel" : true, // Development/debugging (alert, confirm, etc)
|
||||
// "dojo" : false, // Dojo Toolkit
|
||||
// "jasmine" : false, // Jasmine
|
||||
// "jquery" : false, // jQuery
|
||||
// "mocha" : true, // Mocha
|
||||
// "mootools" : false, // MooTools
|
||||
"node" : true, // Node.js
|
||||
// "nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
|
||||
// "prototypejs" : false, // Prototype and Scriptaculous
|
||||
// "qunit" : false, // QUnit
|
||||
// "rhino" : false, // Rhino
|
||||
// "shelljs" : false, // ShellJS
|
||||
// "worker" : false, // Web Workers
|
||||
// "wsh" : false, // Windows Scripting Host
|
||||
// "yui" : false, // Yahoo User Interface
|
||||
|
||||
// Custom predefined global variables
|
||||
"predef": [
|
||||
"-console", // disallows console, use debug
|
||||
"beforeEach",
|
||||
"afterEach",
|
||||
"before",
|
||||
"after",
|
||||
"describe",
|
||||
"it"
|
||||
]
|
||||
}
|
||||
@@ -9,4 +9,4 @@ env:
|
||||
services:
|
||||
- docker
|
||||
before_install: docker pull ${DOCKER_IMAGE}
|
||||
script: npm run docker-test -- ${DOCKER_IMAGE} ${NODE_VERSION}
|
||||
script: npm run test:docker
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
Contributing
|
||||
---
|
||||
|
||||
The issue tracker is at [github.com/CartoDB/Windshaft-cartodb](https://github.com/CartoDB/Windshaft-cartodb).
|
||||
|
||||
We love pull requests from everyone, see [Contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
|
||||
|
||||
|
||||
## Submitting Contributions
|
||||
|
||||
* You will need to sign a Contributor License Agreement (CLA) before making a submission. [Learn more here](https://carto.com/contributions).
|
||||
@@ -1,18 +0,0 @@
|
||||
1. Test (make clean all check), fix if broken before proceeding
|
||||
2. Ensure proper version in package.json and package-lock.json
|
||||
3. Ensure NEWS section exists for the new version, review it, add release date
|
||||
4. If there are modified dependencies in package.json, update them with `npm upgrade {{package_name}}@{{version}}`
|
||||
5. Commit package.json, package-lock.json, NEWS
|
||||
6. git tag -a Major.Minor.Patch # use NEWS section as content
|
||||
7. Stub NEWS/package for next version
|
||||
|
||||
Versions:
|
||||
|
||||
Bugfix releases increment Patch component of version.
|
||||
Feature releases increment Minor and set Patch to zero.
|
||||
If backward compatibility is broken, increment Major and
|
||||
set to zero Minor and Patch.
|
||||
|
||||
Branches named 'b<Major>.<Minor>' are kept for any critical
|
||||
fix that might need to be shipped before next feature release
|
||||
is ready.
|
||||
16
HOW_TO_RELEASE.md
Normal file
16
HOW_TO_RELEASE.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# How to release
|
||||
|
||||
1. Test (npm test), fix if broken before proceeding.
|
||||
2. Ensure proper version in `package.json` and `package-lock.json`.
|
||||
3. Ensure NEWS section exists for the new version, review it, add release date.
|
||||
4. If there are modified dependencies in `package.json`, update them with `npm upgrade {{package_name}}@{{version}}`.
|
||||
5. Commit `package.json`, `package-lock.json`, NEWS.
|
||||
6. Run `git tag -a Major.Minor.Patch`. Use NEWS section as content.
|
||||
7. Stub NEWS/package for next version.
|
||||
|
||||
## Version:
|
||||
|
||||
* Bugfix releases increment Patch component of version.
|
||||
* Feature releases increment Minor and set Patch to zero.
|
||||
* If backward compatibility is broken, increment Major and set to zero Minor and Patch.
|
||||
* Branches named 'b<Major>.<Minor>' are kept for any critical fix that might need to be shipped before next feature release is ready.
|
||||
41
INSTALL.md
41
INSTALL.md
@@ -1,41 +0,0 @@
|
||||
# Installing Windshaft-CartoDB
|
||||
|
||||
## Requirements
|
||||
|
||||
Make sure that you have the requirements needed. These are:
|
||||
|
||||
- Node 10.x
|
||||
- npm 6.x
|
||||
- PostgreSQL >= 10.0
|
||||
- PostGIS >= 2.4
|
||||
- CARTO Postgres Extension >= 0.24.1
|
||||
- Redis >= 4
|
||||
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
|
||||
- C++11 (to build internal dependencies if needed)
|
||||
|
||||
### Optional
|
||||
|
||||
- Varnish (http://www.varnish-cache.org)
|
||||
|
||||
## PostGIS setup
|
||||
|
||||
A `template_postgis` database is expected. One can be set up with
|
||||
|
||||
```shell
|
||||
createdb --owner postgres --template template0 template_postgis
|
||||
psql -d template_postgis -c 'CREATE EXTENSION postgis;'
|
||||
```
|
||||
|
||||
## Build/install
|
||||
|
||||
To fetch and build all node-based dependencies, run:
|
||||
|
||||
```shell
|
||||
npm install
|
||||
```
|
||||
|
||||
Note that the ```npm``` step will populate the node_modules/
|
||||
directory with modules, some of which being compiled on demand. If you
|
||||
happen to have startup errors you may need to force rebuilding those
|
||||
modules. At any time just wipe out the node_modules/ directory and run
|
||||
```npm``` again.
|
||||
53
Makefile
53
Makefile
@@ -1,53 +0,0 @@
|
||||
SHELL=/bin/bash
|
||||
|
||||
pre-install:
|
||||
@$(SHELL) ./scripts/check-node-canvas.sh
|
||||
|
||||
all:
|
||||
@$(SHELL) ./scripts/install.sh
|
||||
|
||||
clean:
|
||||
rm -rf node_modules/
|
||||
|
||||
distclean: clean
|
||||
rm config.status*
|
||||
|
||||
config.status--test:
|
||||
./configure --environment=test
|
||||
|
||||
config/environments/test.js: config.status--test
|
||||
./config.status--test
|
||||
|
||||
TEST_SUITE := $(shell find test/{acceptance,integration,unit} -name "*.js")
|
||||
TEST_SUITE_UNIT := $(shell find test/unit -name "*.js")
|
||||
TEST_SUITE_INTEGRATION := $(shell find test/integration -name "*.js")
|
||||
TEST_SUITE_ACCEPTANCE := $(shell find test/acceptance -name "*.js")
|
||||
|
||||
test: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE)
|
||||
|
||||
test-unit: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_UNIT)
|
||||
|
||||
test-integration: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_INTEGRATION)
|
||||
|
||||
test-acceptance: config/environments/test.js
|
||||
@echo "***tests***"
|
||||
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_ACCEPTANCE)
|
||||
|
||||
jshint:
|
||||
@echo "***jshint***"
|
||||
@./node_modules/.bin/jshint lib/ test/ app.js
|
||||
|
||||
test-all: test jshint
|
||||
|
||||
coverage:
|
||||
@RUNTESTFLAGS=--with-coverage make test
|
||||
|
||||
check: test
|
||||
|
||||
.PHONY: pre-install test jshint coverage
|
||||
43
NEWS.md
43
NEWS.md
@@ -1,5 +1,48 @@
|
||||
# Changelog
|
||||
|
||||
## 8.1.1
|
||||
Released 2020-02-17
|
||||
|
||||
Announcements:
|
||||
- Upgrade camshaft to [`0.65.2`](https://github.com/CartoDB/camshaft/blob/69c9447c9fccf00a70a67d713d1ce777775a17ff/CHANGELOG.md#0652): Fixes uncatched errors problem (#1117)
|
||||
|
||||
## 8.1.0
|
||||
Released 2020-01-27
|
||||
|
||||
Announcements:
|
||||
- Removed `jshint` as linter in favour of `eslint` to check syntax, find problems, and enforce code style.
|
||||
- Upgrade `camshaft` to [`0.65.1`](https://github.com/CartoDB/camshaft/blob/a2836c15fd2830f8364a222eeafdb4dc2f41b580/CHANGELOG.md#0651): Use quoted identifiers for column names and enforce the usage of the cartodb schema when using cartodb extension functions and tables.
|
||||
- Stop using two different tools for package management, testing, and any other developer workflow.
|
||||
- Removes Makefile and related bash scripts
|
||||
- Use npm scripts as the only tool for testing, CI and linting.
|
||||
- Simplified CI configuration.
|
||||
- Improved documentation:
|
||||
- Centralized several documents into README.md
|
||||
- Remove outdated sections
|
||||
- Update old sections
|
||||
- Added missing sections.
|
||||
- Remove deprecated coverage tool istanbul, using nyc instead.
|
||||
- Removed unused dockerfiles
|
||||
- Use cartodb schema when using cartodb extension functions and tables.
|
||||
- Implemented circle and polygon dataview filters.
|
||||
|
||||
## 8.0.0
|
||||
Released 2019-11-13
|
||||
|
||||
Breaking changes:
|
||||
- Schema change for "routes" in configuration file, each "router" is now an array instead of an object. See [`dd06de2`](https://github.com/CartoDB/Windshaft-cartodb/pull/1126/commits/dd06de2632661e19d64c9fbc2be0ba1a8059f54c) for more details.
|
||||
|
||||
Announcements:
|
||||
- Added validation to only allow "count" and "sum" aggregations in dataview overview.
|
||||
- Added mechanism to inject custom middlewares through configuration.
|
||||
- Stop requiring unused config properties: "base_url", "base_url_mapconfig", and "base_url_templated".
|
||||
- Upgraded cartodb-query-tables to version [0.7.0](https://github.com/CartoDB/node-cartodb-query-tables/blob/0.7.0/NEWS.md#version-0.7.0).
|
||||
- Be able to set a coherent TTL in Cache-Control header to expire all resources belonging to a map simultaneously.
|
||||
- When `cache buster` in request path is `0` set header `Last-Modified` to now, it avoids stalled content in 3rd party cache providers when they add `If-Modified-Since` header into the request.
|
||||
- Adding a logger to MapStore (#1134)
|
||||
- Qualify calls to cartodb extension so having it in the search_path isn't necessary.
|
||||
- Fix multiple DB login issues.
|
||||
|
||||
## 7.2.0
|
||||
Released 2019-09-30
|
||||
|
||||
|
||||
172
README.md
172
README.md
@@ -1,80 +1,126 @@
|
||||
Windshaft-CartoDB
|
||||
==================
|
||||
# Windshaft-CartoDB [](https://travis-ci.org/CartoDB/Windshaft-cartodb)
|
||||
|
||||
[](https://travis-ci.org/CartoDB/Windshaft-cartodb)
|
||||
The [`CARTO Maps API`](https://carto.com/developers/maps-api/) tiler. It extends [`Windshaft`](https://github.com/CartoDB/Windshaft) and exposes a web service with extra functionality:
|
||||
|
||||
This is the [CartoDB Maps API](http://docs.cartodb.com/cartodb-platform/maps-api.html) tiler. It extends
|
||||
[Windshaft](https://github.com/CartoDB/Windshaft) with some extra functionality and custom filters for authentication.
|
||||
* Instantiate [`Anonymous Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/03-anonymous-maps.md) through CARTO's map configuration ([`MapConfig`](https://github.com/CartoDB/Windshaft/blob/master/doc/MapConfig-specification.md)).
|
||||
* Create [`Named Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/04-named-maps.md) based on customizable templates.
|
||||
* Get map previews through [`Static Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/05-static-maps-API.md) API.
|
||||
* Render maps with a large amount of data faster using [`Tile Aggregation`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/06-tile-aggregation.md).
|
||||
* Build advanced maps with enriched data through [`Analyses Extension`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/09-MapConfig-analyses-extension.md).
|
||||
* Fetch tabular data from analysis nodes with [`Dataviews`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/10-MapConfig-dataviews-extension.md)
|
||||
|
||||
* reads dbname from subdomain and cartodb redis for pretty tile urls
|
||||
* configures windshaft to publish `cartodb_id` as the interactivity layer
|
||||
* gets the default geometry type from the cartodb redis store
|
||||
* allows tiles to be styled individually
|
||||
* provides a link to varnish high speed cache
|
||||
* provides a [template maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Template-maps.md)
|
||||
## Build
|
||||
|
||||
Install
|
||||
-------
|
||||
See [INSTALL.md](INSTALL.md) for detailed installation instructions.
|
||||
Requirements:
|
||||
|
||||
Configure
|
||||
---------
|
||||
* [`Node 10.x (npm 6.x)`](https://nodejs.org/dist/latest-v10.x/)
|
||||
* [`PostgreSQL >= 10.0`](https://www.postgresql.org/download/)
|
||||
* [`PostGIS >= 2.4`](https://postgis.net/install/)
|
||||
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
|
||||
* [`Redis >= 4`](https://redis.io/download)
|
||||
* `libcairo2-dev`, `libpango1.0-dev`, `libjpeg8-dev` and `libgif-dev` for server side canvas support
|
||||
* `C++11` to build internal dependencies. When there's no pre-built binaries for your OS/architecture distribution.
|
||||
|
||||
Create the config/environments/<env>.js files (there are .example files
|
||||
to start from). You can optionally use the ./configure script for this,
|
||||
see ```./configure --help``` to see available options.
|
||||
Optional:
|
||||
|
||||
Look at lib/cartodb/server_options.js for more on config
|
||||
* [`Varnish`](http://www.varnish-cache.org)
|
||||
* [`Statsd`](https://github.com/statsd/statsd)
|
||||
|
||||
Upgrading
|
||||
---------
|
||||
### PostGIS setup
|
||||
|
||||
Checkout your commit/branch. If you need to reinstall dependencies (you can check [NEWS](NEWS.md)) do the following:
|
||||
A `template_postgis` database is expected. One can be set up with
|
||||
|
||||
```sh
|
||||
$ rm -rf node_modules
|
||||
```shell
|
||||
$ createdb --owner postgres --template template0 template_postgis
|
||||
$ psql -d template_postgis -c 'CREATE EXTENSION postgis;'
|
||||
```
|
||||
|
||||
### Install
|
||||
|
||||
To fetch and build all node-based dependencies, run:
|
||||
|
||||
```shell
|
||||
$ npm install
|
||||
```
|
||||
|
||||
```
|
||||
node app.js <env>
|
||||
```
|
||||
### Run
|
||||
|
||||
Where <env> is the name of a configuration file under config/environments/.
|
||||
|
||||
Note that caches are kept in redis. If you're not seeing what you expect
|
||||
there may be out-of-sync records in there.
|
||||
Take a look: http://redis.io/commands
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation
|
||||
resources, from higher level to more detailed ones:
|
||||
The [Maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Map-API.md) defined the endpoints and their
|
||||
expected parameters and outputs.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
[CartoDB's Map Gallery](http://cartodb.com/gallery/) showcases several examples of visualisations built on top of this.
|
||||
|
||||
Contributing
|
||||
---
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
### Developing with a custom windshaft version
|
||||
|
||||
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency) the best option is
|
||||
to use `npm link`. You can read more about it at [npm-link: Symlink a package folder](https://docs.npmjs.com/cli/link.html).
|
||||
|
||||
**Quick start**:
|
||||
Create the `./config/environments/<env>.js` file (there are `.example` files to start from). Look at `./lib/server-options.js` for more on config.
|
||||
|
||||
```shell
|
||||
~/windshaft-directory $ npm install
|
||||
~/windshaft-directory $ npm link
|
||||
~/windshaft-cartodb-directory $ npm link windshaft
|
||||
$ node app.js <env>
|
||||
```
|
||||
|
||||
Where `<env>` is the name of a configuration file under `./config/environments/`.
|
||||
|
||||
### Test
|
||||
|
||||
```shell
|
||||
$ npm test
|
||||
```
|
||||
|
||||
### Coverage
|
||||
|
||||
```shell
|
||||
$ npm run cover
|
||||
```
|
||||
|
||||
Open `./coverage/lcov-report/index.html`.
|
||||
|
||||
### Docker support
|
||||
|
||||
We provide docker images just for testing and continuous integration purposes:
|
||||
|
||||
* [`nodejs-xenial-pg1121`](https://hub.docker.com/r/carto/nodejs-xenial-pg1121/tags)
|
||||
* [`nodejs-xenial-pg101`](https://hub.docker.com/r/carto/nodejs-xenial-pg101/tags)
|
||||
|
||||
You can find instructions to install Docker, download, and update images [here](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docker/reference.md).
|
||||
|
||||
### Useful `npm` scripts
|
||||
|
||||
Run test in a docker image with a specific Node.js version:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> NODE_VERSION=<nodejs-version> npm run test:docker
|
||||
```
|
||||
|
||||
Where:
|
||||
|
||||
* `<docker-image-tag>`: the tag of required docker image, e.g. `carto/nodejs-xenial-pg1121:latest`
|
||||
* `<nodejs-version>`: the Node.js version, e.g. `10.15.1`
|
||||
|
||||
In case you need to debug:
|
||||
|
||||
```shell
|
||||
$ DOCKER_IMAGE=<docker-image-tag> npm run docker:bash
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find an overview, guides, full reference, and support in [`CARTO's developer center`](https://carto.com/developers/maps-api/). The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation resources, from a higher level to more detailed ones.
|
||||
|
||||
## Contributing
|
||||
|
||||
* The issue tracker: [`Github`](https://github.com/CartoDB/Windshaft-cartodb/issues).
|
||||
* We love Pull Requests from everyone, see [contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
|
||||
* You'll need to sign a Contributor License Agreement (CLA) before submitting a Pull Request. [Learn more here](https://carto.com/contributions).
|
||||
|
||||
## Developing with a custom `Windshaft` version
|
||||
|
||||
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency), the best option is to use `npm link`. You can read more about it at `npm-link`: [symlink a package folder](https://docs.npmjs.com/cli/link.html).
|
||||
|
||||
```shell
|
||||
$ cd /path/to/Windshaft
|
||||
$ npm install
|
||||
$ npm link
|
||||
$ cd /path/to/Windshaft-cartodb
|
||||
$ npm link windshaft
|
||||
```
|
||||
|
||||
## Versioning
|
||||
|
||||
We follow [`SemVer`](http://semver.org/) for versioning. For available versions, see the [tags on this repository](https://github.com/CartoDB/Windshaft-cartodb/tags).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the BSD 3-clause "New" or "Revised" License. See the [LICENSE](LICENSE) file for details.
|
||||
|
||||
89
app.js
89
app.js
@@ -6,7 +6,7 @@ var path = require('path');
|
||||
var fs = require('fs');
|
||||
var _ = require('underscore');
|
||||
var semver = require('semver');
|
||||
const setICUEnvVariable = require('./lib/cartodb/utils/icu_data_env_setter');
|
||||
const setICUEnvVariable = require('./lib/utils/icu-data-env-setter');
|
||||
|
||||
// jshint undef:false
|
||||
var log = console.log.bind(console);
|
||||
@@ -24,12 +24,12 @@ if (!semver.satisfies(nodejsVersion, engines.node)) {
|
||||
setICUEnvVariable();
|
||||
|
||||
var argv = require('yargs')
|
||||
.usage('Usage: $0 <environment> [options]')
|
||||
.usage('Usage: node $0 <environment> [options]')
|
||||
.help('h')
|
||||
.example(
|
||||
'$0 production -c /etc/sql-api/config.js',
|
||||
'start server in production environment with /etc/sql-api/config.js as config file'
|
||||
)
|
||||
'node $0 production -c /etc/windshaft-cartodb/config.js',
|
||||
'start server in production environment with /etc/windshaft-cartodb/config.js as config file'
|
||||
)
|
||||
.alias('h', 'help')
|
||||
.alias('c', 'config')
|
||||
.nargs('c', 1)
|
||||
@@ -54,7 +54,7 @@ var availableEnvironments = {
|
||||
};
|
||||
|
||||
// sanity check
|
||||
if (!availableEnvironments[ENVIRONMENT]){
|
||||
if (!availableEnvironments[ENVIRONMENT]) {
|
||||
logError('node app.js [environment]');
|
||||
logError('environments: %s', Object.keys(availableEnvironments).join(', '));
|
||||
process.exit(1);
|
||||
@@ -76,27 +76,26 @@ var agentOptions = _.defaults(global.environment.httpAgent || {}, {
|
||||
http.globalAgent = new http.Agent(agentOptions);
|
||||
https.globalAgent = new https.Agent(agentOptions);
|
||||
|
||||
|
||||
global.log4js = require('log4js');
|
||||
var log4jsConfig = {
|
||||
appenders: [],
|
||||
replaceConsole: true
|
||||
};
|
||||
|
||||
if ( global.environment.log_filename ) {
|
||||
if (global.environment.log_filename) {
|
||||
var logFilename = path.resolve(global.environment.log_filename);
|
||||
var logDirectory = path.dirname(logFilename);
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
logError("Log filename directory does not exist: " + logDirectory);
|
||||
logError('Log filename directory does not exist: ' + logDirectory);
|
||||
process.exit(1);
|
||||
}
|
||||
log("Logs will be written to " + logFilename);
|
||||
log('Logs will be written to ' + logFilename);
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "file", absolute: true, filename: logFilename }
|
||||
{ type: 'file', absolute: true, filename: logFilename }
|
||||
);
|
||||
} else {
|
||||
log4jsConfig.appenders.push(
|
||||
{ type: "console", layout: { type:'basic' } }
|
||||
{ type: 'console', layout: { type: 'basic' } }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -105,8 +104,8 @@ global.logger = global.log4js.getLogger();
|
||||
|
||||
// Include cartodb_windshaft only _after_ the "global" variable is set
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/28
|
||||
var cartodbWindshaft = require('./lib/cartodb/server');
|
||||
var serverOptions = require('./lib/cartodb/server_options');
|
||||
var cartodbWindshaft = require('./lib/server');
|
||||
var serverOptions = require('./lib/server-options');
|
||||
|
||||
var server = cartodbWindshaft(serverOptions);
|
||||
|
||||
@@ -118,13 +117,13 @@ var backlog = global.environment.maxConnections || 128;
|
||||
|
||||
var listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
|
||||
|
||||
var version = require("./package").version;
|
||||
var version = require('./package').version;
|
||||
|
||||
listener.on('listening', function() {
|
||||
log("Using Node.js %s", process.version);
|
||||
listener.on('listening', function () {
|
||||
log('Using Node.js %s', process.version);
|
||||
log('Using configuration file "%s"', configurationFile);
|
||||
log(
|
||||
"Windshaft tileserver %s started on %s:%s PID=%d (%s)",
|
||||
'Windshaft tileserver %s started on %s:%s PID=%d (%s)',
|
||||
version, serverOptions.bind.host, serverOptions.bind.port, process.pid, ENVIRONMENT
|
||||
);
|
||||
});
|
||||
@@ -163,15 +162,15 @@ setInterval(function cpuUsageMetrics () {
|
||||
previousCPUUsage = CPUUsage;
|
||||
}, 5000);
|
||||
|
||||
setInterval(function() {
|
||||
setInterval(function () {
|
||||
var memoryUsage = process.memoryUsage();
|
||||
Object.keys(memoryUsage).forEach(function(k) {
|
||||
Object.keys(memoryUsage).forEach(function (k) {
|
||||
global.statsClient.gauge('windshaft.memory.' + k, memoryUsage[k]);
|
||||
});
|
||||
}, 5000);
|
||||
|
||||
process.on('SIGHUP', function() {
|
||||
global.log4js.clearAndShutdownAppenders(function() {
|
||||
process.on('SIGHUP', function () {
|
||||
global.log4js.clearAndShutdownAppenders(function () {
|
||||
global.log4js.configure(log4jsConfig);
|
||||
global.logger = global.log4js.getLogger();
|
||||
log('Log files reloaded');
|
||||
@@ -179,12 +178,12 @@ process.on('SIGHUP', function() {
|
||||
});
|
||||
|
||||
if (global.gc) {
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval) ?
|
||||
global.environment.gc_interval :
|
||||
10000;
|
||||
var gcInterval = Number.isFinite(global.environment.gc_interval)
|
||||
? global.environment.gc_interval
|
||||
: 10000;
|
||||
|
||||
if (gcInterval > 0) {
|
||||
setInterval(function gcForcedCycle() {
|
||||
setInterval(function gcForcedCycle () {
|
||||
global.gc();
|
||||
}, gcInterval);
|
||||
}
|
||||
@@ -206,24 +205,24 @@ function getGCTypeValue (type) {
|
||||
let value;
|
||||
|
||||
switch (type) {
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
case 1:
|
||||
value = 'Scavenge';
|
||||
break;
|
||||
case 2:
|
||||
value = 'MarkSweepCompact';
|
||||
break;
|
||||
case 4:
|
||||
value = 'IncrementalMarking';
|
||||
break;
|
||||
case 8:
|
||||
value = 'ProcessWeakCallbacks';
|
||||
break;
|
||||
case 15:
|
||||
value = 'All';
|
||||
break;
|
||||
default:
|
||||
value = 'Unkown';
|
||||
break;
|
||||
}
|
||||
|
||||
return value;
|
||||
@@ -231,7 +230,7 @@ function getGCTypeValue (type) {
|
||||
|
||||
addHandlers(listener, global.logger, 45000);
|
||||
|
||||
function addHandlers(listener, logger, killTimeout) {
|
||||
function addHandlers (listener, logger, killTimeout) {
|
||||
process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
|
||||
process.on('unhandledRejection', exitProcess(listener, logger, killTimeout));
|
||||
process.on('ENOMEM', exitProcess(listener, logger, killTimeout));
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"redis": ">=4.0.0",
|
||||
"postgresql": ">=10.0.0",
|
||||
"postgis": ">=2.4.4.5",
|
||||
"carto_postgresql_ext": ">=0.24.1"
|
||||
"carto_postgresql_ext": ">=0.35.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -92,7 +86,8 @@ var config = {
|
||||
// If log_filename is given logs will be written
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
,log_filename: undefined
|
||||
,log_windshaft: true
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'development_cartodb_user_<%= user_id %>'
|
||||
@@ -271,7 +266,7 @@ var config = {
|
||||
// If filename is given logs comming from analysis client will be written
|
||||
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
filename: 'logs/node-windshaft-analysis.log'
|
||||
filename: undefined
|
||||
},
|
||||
// Define max execution time in ms for analyses or tags
|
||||
// If analysis or tag are not found in redis this values will be used as default.
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -93,6 +87,7 @@ var config = {
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
,log_windshaft: true
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'cartodb_user_<%= user_id %>'
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -93,6 +87,7 @@ var config = {
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: 'logs/node-windshaft.log'
|
||||
,log_windshaft: true
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'cartodb_staging_user_<%= user_id %>'
|
||||
|
||||
@@ -16,47 +16,41 @@ var config = {
|
||||
// Base URLs for the APIs
|
||||
//
|
||||
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||
//
|
||||
// Note: each entry corresponds with an express' router.
|
||||
// You must define at least one path. However, middlewares are optional.
|
||||
,routes: {
|
||||
v1: {
|
||||
api: [{
|
||||
paths: [
|
||||
'/api/v1',
|
||||
'/user/:user/api/v1',
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/api/v1/map" is the new API,
|
||||
map: {
|
||||
paths: [
|
||||
'/map',
|
||||
]
|
||||
},
|
||||
// Base url for the Templated Maps API
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: {
|
||||
paths: [
|
||||
'/map/named'
|
||||
]
|
||||
}
|
||||
},
|
||||
// For compatibility with versions up to 1.6.x
|
||||
v0: {
|
||||
paths: [
|
||||
'/tiles'
|
||||
// Optional: attach middlewares at the begining of the router
|
||||
// to perform custom operations.
|
||||
middlewares: [
|
||||
function noop () {
|
||||
return function noopMiddleware (req, res, next) {
|
||||
next();
|
||||
}
|
||||
}
|
||||
],
|
||||
// Base url for the Detached Maps API
|
||||
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
|
||||
map: {
|
||||
// "/api/v1/map" is the new API,
|
||||
map: [{
|
||||
paths: [
|
||||
'/layergroup'
|
||||
]
|
||||
},
|
||||
'/map',
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}],
|
||||
// Base url for the Templated Maps API
|
||||
// "/tiles/template" is for compatibility with versions up to 1.6.x
|
||||
template: {
|
||||
// "/api/v1/map/named" is the new API,
|
||||
template: [{
|
||||
paths: [
|
||||
'/template'
|
||||
]
|
||||
}
|
||||
}
|
||||
'/map/named'
|
||||
],
|
||||
middlewares: [] // Optional
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
||||
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||
@@ -93,6 +87,7 @@ var config = {
|
||||
// there, in append mode. Otherwise stdout is used (default).
|
||||
// Log file will be re-opened on receiving the HUP signal
|
||||
,log_filename: '/tmp/node-windshaft.log'
|
||||
,log_windshaft: true
|
||||
// Templated database username for authorized user
|
||||
// Supported labels: 'user_id' (read from redis)
|
||||
,postgres_auth_user: 'test_windshaft_cartodb_user_<%= user_id %>'
|
||||
|
||||
81
configure
vendored
81
configure
vendored
@@ -1,81 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# This script creates config/environments/*.js files using
|
||||
# config/environments/*.js.example files as input and performing
|
||||
# settings substitutions.
|
||||
#
|
||||
# It relies on a known format of the .js.example files which haven't
|
||||
# been made easier to parse to still let humans copy them manually and
|
||||
# do further editing or leave them as such to get the same setup as before
|
||||
# the introduction of this script.
|
||||
#
|
||||
# The script is a work in progress. Available switches are printed
|
||||
# by invoking with the --help switch. More switches will be added
|
||||
# as the need/request for them arises.
|
||||
#
|
||||
# --strk(2012-07-23)
|
||||
#
|
||||
|
||||
ENVDIR=config/environments
|
||||
|
||||
PGPORT=
|
||||
MAPNIK_VERSION=
|
||||
ENVIRONMENT=development
|
||||
|
||||
STATUS="$0 $*"
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 [OPTION]"
|
||||
echo
|
||||
echo "Configuration:"
|
||||
echo " --help display this help and exit"
|
||||
echo " --with-pgport=NUM access PostgreSQL server on TCP port NUM [$PGPORT]"
|
||||
echo " --with-mapnik-version=STRING set mapnik version string [$MAPNIK_VERSION]"
|
||||
echo " --environment=STRING set output environment name [$ENVIRONMENT]"
|
||||
}
|
||||
|
||||
while test -n "$1"; do
|
||||
case "$1" in
|
||||
--help|-h)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
--with-pgport=*)
|
||||
PGPORT=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
--with-mapnik-version=*)
|
||||
MAPNIK_VERSION=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
--environment=*)
|
||||
ENVIRONMENT=`echo "$1" | cut -d= -f2`
|
||||
;;
|
||||
*)
|
||||
echo "Unused option '$1'" >&2
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
ENVEX=./${ENVDIR}/${ENVIRONMENT}.js.example
|
||||
|
||||
if [ -z "$PGPORT" ]; then
|
||||
PGPORT=`node -e "console.log(require('${ENVEX}').postgres.port)"`
|
||||
fi
|
||||
|
||||
echo "PGPORT: $PGPORT"
|
||||
echo "MAPNIK_VERSION: $MAPNIK_VERSION"
|
||||
echo "ENVIRONMENT: $ENVIRONMENT"
|
||||
|
||||
o=`dirname "${ENVEX}"`/`basename "${ENVEX}" .example`
|
||||
echo "Writing $o"
|
||||
|
||||
# See http://austinmatzko.com/2008/04/26/sed-multi-line-search-and-replace/
|
||||
sed -n "1h;1!H;\${;g;s/\(,postgres: {[^}]*port: *'\?\)[^',]*\('\?,\)/\1$PGPORT\2/;p;}" < "${ENVEX}" \
|
||||
| sed "s/mapnik_version:.*/mapnik_version: '$MAPNIK_VERSION'/" \
|
||||
> "$o"
|
||||
|
||||
STATUSFILE=config.status--${ENVIRONMENT}
|
||||
echo "Writing ${STATUSFILE}"
|
||||
echo ${STATUS} > ${STATUSFILE} && chmod +x ${STATUSFILE}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "*********************"
|
||||
echo "To install Node.js, run:"
|
||||
echo "/src/nodejs-install.sh"
|
||||
echo "Use NODEJS_VERSION env var to select the Node.js version"
|
||||
echo " "
|
||||
echo "To start postgres, run:"
|
||||
echo "/etc/init.d/postgresql start"
|
||||
echo "*********************"
|
||||
echo " "
|
||||
|
||||
docker run -it -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker run -e "NODEJS_VERSION=${2}" -v `pwd`:/srv ${1} bash run_tests_docker.sh && \
|
||||
docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v
|
||||
@@ -1,88 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_10.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,89 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
# Install dependencies and PostGIS 2.4 from sources
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgresql-10-postgis-2.4 \
|
||||
postgresql-10-postgis-2.4-scripts \
|
||||
postgresql-10-postgis-scripts \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
postgis \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,89 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
# Install dependencies and PostGIS 2.4 from sources
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgresql-10-postgis-2.4 \
|
||||
postgresql-10-postgis-2.4-scripts \
|
||||
postgresql-10-postgis-scripts \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
postgis \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,88 +0,0 @@
|
||||
FROM ubuntu:xenial
|
||||
|
||||
# Use UTF8 to avoid encoding problems with pgsql
|
||||
ENV LANG C.UTF-8
|
||||
ENV NPROCS 1
|
||||
ENV JOBS 1
|
||||
ENV CXX g++-4.9
|
||||
ENV PGUSER postgres
|
||||
|
||||
# Add external repos
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
curl \
|
||||
software-properties-common \
|
||||
locales \
|
||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
||||
&& add-apt-repository -y ppa:cartodb/gis \
|
||||
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
|
||||
&& locale-gen en_US.UTF-8 \
|
||||
&& update-locale LANG=en_US.UTF-8
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
g++-4.9 \
|
||||
gcc-4.9 \
|
||||
git \
|
||||
libcairo2-dev \
|
||||
libgdal-dev \
|
||||
libgdal1i \
|
||||
libgdal20 \
|
||||
libgeos-dev \
|
||||
libgif-dev \
|
||||
libjpeg8-dev \
|
||||
libjson-c-dev \
|
||||
libpango1.0-dev \
|
||||
libpixman-1-dev \
|
||||
libproj-dev \
|
||||
libprotobuf-c-dev \
|
||||
libxml2-dev \
|
||||
gdal-bin \
|
||||
make \
|
||||
nodejs \
|
||||
protobuf-c-compiler \
|
||||
pkg-config \
|
||||
wget \
|
||||
zip \
|
||||
postgresql-10 \
|
||||
postgresql-10-plproxy \
|
||||
postgis=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
|
||||
postgresql-client-10 \
|
||||
postgresql-client-common \
|
||||
postgresql-common \
|
||||
postgresql-contrib \
|
||||
postgresql-plpython-10 \
|
||||
postgresql-server-dev-10 \
|
||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
||||
&& tar xvzf redis-4.0.8.tar.gz \
|
||||
&& cd redis-4.0.8 \
|
||||
&& make \
|
||||
&& make install \
|
||||
&& cd .. \
|
||||
&& rm redis-4.0.8.tar.gz \
|
||||
&& rm -R redis-4.0.8 \
|
||||
&& apt-get purge -y wget protobuf-c-compiler \
|
||||
&& apt-get autoremove -y
|
||||
|
||||
# Configure PostgreSQL
|
||||
RUN set -ex \
|
||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
||||
&& /etc/init.d/postgresql start \
|
||||
&& createdb template_postgis \
|
||||
&& createuser publicuser \
|
||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
||||
&& /etc/init.d/postgresql stop
|
||||
|
||||
WORKDIR /srv
|
||||
EXPOSE 5858
|
||||
|
||||
CMD /etc/init.d/postgresql start
|
||||
@@ -1,23 +1,33 @@
|
||||
After running the tests with docker, you will need Docker installed and the docker image downloaded.
|
||||
# Testing with Docker
|
||||
|
||||
Before running the tests with docker, you'll need Docker installed and the docker image downloaded.
|
||||
|
||||
## Install docker
|
||||
`sudo apt install docker.io && sudo usermod -aG docker $(whoami)`
|
||||
|
||||
```shell
|
||||
$ sudo apt install docker.io && sudo usermod -aG docker $(whoami)
|
||||
```
|
||||
|
||||
## Download image
|
||||
`docker pull carto/IMAGE`
|
||||
|
||||
```shell
|
||||
docker pull carto/IMAGE
|
||||
```
|
||||
|
||||
## Carto account
|
||||
https://hub.docker.com/r/carto/
|
||||
|
||||
* `https://hub.docker.com/r/carto/`
|
||||
|
||||
## Update image
|
||||
- Edit the docker image file with your desired changes
|
||||
- Build image:
|
||||
- `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
|
||||
|
||||
- Upload to docker hub:
|
||||
- Login into docker hub:
|
||||
- `docker login`
|
||||
- Create tag:
|
||||
- `docker tag carto/IMAGE carto/IMAGE`
|
||||
- Upload:
|
||||
- `docker push carto/IMAGE`
|
||||
* Edit the docker image file
|
||||
* Build image:
|
||||
* `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
|
||||
|
||||
* Upload to docker hub:
|
||||
* Login into docker hub:
|
||||
* `docker login`
|
||||
* Create tag:
|
||||
* `docker tag carto/IMAGE carto/IMAGE`
|
||||
* Upload:
|
||||
* `docker push carto/IMAGE`
|
||||
|
||||
@@ -8,15 +8,7 @@ source /src/nodejs-install.sh
|
||||
git clone https://github.com/CartoDB/cartodb-postgresql.git
|
||||
cd cartodb-postgresql && make && make install && cd ..
|
||||
|
||||
echo "Node.js version: "
|
||||
node -v
|
||||
cp config/environments/test.js.example config/environments/test.js
|
||||
|
||||
echo "npm version: "
|
||||
npm -v
|
||||
|
||||
echo "Clean install: "
|
||||
npm ci
|
||||
npm ls
|
||||
|
||||
# run tests
|
||||
npm test
|
||||
@@ -1,5 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
|
||||
const { Router: router } = require('express');
|
||||
|
||||
const RedisPool = require('redis-mpool');
|
||||
@@ -7,12 +9,12 @@ const cartodbRedis = require('cartodb-redis');
|
||||
|
||||
const windshaft = require('windshaft');
|
||||
|
||||
const PgConnection = require('../backends/pg_connection');
|
||||
const PgConnection = require('../backends/pg-connection');
|
||||
const AnalysisBackend = require('../backends/analysis');
|
||||
const AnalysisStatusBackend = require('../backends/analysis-status');
|
||||
const DataviewBackend = require('../backends/dataview');
|
||||
const TemplateMaps = require('../backends/template_maps.js');
|
||||
const PgQueryRunner = require('../backends/pg_query_runner');
|
||||
const TemplateMaps = require('../backends/template-maps');
|
||||
const PgQueryRunner = require('../backends/pg-query-runner');
|
||||
const StatsBackend = require('../backends/stats');
|
||||
const AuthBackend = require('../backends/auth');
|
||||
|
||||
@@ -23,12 +25,12 @@ const TablesExtentBackend = require('../backends/tables-extent');
|
||||
|
||||
const ClusterBackend = require('../backends/cluster');
|
||||
|
||||
const LayergroupAffectedTablesCache = require('../cache/layergroup_affected_tables');
|
||||
const SurrogateKeysCache = require('../cache/surrogate_keys_cache');
|
||||
const VarnishHttpCacheBackend = require('../cache/backend/varnish_http');
|
||||
const LayergroupAffectedTablesCache = require('../cache/layergroup-affected-tables');
|
||||
const SurrogateKeysCache = require('../cache/surrogate-keys-cache');
|
||||
const VarnishHttpCacheBackend = require('../cache/backend/varnish-http');
|
||||
const FastlyCacheBackend = require('../cache/backend/fastly');
|
||||
const NamedMapProviderCache = require('../cache/named_map_provider_cache');
|
||||
const NamedMapsCacheEntry = require('../cache/model/named_maps_entry');
|
||||
const NamedMapProviderCache = require('../cache/named-map-provider-cache');
|
||||
const NamedMapsCacheEntry = require('../cache/model/named-maps-entry');
|
||||
const NamedMapProviderReporter = require('../stats/reporter/named-map-provider');
|
||||
|
||||
const SqlWrapMapConfigAdapter = require('../models/mapconfig/adapter/sql-wrap-mapconfig-adapter');
|
||||
@@ -83,9 +85,13 @@ module.exports = class ApiRouter {
|
||||
const metadataBackend = cartodbRedis({ pool: redisPool });
|
||||
const pgConnection = new PgConnection(metadataBackend);
|
||||
|
||||
const windshaftLogger = environmentOptions.log_windshaft && global.log4js
|
||||
? global.log4js.getLogger('[windshaft]')
|
||||
: null;
|
||||
const mapStore = new windshaft.storage.MapStore({
|
||||
pool: redisPool,
|
||||
expire_time: serverOptions.grainstore.default_layergroup_ttl
|
||||
expire_time: serverOptions.grainstore.default_layergroup_ttl,
|
||||
logger: windshaftLogger
|
||||
});
|
||||
|
||||
const rendererFactory = createRendererFactory({ redisPool, serverOptions, environmentOptions });
|
||||
@@ -196,16 +202,17 @@ module.exports = class ApiRouter {
|
||||
this.templateRouter = new TemplateRouter({ collaborators });
|
||||
}
|
||||
|
||||
register (app) {
|
||||
route (app, routes) {
|
||||
// FIXME: we need a better way to reset cache while running tests
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
app.layergroupAffectedTablesCache = this.layergroupAffectedTablesCache;
|
||||
}
|
||||
|
||||
Object.keys(this.serverOptions.routes).forEach(apiVersion => {
|
||||
const routes = this.serverOptions.routes[apiVersion];
|
||||
|
||||
routes.forEach(route => {
|
||||
const apiRouter = router({ mergeParams: true });
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
middlewares.forEach(middleware => apiRouter.use(middleware()));
|
||||
|
||||
apiRouter.use(logger(this.serverOptions));
|
||||
apiRouter.use(initializeStatusCode());
|
||||
@@ -219,21 +226,18 @@ module.exports = class ApiRouter {
|
||||
apiRouter.use(cors());
|
||||
apiRouter.use(user());
|
||||
|
||||
this.templateRouter.register(apiRouter, routes.template.paths);
|
||||
this.mapRouter.register(apiRouter, routes.map.paths);
|
||||
this.templateRouter.route(apiRouter, route.template);
|
||||
this.mapRouter.route(apiRouter, route.map);
|
||||
|
||||
apiRouter.use(sendResponse());
|
||||
apiRouter.use(syntaxError());
|
||||
apiRouter.use(errorMiddleware());
|
||||
|
||||
const apiPaths = routes.paths;
|
||||
|
||||
apiPaths.forEach(path => app.use(path, apiRouter));
|
||||
paths.forEach(path => app.use(path, apiRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
const templateMaps = new TemplateMaps(redisPool, {
|
||||
max_user_templates: global.environment.maxUserTemplates
|
||||
@@ -241,12 +245,12 @@ function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
|
||||
function invalidateNamedMap (owner, templateName) {
|
||||
var startTime = Date.now();
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function(err) {
|
||||
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function (err) {
|
||||
var logMessage = JSON.stringify({
|
||||
username: owner,
|
||||
type: 'named_map_invalidation',
|
||||
elapsed: Date.now() - startTime,
|
||||
error: !!err ? JSON.stringify(err.message) : undefined
|
||||
error: err ? JSON.stringify(err.message) : undefined
|
||||
});
|
||||
if (err) {
|
||||
global.logger.warn(logMessage);
|
||||
@@ -256,15 +260,14 @@ function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
['update', 'delete'].forEach(function(eventType) {
|
||||
['update', 'delete'].forEach(function (eventType) {
|
||||
templateMaps.on(eventType, invalidateNamedMap);
|
||||
});
|
||||
|
||||
return templateMaps;
|
||||
}
|
||||
|
||||
function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
function createSurrogateKeysCacheBackends (serverOptions) {
|
||||
var cacheBackends = [];
|
||||
|
||||
if (serverOptions.varnish_purge_enabled) {
|
||||
@@ -283,14 +286,13 @@ function createSurrogateKeysCacheBackends(serverOptions) {
|
||||
return cacheBackends;
|
||||
}
|
||||
|
||||
const timeoutErrorTilePath = __dirname + '/../../../assets/render-timeout-fallback.png';
|
||||
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, {encoding: null});
|
||||
const timeoutErrorTilePath = path.join(__dirname, '/../../assets/render-timeout-fallback.png');
|
||||
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, { encoding: null });
|
||||
|
||||
function createRendererFactory ({ redisPool, serverOptions, environmentOptions }) {
|
||||
var onTileErrorStrategy;
|
||||
if (environmentOptions.enabledFeatures.onTileErrorStrategy !== false) {
|
||||
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile(err, tile, headers, stats, format, callback) {
|
||||
|
||||
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile (err, tile, headers, stats, format, callback) {
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
@@ -309,7 +311,7 @@ function createRendererFactory ({ redisPool, serverOptions, environmentOptions }
|
||||
|
||||
if (isTimeoutError(err) && isRasterFormat(format)) {
|
||||
return callback(null, timeoutErrorTile, {
|
||||
'Content-Type': 'image/png',
|
||||
'Content-Type': 'image/png'
|
||||
}, {});
|
||||
} else {
|
||||
return callback(err, tile, headers, stats);
|
||||
@@ -17,7 +17,7 @@ module.exports = class AnalysesController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/analyses/catalog', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -48,10 +48,10 @@ function createPGClient () {
|
||||
};
|
||||
}
|
||||
|
||||
function getDataFromQuery({ queryTemplate, key }) {
|
||||
function getDataFromQuery ({ queryTemplate, key }) {
|
||||
const readOnlyTransactionOn = true;
|
||||
|
||||
return function getCatalogMiddleware(req, res, next) {
|
||||
return function getCatalogMiddleware (req, res, next) {
|
||||
const { pg, user } = res.locals;
|
||||
const sql = queryTemplate({ _username: user });
|
||||
|
||||
@@ -82,27 +82,27 @@ function prepareResponse () {
|
||||
}, {});
|
||||
|
||||
const analysisCatalog = catalog.map(analysis => {
|
||||
if (analysisIdToTable.hasOwnProperty(analysis.node_id)) {
|
||||
if (Object.prototype.hasOwnProperty.call(analysisIdToTable, analysis.node_id)) {
|
||||
analysis.table = analysisIdToTable[analysis.node_id];
|
||||
}
|
||||
|
||||
return analysis;
|
||||
})
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
.sort((analysisA, analysisB) => {
|
||||
if (!!analysisA.table && !!analysisB.table) {
|
||||
return analysisB.table.size - analysisA.table.size;
|
||||
}
|
||||
|
||||
if (analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!!analysisA.table) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!!analysisB.table) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
});
|
||||
});
|
||||
|
||||
res.statusCode = 200;
|
||||
res.body = { catalog: analysisCatalog };
|
||||
@@ -112,7 +112,7 @@ function prepareResponse () {
|
||||
}
|
||||
|
||||
function unauthorizedError () {
|
||||
return function unathorizedErrorMiddleware(err, req, res, next) {
|
||||
return function unathorizedErrorMiddleware (err, req, res, next) {
|
||||
if (err.message.match(/permission\sdenied/)) {
|
||||
err = new Error('Unauthorized');
|
||||
err.http_status = 401;
|
||||
@@ -123,7 +123,7 @@ function unauthorizedError () {
|
||||
}
|
||||
|
||||
const catalogQueryTpl = ctx => `
|
||||
SELECT analysis_def->>'type' as type, * FROM cdb_analysis_catalog WHERE username = '${ctx._username}'
|
||||
SELECT analysis_def->>'type' as type, * FROM cartodb.cdb_analysis_catalog WHERE username = '${ctx._username}'
|
||||
`;
|
||||
|
||||
var tablesQueryTpl = ctx => `
|
||||
@@ -17,7 +17,7 @@ module.exports = class AnalysisLayergroupController {
|
||||
this.authBackend = authBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/analysis/node/:nodeId', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ module.exports = class AnalysisLayergroupController {
|
||||
};
|
||||
|
||||
function analysisNodeStatus (analysisStatusBackend) {
|
||||
return function analysisNodeStatusMiddleware(req, res, next) {
|
||||
return function analysisNodeStatusMiddleware (req, res, next) {
|
||||
const { nodeId } = req.params;
|
||||
const dbParams = dbParamsFromResLocals(res.locals);
|
||||
|
||||
@@ -64,7 +64,7 @@ module.exports = class AnonymousMapController {
|
||||
this.layergroupMetadata = layergroupMetadata;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.options('/');
|
||||
mapRouter.get('/', this.middlewares());
|
||||
mapRouter.post('/', this.middlewares());
|
||||
@@ -87,7 +87,7 @@ module.exports = class AnonymousMapController {
|
||||
checkJsonContentType(),
|
||||
checkCreateLayergroup(),
|
||||
prepareAdapterMapConfig(this.mapConfigAdapter),
|
||||
createLayergroup (
|
||||
createLayergroup(
|
||||
this.mapBackend,
|
||||
this.userLimitsBackend,
|
||||
this.pgConnection,
|
||||
@@ -130,10 +130,10 @@ function checkCreateLayergroup () {
|
||||
}
|
||||
|
||||
function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
return function prepareAdapterMapConfigMiddleware(req, res, next) {
|
||||
return function prepareAdapterMapConfigMiddleware (req, res, next) {
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { user, api_key } = res.locals;
|
||||
const { user, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
@@ -149,31 +149,30 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
|
||||
},
|
||||
batch: {
|
||||
username: user,
|
||||
apiKey: api_key
|
||||
apiKey
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
mapConfigAdapter.getMapConfig(user,
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
requestMapConfig,
|
||||
params,
|
||||
context,
|
||||
(err, requestMapConfig, stats = { overviewsAddedToMapconfig: false }) => {
|
||||
req.profiler.done('anonymous.getMapConfig');
|
||||
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
stats.mapType = 'anonymous';
|
||||
req.profiler.add(stats);
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
req.body = requestMapConfig;
|
||||
res.locals.context = context;
|
||||
|
||||
next();
|
||||
});
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -182,12 +181,17 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
|
||||
const requestMapConfig = req.body;
|
||||
|
||||
const { context } = res.locals;
|
||||
const { user, cache_buster, api_key } = res.locals;
|
||||
const { user, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
|
||||
const params = {
|
||||
cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport
|
||||
};
|
||||
|
||||
const datasource = context.datasource || Datasource.EmptyDatasource();
|
||||
@@ -32,7 +32,7 @@ module.exports = class AttributesLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/:layer/attributes/:fid', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -70,8 +70,13 @@ function getFeatureAttributes (attributesBackend) {
|
||||
|
||||
const params = {
|
||||
token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, fid
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
fid
|
||||
};
|
||||
|
||||
attributesBackend.getFeatureAttributes(mapConfigProvider, params, false, (err, tile, stats = {}) => {
|
||||
@@ -32,7 +32,7 @@ module.exports = class AggregatedFeaturesLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/:token/:layer/:z/cluster/:clusterId', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ module.exports = class AggregatedFeaturesLayergroupController {
|
||||
dbConnSetup(this.pgConnection),
|
||||
// TODO: create its rate limit
|
||||
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ATTRIBUTES),
|
||||
cleanUpQueryParams([ 'aggregation' ]),
|
||||
cleanUpQueryParams(['aggregation']),
|
||||
createMapStoreMapConfigProvider(
|
||||
this.mapStore,
|
||||
this.userLimitsBackend,
|
||||
@@ -71,9 +71,16 @@ function getClusteredFeatures (clusterBackend) {
|
||||
const { aggregation } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer, zoom, clusterId,
|
||||
user,
|
||||
token,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer,
|
||||
zoom,
|
||||
clusterId,
|
||||
aggregation
|
||||
};
|
||||
|
||||
@@ -18,14 +18,16 @@ const ALLOWED_DATAVIEW_QUERY_PARAMS = [
|
||||
'own_filter', // 0, 1
|
||||
'no_filters', // 0, 1
|
||||
'bbox', // w,s,e,n
|
||||
'circle', // json
|
||||
'polygon', // json
|
||||
'start', // number
|
||||
'end', // number
|
||||
'column_type', // string
|
||||
'bins', // number
|
||||
'aggregation', //string
|
||||
'aggregation', // string
|
||||
'offset', // number
|
||||
'q', // widgets search
|
||||
'categories', // number
|
||||
'categories' // number
|
||||
];
|
||||
|
||||
module.exports = class DataviewLayergroupController {
|
||||
@@ -47,7 +49,7 @@ module.exports = class DataviewLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
// Undocumented/non-supported API endpoint methods.
|
||||
// Use at your own peril.
|
||||
|
||||
@@ -126,19 +126,25 @@ module.exports = class MapRouter {
|
||||
);
|
||||
}
|
||||
|
||||
register (apiRouter, mapPaths) {
|
||||
route (apiRouter, routes) {
|
||||
const mapRouter = router({ mergeParams: true });
|
||||
|
||||
this.analysisLayergroupController.register(mapRouter);
|
||||
this.attributesLayergroupController.register(mapRouter);
|
||||
this.dataviewLayergroupController.register(mapRouter);
|
||||
this.previewLayergroupController.register(mapRouter);
|
||||
this.tileLayergroupController.register(mapRouter);
|
||||
this.anonymousMapController.register(mapRouter);
|
||||
this.previewTemplateController.register(mapRouter);
|
||||
this.analysesController.register(mapRouter);
|
||||
this.clusteredFeaturesLayergroupController.register(mapRouter);
|
||||
routes.forEach(route => {
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
mapPaths.forEach(path => apiRouter.use(path, mapRouter));
|
||||
middlewares.forEach(middleware => mapRouter.use(middleware()));
|
||||
|
||||
this.analysisLayergroupController.route(mapRouter);
|
||||
this.attributesLayergroupController.route(mapRouter);
|
||||
this.dataviewLayergroupController.route(mapRouter);
|
||||
this.previewLayergroupController.route(mapRouter);
|
||||
this.tileLayergroupController.route(mapRouter);
|
||||
this.anonymousMapController.route(mapRouter);
|
||||
this.previewTemplateController.route(mapRouter);
|
||||
this.analysesController.route(mapRouter);
|
||||
this.clusteredFeaturesLayergroupController.route(mapRouter);
|
||||
|
||||
paths.forEach(path => apiRouter.use(path, mapRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -35,7 +35,7 @@ module.exports = class PreviewLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/static/center/:token/:z/:lat/:lng/:width/:height.:format', this.middlewares({
|
||||
validateZoom: true,
|
||||
previewType: 'centered'
|
||||
@@ -21,7 +21,7 @@ const DEFAULT_ZOOM_CENTER = {
|
||||
}
|
||||
};
|
||||
|
||||
function numMapper(n) {
|
||||
function numMapper (n) {
|
||||
return +n;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ module.exports = class PreviewTemplateController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
mapRouter.get('/static/named/:template_id/:width/:height.:format', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -60,7 +60,8 @@ module.exports = class PreviewTemplateController {
|
||||
checkStaticImageFormat(),
|
||||
namedMapProvider({
|
||||
namedMapProviderCache: this.namedMapProviderCache,
|
||||
label: 'STATIC_VIZ_MAP', forcedFormat: 'png'
|
||||
label: 'STATIC_VIZ_MAP',
|
||||
forcedFormat: 'png'
|
||||
}),
|
||||
getTemplate({ label: 'STATIC_VIZ_MAP' }),
|
||||
prepareLayerFilterFromPreviewLayers({
|
||||
@@ -99,7 +100,7 @@ function getTemplate ({ label }) {
|
||||
function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label }) {
|
||||
return function prepareLayerFilterFromPreviewLayersMiddleware (req, res, next) {
|
||||
const { template } = res.locals;
|
||||
const { config, auth_token } = req.query;
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
if (!template || !template.view || !template.view.preview_layers) {
|
||||
return next();
|
||||
@@ -109,8 +110,8 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
var layerVisibilityFilter = [];
|
||||
|
||||
template.layergroup.layers.forEach((layer, index) => {
|
||||
if (previewLayers[''+index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push(''+index);
|
||||
if (previewLayers['' + index] !== false && previewLayers[layer.id] !== false) {
|
||||
layerVisibilityFilter.push('' + index);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -118,21 +119,29 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
return next();
|
||||
}
|
||||
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id, format } = req.params;
|
||||
const { template_id: templateId, format } = req.params;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
template_id, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
format
|
||||
};
|
||||
|
||||
// overwrites 'all' default filter
|
||||
params.layer = layerVisibilityFilter.join(',');
|
||||
|
||||
// recreates the provider
|
||||
namedMapProviderCache.get(user, template_id, config, auth_token, params, (err, provider) => {
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, provider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
@@ -146,7 +155,7 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
|
||||
}
|
||||
|
||||
function getStaticImageOptions ({ tablesExtentBackend }) {
|
||||
return function getStaticImageOptionsMiddleware(req, res, next) {
|
||||
return function getStaticImageOptionsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider, template } = res.locals;
|
||||
const { zoom, lon, lat, bbox } = req.query;
|
||||
const params = { zoom, lon, lat, bbox };
|
||||
@@ -248,7 +257,7 @@ function getImageOptionsFromBoundingBox (bbox = '') {
|
||||
}
|
||||
}
|
||||
|
||||
function getImage({ previewBackend, label }) {
|
||||
function getImage ({ previewBackend, label }) {
|
||||
return function getImageMiddleware (req, res, next) {
|
||||
const { imageOpts, mapConfigProvider } = res.locals;
|
||||
const { zoom, center, bbox } = imageOpts;
|
||||
@@ -298,7 +307,7 @@ function getImage({ previewBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
const format = req.params.format === 'jpg' ? 'jpeg' : 'png';
|
||||
|
||||
res.set('Content-Type', `image/${format}`);
|
||||
@@ -312,7 +321,7 @@ function incrementMapViewsError (ctx) {
|
||||
}
|
||||
|
||||
function incrementMapViews ({ metadataBackend }) {
|
||||
return function incrementMapViewsMiddleware(req, res, next) {
|
||||
return function incrementMapViewsMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
|
||||
mapConfigProvider.getMapConfig((err, mapConfig) => {
|
||||
@@ -334,7 +343,7 @@ function incrementMapViews ({ metadataBackend }) {
|
||||
};
|
||||
}
|
||||
|
||||
function templateZoomCenter(view) {
|
||||
function templateZoomCenter (view) {
|
||||
if (view.zoom !== undefined && view.center) {
|
||||
return {
|
||||
zoom: view.zoom,
|
||||
@@ -344,7 +353,7 @@ function templateZoomCenter(view) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function templateBounds(view) {
|
||||
function templateBounds (view) {
|
||||
if (view.bounds) {
|
||||
var hasAllBounds = ['west', 'south', 'east', 'north'].every(prop => Number.isFinite(view.bounds[prop]));
|
||||
|
||||
@@ -43,16 +43,16 @@ module.exports = class TileLayergroupController {
|
||||
this.surrogateKeysCache = surrogateKeysCache;
|
||||
}
|
||||
|
||||
register (mapRouter) {
|
||||
route (mapRouter) {
|
||||
// REGEXP: doesn't match with `val`
|
||||
const not = (val) => `(?!${val})([^\/]+?)`;
|
||||
const not = (val) => `(?!${val})([^\/]+?)`; // eslint-disable-line no-useless-escape
|
||||
|
||||
// Sadly the path that matches 1 also matches with 2 so we need to tell to express
|
||||
// that performs only the middlewares of the first path that matches
|
||||
// for that we use one array to group all paths.
|
||||
mapRouter.get([
|
||||
`/:token/:z/:x/:y@:scale_factor?x.:format`, // 1
|
||||
`/:token/:z/:x/:y.:format`, // 2
|
||||
'/:token/:z/:x/:y@:scale_factor?x.:format', // 1
|
||||
'/:token/:z/:x/:y.:format', // 2
|
||||
`/:token${not('static')}/:layer/:z/:x/:y.(:format)`
|
||||
], this.middlewares());
|
||||
}
|
||||
@@ -90,7 +90,7 @@ function parseFormat (format = '') {
|
||||
return SUPPORTED_FORMATS[prettyFormat] ? prettyFormat : 'invalid';
|
||||
}
|
||||
|
||||
function getStatusCode(tile, format){
|
||||
function getStatusCode (tile, format) {
|
||||
return tile.length === 0 && format === 'mvt' ? 204 : 200;
|
||||
}
|
||||
|
||||
@@ -149,9 +149,8 @@ function incrementErrorMetrics (statsClient) {
|
||||
|
||||
function tileError () {
|
||||
return function tileErrorMiddleware (err, req, res, next) {
|
||||
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
let errMsg = err.message ? ( '' + err.message ) : ( '' + err );
|
||||
let errMsg = err.message ? ('' + err.message) : ('' + err);
|
||||
|
||||
// Rewrite mapnik parsing errors to start with layer number
|
||||
const matches = errMsg.match("(.*) in style 'layer([0-9]+)'");
|
||||
@@ -9,8 +9,8 @@ module.exports = function authorize (authBackend) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if(!authorized) {
|
||||
err = new Error("Sorry, you are unauthorized (permission denied)");
|
||||
if (!authorized) {
|
||||
err = new Error('Sorry, you are unauthorized (permission denied)');
|
||||
err.http_status = 403;
|
||||
return next(err);
|
||||
}
|
||||
@@ -1,14 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
const ONE_YEAR_IN_SECONDS = 60 * 60 * 24 * 365;
|
||||
const FIVE_MINUTES_IN_SECONDS = 60 * 5;
|
||||
const ONE_MINUTE_IN_SECONDS = 60;
|
||||
const THREE_MINUTE_IN_SECONDS = 60 * 3;
|
||||
const FIVE_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 5;
|
||||
const TEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 10;
|
||||
const FIFTEEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 15;
|
||||
const THIRTY_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 30;
|
||||
const ONE_HOUR_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 60;
|
||||
const ONE_YEAR_IN_SECONDS = ONE_HOUR_IN_SECONDS * 24 * 365;
|
||||
|
||||
const FALLBACK_TTL = global.environment.varnish.fallbackTtl || FIVE_MINUTES_IN_SECONDS;
|
||||
|
||||
const validFallbackTTL = [
|
||||
ONE_MINUTE_IN_SECONDS,
|
||||
THREE_MINUTE_IN_SECONDS,
|
||||
FIVE_MINUTES_IN_SECONDS,
|
||||
TEN_MINUTES_IN_SECONDS,
|
||||
FIFTEEN_MINUTES_IN_SECONDS,
|
||||
THIRTY_MINUTES_IN_SECONDS,
|
||||
ONE_HOUR_IN_SECONDS
|
||||
];
|
||||
|
||||
module.exports = function setCacheControlHeader ({
|
||||
ttl = ONE_YEAR_IN_SECONDS,
|
||||
fallbackTtl = FALLBACK_TTL,
|
||||
revalidate = false
|
||||
} = {}) {
|
||||
if (!validFallbackTTL.includes(fallbackTtl)) {
|
||||
const message = [
|
||||
'Invalid fallback TTL value for Cache-Control header.',
|
||||
`Got ${fallbackTtl}, expected ${validFallbackTTL.join(', ')}`
|
||||
].join(' ');
|
||||
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
return function setCacheControlHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
return next();
|
||||
@@ -22,12 +48,12 @@ module.exports = function setCacheControlHeader ({
|
||||
return next();
|
||||
}
|
||||
|
||||
const directives = [ 'public' ];
|
||||
const directives = ['public'];
|
||||
|
||||
if (everyAffectedTableCanBeInvalidated(affectedTables)) {
|
||||
directives.push(`max-age=${ttl}`);
|
||||
} else {
|
||||
directives.push(`max-age=${fallbackTtl}`);
|
||||
directives.push(`max-age=${computeNextTTL({ ttlInSeconds: fallbackTtl })}`);
|
||||
}
|
||||
|
||||
if (revalidate) {
|
||||
@@ -49,3 +75,11 @@ function everyAffectedTableCanBeInvalidated (affectedTables) {
|
||||
affectedTables.getTables(skipNotUpdatedAtTables, skipAnalysisCachedTables)
|
||||
.every(table => table.updated_at !== null);
|
||||
}
|
||||
|
||||
function computeNextTTL ({ ttlInSeconds } = {}) {
|
||||
const nowInSeconds = Math.ceil(Date.now() / 1000);
|
||||
const secondsAfterPreviousTTLStep = nowInSeconds % ttlInSeconds;
|
||||
const secondsToReachTheNextTTLStep = ttlInSeconds - secondsAfterPreviousTTLStep;
|
||||
|
||||
return secondsToReachTheNextTTLStep;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function checkJsonContentType () {
|
||||
return function checkJsonContentTypeMiddleware(req, res, next) {
|
||||
return function checkJsonContentTypeMiddleware (req, res, next) {
|
||||
if (req.method === 'POST' && !req.is('application/json')) {
|
||||
return next(new Error('POST data must be of type application/json'));
|
||||
}
|
||||
@@ -4,7 +4,7 @@ const VALID_IMAGE_FORMATS = ['png', 'jpg'];
|
||||
|
||||
module.exports = function checkStaticImageFormat () {
|
||||
return function checkStaticImageFormatMiddleware (req, res, next) {
|
||||
if(!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
if (!VALID_IMAGE_FORMATS.includes(req.params.format)) {
|
||||
return next(new Error(`Unsupported image format "${req.params.format}"`));
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ module.exports = function cors () {
|
||||
headers.push('Content-Type');
|
||||
}
|
||||
|
||||
res.set("Access-Control-Allow-Origin", "*");
|
||||
res.set("Access-Control-Allow-Headers", headers.join(', '));
|
||||
res.set('Access-Control-Allow-Origin', '*');
|
||||
res.set('Access-Control-Allow-Headers', headers.join(', '));
|
||||
|
||||
next();
|
||||
};
|
||||
@@ -3,24 +3,24 @@
|
||||
const basicAuth = require('basic-auth');
|
||||
|
||||
module.exports = function credentials () {
|
||||
return function credentialsMiddleware(req, res, next) {
|
||||
return function credentialsMiddleware (req, res, next) {
|
||||
const apikeyCredentials = getApikeyCredentialsFromRequest(req);
|
||||
|
||||
res.locals.api_key = apikeyCredentials.token;
|
||||
res.locals.basicAuthUsername = apikeyCredentials.username;
|
||||
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
|
||||
res.set('vary', 'Authorization'); // Honor Authorization header when caching.
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
function getApikeyCredentialsFromRequest(req) {
|
||||
function getApikeyCredentialsFromRequest (req) {
|
||||
let apikeyCredentials = {
|
||||
token: null,
|
||||
username: null,
|
||||
username: null
|
||||
};
|
||||
|
||||
for (let getter of apikeyGetters) {
|
||||
for (const getter of apikeyGetters) {
|
||||
apikeyCredentials = getter(req);
|
||||
if (apikeyTokenFound(apikeyCredentials)) {
|
||||
break;
|
||||
@@ -33,10 +33,10 @@ function getApikeyCredentialsFromRequest(req) {
|
||||
const apikeyGetters = [
|
||||
getApikeyTokenFromHeaderAuthorization,
|
||||
getApikeyTokenFromRequestQueryString,
|
||||
getApikeyTokenFromRequestBody,
|
||||
getApikeyTokenFromRequestBody
|
||||
];
|
||||
|
||||
function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
function getApikeyTokenFromHeaderAuthorization (req) {
|
||||
const credentials = basicAuth(req);
|
||||
|
||||
if (credentials) {
|
||||
@@ -47,12 +47,12 @@ function getApikeyTokenFromHeaderAuthorization(req) {
|
||||
} else {
|
||||
return {
|
||||
username: null,
|
||||
token: null,
|
||||
token: null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestQueryString(req) {
|
||||
function getApikeyTokenFromRequestQueryString (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.query && req.query.api_key) {
|
||||
@@ -63,11 +63,11 @@ function getApikeyTokenFromRequestQueryString(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function getApikeyTokenFromRequestBody(req) {
|
||||
function getApikeyTokenFromRequestBody (req) {
|
||||
let token = null;
|
||||
|
||||
if (req.body && req.body.api_key) {
|
||||
@@ -78,10 +78,10 @@ function getApikeyTokenFromRequestBody(req) {
|
||||
|
||||
return {
|
||||
username: null,
|
||||
token: token,
|
||||
token: token
|
||||
};
|
||||
}
|
||||
|
||||
function apikeyTokenFound(apikey) {
|
||||
function apikeyTokenFound (apikey) {
|
||||
return !!apikey && !!apikey.token;
|
||||
}
|
||||
@@ -10,7 +10,7 @@ module.exports = function dbConnSetup (pgConnection) {
|
||||
req.profiler.done('dbConnSetup');
|
||||
|
||||
if (err) {
|
||||
if (err.message && -1 !== err.message.indexOf('name not found')) {
|
||||
if (err.message && err.message.indexOf('name not found') !== -1) {
|
||||
err.http_status = 404;
|
||||
}
|
||||
|
||||
@@ -52,10 +52,10 @@ function isTimeoutError (errorTypes) {
|
||||
return errorTypes.renderTimeoutError || errorTypes.datasourceTimeoutError;
|
||||
}
|
||||
|
||||
function getErrorTypes(error) {
|
||||
function getErrorTypes (error) {
|
||||
return {
|
||||
renderTimeoutError: isRenderTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error),
|
||||
datasourceTimeoutError: isDatasourceTimeoutError(error)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -99,9 +99,9 @@ function populateLimitErrors (errors) {
|
||||
});
|
||||
}
|
||||
|
||||
function findStatusCode(err) {
|
||||
function findStatusCode (err) {
|
||||
var statusCode;
|
||||
if ( err.http_status ) {
|
||||
if (err.http_status) {
|
||||
statusCode = err.http_status;
|
||||
} else {
|
||||
statusCode = statusFromErrorMessage('' + err);
|
||||
@@ -111,34 +111,30 @@ function findStatusCode(err) {
|
||||
|
||||
module.exports.findStatusCode = findStatusCode;
|
||||
|
||||
function statusFromErrorMessage(errMsg) {
|
||||
function statusFromErrorMessage (errMsg) {
|
||||
// Find an appropriate statusCode based on message
|
||||
// jshint maxcomplexity:7
|
||||
var statusCode = 400;
|
||||
if ( -1 !== errMsg.indexOf('permission denied') ) {
|
||||
if (errMsg.indexOf('permission denied') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('authentication failed') ) {
|
||||
} else if (errMsg.indexOf('authentication failed') !== -1) {
|
||||
statusCode = 403;
|
||||
}
|
||||
else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
} else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
|
||||
statusCode = 400;
|
||||
}
|
||||
else if ( -1 !== errMsg.indexOf('does not exist') ) {
|
||||
if ( -1 !== errMsg.indexOf(' role ') ) {
|
||||
} else if (errMsg.indexOf('does not exist') !== -1) {
|
||||
if (errMsg.indexOf(' role ') !== -1) {
|
||||
statusCode = 403; // role 'xxx' does not exist
|
||||
} else if ( errMsg.match(/function .* does not exist/) ) {
|
||||
} else if (errMsg.match(/function .* does not exist/)) {
|
||||
statusCode = 400; // invalid SQL (SQL function does not exist)
|
||||
} else {
|
||||
statusCode = 404;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return statusCode;
|
||||
}
|
||||
|
||||
function errorMessage(err) {
|
||||
function errorMessage (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
|
||||
@@ -147,7 +143,7 @@ function errorMessage(err) {
|
||||
|
||||
module.exports.errorMessage = errorMessage;
|
||||
|
||||
function stripConnectionInfo(message) {
|
||||
function stripConnectionInfo (message) {
|
||||
// Strip connection info, if any
|
||||
return message
|
||||
// See https://github.com/CartoDB/Windshaft/issues/173
|
||||
@@ -168,18 +164,18 @@ function shouldBeExposed (prop) {
|
||||
return !!ERROR_INFO_TO_EXPOSE[prop];
|
||||
}
|
||||
|
||||
function errorMessageWithContext(err) {
|
||||
function errorMessageWithContext (err) {
|
||||
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
|
||||
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
|
||||
|
||||
var error = {
|
||||
type: err.type || 'unknown',
|
||||
message: stripConnectionInfo(message),
|
||||
message: stripConnectionInfo(message)
|
||||
};
|
||||
|
||||
for (var prop in err) {
|
||||
// type & message are properties from Error's prototype and will be skipped
|
||||
if (err.hasOwnProperty(prop) && shouldBeExposed(prop)) {
|
||||
if (Object.prototype.hasOwnProperty.call(err, prop) && shouldBeExposed(prop)) {
|
||||
error[prop] = err[prop];
|
||||
}
|
||||
}
|
||||
@@ -187,27 +183,27 @@ function errorMessageWithContext(err) {
|
||||
return error;
|
||||
}
|
||||
|
||||
function setErrorHeader(errors, statusCode, res) {
|
||||
let errorsCopy = errors.slice(0);
|
||||
function setErrorHeader (errors, statusCode, res) {
|
||||
const errorsCopy = errors.slice(0);
|
||||
const mainError = errorsCopy.shift();
|
||||
|
||||
let errorsLog = {
|
||||
const errorsLog = {
|
||||
mainError: {
|
||||
statusCode: statusCode || 200,
|
||||
message: mainError.message,
|
||||
name: mainError.name,
|
||||
label: mainError.label,
|
||||
type: mainError.type,
|
||||
subtype: mainError.subtype
|
||||
message: mainError.message,
|
||||
name: mainError.name,
|
||||
label: mainError.label,
|
||||
type: mainError.type,
|
||||
subtype: mainError.subtype
|
||||
}
|
||||
};
|
||||
|
||||
errorsLog.moreErrors = errorsCopy.map(error => {
|
||||
return {
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
label: error.label,
|
||||
type: error.type,
|
||||
name: error.name,
|
||||
label: error.label,
|
||||
type: error.type,
|
||||
subtype: error.subtype
|
||||
};
|
||||
});
|
||||
@@ -221,14 +217,14 @@ function setErrorHeader(errors, statusCode, res) {
|
||||
*
|
||||
* @param {Object} object
|
||||
*/
|
||||
function stringifyForLogs(object) {
|
||||
function stringifyForLogs (object) {
|
||||
Object.keys(object).map(key => {
|
||||
if(typeof object[key] === 'string') {
|
||||
if (typeof object[key] === 'string') {
|
||||
object[key] = object[key].replace(/[^a-zA-Z0-9]/g, ' ');
|
||||
} else if (typeof object[key] === 'object') {
|
||||
stringifyForLogs(object[key]);
|
||||
} else if (object[key] instanceof Array) {
|
||||
for (let element of object[key]) {
|
||||
for (const element of object[key]) {
|
||||
stringifyForLogs(element);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function incrementMapViewCount (metadataBackend) {
|
||||
return function incrementMapViewCountMiddleware(req, res, next) {
|
||||
return function incrementMapViewCountMiddleware (req, res, next) {
|
||||
const { mapConfig, user } = res.locals;
|
||||
|
||||
// Error won't blow up, just be logged.
|
||||
@@ -1,16 +1,18 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLastModifiedHeader () {
|
||||
return function setLastModifiedHeaderMiddleware(req, res, next) {
|
||||
return function setLastModifiedHeaderMiddleware (req, res, next) {
|
||||
if (req.method !== 'GET') {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { mapConfigProvider, cache_buster } = res.locals;
|
||||
const { mapConfigProvider, cache_buster: cacheBuster } = res.locals;
|
||||
|
||||
if (cache_buster) {
|
||||
const cacheBuster = parseInt(cache_buster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBuster) ? new Date(cacheBuster) : new Date();
|
||||
if (cacheBuster) {
|
||||
const cacheBusterTimestamp = parseInt(cacheBuster, 10);
|
||||
const lastModifiedDate = Number.isFinite(cacheBusterTimestamp) && cacheBusterTimestamp !== 0
|
||||
? new Date(cacheBusterTimestamp)
|
||||
: new Date();
|
||||
|
||||
res.set('Last-Modified', lastModifiedDate.toUTCString());
|
||||
|
||||
@@ -27,12 +27,12 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
|
||||
};
|
||||
};
|
||||
|
||||
function getLastUpdatedTime(analysesResults, lastUpdateTime) {
|
||||
function getLastUpdatedTime (analysesResults, lastUpdateTime) {
|
||||
if (!Array.isArray(analysesResults)) {
|
||||
return lastUpdateTime;
|
||||
}
|
||||
return analysesResults.reduce(function(lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function(lastNodeUpdatedAtTime, node) {
|
||||
return analysesResults.reduce(function (lastUpdateTime, analysis) {
|
||||
return analysis.getNodes().reduce(function (lastNodeUpdatedAtTime, node) {
|
||||
var nodeUpdatedAtDate = node.getUpdatedAt();
|
||||
var nodeUpdatedTimeAt = (nodeUpdatedAtDate && nodeUpdatedAtDate.getTime()) || 0;
|
||||
return nodeUpdatedTimeAt > lastNodeUpdatedAtTime ? nodeUpdatedTimeAt : lastNodeUpdatedAtTime;
|
||||
@@ -1,7 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return function setLayerStatsMiddleware(req, res, next) {
|
||||
return function setLayerStatsMiddleware (req, res, next) {
|
||||
const { user, mapConfig } = res.locals;
|
||||
const layergroup = res.body;
|
||||
|
||||
@@ -10,7 +10,7 @@ module.exports = function setLayerStats (pgConnection, statsBackend) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
statsBackend.getStats(mapConfig, connection, function(err, layersStats) {
|
||||
statsBackend.getStats(mapConfig, connection, function (err, layersStats) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
@@ -9,7 +9,7 @@ module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQu
|
||||
layergroupMetadata.addAnalysesMetadata(user, layergroup, analysesResults, includeQuery);
|
||||
layergroupMetadata.addTurboCartoContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addAggregationContextMetadata(layergroup, mapConfig.obj(), context);
|
||||
layergroupMetadata.addDateWrappingMetadata (layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addDateWrappingMetadata(layergroup, mapConfig.obj());
|
||||
layergroupMetadata.addTileJsonMetadata(layergroup, user, mapConfig, userApiKey);
|
||||
|
||||
next();
|
||||
@@ -19,7 +19,7 @@ module.exports = function layergroupToken () {
|
||||
if (res.locals.signer !== user) {
|
||||
const err = new Error(authErrorMessageTemplate(res.locals.signer, user));
|
||||
err.type = 'auth';
|
||||
err.http_status = (req.query && req.query.callback) ? 200: 403;
|
||||
err.http_status = (req.query && req.query.callback) ? 200 : 403;
|
||||
|
||||
return next(err);
|
||||
}
|
||||
@@ -6,20 +6,20 @@ module.exports = function lzma () {
|
||||
const lzmaWorker = new LZMA();
|
||||
|
||||
return function lzmaMiddleware (req, res, next) {
|
||||
if (!req.query.hasOwnProperty('lzma')) {
|
||||
if (!Object.prototype.hasOwnProperty.call(req.query, 'lzma')) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Decode (from base64)
|
||||
var lzma = new Buffer(req.query.lzma, 'base64')
|
||||
var lzma = Buffer.from(req.query.lzma, 'base64')
|
||||
.toString('binary')
|
||||
.split('')
|
||||
.map(function(c) {
|
||||
.map(function (c) {
|
||||
return c.charCodeAt(0) - 128;
|
||||
});
|
||||
|
||||
// Decompress
|
||||
lzmaWorker.decompress(lzma, function(result) {
|
||||
lzmaWorker.decompress(lzma, function (result) {
|
||||
try {
|
||||
delete req.query.lzma;
|
||||
Object.assign(req.query, JSON.parse(result));
|
||||
@@ -17,7 +17,7 @@ module.exports = function mapError (options) {
|
||||
};
|
||||
};
|
||||
|
||||
function populateError(err, mapConfig) {
|
||||
function populateError (err, mapConfig) {
|
||||
var error = new Error(err.message);
|
||||
error.http_status = err.http_status;
|
||||
|
||||
@@ -10,15 +10,27 @@ module.exports = function createMapStoreMapConfigProvider (
|
||||
forcedFormat = null
|
||||
) {
|
||||
return function createMapStoreMapConfigProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster, api_key } = res.locals;
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor, format } = req.params;
|
||||
const { layer: layerFromParams, z, x, y, scale_factor: scaleFactor, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user, token, cache_buster, api_key,
|
||||
dbuser, dbname, dbpassword, dbhost, dbport,
|
||||
layer: (layerFromQuery || layerFromParams), z, x, y, scale_factor, format
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
scale_factor: scaleFactor,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
46
lib/api/middlewares/named-map-provider.js
Normal file
46
lib/api/middlewares/named-map-provider.js
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
|
||||
return function getNamedMapProviderMiddleware (req, res, next) {
|
||||
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
|
||||
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id: templateId, layer: layerFromParams, z, x, y, format } = req.params;
|
||||
const { layer: layerFromQuery } = req.query;
|
||||
|
||||
const params = {
|
||||
user,
|
||||
token,
|
||||
cache_buster: cacheBuster,
|
||||
api_key: apiKey,
|
||||
dbuser,
|
||||
dbname,
|
||||
dbpassword,
|
||||
dbhost,
|
||||
dbport,
|
||||
template_id: templateId,
|
||||
layer: (layerFromQuery || layerFromParams),
|
||||
z,
|
||||
x,
|
||||
y,
|
||||
format
|
||||
};
|
||||
|
||||
if (forcedFormat) {
|
||||
params.format = forcedFormat;
|
||||
params.layer = params.layer || 'all';
|
||||
}
|
||||
|
||||
const { config, auth_token: authToken } = req.query;
|
||||
|
||||
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, namedMapProvider) => {
|
||||
if (err) {
|
||||
err.label = label;
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.mapConfigProvider = namedMapProvider;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
@@ -19,12 +19,12 @@ const RATE_LIMIT_ENDPOINTS_GROUPS = {
|
||||
NAMED_TILES: 'named_tiles'
|
||||
};
|
||||
|
||||
function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
function rateLimit (userLimitsBackend, endpointGroup = null) {
|
||||
if (!isRateLimitEnabled(endpointGroup)) {
|
||||
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
|
||||
return function rateLimitDisabledMiddleware (req, res, next) { next(); };
|
||||
}
|
||||
|
||||
return function rateLimitMiddleware(req, res, next) {
|
||||
return function rateLimitMiddleware (req, res, next) {
|
||||
userLimitsBackend.getRateLimit(res.locals.user, endpointGroup, function (err, userRateLimit) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
@@ -46,7 +46,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
// retry is floor rounded in seconds by redis-cell
|
||||
res.set('Retry-After', retry + 1);
|
||||
|
||||
let rateLimitError = new Error(
|
||||
const rateLimitError = new Error(
|
||||
'You are over platform\'s limits: too many requests.' +
|
||||
' Please contact us to know more details'
|
||||
);
|
||||
@@ -61,8 +61,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function isRateLimitEnabled(endpointGroup) {
|
||||
function isRateLimitEnabled (endpointGroup) {
|
||||
return global.environment.enabledFeatures.rateLimitsEnabled &&
|
||||
endpointGroup &&
|
||||
global.environment.enabledFeatures.rateLimitsByEndpoint[endpointGroup];
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const Profiler = require('../../stats/profiler_proxy');
|
||||
const Profiler = require('../../stats/profiler-proxy');
|
||||
const debug = require('debug')('windshaft:cartodb:stats');
|
||||
const onHeaders = require('on-headers');
|
||||
|
||||
@@ -20,7 +20,7 @@ module.exports = function stats (options) {
|
||||
// May throw due to dns, see: http://github.com/CartoDB/Windshaft/issues/166
|
||||
req.profiler.sendStats();
|
||||
} catch (err) {
|
||||
debug("error sending profiling stats: " + err);
|
||||
debug('error sending profiling stats: ' + err);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const NamedMapsCacheEntry = require('../../cache/model/named_maps_entry');
|
||||
const NamedMapsCacheEntry = require('../../cache/model/named-maps-entry');
|
||||
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
|
||||
|
||||
module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
|
||||
return function setSurrogateKeyHeaderMiddleware(req, res, next) {
|
||||
return function setSurrogateKeyHeaderMiddleware (req, res, next) {
|
||||
const { user, mapConfigProvider } = res.locals;
|
||||
|
||||
if (mapConfigProvider instanceof NamedMapMapConfigProvider) {
|
||||
@@ -1,11 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
const CdbRequest = require('../../models/cdb_request');
|
||||
const CdbRequest = require('../../models/cdb-request');
|
||||
|
||||
module.exports = function user () {
|
||||
const cdbRequest = new CdbRequest();
|
||||
|
||||
return function userMiddleware(req, res, next) {
|
||||
return function userMiddleware (req, res, next) {
|
||||
res.locals.user = cdbRequest.userByReq(req);
|
||||
|
||||
next();
|
||||
@@ -1,12 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../../assets/render-timeout-fallback.mvt');
|
||||
|
||||
module.exports = function vectorError() {
|
||||
return function vectorErrorMiddleware(err, req, res, next) {
|
||||
if(req.params.format === 'mvt') {
|
||||
const path = require('path');
|
||||
const timeoutErrorVectorTile = fs.readFileSync(path.join(__dirname, '/../../../assets/render-timeout-fallback.mvt'));
|
||||
|
||||
module.exports = function vectorError () {
|
||||
return function vectorErrorMiddleware (err, req, res, next) {
|
||||
if (req.params.format === 'mvt') {
|
||||
if (isTimeoutError(err) || isRateLimitError(err)) {
|
||||
res.set('Content-Type', 'application/x-protobuf');
|
||||
return res.status(429).send(timeoutErrorVectorTile);
|
||||
@@ -17,7 +17,6 @@ module.exports = function vectorError() {
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
function isRenderTimeoutError (err) {
|
||||
return err.message === 'Render timed out';
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const { templateName } = require('../../backends/template_maps');
|
||||
const { templateName } = require('../../backends/template-maps');
|
||||
const credentials = require('../middlewares/credentials');
|
||||
const rateLimit = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
|
||||
@@ -18,8 +18,8 @@ module.exports = class AdminTemplateController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
templateRouter.options(`/:template_id`);
|
||||
route (templateRouter) {
|
||||
templateRouter.options('/:template_id');
|
||||
|
||||
templateRouter.post('/', this.middlewares({
|
||||
action: 'create',
|
||||
@@ -63,7 +63,7 @@ module.exports = class NamedMapController {
|
||||
this.layergroupMetadata = layergroupMetadata;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
route (templateRouter) {
|
||||
templateRouter.get('/:template_id/jsonp', this.middlewares());
|
||||
templateRouter.post('/:template_id', this.middlewares());
|
||||
}
|
||||
@@ -106,7 +106,7 @@ module.exports = class NamedMapController {
|
||||
lastModifiedHeader(),
|
||||
lastUpdatedTimeLayergroup(),
|
||||
layerStats(this.pgConnection, this.statsBackend),
|
||||
layergroupIdHeader(this.templateMaps ,useTemplateHash),
|
||||
layergroupIdHeader(this.templateMaps, useTemplateHash),
|
||||
layergroupMetadata(this.layergroupMetadata, includeQuery),
|
||||
mapError({ label, addContext })
|
||||
];
|
||||
@@ -114,7 +114,7 @@ module.exports = class NamedMapController {
|
||||
};
|
||||
|
||||
function checkInstantiteLayergroup () {
|
||||
return function checkInstantiteLayergroupMiddleware(req, res, next) {
|
||||
return function checkInstantiteLayergroupMiddleware (req, res, next) {
|
||||
if (req.method === 'GET') {
|
||||
const { callback, config } = req.query;
|
||||
|
||||
@@ -125,7 +125,7 @@ function checkInstantiteLayergroup () {
|
||||
if (config) {
|
||||
try {
|
||||
req.body = JSON.parse(config);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
return next(new Error('Invalid config parameter, should be a valid JSON'));
|
||||
}
|
||||
}
|
||||
@@ -148,8 +148,8 @@ function getTemplate (
|
||||
return function getTemplateMiddleware (req, res, next) {
|
||||
const templateParams = req.body;
|
||||
const { user, dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
|
||||
const { template_id } = req.params;
|
||||
const { auth_token } = req.query;
|
||||
const { template_id: templateId } = req.params;
|
||||
const { auth_token: authToken } = req.query;
|
||||
|
||||
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
|
||||
|
||||
@@ -161,9 +161,9 @@ function getTemplate (
|
||||
mapConfigAdapter,
|
||||
affectedTablesCache,
|
||||
user,
|
||||
template_id,
|
||||
templateId,
|
||||
templateParams,
|
||||
auth_token,
|
||||
authToken,
|
||||
params
|
||||
);
|
||||
|
||||
@@ -21,7 +21,7 @@ module.exports = class TemplateRouter {
|
||||
authBackend,
|
||||
layergroupMetadata,
|
||||
namedMapProviderCache,
|
||||
tileBackend,
|
||||
tileBackend
|
||||
} = collaborators;
|
||||
|
||||
this.namedMapController = new NamedMapController(
|
||||
@@ -54,13 +54,19 @@ module.exports = class TemplateRouter {
|
||||
);
|
||||
}
|
||||
|
||||
register (apiRouter, templatePaths) {
|
||||
route (apiRouter, routes) {
|
||||
const templateRouter = router({ mergeParams: true });
|
||||
|
||||
this.namedMapController.register(templateRouter);
|
||||
this.tileTemplateController.register(templateRouter);
|
||||
this.adminTemplateController.register(templateRouter);
|
||||
routes.forEach(route => {
|
||||
const { paths, middlewares = [] } = route;
|
||||
|
||||
templatePaths.forEach(path => apiRouter.use(path, templateRouter));
|
||||
middlewares.forEach(middleware => templateRouter.use(middleware()));
|
||||
|
||||
this.namedMapController.route(templateRouter);
|
||||
this.tileTemplateController.route(templateRouter);
|
||||
this.adminTemplateController.route(templateRouter);
|
||||
|
||||
paths.forEach(path => apiRouter.use(path, templateRouter));
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -31,7 +31,7 @@ module.exports = class TileTemplateController {
|
||||
this.userLimitsBackend = userLimitsBackend;
|
||||
}
|
||||
|
||||
register (templateRouter) {
|
||||
route (templateRouter) {
|
||||
templateRouter.get('/:template_id/:layer/:z/:x/:y.(:format)', this.middlewares());
|
||||
}
|
||||
|
||||
@@ -89,7 +89,7 @@ function getTile ({ tileBackend, label }) {
|
||||
}
|
||||
|
||||
function setContentTypeHeader () {
|
||||
return function setContentTypeHeaderMiddleware(req, res, next) {
|
||||
return function setContentTypeHeaderMiddleware (req, res, next) {
|
||||
res.set('Content-Type', res.get('content-type') || res.get('Content-Type') || 'image/png');
|
||||
|
||||
next();
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function AnalysisStatusBackend() {
|
||||
function AnalysisStatusBackend () {
|
||||
}
|
||||
|
||||
module.exports = AnalysisStatusBackend;
|
||||
@@ -10,12 +10,12 @@ module.exports = AnalysisStatusBackend;
|
||||
AnalysisStatusBackend.prototype.getNodeStatus = function (nodeId, dbParams, callback) {
|
||||
var statusQuery = [
|
||||
'SELECT node_id, status, updated_at, last_error_message as error_message',
|
||||
'FROM cdb_analysis_catalog where node_id = \'' + nodeId + '\''
|
||||
'FROM cartodb.cdb_analysis_catalog where node_id = \'' + nodeId + '\''
|
||||
].join(' ');
|
||||
|
||||
var pg = new PSQL(dbParams);
|
||||
|
||||
pg.query(statusQuery, function(err, result) {
|
||||
pg.query(statusQuery, function (err, result) {
|
||||
if (err) {
|
||||
return callback(err, result);
|
||||
}
|
||||
@@ -43,7 +43,7 @@ AnalysisBackend.prototype.setLoggerConfig = function (options) {
|
||||
}
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefinition, callback) {
|
||||
AnalysisBackend.prototype.create = function (analysisConfiguration, analysisDefinition, callback) {
|
||||
analysisConfiguration.batch.endpoint = this.batchConfig.endpoint;
|
||||
analysisConfiguration.batch.inlineExecution = this.batchConfig.inlineExecution;
|
||||
analysisConfiguration.batch.hostHeaderTemplate = this.batchConfig.hostHeaderTemplate;
|
||||
@@ -52,13 +52,14 @@ AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefin
|
||||
stream: this.stream ? this.stream : process.stdout
|
||||
};
|
||||
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function(err, limits) {
|
||||
this.getAnalysesLimits(analysisConfiguration.user, function (err, limits) {
|
||||
if (err) {}
|
||||
analysisConfiguration.limits = limits || {};
|
||||
camshaft.create(analysisConfiguration, analysisDefinition, callback);
|
||||
});
|
||||
};
|
||||
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
AnalysisBackend.prototype.getAnalysesLimits = function (username, callback) {
|
||||
var self = this;
|
||||
|
||||
var analysesLimits = {
|
||||
@@ -70,16 +71,17 @@ AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
|
||||
}
|
||||
};
|
||||
|
||||
Object.keys(self.options.limits).forEach(function(analysisTypeOrTag) {
|
||||
Object.keys(self.options.limits).forEach(function (analysisTypeOrTag) {
|
||||
analysesLimits.analyses[analysisTypeOrTag] = _.extend({}, self.options.limits[analysisTypeOrTag]);
|
||||
});
|
||||
|
||||
var analysesLimitsKey = REDIS_LIMITS.PREFIX + username;
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function(err, analysesTimeouts) {
|
||||
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function (err, analysesTimeouts) {
|
||||
if (err) {}
|
||||
// analysesTimeouts wil be something like: { moran: 3000, intersection: 5000 }
|
||||
analysesTimeouts = analysesTimeouts || {};
|
||||
|
||||
Object.keys(analysesTimeouts).forEach(function(analysisType) {
|
||||
Object.keys(analysesTimeouts).forEach(function (analysisType) {
|
||||
analysesLimits.analyses[analysisType] = _.defaults(
|
||||
{
|
||||
timeout: Number.isFinite(+analysesTimeouts[analysisType]) ? +analysesTimeouts[analysisType] : 0
|
||||
@@ -9,7 +9,7 @@
|
||||
* @constructor
|
||||
* @type {AuthBackend}
|
||||
*/
|
||||
function AuthBackend(pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
function AuthBackend (pgConnection, metadataBackend, mapStore, templateMaps) {
|
||||
this.pgConnection = pgConnection;
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.mapStore = mapStore;
|
||||
@@ -25,28 +25,28 @@ module.exports = AuthBackend;
|
||||
// null if the request is not signed by anyone
|
||||
// or will be a string cartodb username otherwise.
|
||||
//
|
||||
AuthBackend.prototype.authorizedBySigner = function(req, res, callback) {
|
||||
if ( ! res.locals.token || ! res.locals.signer ) {
|
||||
AuthBackend.prototype.authorizedBySigner = function (req, res, callback) {
|
||||
if (!res.locals.token || !res.locals.signer) {
|
||||
return callback(null, false); // no signer requested
|
||||
}
|
||||
|
||||
var self = this;
|
||||
|
||||
var layergroup_id = res.locals.token;
|
||||
var auth_token = req.query.auth_token;
|
||||
var layergroupId = res.locals.token;
|
||||
var authToken = req.query.auth_token;
|
||||
|
||||
this.mapStore.load(layergroup_id, function(err, mapConfig) {
|
||||
this.mapStore.load(layergroupId, function (err, mapConfig) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, auth_token);
|
||||
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, authToken);
|
||||
|
||||
return callback(null, authorized);
|
||||
});
|
||||
};
|
||||
|
||||
function isValidApiKey(apikey) {
|
||||
function isValidApiKey (apikey) {
|
||||
return apikey.type &&
|
||||
apikey.user &&
|
||||
apikey.databasePassword &&
|
||||
@@ -60,11 +60,11 @@ function isValidApiKey(apikey) {
|
||||
// @param callback function(err, authorized)
|
||||
// NOTE: authorized is expected to be 0 or 1 (integer)
|
||||
//
|
||||
AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
AuthBackend.prototype.authorizedByAPIKey = function (user, res, callback) {
|
||||
const apikeyToken = res.locals.api_key;
|
||||
const basicAuthUsername = res.locals.basicAuthUsername;
|
||||
|
||||
if ( ! apikeyToken ) {
|
||||
if (!apikeyToken) {
|
||||
return callback(null, false); // no api key, no authorization...
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if ( !isValidApiKey(apikey)) {
|
||||
if (!isValidApiKey(apikey)) {
|
||||
const error = new Error('Unauthorized');
|
||||
error.type = 'auth';
|
||||
error.subtype = 'api-key-not-found';
|
||||
@@ -109,7 +109,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
@@ -123,7 +123,7 @@ function usernameMatches (basicAuthUsername, requestUsername) {
|
||||
* @param res - standard res object. Contains the auth parameters in locals
|
||||
* @param callback function(err, allowed) is access allowed not?
|
||||
*/
|
||||
AuthBackend.prototype.authorize = function(req, res, callback) {
|
||||
AuthBackend.prototype.authorize = function (req, res, callback) {
|
||||
var user = res.locals.user;
|
||||
|
||||
this.authorizedByAPIKey(user, res, (err, isAuthorizedByApikey) => {
|
||||
@@ -7,6 +7,7 @@ const AggregationMapConfig = require('../models/aggregation/aggregation-mapconfi
|
||||
|
||||
const WebMercatorHelper = require('cartodb-query-tables').utils.webMercatorHelper;
|
||||
const webmercator = new WebMercatorHelper();
|
||||
const queryUtils = require('../../lib/utils/query-utils');
|
||||
|
||||
module.exports = class ClusterBackend {
|
||||
getClusterFeatures (mapConfigProvider, params, callback) {
|
||||
@@ -71,8 +72,8 @@ function getFeatures (pg, layer, params, callback) {
|
||||
}
|
||||
|
||||
const SKIP_COLUMNS = {
|
||||
'the_geom': true,
|
||||
'the_geom_webmercator': true
|
||||
the_geom: true,
|
||||
the_geom_webmercator: true
|
||||
};
|
||||
|
||||
function getColumnsName (pg, query, callback) {
|
||||
@@ -89,7 +90,7 @@ function getColumnsName (pg, query, callback) {
|
||||
|
||||
const fields = resultSet.fields || [];
|
||||
const columnNames = fields.map(field => field.name)
|
||||
.filter(columnName => !SKIP_COLUMNS[columnName]);
|
||||
.filter(columnName => !SKIP_COLUMNS[queryUtils.stripQuotes(columnName)]);
|
||||
|
||||
return callback(null, columnNames);
|
||||
}, true);
|
||||
@@ -100,7 +101,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
zoom: zoom,
|
||||
id: clusterId,
|
||||
query: query,
|
||||
res: 256/resolution,
|
||||
res: 256 / resolution,
|
||||
columns: columns
|
||||
});
|
||||
|
||||
@@ -127,7 +128,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
|
||||
}
|
||||
|
||||
return callback(null, data);
|
||||
} , true); // use read-only transaction
|
||||
}, true); // use read-only transaction
|
||||
}
|
||||
|
||||
const schemaQuery = ctx => `SELECT * FROM (${ctx.query}) __cdb_cluster_schema LIMIT 0`;
|
||||
@@ -159,8 +160,8 @@ const clusterFeaturesQuery = ctx => `
|
||||
`;
|
||||
|
||||
const gridResolution = ctx => {
|
||||
const zoomResolution = webmercator.getResolution({ z : Math.min(38, ctx.zoom) });
|
||||
return `${256/ctx.res} * (${zoomResolution})::double precision`;
|
||||
const zoomResolution = webmercator.getResolution({ z: Math.min(38, ctx.zoom) });
|
||||
return `${256 / ctx.res} * (${zoomResolution})::double precision`;
|
||||
};
|
||||
|
||||
const aggregationQuery = ctx => `
|
||||
@@ -194,9 +195,8 @@ function parseAggregation (aggregation) {
|
||||
try {
|
||||
aggregation = JSON.parse(aggregation);
|
||||
} catch (err) {
|
||||
throw new Error(`Invalid aggregation input, should be a a valid JSON`);
|
||||
throw new Error('Invalid aggregation input, should be a a valid JSON');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return aggregation;
|
||||
@@ -207,7 +207,7 @@ function validateAggregation (aggregation) {
|
||||
const { columns, expressions } = aggregation;
|
||||
|
||||
if (!hasColumns(columns)) {
|
||||
throw new Error(`Invalid aggregation input, columns should be and array of column names`);
|
||||
throw new Error('Invalid aggregation input, columns should be and array of column names');
|
||||
}
|
||||
|
||||
validateExpressions(expressions);
|
||||
@@ -221,16 +221,16 @@ function hasColumns (columns) {
|
||||
function validateExpressions (expressions) {
|
||||
if (expressions !== undefined) {
|
||||
if (!isValidExpression(expressions)) {
|
||||
throw new Error(`Invalid aggregation input, expressions should be and object with valid functions`);
|
||||
throw new Error('Invalid aggregation input, expressions should be and object with valid functions');
|
||||
}
|
||||
|
||||
for (const { aggregate_function, aggregated_column } of Object.values(expressions)) {
|
||||
if (typeof aggregated_column !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregated column should be an string`);
|
||||
for (const { aggregate_function: aggregateFunction, aggregated_column: aggregatedColumn } of Object.values(expressions)) {
|
||||
if (typeof aggregatedColumn !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregated column should be an string');
|
||||
}
|
||||
|
||||
if (typeof aggregate_function !== 'string') {
|
||||
throw new Error(`Invalid aggregation input, aggregate function should be an string`);
|
||||
if (typeof aggregateFunction !== 'string') {
|
||||
throw new Error('Invalid aggregation input, aggregate function should be an string');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,18 +3,20 @@
|
||||
var _ = require('underscore');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var BBoxFilter = require('../models/filter/bbox');
|
||||
const CircleFilter = require('../models/filter/circle');
|
||||
const PolygonFilter = require('../models/filter/polygon');
|
||||
var DataviewFactory = require('../models/dataview/factory');
|
||||
var DataviewFactoryWithOverviews = require('../models/dataview/overviews/factory');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
var OverviewsQueryRewriter = require('../utils/overviews_query_rewriter');
|
||||
var OverviewsQueryRewriter = require('../utils/overviews-query-rewriter');
|
||||
var overviewsQueryRewriter = new OverviewsQueryRewriter({
|
||||
zoom_level: 'CDB_ZoomFromScale(!scale_denominator!)'
|
||||
zoom_level: 'cartodb.CDB_ZoomFromScale(!scale_denominator!)'
|
||||
});
|
||||
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function DataviewBackend(analysisBackend) {
|
||||
function DataviewBackend (analysisBackend) {
|
||||
this.analysisBackend = analysisBackend;
|
||||
}
|
||||
|
||||
@@ -84,14 +86,20 @@ function getQueryWithFilters (dataviewDefinition, params) {
|
||||
var query = getDataviewQuery(dataviewDefinition, ownFilter, noFilters);
|
||||
|
||||
if (params.bbox) {
|
||||
var bboxFilter = new BBoxFilter({column: 'the_geom_webmercator', srid: 3857}, {bbox: params.bbox});
|
||||
var bboxFilter = new BBoxFilter({ column: 'the_geom_webmercator', srid: 3857 }, { bbox: params.bbox });
|
||||
query = bboxFilter.sql(query);
|
||||
} else if (params.circle) {
|
||||
const circleFilter = new CircleFilter({ column: 'the_geom_webmercator', srid: 3857 }, { circle: params.circle });
|
||||
query = circleFilter.sql(query);
|
||||
} else if (params.polygon) {
|
||||
const polygonFilter = new PolygonFilter({ column: 'the_geom_webmercator', srid: 3857 }, { polygon: params.polygon });
|
||||
query = polygonFilter.sql(query);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
function getDataviewQuery (dataviewDefinition, ownFilter, noFilters) {
|
||||
if (noFilters) {
|
||||
return dataviewDefinition.sql.no_filters;
|
||||
} else if (ownFilter === 1) {
|
||||
@@ -101,9 +109,9 @@ function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
|
||||
}
|
||||
}
|
||||
|
||||
function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
function getQueryRewriteData (mapConfig, dataviewDefinition, params) {
|
||||
var sourceId = dataviewDefinition.source.id; // node.id
|
||||
var layer = _.find(mapConfig.obj().layers, function(l) {
|
||||
var layer = _.find(mapConfig.obj().layers, function (l) {
|
||||
return l.options.source && (l.options.source.id === sourceId);
|
||||
});
|
||||
var queryRewriteData = layer && layer.options.query_rewrite_data;
|
||||
@@ -115,7 +123,7 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
}
|
||||
|
||||
if (params.bbox && queryRewriteData) {
|
||||
var bbox_filter_definition = {
|
||||
var bboxFilterDefinition = {
|
||||
type: 'bbox',
|
||||
options: {
|
||||
column: 'the_geom_webmercator',
|
||||
@@ -125,22 +133,22 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
|
||||
bbox: params.bbox
|
||||
}
|
||||
};
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bbox_filter_definition });
|
||||
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bboxFilterDefinition });
|
||||
}
|
||||
|
||||
return queryRewriteData;
|
||||
}
|
||||
|
||||
function getOverrideParams(params, ownFilter) {
|
||||
function getOverrideParams (params, ownFilter) {
|
||||
var overrideParams = _.reduce(_.pick(params, 'start', 'end', 'bins', 'offset', 'categories'),
|
||||
function castNumbers(overrides, val, k) {
|
||||
function castNumbers (overrides, val, k) {
|
||||
if (!Number.isFinite(+val)) {
|
||||
throw new Error('Invalid number format for parameter \'' + k + '\'');
|
||||
}
|
||||
overrides[k] = +val;
|
||||
return overrides;
|
||||
},
|
||||
{ownFilter: ownFilter}
|
||||
{ ownFilter: ownFilter }
|
||||
);
|
||||
|
||||
// validation will be delegated to the proper dataview
|
||||
@@ -197,12 +205,18 @@ function getQueryWithOwnFilters (dataviewDefinition, params) {
|
||||
if (params.bbox) {
|
||||
var bboxFilter = new BBoxFilter({ column: 'the_geom', srid: 4326 }, { bbox: params.bbox });
|
||||
query = bboxFilter.sql(query);
|
||||
} else if (params.circle) {
|
||||
const circleFilter = new CircleFilter({ column: 'the_geom', srid: 4326 }, { circle: params.circle });
|
||||
query = circleFilter.sql(query);
|
||||
} else if (params.polygon) {
|
||||
const polygonFilter = new PolygonFilter({ column: 'the_geom', srid: 4326 }, { polygon: params.polygon });
|
||||
query = polygonFilter.sql(query);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
function getDataviewDefinition(mapConfig, dataviewName) {
|
||||
function getDataviewDefinition (mapConfig, dataviewName) {
|
||||
var dataviews = mapConfig.dataviews || {};
|
||||
return dataviews[dataviewName];
|
||||
}
|
||||
@@ -3,32 +3,32 @@
|
||||
var _ = require('underscore');
|
||||
var AnalysisFilter = require('../models/filter/analysis');
|
||||
|
||||
function FilterStatsBackends(pgQueryRunner) {
|
||||
function FilterStatsBackends (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
module.exports = FilterStatsBackends;
|
||||
|
||||
function getEstimatedRows(pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, "EXPLAIN (FORMAT JSON)"+query, function(err, result_rows) {
|
||||
if (err){
|
||||
function getEstimatedRows (pgQueryRunner, username, query, callback) {
|
||||
pgQueryRunner.run(username, 'EXPLAIN (FORMAT JSON)' + query, function (err, resultRows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var rows;
|
||||
if ( result_rows[0] && result_rows[0]['QUERY PLAN'] &&
|
||||
result_rows[0]['QUERY PLAN'][0] && result_rows[0]['QUERY PLAN'][0].Plan ) {
|
||||
rows = result_rows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
if (resultRows[0] && resultRows[0]['QUERY PLAN'] &&
|
||||
resultRows[0]['QUERY PLAN'][0] && resultRows[0]['QUERY PLAN'][0].Plan) {
|
||||
rows = resultRows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
|
||||
}
|
||||
return callback(null, rows);
|
||||
});
|
||||
}
|
||||
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_query, filters, callback) {
|
||||
FilterStatsBackends.prototype.getFilterStats = function (username, unfilteredQuery, filters, callback) {
|
||||
var stats = {};
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfiltered_query, (err, rows) => {
|
||||
if (err){
|
||||
getEstimatedRows(this.pgQueryRunner, username, unfilteredQuery, (err, rows) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
@@ -39,10 +39,10 @@ FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_qu
|
||||
}
|
||||
|
||||
var analysisFilter = new AnalysisFilter(filters);
|
||||
var query = analysisFilter.sql(unfiltered_query);
|
||||
var query = analysisFilter.sql(unfilteredQuery);
|
||||
|
||||
getEstimatedRows(this.pgQueryRunner, username, query, (err, rows) => {
|
||||
if (err){
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function EmptyLayerStats(types) {
|
||||
function EmptyLayerStats (types) {
|
||||
this._types = types || {};
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ EmptyLayerStats.prototype.is = function (type) {
|
||||
|
||||
EmptyLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
setImmediate(function() {
|
||||
setImmediate(function () {
|
||||
callback(null, {});
|
||||
});
|
||||
};
|
||||
@@ -5,7 +5,7 @@ var EmptyLayerStats = require('./empty-layer-stats');
|
||||
var MapnikLayerStats = require('./mapnik-layer-stats');
|
||||
var TorqueLayerStats = require('./torque-layer-stats');
|
||||
|
||||
module.exports = function LayerStatsFactory(type) {
|
||||
module.exports = function LayerStatsFactory (type) {
|
||||
var layerStatsIterator = [];
|
||||
var selectedType = type || 'ALL';
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var queue = require('queue-async');
|
||||
|
||||
function LayerStats(layerStatsIterator) {
|
||||
function LayerStats (layerStatsIterator) {
|
||||
this.layerStatsIterator = layerStatsIterator;
|
||||
}
|
||||
|
||||
@@ -41,7 +41,6 @@ LayerStats.prototype.getStats = function (mapConfig, dbConnection, callback) {
|
||||
|
||||
return callback(err, stats);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports = LayerStats;
|
||||
@@ -15,7 +15,7 @@ MapnikLayerStats.prototype.is = function (type) {
|
||||
return this._types[type] ? this._types[type] : false;
|
||||
};
|
||||
|
||||
function columnAggregations(field) {
|
||||
function columnAggregations (field) {
|
||||
if (field.type === 'number') {
|
||||
return ['min', 'max', 'avg', 'sum'];
|
||||
}
|
||||
@@ -28,25 +28,24 @@ function columnAggregations(field) {
|
||||
return [];
|
||||
}
|
||||
|
||||
function _getSQL(ctx, query, type='pre', zoom=0) {
|
||||
function _getSQL (ctx, query, type = 'pre', zoom = 0) {
|
||||
let sql;
|
||||
if (type === 'pre') {
|
||||
sql = ctx.preQuery;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
sql = ctx.aggrQuery;
|
||||
}
|
||||
sql = queryUtils.substituteTokensForZoom(sql, zoom || 0);
|
||||
return query(sql);
|
||||
}
|
||||
|
||||
function _estimatedFeatureCount(ctx) {
|
||||
function _estimatedFeatureCount (ctx) {
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryRowEstimation))
|
||||
.then(res => ({ estimatedFeatureCount: res.rows[0].rows }))
|
||||
.catch(() => ({ estimatedFeatureCount: -1 }));
|
||||
}
|
||||
|
||||
function _featureCount(ctx) {
|
||||
function _featureCount (ctx) {
|
||||
if (ctx.metaOptions.featureCount) {
|
||||
// TODO: if ctx.metaOptions.columnStats we can combine this with column stats query
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryActualRowCount))
|
||||
@@ -55,20 +54,20 @@ function _featureCount(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _aggrFeatureCount(ctx) {
|
||||
if (ctx.metaOptions.hasOwnProperty('aggrFeatureCount')) {
|
||||
function _aggrFeatureCount (ctx) {
|
||||
if (Object.prototype.hasOwnProperty.call(ctx.metaOptions, 'aggrFeatureCount')) {
|
||||
// We expect as zoom level as the value of aggrFeatureCount
|
||||
// TODO: it'd be nice to admit an array of zoom levels to
|
||||
// return metadata for multiple levels.
|
||||
return queryUtils.queryPromise(
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
|
||||
).then(res => ({ aggrFeatureCount: res.rows[0].rows }));
|
||||
}
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _geometryType(ctx) {
|
||||
function _geometryType (ctx) {
|
||||
if (ctx.metaOptions.geometryType) {
|
||||
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
|
||||
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
|
||||
@@ -78,7 +77,7 @@ function _geometryType(ctx) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
function _columns(ctx) {
|
||||
function _columns (ctx) {
|
||||
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
|
||||
// note: post-aggregation columns are in layer.options.columns when aggregation is present
|
||||
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
|
||||
@@ -89,7 +88,7 @@ function _columns(ctx) {
|
||||
|
||||
// combine a list of results merging the properties of all the objects
|
||||
// undefined results are admitted and ignored
|
||||
function mergeResults(results) {
|
||||
function mergeResults (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
@@ -108,15 +107,15 @@ function mergeResults(results) {
|
||||
|
||||
// deeper (1 level) combination of a list of objects:
|
||||
// mergeColumns([{ col1: { a: 1 }, col2: { a: 2 } }, { col1: { b: 3 } }]) => { col1: { a: 1, b: 3 }, col2: { a: 2 } }
|
||||
function mergeColumns(results) {
|
||||
function mergeColumns (results) {
|
||||
if (results) {
|
||||
if (results.length === 0) {
|
||||
return {};
|
||||
}
|
||||
return results.reduce((a, b) => {
|
||||
let c = Object.assign({}, b || {}, a || {});
|
||||
const c = Object.assign({}, b || {}, a || {});
|
||||
Object.keys(c).forEach(key => {
|
||||
if (b.hasOwnProperty(key)) {
|
||||
if (Object.prototype.hasOwnProperty.call(b, key)) {
|
||||
c[key] = Object.assign(c[key], b[key]);
|
||||
}
|
||||
});
|
||||
@@ -127,7 +126,7 @@ function mergeColumns(results) {
|
||||
|
||||
const DEFAULT_SAMPLE_ROWS = 100;
|
||||
|
||||
function _sample(ctx) {
|
||||
function _sample (ctx) {
|
||||
if (!ctx.metaOptions.sample) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
@@ -164,32 +163,32 @@ function _getSampleValuesFromRange (min, span, limit) {
|
||||
return Array.from(sample);
|
||||
}
|
||||
|
||||
function _columnsMetadataRequired(options) {
|
||||
function _columnsMetadataRequired (options) {
|
||||
// We need determine the columns of a query
|
||||
// if either column stats or dimension stats are required,
|
||||
// since we'll ultimately use the same query to fetch both
|
||||
return options.columnStats || options.dimensions;
|
||||
}
|
||||
|
||||
function _columnStats(ctx, columns, dimensions) {
|
||||
function _columnStats (ctx, columns, dimensions) {
|
||||
if (!columns) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (_columnsMetadataRequired(ctx.metaOptions)) {
|
||||
let queries = [];
|
||||
const queries = [];
|
||||
let aggr = [];
|
||||
if (ctx.metaOptions.columnStats) {
|
||||
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
|
||||
Object.keys(columns).forEach(name => {
|
||||
aggr = aggr.concat(
|
||||
columnAggregations(columns[name])
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
|
||||
);
|
||||
if (columns[name].type === 'string') {
|
||||
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
|
||||
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
|
||||
ctx.metaOptions.columnStats.includeNulls :
|
||||
true;
|
||||
const includeNulls = Object.prototype.hasOwnProperty.call(ctx.metaOptions.columnStats, 'includeNulls')
|
||||
? ctx.metaOptions.columnStats.includeNulls
|
||||
: true;
|
||||
|
||||
// TODO: ctx.metaOptions.columnStats.maxCategories
|
||||
// => use PG stats to dismiss columns with more distinct values
|
||||
@@ -223,7 +222,7 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
ctx.dbConnection,
|
||||
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
|
||||
).then(res => {
|
||||
let stats = { columns: {}, dimensions: {} };
|
||||
const stats = { columns: {}, dimensions: {} };
|
||||
Object.keys(columns).forEach(name => {
|
||||
stats.columns[name] = {};
|
||||
columnAggregations(columns[name]).forEach(fn => {
|
||||
@@ -245,62 +244,62 @@ function _columnStats(ctx, columns, dimensions) {
|
||||
);
|
||||
return Promise.all(queries).then(results => ({
|
||||
columns: mergeColumns(results.map(r => r.columns)),
|
||||
dimensions: mergeColumns(results.map( r => r.dimensions))
|
||||
dimensions: mergeColumns(results.map(r => r.dimensions))
|
||||
}));
|
||||
}
|
||||
return Promise.resolve({ columns });
|
||||
}
|
||||
|
||||
// This is adapted from SQL API:
|
||||
function fieldType(cname) {
|
||||
function fieldType (cname) {
|
||||
let tname;
|
||||
switch (true) {
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
case /bool/.test(cname):
|
||||
tname = 'boolean';
|
||||
break;
|
||||
case /int|float|numeric/.test(cname):
|
||||
tname = 'number';
|
||||
break;
|
||||
case /text|char|unknown/.test(cname):
|
||||
tname = 'string';
|
||||
break;
|
||||
case /date|time/.test(cname):
|
||||
tname = 'date';
|
||||
break;
|
||||
default:
|
||||
tname = cname;
|
||||
}
|
||||
if ( tname && cname.match(/^_/) ) {
|
||||
if (tname && cname.match(/^_/)) {
|
||||
tname += '[]';
|
||||
}
|
||||
return tname;
|
||||
}
|
||||
|
||||
function fieldTypeSafe(dbConnection, field) {
|
||||
function fieldTypeSafe (dbConnection, field) {
|
||||
const cname = dbConnection.typeName(field.dataTypeID);
|
||||
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
|
||||
}
|
||||
|
||||
// columns are returned as an object { columnName1: { type1: ...}, ..}
|
||||
// for consistency with SQL API
|
||||
function formatResultFields(dbConnection, fields = []) {
|
||||
let nfields = {};
|
||||
for (let field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
function formatResultFields (dbConnection, fields = []) {
|
||||
const nfields = {};
|
||||
for (const field of fields) {
|
||||
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
|
||||
}
|
||||
return nfields;
|
||||
}
|
||||
|
||||
MapnikLayerStats.prototype.getStats =
|
||||
function (layer, dbConnection, callback) {
|
||||
let aggrQuery = layer.options.sql;
|
||||
let preQuery = layer.options.sql_raw || aggrQuery;
|
||||
const aggrQuery = layer.options.sql;
|
||||
const preQuery = layer.options.sql_raw || aggrQuery;
|
||||
|
||||
let ctx = {
|
||||
const ctx = {
|
||||
dbConnection,
|
||||
preQuery,
|
||||
aggrQuery,
|
||||
metaOptions: layer.options.metadata || {},
|
||||
metaOptions: layer.options.metadata || {}
|
||||
};
|
||||
|
||||
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
|
||||
@@ -316,7 +315,7 @@ function (layer, dbConnection, callback) {
|
||||
Promise.all([
|
||||
_estimatedFeatureCount(ctx).then(
|
||||
({ estimatedFeatureCount }) => _sample(ctx)
|
||||
.then(sampleResults => mergeResults([ sampleResults, { estimatedFeatureCount }] ))
|
||||
.then(sampleResults => mergeResults([sampleResults, { estimatedFeatureCount }]))
|
||||
),
|
||||
_featureCount(ctx),
|
||||
_aggrFeatureCount(ctx),
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function TorqueLayerStats() {
|
||||
function TorqueLayerStats () {
|
||||
this._types = {
|
||||
torque: true
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
const queryUtils = require('../utils/query-utils');
|
||||
|
||||
function OverviewsMetadataBackend(pgQueryRunner) {
|
||||
function OverviewsMetadataBackend (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@@ -12,20 +12,20 @@ OverviewsMetadataBackend.prototype.getOverviewsMetadata = function (username, sq
|
||||
// FIXME: Currently using internal function _cdb_schema_name
|
||||
// CDB_Overviews should provide the schema information directly.
|
||||
const query = `
|
||||
SELECT *, _cdb_schema_name(base_table)
|
||||
FROM CDB_Overviews(
|
||||
CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
|
||||
SELECT *, cartodb._cdb_schema_name(base_table)
|
||||
FROM cartodb.CDB_Overviews(
|
||||
cartodb.CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
|
||||
);
|
||||
`;
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows(err, rows) {
|
||||
if (err){
|
||||
this.pgQueryRunner.run(username, query, function handleOverviewsRows (err, rows) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var metadata = rows.reduce(function(metadata, row){
|
||||
var metadata = rows.reduce(function (metadata, row) {
|
||||
var table = row.base_table;
|
||||
var schema = row._cdb_schema_name;
|
||||
if ( !metadata[table] ) {
|
||||
if (!metadata[table]) {
|
||||
metadata[table] = {};
|
||||
}
|
||||
metadata[table][row.z] = { table: row.overview_table };
|
||||
@@ -3,14 +3,14 @@
|
||||
var PSQL = require('cartodb-psql');
|
||||
var _ = require('underscore');
|
||||
const debug = require('debug')('cachechan');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
|
||||
function PgConnection(metadataBackend) {
|
||||
function PgConnection (metadataBackend) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
}
|
||||
|
||||
module.exports = PgConnection;
|
||||
|
||||
|
||||
// Set db authentication parameters to those of the given username
|
||||
//
|
||||
// @param username the cartodb username, mapped to a database username
|
||||
@@ -21,7 +21,7 @@ module.exports = PgConnection;
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callback) {
|
||||
PgConnection.prototype.setDBAuth = function (username, params, apikeyType, callback) {
|
||||
if (apikeyType === 'master') {
|
||||
this.metadataBackend.getMasterApikey(username, (err, apikey) => {
|
||||
if (err) {
|
||||
@@ -36,7 +36,7 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
|
||||
return callback();
|
||||
});
|
||||
} else if (apikeyType === 'regular') { //Actually it can be any type of api key
|
||||
} else if (apikeyType === 'regular') { // Actually it can be any type of api key
|
||||
this.metadataBackend.getApikey(username, params.api_key, (err, apikey) => {
|
||||
if (err) {
|
||||
if (isNameNotFoundError(err)) {
|
||||
@@ -70,10 +70,9 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
|
||||
};
|
||||
|
||||
function isNameNotFoundError (err) {
|
||||
return err.message && -1 !== err.message.indexOf('name not found');
|
||||
return err.message && err.message.indexOf('name not found') !== -1;
|
||||
}
|
||||
|
||||
|
||||
// Set db connection parameters to those for the given username
|
||||
//
|
||||
// @param dbowner cartodb username of database owner,
|
||||
@@ -85,7 +84,7 @@ function isNameNotFoundError (err) {
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
PgConnection.prototype.setDBConn = function (dbowner, params, callback) {
|
||||
_.defaults(params, {
|
||||
// dbuser: global.environment.postgres.user,
|
||||
// dbpassword: global.environment.postgres.password,
|
||||
@@ -117,25 +116,18 @@ PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
|
||||
* @param {Function} callback function({Error}, {PSQL})
|
||||
*/
|
||||
|
||||
PgConnection.prototype.getConnection = function(username, callback) {
|
||||
debug("getConn1");
|
||||
PgConnection.prototype.getConnection = function (username, callback) {
|
||||
debug('getConn1');
|
||||
|
||||
this.getDatabaseParams(username, (err, databaseParams) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(err, new PSQL({
|
||||
user: databaseParams.dbuser,
|
||||
pass: databaseParams.dbpass,
|
||||
host: databaseParams.dbhost,
|
||||
port: databaseParams.dbport,
|
||||
dbname: databaseParams.dbname
|
||||
}));
|
||||
|
||||
return callback(err, new PSQL(dbParamsFromReqParams(databaseParams)));
|
||||
});
|
||||
};
|
||||
|
||||
PgConnection.prototype.getDatabaseParams = function(username, callback) {
|
||||
PgConnection.prototype.getDatabaseParams = function (username, callback) {
|
||||
const databaseParams = {};
|
||||
|
||||
this.setDBAuth(username, databaseParams, 'master', err => {
|
||||
@@ -1,8 +1,9 @@
|
||||
'use strict';
|
||||
|
||||
var PSQL = require('cartodb-psql');
|
||||
const dbParamsFromReqParams = require('../utils/database-params');
|
||||
|
||||
function PgQueryRunner(pgConnection) {
|
||||
function PgQueryRunner (pgConnection) {
|
||||
this.pgConnection = pgConnection;
|
||||
}
|
||||
|
||||
@@ -15,20 +16,13 @@ module.exports = PgQueryRunner;
|
||||
* @param {String} query
|
||||
* @param {Function} callback function({Error}, {Array}) second argument is guaranteed to be an array
|
||||
*/
|
||||
PgQueryRunner.prototype.run = function(username, query, callback) {
|
||||
|
||||
PgQueryRunner.prototype.run = function (username, query, callback) {
|
||||
this.pgConnection.getDatabaseParams(username, (err, databaseParams) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const psql = new PSQL({
|
||||
user: databaseParams.dbuser,
|
||||
pass: databaseParams.dbpass,
|
||||
host: databaseParams.dbhost,
|
||||
port: databaseParams.dbport,
|
||||
dbname: databaseParams.dbname
|
||||
});
|
||||
const psql = new PSQL(dbParamsFromReqParams(databaseParams));
|
||||
|
||||
psql.query(query, function (err, resultSet) {
|
||||
resultSet = resultSet || {};
|
||||
@@ -2,14 +2,14 @@
|
||||
|
||||
var layerStats = require('./layer-stats/factory');
|
||||
|
||||
function StatsBackend() {
|
||||
function StatsBackend () {
|
||||
}
|
||||
|
||||
module.exports = StatsBackend;
|
||||
|
||||
StatsBackend.prototype.getStats = function(mapConfig, dbConnection, callback) {
|
||||
StatsBackend.prototype.getStats = function (mapConfig, dbConnection, callback) {
|
||||
var enabledFeatures = global.environment.enabledFeatures;
|
||||
var layerStatsEnabled = enabledFeatures ? enabledFeatures.layerStats: false;
|
||||
var layerStatsEnabled = enabledFeatures ? enabledFeatures.layerStats : false;
|
||||
if (layerStatsEnabled) {
|
||||
layerStats().getStats(mapConfig, dbConnection, callback);
|
||||
} else {
|
||||
@@ -1,6 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
function TablesExtentBackend(pgQueryRunner) {
|
||||
function TablesExtentBackend (pgQueryRunner) {
|
||||
this.pgQueryRunner = pgQueryRunner;
|
||||
}
|
||||
|
||||
@@ -16,21 +16,21 @@ module.exports = TablesExtentBackend;
|
||||
* @param {Function} callback function(err, result) {Object} result with `west`, `south`, `east`, `north`
|
||||
*/
|
||||
TablesExtentBackend.prototype.getBounds = function (username, tables, callback) {
|
||||
var estimatedExtentSQLs = tables.map(function(table) {
|
||||
var estimatedExtentSQLs = tables.map(function (table) {
|
||||
return "ST_EstimatedExtent('" + table.schema_name + "', '" + table.table_name + "', 'the_geom_webmercator')";
|
||||
});
|
||||
|
||||
var query = [
|
||||
"WITH ext as (" +
|
||||
"SELECT ST_Transform(ST_SetSRID(ST_Extent(ST_Union(ARRAY[",
|
||||
estimatedExtentSQLs.join(','),
|
||||
"])), 3857), 4326) geom)",
|
||||
"SELECT",
|
||||
"ST_XMin(geom) west,",
|
||||
"ST_YMin(geom) south,",
|
||||
"ST_XMax(geom) east,",
|
||||
"ST_YMax(geom) north",
|
||||
"FROM ext"
|
||||
'WITH ext as (' +
|
||||
'SELECT ST_Transform(ST_SetSRID(ST_Extent(ST_Union(ARRAY[',
|
||||
estimatedExtentSQLs.join(','),
|
||||
'])), 3857), 4326) geom)',
|
||||
'SELECT',
|
||||
'ST_XMin(geom) west,',
|
||||
'ST_YMin(geom) south,',
|
||||
'ST_XMax(geom) east,',
|
||||
'ST_YMax(geom) north',
|
||||
'FROM ext'
|
||||
].join(' ');
|
||||
|
||||
this.pgQueryRunner.run(username, query, function handleBoundsResult (err, rows) {
|
||||
@@ -5,16 +5,14 @@ var debug = require('debug')('windshaft:templates');
|
||||
var _ = require('underscore');
|
||||
var dot = require('dot');
|
||||
|
||||
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
var util = require('util');
|
||||
|
||||
|
||||
// Class handling map templates
|
||||
//
|
||||
// See http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps
|
||||
//
|
||||
// @param redis_pool an instance of a "redis-mpool"
|
||||
// @param redisPool an instance of a "redis-mpool"
|
||||
// See https://github.com/CartoDB/node-redis-mpool
|
||||
// Needs version 0.x.x of the API.
|
||||
//
|
||||
@@ -22,43 +20,42 @@ var util = require('util');
|
||||
// 'max_user_templates' limit on the number of per-user
|
||||
//
|
||||
//
|
||||
function TemplateMaps(redis_pool, opts) {
|
||||
if (!(this instanceof TemplateMaps)) {
|
||||
return new TemplateMaps();
|
||||
}
|
||||
function TemplateMaps (redisPool, opts) {
|
||||
if (!(this instanceof TemplateMaps)) {
|
||||
return new TemplateMaps();
|
||||
}
|
||||
|
||||
EventEmitter.call(this);
|
||||
EventEmitter.call(this);
|
||||
|
||||
this.redis_pool = redis_pool;
|
||||
this.opts = opts || {};
|
||||
this.redisPool = redisPool;
|
||||
this.opts = opts || {};
|
||||
|
||||
// Database containing templates
|
||||
// TODO: allow configuring ?
|
||||
// NOTE: currently it is the same as
|
||||
// the one containing layergroups
|
||||
this.db_signatures = 0;
|
||||
// Database containing templates
|
||||
// TODO: allow configuring ?
|
||||
// NOTE: currently it is the same as
|
||||
// the one containing layergroups
|
||||
this.db_signatures = 0;
|
||||
|
||||
//
|
||||
// Map templates are owned by a user that specifies access permissions
|
||||
// for their instances.
|
||||
//
|
||||
// We have the following datastores:
|
||||
//
|
||||
// 1. User templates: set of per-user map templates
|
||||
//
|
||||
// Map templates are owned by a user that specifies access permissions
|
||||
// for their instances.
|
||||
//
|
||||
// We have the following datastores:
|
||||
//
|
||||
// 1. User templates: set of per-user map templates
|
||||
|
||||
// User templates (HASH:tpl_id->tpl_val)
|
||||
this.key_usr_tpl = dot.template("map_tpl|{{=it.owner}}");
|
||||
// User templates (HASH:tplId->tpl_val)
|
||||
this.key_usr_tpl = dot.template('map_tpl|{{=it.owner}}');
|
||||
}
|
||||
|
||||
util.inherits(TemplateMaps, EventEmitter);
|
||||
|
||||
module.exports = TemplateMaps;
|
||||
|
||||
// --------------- PRIVATE METHODS --------------------------------
|
||||
|
||||
//--------------- PRIVATE METHODS --------------------------------
|
||||
|
||||
TemplateMaps.prototype._userTemplateLimit = function() {
|
||||
return this.opts.max_user_templates || 0;
|
||||
TemplateMaps.prototype._userTemplateLimit = function () {
|
||||
return this.opts.max_user_templates || 0;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -68,14 +65,14 @@ TemplateMaps.prototype._userTemplateLimit = function() {
|
||||
* @param redisArgs - the arguments for the redis function in an array
|
||||
* @param callback - function to pass results too.
|
||||
*/
|
||||
TemplateMaps.prototype._redisCmd = function(redisFunc, redisArgs, callback) {
|
||||
this.redis_pool.acquire(this.db_signatures, (err, redisClient) => {
|
||||
TemplateMaps.prototype._redisCmd = function (redisFunc, redisArgs, callback) {
|
||||
this.redisPool.acquire(this.db_signatures, (err, redisClient) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
redisClient[redisFunc.toUpperCase()](...redisArgs, (err, data) => {
|
||||
this.redis_pool.release(this.db_signatures, redisClient);
|
||||
this.redisPool.release(this.db_signatures, redisClient);
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@@ -84,64 +81,64 @@ TemplateMaps.prototype._redisCmd = function(redisFunc, redisArgs, callback) {
|
||||
});
|
||||
};
|
||||
|
||||
var _reValidNameIdentifier = /^[a-z0-9][0-9a-z_\-]*$/i;
|
||||
var _reValidNameIdentifier = /^[a-z0-9][0-9a-z_-]*$/i;
|
||||
var _reValidPlaceholderIdentifier = /^[a-z][0-9a-z_]*$/i;
|
||||
// jshint maxcomplexity:15
|
||||
TemplateMaps.prototype._checkInvalidTemplate = function(template) {
|
||||
if ( template.version !== '0.0.1' ) {
|
||||
return new Error("Unsupported template version " + template.version);
|
||||
}
|
||||
var tplname = template.name;
|
||||
if ( ! tplname ) {
|
||||
return new Error("Missing template name");
|
||||
}
|
||||
if ( ! tplname.match(_reValidNameIdentifier) ) {
|
||||
return new Error("Invalid characters in template name '" + tplname + "'");
|
||||
}
|
||||
TemplateMaps.prototype._checkInvalidTemplate = function (template) {
|
||||
if (template.version !== '0.0.1') {
|
||||
return new Error('Unsupported template version ' + template.version);
|
||||
}
|
||||
var tplname = template.name;
|
||||
if (!tplname) {
|
||||
return new Error('Missing template name');
|
||||
}
|
||||
if (!tplname.match(_reValidNameIdentifier)) {
|
||||
return new Error("Invalid characters in template name '" + tplname + "'");
|
||||
}
|
||||
|
||||
var invalidError = isInvalidLayergroup(template.layergroup);
|
||||
if (invalidError) {
|
||||
return invalidError;
|
||||
}
|
||||
var invalidError = isInvalidLayergroup(template.layergroup);
|
||||
if (invalidError) {
|
||||
return invalidError;
|
||||
}
|
||||
|
||||
var placeholders = template.placeholders || {};
|
||||
var placeholders = template.placeholders || {};
|
||||
|
||||
var placeholderKeys = Object.keys(placeholders);
|
||||
for (var i = 0, len = placeholderKeys.length; i < len; i++) {
|
||||
var placeholderKey = placeholderKeys[i];
|
||||
var placeholderKeys = Object.keys(placeholders);
|
||||
for (var i = 0, len = placeholderKeys.length; i < len; i++) {
|
||||
var placeholderKey = placeholderKeys[i];
|
||||
|
||||
if (!placeholderKey.match(_reValidPlaceholderIdentifier)) {
|
||||
return new Error("Invalid characters in placeholder name '" + placeholderKey + "'");
|
||||
}
|
||||
if ( ! placeholders[placeholderKey].hasOwnProperty('default') ) {
|
||||
return new Error("Missing default for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
if ( ! placeholders[placeholderKey].hasOwnProperty('type') ) {
|
||||
return new Error("Missing type for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
}
|
||||
if (!placeholderKey.match(_reValidPlaceholderIdentifier)) {
|
||||
return new Error("Invalid characters in placeholder name '" + placeholderKey + "'");
|
||||
}
|
||||
if (!Object.prototype.hasOwnProperty.call(placeholders[placeholderKey], 'default')) {
|
||||
return new Error("Missing default for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
if (!Object.prototype.hasOwnProperty.call(placeholders[placeholderKey], 'type')) {
|
||||
return new Error("Missing type for placeholder '" + placeholderKey + "'");
|
||||
}
|
||||
}
|
||||
|
||||
var auth = template.auth || {};
|
||||
|
||||
switch ( auth.method ) {
|
||||
case 'open':
|
||||
break;
|
||||
case 'token':
|
||||
if ( ! _.isArray(auth.valid_tokens) ) {
|
||||
return new Error("Invalid 'token' authentication: missing valid_tokens");
|
||||
}
|
||||
if ( ! auth.valid_tokens.length ) {
|
||||
return new Error("Invalid 'token' authentication: no valid_tokens");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return new Error("Unsupported authentication method: " + auth.method);
|
||||
switch (auth.method) {
|
||||
case 'open':
|
||||
break;
|
||||
case 'token':
|
||||
if (!_.isArray(auth.valid_tokens)) {
|
||||
return new Error("Invalid 'token' authentication: missing valid_tokens");
|
||||
}
|
||||
if (!auth.valid_tokens.length) {
|
||||
return new Error("Invalid 'token' authentication: no valid_tokens");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
return new Error('Unsupported authentication method: ' + auth.method);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
function isInvalidLayergroup(layergroup) {
|
||||
function isInvalidLayergroup (layergroup) {
|
||||
if (!layergroup) {
|
||||
return new Error('Missing layergroup');
|
||||
}
|
||||
@@ -153,10 +150,10 @@ function isInvalidLayergroup(layergroup) {
|
||||
}
|
||||
|
||||
var invalidLayers = layers
|
||||
.map(function(layer, layerIndex) {
|
||||
.map(function (layer, layerIndex) {
|
||||
return layer.options ? null : layerIndex;
|
||||
})
|
||||
.filter(function(layerIndex) {
|
||||
.filter(function (layerIndex) {
|
||||
return layerIndex !== null;
|
||||
});
|
||||
|
||||
@@ -167,7 +164,7 @@ function isInvalidLayergroup(layergroup) {
|
||||
return false;
|
||||
}
|
||||
|
||||
function templateDefaults(template) {
|
||||
function templateDefaults (template) {
|
||||
var templateAuth = _.defaults({}, template.auth || {}, {
|
||||
method: 'open'
|
||||
});
|
||||
@@ -183,10 +180,10 @@ function templateDefaults(template) {
|
||||
* @param owner cartodb username of the template owner
|
||||
* @param callback returns error if the user reaches the limit
|
||||
*/
|
||||
TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, owner, callback) {
|
||||
TemplateMaps.prototype._checkUserTemplatesLimit = function (userTemplatesKey, owner, callback) {
|
||||
const limit = this._userTemplateLimit();
|
||||
|
||||
if(!limit) {
|
||||
if (!limit) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
@@ -207,7 +204,7 @@ TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, own
|
||||
});
|
||||
};
|
||||
|
||||
//--------------- PUBLIC API -------------------------------------
|
||||
// --------------- PUBLIC API -------------------------------------
|
||||
|
||||
// Add a template
|
||||
//
|
||||
@@ -218,10 +215,10 @@ TemplateMaps.prototype._checkUserTemplatesLimit = function(userTemplatesKey, own
|
||||
// @param template layergroup template, see
|
||||
// http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps#template-format
|
||||
//
|
||||
// @param callback function(err, tpl_id)
|
||||
// @param callback function(err, tplId)
|
||||
// Return template identifier (only valid for given user)
|
||||
//
|
||||
TemplateMaps.prototype.addTemplate = function(owner, template, callback) {
|
||||
TemplateMaps.prototype.addTemplate = function (owner, template, callback) {
|
||||
template = templateDefaults(template);
|
||||
|
||||
var invalidError = this._checkInvalidTemplate(template);
|
||||
@@ -263,22 +260,22 @@ TemplateMaps.prototype.addTemplate = function(owner, template, callback) {
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned
|
||||
// @param tplId template identifier as returned
|
||||
// by addTemplate or listTemplates
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
TemplateMaps.prototype.delTemplate = function(owner, tpl_id, callback) {
|
||||
this._redisCmd('HDEL', [ this.key_usr_tpl({ owner:owner }), tpl_id ], (err, deleted) => {
|
||||
TemplateMaps.prototype.delTemplate = function (owner, tplId, callback) {
|
||||
this._redisCmd('HDEL', [this.key_usr_tpl({ owner: owner }), tplId], (err, deleted) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!deleted) {
|
||||
return callback(new Error(`Template '${tpl_id}' of user '${owner}' does not exist`));
|
||||
return callback(new Error(`Template '${tplId}' of user '${owner}' does not exist`));
|
||||
}
|
||||
|
||||
this.emit('delete', owner, tpl_id);
|
||||
this.emit('delete', owner, tplId);
|
||||
return callback();
|
||||
});
|
||||
};
|
||||
@@ -292,14 +289,14 @@ TemplateMaps.prototype.delTemplate = function(owner, tpl_id, callback) {
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned by addTemplate
|
||||
// @param tplId template identifier as returned by addTemplate
|
||||
//
|
||||
// @param template layergroup template, see
|
||||
// http://github.com/CartoDB/Windshaft-cartodb/wiki/Template-maps#template-format
|
||||
//
|
||||
// @param callback function(err)
|
||||
//
|
||||
TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback) {
|
||||
TemplateMaps.prototype.updTemplate = function (owner, tplId, template, callback) {
|
||||
template = templateDefaults(template);
|
||||
|
||||
var invalidError = this._checkInvalidTemplate(template);
|
||||
@@ -307,19 +304,19 @@ TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback)
|
||||
return callback(invalidError);
|
||||
}
|
||||
|
||||
if (tpl_id !== template.name) {
|
||||
return callback(new Error(`Cannot update name of a map template ('${tpl_id}' != '${template.name}')`));
|
||||
if (tplId !== template.name) {
|
||||
return callback(new Error(`Cannot update name of a map template ('${tplId}' != '${template.name}')`));
|
||||
}
|
||||
|
||||
var userTemplatesKey = this.key_usr_tpl({ owner });
|
||||
|
||||
this._redisCmd('HGET', [userTemplatesKey, tpl_id], (err, beforeUpdateTemplate) => {
|
||||
this._redisCmd('HGET', [userTemplatesKey, tplId], (err, beforeUpdateTemplate) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!beforeUpdateTemplate) {
|
||||
return callback(new Error(`Template '${tpl_id}' of user '${owner}' does not exist`));
|
||||
return callback(new Error(`Template '${tplId}' of user '${owner}' does not exist`));
|
||||
}
|
||||
|
||||
let templateString;
|
||||
@@ -358,25 +355,25 @@ TemplateMaps.prototype.updTemplate = function(owner, tpl_id, template, callback)
|
||||
//
|
||||
// @param owner cartodb username of the templates owner
|
||||
//
|
||||
// @param callback function(err, tpl_id_list)
|
||||
// @param callback function(err, tplId_list)
|
||||
// Returns a list of template identifiers
|
||||
//
|
||||
TemplateMaps.prototype.listTemplates = function(owner, callback) {
|
||||
this._redisCmd('HKEYS', [ this.key_usr_tpl({owner:owner}) ], callback);
|
||||
TemplateMaps.prototype.listTemplates = function (owner, callback) {
|
||||
this._redisCmd('HKEYS', [this.key_usr_tpl({ owner: owner })], callback);
|
||||
};
|
||||
|
||||
// Get a templates
|
||||
//
|
||||
// @param owner cartodb username of the template owner
|
||||
//
|
||||
// @param tpl_id template identifier as returned
|
||||
// @param tplId template identifier as returned
|
||||
// by addTemplate or listTemplates
|
||||
//
|
||||
// @param callback function(err, template)
|
||||
// Return full template definition
|
||||
//
|
||||
TemplateMaps.prototype.getTemplate = function(owner, tpl_id, callback) {
|
||||
this._redisCmd('HGET', [this.key_usr_tpl({owner:owner}), tpl_id], (err, template) => {
|
||||
TemplateMaps.prototype.getTemplate = function (owner, tplId, callback) {
|
||||
this._redisCmd('HGET', [this.key_usr_tpl({ owner: owner }), tplId], (err, template) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
@@ -392,7 +389,7 @@ TemplateMaps.prototype.getTemplate = function(owner, tpl_id, callback) {
|
||||
});
|
||||
};
|
||||
|
||||
TemplateMaps.prototype.isAuthorized = function(template, authTokens) {
|
||||
TemplateMaps.prototype.isAuthorized = function (template, authTokens) {
|
||||
if (!template) {
|
||||
return false;
|
||||
}
|
||||
@@ -432,99 +429,95 @@ TemplateMaps.prototype.isAuthorized = function(template, authTokens) {
|
||||
//
|
||||
// @throws Error on malformed template or parameter
|
||||
//
|
||||
var _reNumber = /^([-+]?[\d\.]?\d+([eE][+-]?\d+)?)$/,
|
||||
_reCSSColorName = /^[a-zA-Z]+$/,
|
||||
_reCSSColorVal = /^#[0-9a-fA-F]{3,6}$/;
|
||||
var _reNumber = /^([-+]?[\d\.]?\d+([eE][+-]?\d+)?)$/; // eslint-disable-line no-useless-escape
|
||||
var _reCSSColorName = /^[a-zA-Z]+$/;
|
||||
var _reCSSColorVal = /^#[0-9a-fA-F]{3,6}$/;
|
||||
|
||||
function _replaceVars (str, params) {
|
||||
// Construct regular expressions for each param
|
||||
Object.keys(params).forEach(function(k) {
|
||||
str = str.replace(new RegExp("<%=\\s*" + k + "\\s*%>", "g"), params[k]);
|
||||
Object.keys(params).forEach(function (k) {
|
||||
str = str.replace(new RegExp('<%=\\s*' + k + '\\s*%>', 'g'), params[k]);
|
||||
});
|
||||
return str;
|
||||
}
|
||||
|
||||
function isObject(val) {
|
||||
return ( _.isObject(val) && !_.isArray(val) && !_.isFunction(val));
|
||||
function isObject (val) {
|
||||
return (_.isObject(val) && !_.isArray(val) && !_.isFunction(val));
|
||||
}
|
||||
|
||||
TemplateMaps.prototype.instance = function(template, params) {
|
||||
var all_params = {};
|
||||
var phold = template.placeholders || {};
|
||||
Object.keys(phold).forEach(function(k) {
|
||||
var val = params.hasOwnProperty(k) ? params[k] : phold[k].default;
|
||||
var type = phold[k].type;
|
||||
// properly escape
|
||||
if ( type === 'sql_literal' ) {
|
||||
// duplicate any single-quote
|
||||
val = val.replace(/'/g, "''");
|
||||
}
|
||||
else if ( type === 'sql_ident' ) {
|
||||
// duplicate any double-quote
|
||||
val = val.replace(/"/g, '""');
|
||||
}
|
||||
else if ( type === 'number' ) {
|
||||
// check it's a number
|
||||
if ( typeof(val) !== 'number' && ! val.match(_reNumber) ) {
|
||||
throw new Error("Invalid number value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
}
|
||||
else if ( type === 'css_color' ) {
|
||||
// check it only contains letters or
|
||||
// starts with # and only contains hexdigits
|
||||
if ( ! val.match(_reCSSColorName) && ! val.match(_reCSSColorVal) ) {
|
||||
throw new Error("Invalid css_color value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// NOTE: should be checked at template create/update time
|
||||
throw new Error("Invalid placeholder type '" + type + "'");
|
||||
}
|
||||
all_params[k] = val;
|
||||
});
|
||||
TemplateMaps.prototype.instance = function (template, params) {
|
||||
var allParams = {};
|
||||
var phold = template.placeholders || {};
|
||||
Object.keys(phold).forEach(function (k) {
|
||||
var val = Object.prototype.hasOwnProperty.call(params, k) ? params[k] : phold[k].default;
|
||||
var type = phold[k].type;
|
||||
// properly escape
|
||||
if (type === 'sql_literal') {
|
||||
// duplicate any single-quote
|
||||
val = val.replace(/'/g, "''");
|
||||
} else if (type === 'sql_ident') {
|
||||
// duplicate any double-quote
|
||||
val = val.replace(/"/g, '""');
|
||||
} else if (type === 'number') {
|
||||
// check it's a number
|
||||
if (typeof (val) !== 'number' && !val.match(_reNumber)) {
|
||||
throw new Error("Invalid number value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
} else if (type === 'css_color') {
|
||||
// check it only contains letters or
|
||||
// starts with # and only contains hexdigits
|
||||
if (!val.match(_reCSSColorName) && !val.match(_reCSSColorVal)) {
|
||||
throw new Error("Invalid css_color value for template parameter '" + k + "': " + val);
|
||||
}
|
||||
} else {
|
||||
// NOTE: should be checked at template create/update time
|
||||
throw new Error("Invalid placeholder type '" + type + "'");
|
||||
}
|
||||
allParams[k] = val;
|
||||
});
|
||||
|
||||
// NOTE: we're deep-cloning the layergroup here
|
||||
var layergroup = JSON.parse(JSON.stringify(template.layergroup));
|
||||
// NOTE: we're deep-cloning the layergroup here
|
||||
var layergroup = JSON.parse(JSON.stringify(template.layergroup));
|
||||
|
||||
if (layergroup.buffersize && isObject(layergroup.buffersize)) {
|
||||
Object.keys(layergroup.buffersize).forEach(function(k) {
|
||||
layergroup.buffersize[k] = parseInt(_replaceVars(layergroup.buffersize[k], all_params), 10);
|
||||
});
|
||||
}
|
||||
if (layergroup.buffersize && isObject(layergroup.buffersize)) {
|
||||
Object.keys(layergroup.buffersize).forEach(function (k) {
|
||||
layergroup.buffersize[k] = parseInt(_replaceVars(layergroup.buffersize[k], allParams), 10);
|
||||
});
|
||||
}
|
||||
|
||||
for (var i=0; i<layergroup.layers.length; ++i) {
|
||||
var lyropt = layergroup.layers[i].options;
|
||||
for (var i = 0; i < layergroup.layers.length; ++i) {
|
||||
var lyropt = layergroup.layers[i].options;
|
||||
|
||||
if ( params.styles && params.styles[i] ) {
|
||||
if (params.styles && params.styles[i]) {
|
||||
// dynamic styling for this layer
|
||||
lyropt.cartocss = params.styles[i];
|
||||
} else if ( lyropt.cartocss ) {
|
||||
lyropt.cartocss = _replaceVars(lyropt.cartocss, all_params);
|
||||
}
|
||||
if ( lyropt.sql) {
|
||||
lyropt.sql = _replaceVars(lyropt.sql, all_params);
|
||||
}
|
||||
lyropt.cartocss = params.styles[i];
|
||||
} else if (lyropt.cartocss) {
|
||||
lyropt.cartocss = _replaceVars(lyropt.cartocss, allParams);
|
||||
}
|
||||
if (lyropt.sql) {
|
||||
lyropt.sql = _replaceVars(lyropt.sql, allParams);
|
||||
}
|
||||
// Anything else ?
|
||||
}
|
||||
}
|
||||
|
||||
// extra information about the template
|
||||
layergroup.template = {
|
||||
name: template.name,
|
||||
auth: template.auth
|
||||
};
|
||||
// extra information about the template
|
||||
layergroup.template = {
|
||||
name: template.name,
|
||||
auth: template.auth
|
||||
};
|
||||
|
||||
return layergroup;
|
||||
return layergroup;
|
||||
};
|
||||
|
||||
// Return a fingerPrint of the object
|
||||
TemplateMaps.prototype.fingerPrint = function(template) {
|
||||
return crypto.createHash('md5')
|
||||
.update(JSON.stringify(template))
|
||||
.digest('hex')
|
||||
;
|
||||
TemplateMaps.prototype.fingerPrint = function (template) {
|
||||
return crypto.createHash('md5')
|
||||
.update(JSON.stringify(template))
|
||||
.digest('hex')
|
||||
;
|
||||
};
|
||||
|
||||
module.exports.templateName = function templateName(templateId) {
|
||||
module.exports.templateName = function templateName (templateId) {
|
||||
var templateIdTokens = templateId.split('@');
|
||||
var name = templateIdTokens[0];
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
var dot = require('dot');
|
||||
dot.templateSettings.strip = false;
|
||||
|
||||
function createTemplate(method) {
|
||||
function createTemplate (method) {
|
||||
return dot.template([
|
||||
'SELECT',
|
||||
'min({{=it._column}}) min_val,',
|
||||
@@ -27,7 +27,7 @@ var methods = {
|
||||
headtails: 'CDB_HeadsTailsBins(array_agg({{=it._column}}::numeric), {{=it._buckets}}) as headtails'
|
||||
};
|
||||
|
||||
var methodTemplates = Object.keys(methods).reduce(function(methodTemplates, methodName) {
|
||||
var methodTemplates = Object.keys(methods).reduce(function (methodTemplates, methodName) {
|
||||
methodTemplates[methodName] = createTemplate(methods[methodName]);
|
||||
return methodTemplates;
|
||||
}, {});
|
||||
@@ -68,7 +68,7 @@ PostgresDatasource.prototype.getName = function () {
|
||||
};
|
||||
|
||||
PostgresDatasource.prototype.getRamp = function (column, buckets, method, callback) {
|
||||
if (method && !methodTemplates.hasOwnProperty(method)) {
|
||||
if (method && !Object.prototype.hasOwnProperty.call(methodTemplates, method)) {
|
||||
return callback(new Error(
|
||||
'Invalid method "' + method + '", valid methods: [' + Object.keys(methodTemplates).join(',') + ']'
|
||||
));
|
||||
@@ -94,9 +94,9 @@ PostgresDatasource.prototype.getRamp = function (column, buckets, method, callba
|
||||
// Skip null values from ramp
|
||||
// Generated turbo-carto won't be correct, but better to keep it working than failing
|
||||
// TODO fix cartodb-postgres extension quantification functions
|
||||
ramp = ramp.filter(function(value) { return value !== null; });
|
||||
ramp = ramp.filter(function (value) { return value !== null; });
|
||||
if (strategy !== STRATEGY.EXACT) {
|
||||
ramp = ramp.sort(function(a, b) {
|
||||
ramp = ramp.sort(function (a, b) {
|
||||
return a - b;
|
||||
});
|
||||
}
|
||||
@@ -105,7 +105,7 @@ PostgresDatasource.prototype.getRamp = function (column, buckets, method, callba
|
||||
}, true); // use read-only transaction
|
||||
};
|
||||
|
||||
function getResult(resultSet) {
|
||||
function getResult (resultSet) {
|
||||
resultSet = resultSet || {};
|
||||
var result = resultSet.rows || [];
|
||||
result = result[0] || {};
|
||||
@@ -7,7 +7,7 @@
|
||||
* @constructor
|
||||
* @type {UserLimitsBackend}
|
||||
*/
|
||||
function UserLimitsBackend(metadataBackend, options) {
|
||||
function UserLimitsBackend (metadataBackend, options) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.options = options || {};
|
||||
this.options.limits = this.options.limits || {};
|
||||
@@ -59,7 +59,7 @@ UserLimitsBackend.prototype.getTimeoutRenderLimit = function (username, apiKey,
|
||||
});
|
||||
};
|
||||
|
||||
function isAuthorized(metadataBackend, username, apiKey, callback) {
|
||||
function isAuthorized (metadataBackend, username, apiKey, callback) {
|
||||
if (!apiKey) {
|
||||
return callback(null, false);
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var FastlyPurge = require('fastly-purge');
|
||||
|
||||
function FastlyCacheBackend(apiKey, serviceId) {
|
||||
function FastlyCacheBackend (apiKey, serviceId) {
|
||||
this.serviceId = serviceId;
|
||||
this.fastlyPurge = new FastlyPurge(apiKey, { softPurge: false });
|
||||
}
|
||||
@@ -13,6 +13,6 @@ module.exports = FastlyCacheBackend;
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
* @param {Function} callback
|
||||
*/
|
||||
FastlyCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
FastlyCacheBackend.prototype.invalidate = function (cacheObject, callback) {
|
||||
this.fastlyPurge.key(this.serviceId, cacheObject.key(), callback);
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var request = require('request');
|
||||
|
||||
function VarnishHttpCacheBackend(host, port) {
|
||||
function VarnishHttpCacheBackend (host, port) {
|
||||
this.host = host;
|
||||
this.port = port;
|
||||
}
|
||||
@@ -13,7 +13,7 @@ module.exports = VarnishHttpCacheBackend;
|
||||
* @param cacheObject should respond to `key() -> String` method
|
||||
* @param {Function} callback
|
||||
*/
|
||||
VarnishHttpCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
VarnishHttpCacheBackend.prototype.invalidate = function (cacheObject, callback) {
|
||||
request(
|
||||
{
|
||||
method: 'PURGE',
|
||||
@@ -22,7 +22,7 @@ VarnishHttpCacheBackend.prototype.invalidate = function(cacheObject, callback) {
|
||||
'Invalidation-Match': '\\b' + cacheObject.key() + '\\b'
|
||||
}
|
||||
},
|
||||
function(err, response) {
|
||||
function (err, response) {
|
||||
if (err || response.statusCode !== 204) {
|
||||
return callback(new Error('Unable to invalidate Varnish object'));
|
||||
}
|
||||
@@ -2,25 +2,25 @@
|
||||
|
||||
var LruCache = require('lru-cache');
|
||||
|
||||
function LayergroupAffectedTables() {
|
||||
function LayergroupAffectedTables () {
|
||||
// dbname + layergroupId -> affected tables cache
|
||||
this.cache = new LruCache({ max: 2000 });
|
||||
}
|
||||
|
||||
module.exports = LayergroupAffectedTables;
|
||||
|
||||
LayergroupAffectedTables.prototype.hasAffectedTables = function(dbName, layergroupId) {
|
||||
LayergroupAffectedTables.prototype.hasAffectedTables = function (dbName, layergroupId) {
|
||||
return this.cache.has(createKey(dbName, layergroupId));
|
||||
};
|
||||
|
||||
LayergroupAffectedTables.prototype.set = function(dbName, layergroupId, affectedTables) {
|
||||
LayergroupAffectedTables.prototype.set = function (dbName, layergroupId, affectedTables) {
|
||||
this.cache.set(createKey(dbName, layergroupId), affectedTables);
|
||||
};
|
||||
|
||||
LayergroupAffectedTables.prototype.get = function(dbName, layergroupId) {
|
||||
LayergroupAffectedTables.prototype.get = function (dbName, layergroupId) {
|
||||
return this.cache.get(createKey(dbName, layergroupId));
|
||||
};
|
||||
|
||||
function createKey(dbName, layergroupId) {
|
||||
function createKey (dbName, layergroupId) {
|
||||
return dbName + ':' + layergroupId;
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
function NamedMaps(owner, name) {
|
||||
function NamedMaps (owner, name) {
|
||||
this.namespace = 'n';
|
||||
this.owner = owner;
|
||||
this.name = name;
|
||||
@@ -10,11 +10,10 @@ function NamedMaps(owner, name) {
|
||||
|
||||
module.exports = NamedMaps;
|
||||
|
||||
|
||||
NamedMaps.prototype.key = function() {
|
||||
NamedMaps.prototype.key = function () {
|
||||
return this.namespace + ':' + shortHashKey(this.owner + ':' + this.name);
|
||||
};
|
||||
|
||||
function shortHashKey(target) {
|
||||
return crypto.createHash('sha256').update(target).digest('base64').substring(0,6);
|
||||
function shortHashKey (target) {
|
||||
return crypto.createHash('sha256').update(target).digest('base64').substring(0, 6);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user