Compare commits

...

390 Commits

Author SHA1 Message Date
Álvaro
c792421687 undefined might come as a string (#1196)
so a hack to check and cast it to a real undefined

Co-authored-by: Álvaro Manera <amanera@cartodb.com>
2021-01-15 10:44:05 +01:00
Álvaro
15135b475c add missing env vars (#1195)
* add missing env vars

* add missing space

Co-authored-by: Álvaro Manera <amanera@cartodb.com>
Co-authored-by: alberhander <albertoh@carto.com>
2021-01-14 10:08:25 +01:00
Álvaro Manera
fd9f935676 submodule bump 2020-12-23 07:37:54 +01:00
Álvaro Manera
71f5886a4d 🤬 yaml 2020-12-21 13:21:44 +01:00
Álvaro Manera
bc8c9f973c yaml typo 2020-12-21 13:19:52 +01:00
Álvaro Manera
ec40614f4b configure docker before push 2020-12-21 13:14:36 +01:00
Álvaro
5ed1a3a2d1 Merge pull request #1194 from CartoDB/clean
Use Github actions for builds
2020-12-21 09:06:52 +01:00
Álvaro Manera
0aa5f394e2 update submodule 2020-12-18 16:03:31 +01:00
Álvaro Manera
2e1a3c7fb1 small fixes 2020-12-18 15:22:26 +01:00
Álvaro Manera
27eb00223d minor PR comments 2020-12-18 07:27:59 +01:00
Álvaro Manera
8d46780006 fix master build 2020-12-17 07:22:31 +01:00
Álvaro Manera
6ffd2c090e fix build 2020-12-17 07:15:57 +01:00
Álvaro Manera
3995787c02 use token to pull repos 2020-12-17 07:07:09 +01:00
Álvaro Manera
ddb1b0c0d8 udpate paths and pull submodule 2020-12-16 16:47:19 +01:00
Álvaro Manera
a03d268260 add submodule 2020-12-16 16:40:41 +01:00
Álvaro
5c491a25cf Use env vars and fix tests 2020-12-16 16:32:32 +01:00
Shylpx
92be27e700 Merge pull request #1192 from CartoDB/feature/ch89482/mr-jeff-if-a-column-name-has-an-uppercase
[ch89482] Update 'camshaft' to version 0.67.2
2020-11-23 10:26:20 +00:00
cgonzalez
6b61f5e168 Update 'camshaft' to version 0.67.2 2020-11-18 12:37:38 +00:00
Daniel G. Aubert
d79f1b41d0 Merge pull request #1190 from CartoDB/feature/ch101625/node-windshaft-exiting-because-of-typeerror
Fix logger error serializer when the exception stack is not set
2020-09-09 14:07:23 +02:00
Jorge Tarrero
e039204638 Fix linter 2020-09-09 11:43:33 +02:00
Jorge Tarrero
dc1becd15c Fix logger error serializer when the exception stack is not set 2020-09-09 11:29:32 +02:00
Daniel G. Aubert
a121fd75ab Merge pull request #1189 from CartoDB/fix-kibana-index-bis
Update camshaft to version 0.67.1
2020-08-26 11:50:50 +02:00
Daniel García Aubert
f85417a886 Update NEWS 2020-08-26 11:39:34 +02:00
Daniel García Aubert
8ad72ff2ce Update camshaft to version 0.67.1 2020-08-26 11:36:38 +02:00
Daniel García Aubert
4dd6bc466a Use development version of camshaft 2020-08-26 11:06:26 +02:00
Daniel García Aubert
c119c92de6 Use development version of camshaft 2020-08-26 10:50:16 +02:00
Daniel García Aubert
a3f7acb213 Use development version of camshaft 2020-08-26 10:41:34 +02:00
Daniel García Aubert
0f14ed55db Use development version of camshaft 2020-08-26 09:35:00 +02:00
Rafa de la Torre
528395103b Merge pull request #1186 from CartoDB/feature/ch94770/node-minimal-doc-in-the-repos-about-how-to
[Node] Minimal doc in the repos about how to add new log traces
2020-08-04 09:20:45 +02:00
Rafa de la Torre
288cd9584f Markdown about how to write log traces 2020-08-03 16:31:00 +02:00
Alberto Asuero
cf82e1954e Merge pull request #1185 from CartoDB/alasarr/gitignore
Adding docker ressources to .gitignore
2020-07-29 21:43:13 +02:00
Alberto Asuero
3b00cffc3b New line at .gitignore 2020-07-28 08:58:33 +00:00
Alberto Asuero
95bf39cada Adding docker ressources to .gitignore 2020-07-28 08:56:38 +00:00
Daniel G. Aubert
f9ad3c8acf Merge pull request #1184 from CartoDB/feature/ch91877/remove-log-aggregation-in-metro
Logger: rename key 'msg' => 'event_message'
2020-07-23 14:12:17 +02:00
Daniel García Aubert
28f70f6877 Logger: rename key 'msg' => 'event_message' 2020-07-23 14:01:34 +02:00
Daniel G. Aubert
d5c5d07507 Merge pull request #1183 from CartoDB/feature/ch91877/remove-log-aggregation-in-metro
Metro: stop aggregating logs per request id
2020-07-22 16:00:41 +02:00
Daniel García Aubert
b646f71394 Don't miss the header 2020-07-22 13:35:43 +02:00
Daniel García Aubert
38fe2169aa Update camshaft to version 0.67.0 2020-07-22 13:03:51 +02:00
Daniel García Aubert
a749d4fb43 Typo 2020-07-22 11:40:45 +02:00
Daniel García Aubert
b9198b59a1 Logger: rename 'error' => 'exception' to avoid name clashing in E/S 2020-07-21 17:53:46 +02:00
Daniel García Aubert
3102d895f2 Update camshaft devel version 2020-07-21 17:15:50 +02:00
Daniel García Aubert
b60a69e7d2 Logger: rename level => levelname to avoid name collision 2020-07-21 17:09:19 +02:00
Daniel García Aubert
3937b8c271 Adapt JSON output to the standard structure 2020-07-21 16:36:23 +02:00
Daniel García Aubert
b32a073ac3 Metro: stop aggregating log per request id, use new config.json file 2020-07-20 19:33:33 +02:00
Daniel García Aubert
afd4ad500f Lint 2020-07-01 10:53:04 +02:00
Daniel G. Aubert
cb17bba3f5 Merge pull request #1181 from CartoDB/feature/ch88712/node-windshaft-metro-service-is-not-started
Fix: TypeError: Cannot read property 'level' of undefined
2020-07-01 08:58:26 +02:00
Daniel García Aubert
5b7341c0e9 Fix: TypeError: Cannot read property 'level' of undefined
Feature: dump unfinished log into a file while exiting
2020-06-29 21:01:48 +02:00
Daniel G. Aubert
d65565c091 Merge pull request #1170 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino
Replace log4js logger by pino
2020-06-23 11:56:03 +02:00
Daniel García Aubert
360b98254b Upgrade camshaft to version 0.66.0 2020-06-22 17:01:29 +02:00
Daniel García Aubert
43a603922d Update NEWS 2020-06-22 12:24:06 +02:00
Daniel García Aubert
74116523b4 Merge branch 'master' into dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino 2020-06-22 12:12:45 +02:00
Daniel García Aubert
6cddec562a Profiler don't log times if there is no one task done at least 2020-06-19 11:42:46 +02:00
Daniel García Aubert
22086ba914 Count requests even when the info is not complete 2020-06-19 10:18:06 +02:00
Daniel García Aubert
a68618c336 Prepare init log to be kibana friendly 2020-06-12 10:12:32 +02:00
Daniel García Aubert
578f543c01 log user 2020-06-11 18:21:13 +02:00
Daniel García Aubert
49735308de Do not rename level and error fields 2020-06-11 18:15:44 +02:00
Daniel García Aubert
2444b4c008 rename error => errors to avoid type clashing in ES 2020-06-11 13:04:19 +02:00
Daniel García Aubert
bf250e592a rename level => levelname to avoid type clashing in ES 2020-06-11 12:30:15 +02:00
Daniel García Aubert
f6c8796c8a Do not duplicate timer 2020-06-11 10:12:27 +02:00
Daniel G. Aubert
649f8d701e Merge pull request #1173 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis
Logger improvements
2020-06-11 09:39:07 +02:00
Daniel G. Aubert
568e428a58 Merge pull request #1174 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis
Create log-collector utilility
2020-06-11 09:38:34 +02:00
Daniel G. Aubert
ff00fed43e Merge pull request #1175 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis-bis
Tags Middleware
2020-06-11 09:38:14 +02:00
Daniel G. Aubert
561bc8aef0 Merge pull request #1177 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis-bis-bis
Introducing @carto/metro the CARTO's logs and metrics transport.
2020-06-11 09:37:58 +02:00
Daniel García Aubert
e49ecda321 Don't create a new metric for each request, use the same label to send to statsd 2020-06-10 17:21:35 +02:00
Daniel García Aubert
18525a60cd Use 9145 as default port for metics 2020-06-09 16:35:08 +02:00
Daniel G. Aubert
b8d3971c8a Merge pull request #1178 from CartoDB/fix-layergroup-structure
Layergroup Id should have cache buster defined always
2020-06-09 15:02:28 +02:00
Daniel García Aubert
23839f5b4a Update NEWS 2020-06-09 15:00:53 +02:00
Daniel García Aubert
f235dcdeda Add test 2020-06-09 13:05:39 +02:00
Daniel García Aubert
9c21194c68 Set cache buster equal to 0 when there is no affected tables in the mapconfig 2020-06-09 12:21:47 +02:00
Daniel García Aubert
7acbfc1e9b Typos 2020-06-09 10:00:54 +02:00
Daniel García Aubert
6f9580bae2 Allow the metro to exit if this is the only active server in the event loop system 2020-06-09 09:52:56 +02:00
Daniel García Aubert
3583e064be User native http server 2020-06-09 09:40:24 +02:00
Daniel García Aubert
9e14185990 Rename 2020-06-09 09:30:11 +02:00
Daniel García Aubert
a5c83edef6 Introducing @carto/metro, the carto logs and metrics transport. 2020-06-08 20:16:00 +02:00
Daniel García Aubert
04d0f2e530 Merge branch 'dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis' into dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis-bis 2020-06-08 19:31:18 +02:00
Daniel García Aubert
e206a1bca3 Check whether the log is a pino's log and skip them when they aren't 2020-06-08 16:34:08 +02:00
Daniel García Aubert
b115bca07e Be able to tag requests with labels as easier way to provide business metrics 2020-06-08 16:29:22 +02:00
Daniel García Aubert
07b9decb03 Add log-collector utlity, it will be moved to its onw repository. Attaching it here fro development purposes. Try it with the following command LOG_LEVEL=info npm t | node ./log-collector.js 2020-06-05 20:12:20 +02:00
Daniel García Aubert
02c8e28494 Finalize request's log 2020-06-05 20:08:40 +02:00
Daniel García Aubert
d28744a5e3 Be able to pass the logger to the analysis creation (camshaft) while instantiating a named map with analysis 2020-06-05 20:08:08 +02:00
Daniel García Aubert
a19e9a79b8 Release 9.0.0 2020-06-05 14:10:24 +02:00
Daniel García Aubert
4d7eb555a8 Update carto-package.json 2020-06-05 14:09:40 +02:00
Daniel García Aubert
6f9f53dd03 Be able to reduce the footprint in the final log file depending on the environment 2020-06-04 20:28:06 +02:00
Daniel García Aubert
63bc8f75b9 Typo 2020-06-04 18:43:21 +02:00
Daniel García Aubert
adeffd2018 Centralize common headers, this will help up to move biz metrics out of the process 2020-06-04 17:45:15 +02:00
Daniel G. Aubert
b2da00900f Merge pull request #1171 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis
Do not bind logger to global object
2020-06-04 12:14:21 +02:00
Daniel G. Aubert
0c6d5a1e18 Merge pull request #1172 from CartoDB/dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino-bis-bis
Stop using profiler wrongly
2020-06-04 12:13:43 +02:00
Daniel García Aubert
6945cfc93c Add TODO 2020-06-04 12:10:15 +02:00
Daniel García Aubert
7b53b7c30a Stop using profiling wrongly. Now it only saves custom events from backends (tile, map, attributes, etc..) and calculates the response time. Besides, removed tags to know whether overviews are being used. 2020-06-03 19:51:56 +02:00
Daniel García Aubert
d073f7e3dd typo 2020-06-03 17:34:30 +02:00
Daniel García Aubert
210f5b01ec Make sure all errors use the serializer set for the logger 2020-06-03 17:32:16 +02:00
Daniel García Aubert
1dda183a31 typo 2020-06-03 16:19:42 +02:00
Daniel García Aubert
0eadfe6ee9 Simpligy error middleware 2020-06-03 15:52:24 +02:00
Daniel García Aubert
c37e3f173d Handle error properly in user middleware, it will logged in error middleware 2020-06-03 15:39:02 +02:00
Daniel García Aubert
107a97aa9e Honor @oleurud's comment 2020-06-03 15:11:08 +02:00
Daniel García Aubert
219d2c9044 Shortcuts for serializers 2020-06-03 15:10:31 +02:00
Daniel García Aubert
1e89821d97 Use standard serializers for error, request, and response 2020-06-03 14:28:35 +02:00
Daniel García Aubert
29c6505252 Do not set header 'x-tiler-profiler' and log it instead 2020-06-02 17:09:06 +02:00
Daniel García Aubert
7d8d05b865 Log errors and do not send 'X-Tiler-Errors' header 2020-06-02 16:15:01 +02:00
Daniel García Aubert
afeb91dc86 Bring back logger for windshaft 2020-06-02 13:20:57 +02:00
Daniel García Aubert
b7b3392bdd Be able to set log level from env variable LOG_LEVEL 2020-06-02 13:16:26 +02:00
Daniel García Aubert
b60116410a Use req/res logger instead of the one bound to global object 2020-06-02 12:31:18 +02:00
Daniel García Aubert
ffe19827fd Rename factory and don't use the keyword 'new' to create server while testing 2020-06-02 11:57:11 +02:00
Daniel García Aubert
48c28aea0b Do not bind logger to global object, now it's a part of serverOptions 2020-06-02 11:49:54 +02:00
Daniel García Aubert
62d66f2dbc Do not use global logger in middlewares, use the one initialized in res.locals instead 2020-06-02 09:00:45 +02:00
Daniel García Aubert
e644201756 Merge branch 'master' into dgaubert/ch78384/maps-api-replace-log4js-logger-by-pino 2020-06-01 19:23:21 +02:00
Daniel G. Aubert
481a5928c4 Merge pull request #1169 from CartoDB/update-deps
Update deps
2020-06-01 19:21:34 +02:00
Daniel García Aubert
163c546236 Replace log4js by pino as logger:
- Logs to stdout, disabled while testing
- Change log calls signature when needed
- Use development version of camshaft
- Removes unused log cofiguration
- Bind request id to log req/res
- Log req at the begining of the cycle and res at the end
2020-06-01 19:18:15 +02:00
Daniel García Aubert
656bc9344b Update deps 2020-06-01 13:50:09 +02:00
Daniel García Aubert
b79a8587fa Update deps to fix some security vuln 2020-06-01 12:35:31 +02:00
Daniel G. Aubert
17337974a2 Merge pull request #1168 from CartoDB/dgaubert/ch77050/data-in-headers
Avoid custom headers to be undefined
2020-05-29 16:16:19 +02:00
Daniel García Aubert
6bcf477532 Avoid custom headers to be undefined 2020-05-29 16:06:16 +02:00
Daniel G. Aubert
bf7e8a6ec6 Merge pull request #1167 from CartoDB/dgaubert/ch77050/data-in-headers
Add 'Carto-Stat-Tag', 'Carto-User-Id', and 'Carto-Client' headers
2020-05-26 17:14:32 +02:00
Daniel García Aubert
f31e8b43b6 Duplicate 2020-05-26 17:03:53 +02:00
Daniel García Aubert
0090811510 Typo 2020-05-26 16:56:50 +02:00
Daniel García Aubert
b97aeda53c Adapt test-client to handle client query param 2020-05-26 16:52:13 +02:00
Daniel García Aubert
f82232194c Under if 2020-05-26 16:31:53 +02:00
Daniel García Aubert
aff5c9a614 Add test to check the headers exist while instantiating a map 2020-05-26 16:28:44 +02:00
Daniel García Aubert
ddefb1a6ca Add 'Carto-Stat-Tag', 'Carto-User-Id', and 'Carto-Client' headers 2020-05-26 13:15:35 +02:00
Daniel G. Aubert
4d06fee1e2 Merge pull request #1164 from CartoDB/node-12
Support Node.js 12
2020-05-20 15:57:46 +02:00
Daniel García Aubert
8febd81ed2 Merge branch 'master' into node-12 2020-05-20 09:15:05 +02:00
Daniel García Aubert
e575f01bef Upgrade gc-stats to version 1.4.0 2020-05-14 19:32:47 +02:00
Raúl Marín
f25f507945 Merge pull request #1165 from Algunenano/clang9
Force our packages to be used
2020-05-14 18:11:15 +02:00
Raúl Marín
bdbb529ea8 Force clang-9 to be used 2020-05-14 17:52:06 +02:00
Daniel García Aubert
0aac942aa1 Make query idempotent among PG versions 2020-05-14 13:13:32 +02:00
Daniel García Aubert
8cc24bc665 - Drop support for Node.js < 12
- Support Node.js 12
- Upgrade `windshaft` to version `7.0.0`
- Upgrade `camshaft` to version `0.65.3`
- Upgrade `cartodb-redis` to version `3.0.0`
2020-05-14 13:00:23 +02:00
Daniel G. Aubert
478ea66678 Merge pull request #1162 from CartoDB/dgaubert/ch71093/update-maps-api-to-new-event-format
New event format for metrics
2020-05-01 13:34:57 +02:00
Daniel García Aubert
4dfc898587 Don't log when metrics where sent successfully 2020-05-01 13:25:03 +02:00
Daniel García Aubert
05e77b2aed Add test with mapconfig's query against a table to ensure cache buster metrics are sent with the right values. 2020-05-01 11:43:37 +02:00
Daniel García Aubert
24863b6393 Update NEWS 2020-05-01 10:51:33 +02:00
Daniel García Aubert
3cf17c8bab typo 2020-05-01 10:40:56 +02:00
Daniel García Aubert
8c38ecf808 Missing substring 2020-04-30 13:24:41 +02:00
Daniel García Aubert
a196a26ab4 Get templateHash for static tile request and errored named map instantiations 2020-04-30 13:09:12 +02:00
Daniel García Aubert
8d73571f5b Simplify assertions 2020-04-30 12:31:12 +02:00
Daniel García Aubert
d5348dd9d4 Rename fields from headers of metrics 2020-04-29 18:48:10 +02:00
Daniel García Aubert
7e31b956bf Send stat_tag metric when available 2020-04-29 18:25:01 +02:00
Daniel García Aubert
dbc5d65d90 Send template_hash as part of the metrics event 2020-04-29 17:26:33 +02:00
Daniel García Aubert
c91d78fe51 Also export template hash 2020-04-29 16:44:14 +02:00
Daniel García Aubert
798d010776 Ensure "map_id" and "cache_buster" as part of the event 2020-04-29 14:32:08 +02:00
Daniel García Aubert
70f0b6ea50 Avoid to use "pubsub" for the name of modules, middlewares, variables, etc.. 2020-04-29 10:40:45 +02:00
Daniel García Aubert
4e3ef96374 Add test to chek we still send events when errored map static tile 2020-04-29 10:28:10 +02:00
Daniel García Aubert
c88a14bf43 Send metrics for map instantiations (named, anonymous and static) with the new format. 2020-04-28 19:17:00 +02:00
Daniel García Aubert
7f5ed58a79 Add test 2020-04-27 18:40:28 +02:00
Daniel García Aubert
89e349146d Fix tests and stop using sinon as a dev dependency 2020-04-27 18:02:06 +02:00
Daniel García Aubert
c5cb2ea4cb Add FIXME comment 2020-04-27 13:35:19 +02:00
Daniel García Aubert
fe9610abe9 Missing logger argument 2020-04-27 13:35:07 +02:00
Daniel García Aubert
1bbde4f5e3 Let to the caller to choose how to handle the call to a method 2020-04-27 13:27:05 +02:00
Daniel García Aubert
e90c196598 Simplified metrics middleware and backend 2020-04-27 12:46:27 +02:00
Daniel García Aubert
6a2333be64 Topic name's lifetime is longer than pubsub backend, we can keep it as property. 2020-04-27 12:13:54 +02:00
Daniel García Aubert
7d6a64d383 Do not expose functions just to be able to mock them while testing 2020-04-27 11:59:36 +02:00
Daniel García Aubert
42dc2915ea Send pubsub metrics once the response has finished 2020-04-27 11:41:37 +02:00
Daniel García Aubert
3cec6b5a90 Missing callback 2020-04-27 11:06:09 +02:00
Daniel García Aubert
c31e3d6e3f Consistent interface when returning no event for eventa data in metrics 2020-04-27 10:58:37 +02:00
Daniel García Aubert
6e4c8a6639 Follow Node.js callback pattern 2020-04-27 10:23:11 +02:00
Manuel J. Morillo
809c267419 Merge pull request #1161 from CartoDB/fix_parsing_columns_histograms_1160
Fixes 1160: Prevent using cast column as part of __ctx_query
2020-04-23 13:12:32 +02:00
manmorjim
5ac27d1002 Update NEWS 2020-04-10 14:34:02 +02:00
manmorjim
7237fb04a8 Adding test for column date type in numeric histograms 2020-04-10 14:33:38 +02:00
manmorjim
d1696425fd Prevent using cast column from alias __ctx_query
Fixes #1160 by keep the original name of the column and using it if the
column type is date.
2020-04-10 14:14:24 +02:00
Raúl Marín
a614fb1ef6 Merge pull request #1159 from Algunenano/travis_12
Travis: Add pg12
2020-04-09 15:09:07 +02:00
Raúl Marín
aa38dd3b59 Travis: Add pg12 2020-04-09 13:20:51 +02:00
Daniel G. Aubert
2ac050501b Merge pull request #1158 from CartoDB/get-tile-promises
Version  9.0.0
2020-04-05 13:23:18 +02:00
Daniel García Aubert
03abe187ce Update NEWS and prepera next major release version 2020-04-05 13:16:45 +02:00
Daniel García Aubert
a83d0cf7af Update windshaft to released version 6.0.0 2020-04-05 12:59:58 +02:00
Daniel García Aubert
8bb4fbec12 Get the rendererCache's config right and avoid to set the NamedMapCacheReporter's interval to 'undefined' 2020-04-04 18:51:22 +02:00
Daniel García Aubert
a8fb51ba25 - Rename NamedMapProviderReporter by NamedMapProviderCacheReporter
- Extract getOnTileErrorStrategy to a module
- Stop using MapStore from windshaft while testing and create a custom one instead
2020-04-04 17:46:08 +02:00
Daniel García Aubert
24efc37737 Update windshaft development version 2020-04-04 17:42:52 +02:00
Daniel García Aubert
c25678cc28 Remove /version endpoint and bootstrapFonts at process startup (now done in windshaft) 2020-04-04 17:42:26 +02:00
Daniel García Aubert
44970b78a1 TODO 2020-04-04 17:35:09 +02:00
Daniel García Aubert
a3bdbf6202 In tests, stop using mapnik module exposed by windshaft and require it from development dependencies 2020-04-04 17:34:22 +02:00
Daniel García Aubert
f583a4240a Remove jshint comments 2020-04-04 17:29:33 +02:00
Daniel García Aubert
4054c6923f Use new signature for onTileErrorStrategy 2020-03-27 19:38:28 +01:00
Daniel García Aubert
7a1d84a3fb Update windshaft 2020-03-27 16:59:30 +01:00
Daniel García Aubert
58ed7c0093 Lint 2020-03-23 10:07:24 +01:00
Daniel García Aubert
f56e79ed1f Update windshaft 2020-03-23 10:01:54 +01:00
Daniel García Aubert
45c423bbaf Update windshaft 2020-03-21 18:53:32 +01:00
Daniel García Aubert
78f47e5873 Update windshaft and send more metrics 2020-03-21 18:30:38 +01:00
Daniel García Aubert
21d1a56953 Update windshaft and use the new method that reports stats about cached renderers 2020-03-21 14:13:53 +01:00
Daniel García Aubert
69a02bcee0 Fix stat named map providers cache count 2020-03-20 18:50:22 +01:00
Daniel García Aubert
d2c0f553fc Update windshaft to development version 2020-03-18 19:50:35 +01:00
Daniel García Aubert
3967aecfdc Fix test where http-fallback-image renderer was failing quietly 2020-03-18 19:45:31 +01:00
Esther Lozano
7b8cc0a8b8 Add response time to pubsub events (#1155) 2020-03-10 11:40:01 +01:00
Daniel G. Aubert
28c4e89ab5 Merge pull request #1156 from CartoDB/camshaft-0.65.3
Upgrade camshaft to version 0.65.3
2020-03-05 12:14:52 +01:00
Daniel García Aubert
8c42ac9053 Update NEWS and project version 2020-03-05 11:40:46 +01:00
Daniel García Aubert
86987f9e69 Upgade camshaft to version 0.65.3 2020-03-05 11:36:23 +01:00
Simon Martín
33a8267d2c Merge pull request #1154 from CartoDB/add-pubsub-metrics
Add pubsub metrics
2020-02-27 11:14:32 +01:00
Esther Lozano
779a8a8927 Fix linter 2020-02-26 17:44:53 +01:00
Esther Lozano
1888302cee Avoid normalizing empty fields 2020-02-26 17:41:41 +01:00
Esther Lozano
34c446909e Trim fields when normalizing 2020-02-26 14:50:41 +01:00
Esther Lozano
583765a298 Normalize headers values for pubsub 2020-02-26 13:24:46 +01:00
Esther Lozano
4b1f0b5775 Add unit and integration tests for pubsub 2020-02-25 14:14:44 +01:00
Esther Lozano
8f81c810e0 Continue middleware chain after response or error 2020-02-25 14:14:20 +01:00
Esther Lozano
970be73052 Allow extra headers in the requests of test client 2020-02-24 12:30:46 +01:00
Esther Lozano
e85469cc3c Use middleware for all requests 2020-02-20 15:25:53 +01:00
Esther Lozano
4a41ee8f75 Add backend and middleware for pubsub metrics 2020-02-20 11:48:32 +01:00
Esther Lozano
9591a5a2b0 Store userId in res.locals 2020-02-20 11:47:44 +01:00
Javier Goizueta
8f510f401e Release 8.1.1 2020-02-17 18:49:24 +01:00
Javier Goizueta
92678c3dae Merge pull request #1152 from CartoDB/1117-camshaft-update
Upgrade camshaft to 0.65.2
2020-02-17 18:46:14 +01:00
Javier Goizueta
9f2d1f90d0 Update NEWS 2020-02-17 18:36:18 +01:00
Javier Goizueta
23e331610d Upgrade camshaft to 0.65.2 2020-02-17 18:36:05 +01:00
Esther Lozano
59cb6f9c9c Rename headers for metrics 2020-02-17 17:07:26 +01:00
Esther Lozano
98325495ea Allow metrics custom headers in cors 2020-02-13 12:52:20 +01:00
Daniel García Aubert
576518b2c8 Stubs next vesion 2020-01-27 12:56:44 +01:00
Daniel García Aubert
0631bafbbf Release 8.1.0 2020-01-27 12:47:57 +01:00
Daniel García Aubert
d9b6284914 Update carto_postgresql_ext minor dependency version 2020-01-27 12:45:35 +01:00
Daniel García Aubert
111b927033 Minor doc fixes 2019-12-30 12:43:00 +01:00
Daniel G. Aubert
d63337f06f Merge pull request #1150 from CartoDB/new-filters
New dataview filters: circle & polygon
2019-12-20 09:40:09 +01:00
Daniel García Aubert
7012e6a66a Update NEWS 2019-12-20 09:37:27 +01:00
Daniel García Aubert
726e1a2268 Add test to validate parameters 2019-12-16 16:12:57 +01:00
Daniel García Aubert
6e455a1205 Better condition 2019-12-16 12:54:17 +01:00
Daniel García Aubert
da07d550d2 Use ST_DWithin() 2019-12-16 12:30:52 +01:00
Daniel García Aubert
1829a634e9 Add formula dataview test 2019-12-16 09:28:11 +01:00
Daniel García Aubert
95f66b8c4b Transform from 3857 2019-12-13 12:36:13 +01:00
Daniel García Aubert
ea1f43bec7 Fix query to make the proper transformations 2019-12-11 11:02:31 +01:00
Daniel García Aubert
c877d0b964 Implement polygon filter 2019-12-03 10:58:55 +01:00
Daniel García Aubert
caf09ac644 Rename file 2019-12-03 10:02:51 +01:00
Daniel García Aubert
17f151cd5a Implement circle filter for dataviews 2019-12-02 18:36:41 +01:00
Daniel García Aubert
0940158d01 Implemented tests, happy cases 2019-12-02 16:17:55 +01:00
Daniel G. Aubert
e6bbe8351d Merge pull request #1148 from CartoDB/no-makefile
Simplify npm scripts
2019-12-02 15:14:17 +01:00
Raúl Marín
031bae2564 Merge pull request #1149 from Algunenano/find_cartodb
Update camshaft to 0.65.1
2019-12-02 14:54:05 +01:00
Raúl Marín
b8d790caab Update camshaft to 0.65.1 2019-12-02 14:22:41 +01:00
Daniel García Aubert
267557eb90 Simplify npm scripts 2019-12-02 14:15:40 +01:00
Daniel García Aubert
b2af93dfec Remove reference to unexistent file 2019-12-02 14:14:40 +01:00
Raúl Marín
7e81618769 Merge pull request #1147 from Algunenano/find_cartodb
Enforce the usage of cartodb schema
2019-12-02 13:58:33 +01:00
Daniel G. Aubert
eeac5ce998 Merge pull request #1146 from CartoDB/no-makefile
Leftovers from #1145
2019-12-02 13:55:00 +01:00
Raúl Marín
fcf2fd1455 Enforce the usage of cartodb schema 2019-12-02 13:43:21 +01:00
Daniel García Aubert
fb9dce0386 Lint 2019-12-02 12:56:21 +01:00
Daniel García Aubert
4c09a70647 Avoid to overwrite env, just extend it with new env variables 2019-12-02 12:51:16 +01:00
Daniel García Aubert
eee59abfa1 Remove unused bash script 2019-12-02 12:43:59 +01:00
Daniel G. Aubert
c7effbccb4 Merge pull request #1145 from CartoDB/no-makefile
Improve project usability
2019-12-02 12:36:41 +01:00
Daniel García Aubert
2912e4fea6 Update NEWS and Release version 2019-12-02 12:30:19 +01:00
Daniel García Aubert
2d09a214ae Leftovers from other PR 2019-12-02 11:02:35 +01:00
Daniel García Aubert
a88c085278 Update 'how to release' document 2019-12-01 19:03:47 +01:00
Daniel García Aubert
5dcca3e088 Update 'how to release' document 2019-12-01 19:00:27 +01:00
Daniel García Aubert
413a1685aa Set default timeout 2019-12-01 14:20:01 +01:00
Daniel García Aubert
7081a7ec3c Better script organization 2019-12-01 13:04:56 +01:00
Daniel García Aubert
33143ea28e Update doc 2019-12-01 13:04:31 +01:00
Daniel García Aubert
f8c86f3b72 Don't be bombastic 2019-11-29 17:23:27 +01:00
Daniel García Aubert
ae53cc736b Merge branch 'master' into no-makefile 2019-11-29 16:39:44 +01:00
Daniel García Aubert
eca75d1365 Removed unused docker files 2019-11-29 16:33:47 +01:00
Daniel García Aubert
ef201e6fcf Improve docker section in docs 2019-11-29 16:30:48 +01:00
Daniel García Aubert
38a556b7d6 Improve spelling in documentation 2019-11-29 15:55:41 +01:00
Daniel García Aubert
c071746768 npm install 2019-11-29 13:51:09 +01:00
Daniel García Aubert
57512ba48b Format 2019-11-29 13:47:38 +01:00
Daniel García Aubert
dcf765efda Merge pre-install scripts 2019-11-29 13:43:05 +01:00
Daniel García Aubert
525d41e63c Merge pre-install scripts 2019-11-29 13:41:18 +01:00
Daniel García Aubert
7d7ca0de4a Add run perms to pre-install script 2019-11-29 13:32:05 +01:00
Daniel García Aubert
11e5726ea9 Improve coverage section 2019-11-29 13:24:12 +01:00
Daniel García Aubert
d3f0c52474 Add test and coverage sections 2019-11-29 13:19:47 +01:00
Daniel García Aubert
8523f835dc Format 2019-11-29 13:13:31 +01:00
Daniel García Aubert
63ccfac599 Add links 2019-11-29 13:11:19 +01:00
Daniel García Aubert
283baa4a3f remove death link 2019-11-29 13:00:54 +01:00
Daniel García Aubert
c7bd132e2f Add developers center link 2019-11-29 12:58:16 +01:00
Daniel García Aubert
3c92e186d6 Missing optional requirement 2019-11-29 12:51:12 +01:00
Daniel García Aubert
67d8919f8a Typos 2019-11-29 12:48:00 +01:00
Daniel García Aubert
06c0b28d37 Add versioning and license sections 2019-11-29 12:44:31 +01:00
Daniel García Aubert
dfedb45254 Update description and keywords 2019-11-29 12:37:33 +01:00
Daniel García Aubert
b373965510 Improve format contributing section 2019-11-29 12:30:56 +01:00
Daniel García Aubert
52d887f3b4 typos 2019-11-29 12:27:15 +01:00
Daniel García Aubert
a6ca480210 Improve section 2019-11-29 11:27:11 +01:00
Daniel García Aubert
16e80424e0 Typo 2019-11-29 11:23:42 +01:00
Daniel García Aubert
6c72d3adbe Typo 2019-11-29 11:22:32 +01:00
Daniel García Aubert
bbc9c9fb9b Merge documents into README 2019-11-29 11:18:25 +01:00
Daniel García Aubert
42d0c4c040 Remove unused makefile scripts 2019-11-28 19:56:49 +01:00
Daniel García Aubert
8f99886d62 Clean script 2019-11-28 19:46:22 +01:00
Daniel García Aubert
60c01e583f Remove deprecated coverage dep. Use nyc instead 2019-11-28 19:46:02 +01:00
Daniel García Aubert
f21f89f561 Move script to docker folder 2019-11-28 18:52:22 +01:00
Daniel García Aubert
5f900a3b3c Update command 2019-11-28 18:45:05 +01:00
Daniel García Aubert
60db55b122 Missin configuration file in CI 2019-11-28 18:42:04 +01:00
Daniel García Aubert
d9c05a9333 Don't use bash script to run ci test 2019-11-28 18:28:01 +01:00
Daniel García Aubert
ab66ad83fd Exec psql commands in batches 2019-11-28 18:07:44 +01:00
Daniel García Aubert
3498fceb6a Improve npm script hooks 2019-11-28 18:07:25 +01:00
Daniel García Aubert
e841774978 Set node env for test 2019-11-28 17:30:29 +01:00
Daniel García Aubert
f297044203 Exec redis comands in batches 2019-11-28 17:30:06 +01:00
Daniel García Aubert
c7e803a94c missing middleware 2019-11-26 15:46:56 +01:00
Raúl Marín
ac198d5b5a Merge pull request #1143 from CartoDB/quote_columns
Quote columns
2019-11-25 13:55:38 +01:00
Raúl Marín
6eb66de94e Unify stripQuotes 2019-11-25 13:38:42 +01:00
Raúl Marín
f545b4d002 camshaft quote_columns 2019-11-25 13:17:09 +01:00
Daniel García Aubert
eee3e8b63c Draft: added script to setup and tear down tests 2019-11-22 19:47:00 +01:00
Daniel G. Aubert
69afee61e0 Merge pull request #1136 from CartoDB/eslint
Eslint
2019-11-14 16:16:47 +01:00
Daniel García Aubert
724f67d381 Update NEWS 2019-11-14 16:09:38 +01:00
Daniel García Aubert
8d69af4445 Merge branch 'master' into eslint 2019-11-14 14:53:53 +01:00
Daniel G. Aubert
3c301ce742 Merge pull request #1141 from CartoDB/fix-undefinded-layergroupTTL
Fix undefinded layergroup ttl
2019-11-14 14:50:32 +01:00
Daniel García Aubert
f87c432744 Use good defaults 2019-11-14 13:14:31 +01:00
Daniel García Aubert
d446ba9c1b Merge branch 'eslint' of github.com:CartoDB/Windshaft-cartodb into eslint 2019-11-14 12:34:47 +01:00
Daniel García Aubert
dc669f5cd4 Do not use object built-ins 2019-11-14 12:18:13 +01:00
Daniel García Aubert
d4719d5707 camel case 2019-11-14 12:17:26 +01:00
Daniel García Aubert
f9082dad94 Merge branch 'eslint' of github.com:CartoDB/Windshaft-cartodb into eslint 2019-11-14 11:13:04 +01:00
Daniel García Aubert
a8d421c9cc Merge branch 'eslint' of github.com:CartoDB/Windshaft-cartodb into eslint 2019-11-13 20:08:04 +01:00
Daniel García Aubert
7b13c12ab4 Automatically lint fixes 2019-11-13 20:07:41 +01:00
Daniel García Aubert
97f4adbc1a camel case 2019-11-13 20:05:19 +01:00
Daniel García Aubert
602ab44375 camel case 2019-11-13 19:47:29 +01:00
Daniel García Aubert
77e6fb8225 Avoid to pass undefined layergroupTTL 2019-11-13 19:29:41 +01:00
Daniel García Aubert
2c8a030ecb Merge branch 'master' into eslint 2019-11-13 19:00:50 +01:00
Daniel García Aubert
ccd01e6da5 Stubs next version 2019-11-13 13:17:47 +01:00
Daniel García Aubert
df10cfe641 Release 8.0.0 2019-11-13 13:07:00 +01:00
Daniel G. Aubert
09d3e8aabb Merge pull request #1139 from CartoDB/fix/bad-aggregation-method-overview
Validate aggregation method is either sum or count
2019-11-13 10:11:28 +01:00
Esther Lozano
f17411916f Remove unnecessary config in tests 2019-11-12 17:43:03 +01:00
Esther Lozano
75583f67c5 Use last version for map config in tests 2019-11-12 14:54:25 +01:00
Esther Lozano
bb745b0318 Update test/acceptance/dataviews/overviews-test.js
Co-Authored-By: Daniel G. Aubert <danielgarciaaubert@gmail.com>
2019-11-12 14:48:37 +01:00
Raúl Marín
3834aeb73f Merge pull request #1140 from Algunenano/local_password
Fix multiple DB login issues
2019-11-12 13:23:34 +01:00
Raúl Marín
aa09c079f6 Fix multiple DB login issues 2019-11-12 13:15:32 +01:00
Raúl Marín
3c586caba4 Qualify calls to cartodb extension so having it in the search_path isn't necessary 2019-11-12 13:15:32 +01:00
Esther Lozano
b05740048c Update NEWS.md 2019-11-12 12:48:22 +01:00
Esther Lozano
2b5ed21207 Remove only in tests :P 2019-11-12 12:37:24 +01:00
Esther Lozano
acecb88efb Validate aggregation method is either sum or count 2019-11-11 18:14:30 +01:00
Daniel García Aubert
734c373f3d Camel case 2019-11-11 12:26:04 +01:00
Daniel García Aubert
e49cb524a8 Update makefile 2019-11-06 14:02:37 +01:00
Daniel García Aubert
cc24228511 Fix eslint issues 2019-11-06 13:56:59 +01:00
Daniel García Aubert
27106fea57 Avoid regular string contains what looks like a template literal placeholder 2019-11-06 13:29:03 +01:00
Simon Martín
990aaadc16 Merge pull request #1134 from CartoDB/better-redis-logging
Adding a logger to MapStore
2019-10-29 12:10:00 +01:00
Simon Martín
0c572b5947 NEWS 2019-10-29 11:37:16 +01:00
Simon Martín
3e7c294989 linter 2019-10-29 11:32:27 +01:00
Simon Martín
8a02156ac0 fix tests 2019-10-29 10:31:31 +01:00
Simon Martín
c4a75de0d8 windshaft 5.6.4 2019-10-28 18:15:15 +01:00
Simon Martín
db03bcdf8f windshaft logger by config param 2019-10-28 15:41:46 +01:00
Simon Martín
dd5825c770 using a new logger 2019-10-28 11:55:43 +01:00
Daniel García Aubert
8fbe8f9f2a Remove unused vars 2019-10-25 10:58:00 +02:00
Daniel García Aubert
3bc3d19f40 Stop using legacy URL api 2019-10-25 09:38:05 +02:00
Daniel García Aubert
575fe8e350 Remove unnecessary escape usage 2019-10-24 19:18:47 +02:00
Daniel García Aubert
d5218a86f6 Enforce callback error handling 2019-10-24 18:38:37 +02:00
Simon Martín
080f93f6de passing logger to MapStore 2019-10-24 17:34:46 +02:00
Simon Martín
df931d95a3 using github:cartodb/windshaft#better-redis-logging 2019-10-24 17:34:30 +02:00
Daniel García Aubert
d5406d5b50 remove jshint comment 2019-10-22 19:22:38 +02:00
Daniel García Aubert
f7e877ce60 Use template string istead of ES5 string line break 2019-10-22 19:11:32 +02:00
Daniel García Aubert
ad4a1ada45 Do not use string concatenation when using __dirname and __filename 2019-10-22 18:22:33 +02:00
Daniel García Aubert
da0d0d21e3 Remove jshintrc 2019-10-21 23:33:52 +02:00
Daniel García Aubert
7a1d2ca205 Avoid calling Object.prototype methods directly on object instances 2019-10-21 23:33:27 +02:00
Daniel García Aubert
d89e785440 Stop using deprecated Buffer constructor 2019-10-21 20:05:51 +02:00
Daniel García Aubert
2423b5a4c4 Replace assert.deepEqual() by assert.deepStrictEqual() 2019-10-21 19:52:51 +02:00
Daniel García Aubert
1bee877b24 Replace assert.equal() by assert.strictEqual() 2019-10-21 19:41:03 +02:00
Daniel García Aubert
4d70ac0894 Apply automatic eslint fixes 2019-10-21 19:07:24 +02:00
Daniel García Aubert
593d9e40f6 Remove jshint, add eslint and config 2019-10-21 18:50:01 +02:00
Daniel G. Aubert
9fd1a3c663 Merge pull request #1131 from CartoDB/coherent-cache-invalidation
Coherent cache invalidation
2019-10-21 16:41:28 +02:00
Daniel García Aubert
8a781d241c Typo 2019-10-21 16:22:26 +02:00
Daniel García Aubert
be4d610de1 Use released version of cartodb-query-tables 0.7.0 2019-10-21 16:17:54 +02:00
Daniel García Aubert
736d3460d9 Update development branch 2019-10-21 13:46:29 +02:00
Daniel García Aubert
f844d70275 Replace http --> https. I swear, I'm not a spy 2019-10-21 11:32:56 +02:00
Daniel García Aubert
0c9cfefcd0 Please jshint, can you be a regular linter? 2019-10-21 11:13:54 +02:00
Daniel García Aubert
8ed187b0f5 Do not set Last-Modifed to January 1st 1970 when cache buster ins layergroup token is 0. In this case, 0 means we don't know when the resource was updated for the last time. 2019-10-21 11:01:05 +02:00
owayss
e5bada81dc Merge pull request #1132 from CartoDB/default_to_stdout_logging
Default to stdout logging on dev environment
2019-10-17 14:13:28 +02:00
Owayss Kabtoul
655f817033 Default to stdout logging on dev environment 2019-10-17 13:04:10 +02:00
Daniel García Aubert
ebff2ac9f2 Please JSHint 2019-10-15 13:27:40 +02:00
Daniel García Aubert
5a7ffcf499 Be able to synchronize the TTL of cache-control header to expire in a coherent way 2019-10-15 12:48:50 +02:00
Daniel García Aubert
f8e117a7b7 JSHint is not ready for modern javascript 2019-10-15 11:46:44 +02:00
Daniel García Aubert
c4054f0ac9 Use develop branch of query-tables 2019-10-15 10:39:31 +02:00
Daniel García Aubert
f7707141d6 Rename variable 2019-10-08 17:22:24 +02:00
Daniel G. Aubert
c40c42fc10 Merge pull request #1130 from CartoDB/remove-routes-adapter
Remove environment configuration adapter
2019-10-08 16:34:24 +02:00
Daniel García Aubert
6cad976078 Remove environment configuration adapter 2019-10-08 11:02:32 +02:00
Daniel G. Aubert
c82f17e5d2 Merge pull request #1126 from CartoDB/gears
Be able to inject middlewares from configuration
2019-10-07 17:35:01 +02:00
Daniel G. Aubert
1054bde7fd Merge pull request #1127 from CartoDB/folders
Standardize folder structure and filenames
2019-10-07 17:34:27 +02:00
Daniel G. Aubert
9e23b91f3f Merge pull request #1129 from CartoDB/fix-routes-config
Convert from v1 to api in routes config
2019-10-07 17:34:00 +02:00
Daniel García Aubert
ea6e064e42 Convert from v1 to api in routes config 2019-10-07 14:08:59 +02:00
Raúl Marín
cf0858f5b9 Merge pull request #1128 from Algunenano/sec
Tests: Remove unnecessary extra qualification
2019-10-07 14:03:06 +02:00
Raul Marin
69b11a8412 sec 2019-10-07 13:36:18 +02:00
Daniel García Aubert
55aad4254c Remove cartodb folder in unit test 2019-10-07 11:29:07 +02:00
Daniel García Aubert
73e1659378 Add suffix '-test' to every test-suite file 2019-10-07 11:16:48 +02:00
Daniel García Aubert
98f3e8159e Stop using __dirname in requires 2019-10-07 10:55:26 +02:00
Daniel García Aubert
e8cff194fc Rename template_maps -> template-maps 2019-10-07 10:50:14 +02:00
Daniel García Aubert
f1de1b3b91 Rename test files: stop using underscores, use hyphens instead 2019-10-07 10:44:45 +02:00
Daniel García Aubert
a134ab3012 Rename server_options -> server-options 2019-10-07 10:10:51 +02:00
Daniel García Aubert
5a84d7233b Rename table_name_parser -> table-name-parser 2019-10-07 10:07:25 +02:00
Daniel García Aubert
8fe0112568 Rename overviews_query_rewriter -> overviews-query-rewriter 2019-10-07 10:06:01 +02:00
Daniel García Aubert
3acaac5403 Rename icu_data_env_setter -> icu-data-env-setter 2019-10-07 10:04:39 +02:00
Daniel García Aubert
7dbac5a565 Rename profiler_proxy -> profiler-proxy 2019-10-07 10:01:18 +02:00
Daniel García Aubert
8fb4f4063f Rename health_check -> health-check 2019-10-07 09:59:54 +02:00
Daniel García Aubert
808718fb26 Rename cdb_request -> cdb-request 2019-10-07 09:58:21 +02:00
Daniel García Aubert
6dc8de315a Rename surrogate_keys_cache -> surrogate-keys-cache 2019-10-07 09:56:20 +02:00
Daniel García Aubert
afb9b08925 Rename surrogate_keys_cache -> surrogate-keys-cache 2019-10-07 09:55:55 +02:00
Daniel García Aubert
2bed034e64 Rename named_map_provider_cache -> named-map-provider-cache 2019-10-07 09:54:54 +02:00
Daniel García Aubert
2328bb6261 Rename layergroup_affected_tables -> layergroup-affected-tables 2019-10-07 09:53:06 +02:00
Daniel García Aubert
06357fa3f9 Rename named_maps_entry -> named-maps-entry 2019-10-07 09:51:51 +02:00
Daniel García Aubert
83f58288f9 Rename varnish_http -> varnish-http 2019-10-07 09:47:42 +02:00
Daniel García Aubert
b1d5f0f9e8 Rename pg_query_runner -> pg-query-runner 2019-10-07 09:45:46 +02:00
Daniel García Aubert
7142e4db37 Rename pg_connection -> pg-connection 2019-10-07 09:43:40 +02:00
Daniel García Aubert
281a079a62 Simplify folder structure 2019-10-07 09:40:50 +02:00
Daniel García Aubert
0d638e6bad Fix command example 2019-10-06 19:12:11 +02:00
Daniel García Aubert
43a63feaca Keep the backwards compatibility for routing configuration 2019-10-06 18:58:21 +02:00
Daniel García Aubert
4aa6ffe28c Add tests to check custom middlewares behavior 2019-10-04 17:54:32 +02:00
Daniel García Aubert
2ce688ee2a Missed to apply configuration changes to default server options 2019-10-04 12:56:36 +02:00
Daniel García Aubert
4e967980a3 Prepare next release ans update news 2019-10-04 12:41:27 +02:00
Daniel García Aubert
93edf07da8 Temporary workaround to not depend on configuration changes 2019-10-04 12:35:12 +02:00
Daniel García Aubert
a684bead92 Rename method 'register' -> 'route' 2019-10-04 12:22:23 +02:00
Daniel García Aubert
dd06de2632 Use new routes configuration 2019-10-04 12:07:58 +02:00
Daniel García Aubert
975f07df99 Use Object.values() 2019-10-02 10:42:29 +02:00
Daniel García Aubert
5fe6845d7c Add comments 2019-10-01 19:48:16 +02:00
Daniel García Aubert
4aa844946d Update environment example files 2019-10-01 19:42:04 +02:00
Daniel García Aubert
3220e3de31 Remove old api configuration paths 2019-10-01 19:34:03 +02:00
Daniel García Aubert
26bba3c5f5 Prepare next release version 2019-10-01 19:31:14 +02:00
Daniel García Aubert
c82a5c38df Fix indentation 2019-10-01 18:20:17 +02:00
Daniel García Aubert
9cfaf6eefc Draft: be able to inject middlewares from configuration 2019-09-30 19:18:36 +02:00
375 changed files with 27128 additions and 21317 deletions

22
.eslintrc.js Normal file
View File

@@ -0,0 +1,22 @@
module.exports = {
env: {
commonjs: true,
es6: true,
node: true,
mocha: true
},
extends: [
'standard'
],
globals: {
Atomics: 'readonly',
SharedArrayBuffer: 'readonly'
},
parserOptions: {
ecmaVersion: 2018
},
rules: {
"indent": ["error", 4],
"semi": ["error", "always"]
}
}

58
.github/workflows/main.yml vendored Normal file
View File

@@ -0,0 +1,58 @@
name: continuous integration
on:
pull_request:
paths-ignore:
- 'LICENSE'
- 'README**'
- 'HOW_TO_RELEASE**'
- 'LOGGING**'
env:
GCLOUD_VERSION: '306.0.0'
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
jobs:
build-test-docker:
runs-on: ubuntu-18.04
timeout-minutes: 10
steps:
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
- name: Build image
# we tag with "latest" but we don't push it on purpose. We use it as a base for the testing image
run: |
echo ${GITHUB_SHA::7}
echo ${GITHUB_REF##*/}
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7} .
- name: Build testing image
# here it uses the lastest from prev step to add the needed parts on top
run: |
docker build -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft-test:latest -f private/Dockerfile.test .
- name: Setup gcloud authentication
uses: google-github-actions/setup-gcloud@master
with:
version: ${{env.GCLOUD_VERSION}}
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
- name: Configure docker and pull images
# we pull images manually, as if done in next step using docker-compose it fails because missing openssl
run: |
gcloud auth configure-docker
docker pull gcr.io/cartodb-on-gcp-main-artifacts/postgres:latest
docker pull gcr.io/cartodb-on-gcp-main-artifacts/redis:latest
- name: Run tests inside container
run: docker-compose -f private/ci/docker-compose.yml run windshaft-tests
- name: Upload image
run: |
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7}

47
.github/workflows/master.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
# in this workflow we don't run the tests. Only build image, tag (also latests) and upload. The tests are not run because they are run
# on each pull request, and there is a branch protection that forces to have branch up to date before merging, so tests are always run
# with the latest code
name: master build image
on:
push:
branches:
- master
env:
GCLOUD_VERSION: '306.0.0'
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
jobs:
build-master:
runs-on: ubuntu-18.04
timeout-minutes: 5
steps:
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.CARTOFANTE_PERSONAL_TOKEN }}
- name: Build image
run: |
echo ${GITHUB_SHA::7}
echo ${GITHUB_REF##*/}
docker build -f private/Dockerfile -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} .
- name: Setup gcloud authentication
uses: google-github-actions/setup-gcloud@master
with:
version: ${{env.GCLOUD_VERSION}}
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
- name: Configure docker
run: |
gcloud auth configure-docker
- name: Upload image
run: |
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest

3
.gitignore vendored
View File

@@ -11,3 +11,6 @@ redis.pid
*.log
coverage/
.DS_Store
.nyc_output
build_resources/
.dockerignore

4
.gitmodules vendored Normal file
View File

@@ -0,0 +1,4 @@
[submodule "private"]
path = private
url = git@github.com:CartoDB/Windshaft-cartodb-private.git
branch = master

View File

@@ -1,95 +0,0 @@
{
// // JSHint Default Configuration File (as on JSHint website)
// // See http://jshint.com/docs/ for more details
//
// "maxerr" : 50, // {int} Maximum error before stopping
//
// // Enforcing
// "bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
// "camelcase" : false, // true: Identifiers must be in camelCase
"curly" : true, // true: Require {} for every new block or scope
"eqeqeq" : true, // true: Require triple equals (===) for comparison
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
"freeze" : true, // true: prohibits overwriting prototypes of native objects such as Array, Date etc.
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
// "indent" : 4, // {int} Number of spaces to use for indentation
// "latedef" : false, // true: Require variables/functions to be defined before being used
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
// "noempty" : true, // true: Prohibit use of empty blocks
"nonbsp" : true, // true: Prohibit "non-breaking whitespace" characters.
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
// "plusplus" : false, // true: Prohibit use of `++` & `--`
// "quotmark" : false, // Quotation mark consistency:
// // false : do nothing (default)
// // true : ensure whatever is used is consistent
// // "single" : require single quotes
// // "double" : require double quotes
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
"unused" : true, // true: Require all defined variables be used
// "strict" : true, // true: Requires all functions run in ES5 Strict Mode
// "maxparams" : false, // {int} Max number of formal params allowed per function
// "maxdepth" : false, // {int} Max depth of nested blocks (within functions)
// "maxstatements" : false, // {int} Max number statements per function
"maxcomplexity" : 6, // {int} Max cyclomatic complexity per function
"maxlen" : 120, // {int} Max number of characters per line
//
// // Relaxing
// "asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
// "boss" : false, // true: Tolerate assignments where comparisons would be expected
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
// "eqnull" : false, // true: Tolerate use of `== null`
// "es5" : false, // true: Allow ES5 syntax (ex: getters and setters)
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
// "moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
// // (ex: `for each`, multiple try/catch, function expression…)
// "evil" : false, // true: Tolerate use of `eval` and `new Function()`
// "expr" : false, // true: Tolerate `ExpressionStatement` as Programs
// "funcscope" : false, // true: Tolerate defining variables inside control statements
// "globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
// "iterator" : false, // true: Tolerate using the `__iterator__` property
// "lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
// "laxbreak" : false, // true: Tolerate possibly unsafe line breakings
// "laxcomma" : false, // true: Tolerate comma-first style coding
// "loopfunc" : false, // true: Tolerate functions being defined in loops
// "multistr" : false, // true: Tolerate multi-line strings
// "noyield" : false, // true: Tolerate generator functions with no yield statement in them.
// "notypeof" : false, // true: Tolerate invalid typeof operator values
// "proto" : false, // true: Tolerate using the `__proto__` property
// "scripturl" : false, // true: Tolerate script-targeted URLs
// "shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
// "sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
// "supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
// "validthis" : false, // true: Tolerate using this in a non-constructor function
//
// // Environments
// "browser" : true, // Web Browser (window, document, etc)
// "browserify" : false, // Browserify (node.js code in the browser)
// "couch" : false, // CouchDB
// "devel" : true, // Development/debugging (alert, confirm, etc)
// "dojo" : false, // Dojo Toolkit
// "jasmine" : false, // Jasmine
// "jquery" : false, // jQuery
// "mocha" : true, // Mocha
// "mootools" : false, // MooTools
"node" : true, // Node.js
// "nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
// "prototypejs" : false, // Prototype and Scriptaculous
// "qunit" : false, // QUnit
// "rhino" : false, // Rhino
// "shelljs" : false, // ShellJS
// "worker" : false, // Web Workers
// "wsh" : false, // Windows Scripting Host
// "yui" : false, // Yahoo User Interface
// Custom predefined global variables
"predef": [
"-console", // disallows console, use debug
"beforeEach",
"afterEach",
"before",
"after",
"describe",
"it"
]
}

View File

@@ -1,12 +0,0 @@
language: generic
sudo: required
env:
matrix:
- NODE_VERSION=10.15.1
DOCKER_IMAGE=carto/nodejs-xenial-pg101:latest
- NODE_VERSION=10.15.1
DOCKER_IMAGE=carto/nodejs-xenial-pg1121:latest
services:
- docker
before_install: docker pull ${DOCKER_IMAGE}
script: npm run docker-test -- ${DOCKER_IMAGE} ${NODE_VERSION}

View File

@@ -1,11 +0,0 @@
Contributing
---
The issue tracker is at [github.com/CartoDB/Windshaft-cartodb](https://github.com/CartoDB/Windshaft-cartodb).
We love pull requests from everyone, see [Contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
## Submitting Contributions
* You will need to sign a Contributor License Agreement (CLA) before making a submission. [Learn more here](https://carto.com/contributions).

View File

@@ -1,18 +0,0 @@
1. Test (make clean all check), fix if broken before proceeding
2. Ensure proper version in package.json and package-lock.json
3. Ensure NEWS section exists for the new version, review it, add release date
4. If there are modified dependencies in package.json, update them with `npm upgrade {{package_name}}@{{version}}`
5. Commit package.json, package-lock.json, NEWS
6. git tag -a Major.Minor.Patch # use NEWS section as content
7. Stub NEWS/package for next version
Versions:
Bugfix releases increment Patch component of version.
Feature releases increment Minor and set Patch to zero.
If backward compatibility is broken, increment Major and
set to zero Minor and Patch.
Branches named 'b<Major>.<Minor>' are kept for any critical
fix that might need to be shipped before next feature release
is ready.

16
HOW_TO_RELEASE.md Normal file
View File

@@ -0,0 +1,16 @@
# How to release
1. Test (npm test), fix if broken before proceeding.
2. Ensure proper version in `package.json` and `package-lock.json`.
3. Ensure NEWS section exists for the new version, review it, add release date.
4. If there are modified dependencies in `package.json`, update them with `npm upgrade {{package_name}}@{{version}}`.
5. Commit `package.json`, `package-lock.json`, NEWS.
6. Run `git tag -a Major.Minor.Patch`. Use NEWS section as content.
7. Stub NEWS/package for next version.
## Version:
* Bugfix releases increment Patch component of version.
* Feature releases increment Minor and set Patch to zero.
* If backward compatibility is broken, increment Major and set to zero Minor and Patch.
* Branches named 'b<Major>.<Minor>' are kept for any critical fix that might need to be shipped before next feature release is ready.

View File

@@ -1,41 +0,0 @@
# Installing Windshaft-CartoDB
## Requirements
Make sure that you have the requirements needed. These are:
- Node 10.x
- npm 6.x
- PostgreSQL >= 10.0
- PostGIS >= 2.4
- CARTO Postgres Extension >= 0.24.1
- Redis >= 4
- libcairo2-dev, libpango1.0-dev, libjpeg8-dev and libgif-dev for server side canvas support
- C++11 (to build internal dependencies if needed)
### Optional
- Varnish (http://www.varnish-cache.org)
## PostGIS setup
A `template_postgis` database is expected. One can be set up with
```shell
createdb --owner postgres --template template0 template_postgis
psql -d template_postgis -c 'CREATE EXTENSION postgis;'
```
## Build/install
To fetch and build all node-based dependencies, run:
```shell
npm install
```
Note that the ```npm``` step will populate the node_modules/
directory with modules, some of which being compiled on demand. If you
happen to have startup errors you may need to force rebuilding those
modules. At any time just wipe out the node_modules/ directory and run
```npm``` again.

21
LOGGING.md Normal file
View File

@@ -0,0 +1,21 @@
# Logging structured traces
In order to have meaningful and useful log traces, you should follow
some general guidelines described in the [Project Guidelines](http://doc-internal.cartodb.net/platform/guidelines.html#structured-logging).
In this project there is a specific logger in place that takes care of
format and context of the traces for you. Take a look at [logger.js](https://github.com/CartoDB/Windshaft-cartodb/blob/cf82e1954e2244861e47fce0c2223ee466a5cd64/lib/utils/logger.js)
(NOTE: that file will be moved soon to a common module).
The logger is instantiated as part of the [app startup process](https://github.com/CartoDB/Windshaft-cartodb/blob/cf82e1954e2244861e47fce0c2223ee466a5cd64/app.js#L53),
then passed to middlewares and other client classes.
There are many examples of how to use the logger to generate traces
throughout the code. Here are a few of them:
```js
lib/api/middlewares/logger.js: res.locals.logger.info({ client_request: req }, 'Incoming request');
lib/api/middlewares/logger.js: res.on('finish', () => res.locals.logger.info({ server_response: res, status: res.statusCode }, 'Response sent'));
lib/api/middlewares/profiler.js: logger.info({ stats, duration: stats.response / 1000, duration_ms: stats.response }, 'Request profiling stats');
lib/api/middlewares/tag.js: res.on('finish', () => logger.info({ tags: res.locals.tags }, 'Request tagged'));
```

View File

@@ -1,53 +0,0 @@
SHELL=/bin/bash
pre-install:
@$(SHELL) ./scripts/check-node-canvas.sh
all:
@$(SHELL) ./scripts/install.sh
clean:
rm -rf node_modules/
distclean: clean
rm config.status*
config.status--test:
./configure --environment=test
config/environments/test.js: config.status--test
./config.status--test
TEST_SUITE := $(shell find test/{acceptance,integration,unit} -name "*.js")
TEST_SUITE_UNIT := $(shell find test/unit -name "*.js")
TEST_SUITE_INTEGRATION := $(shell find test/integration -name "*.js")
TEST_SUITE_ACCEPTANCE := $(shell find test/acceptance -name "*.js")
test: config/environments/test.js
@echo "***tests***"
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE)
test-unit: config/environments/test.js
@echo "***tests***"
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_UNIT)
test-integration: config/environments/test.js
@echo "***tests***"
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_INTEGRATION)
test-acceptance: config/environments/test.js
@echo "***tests***"
@$(SHELL) ./run_tests.sh ${RUNTESTFLAGS} $(TEST_SUITE_ACCEPTANCE)
jshint:
@echo "***jshint***"
@./node_modules/.bin/jshint lib/ test/ app.js
test-all: test jshint
coverage:
@RUNTESTFLAGS=--with-coverage make test
check: test
.PHONY: pre-install test jshint coverage

96
NEWS.md
View File

@@ -1,5 +1,101 @@
# Changelog
## 10.0.0
Released 2020-mm-dd
Breaking changes:
- Log system revamp:
- Logs to stdout, disabled while testing
- Upgrade `camshaft` to version [`0.67.2`](https://github.com/CartoDB/camshaft/releases/tag/0.67.2)
- Use header `X-Request-Id`, or create a new `uuid` when no present, to identyfy log entries
- Be able to set log level from env variable `LOG_LEVEL`, useful while testing: `LOG_LEVEL=info npm test`; even more human-readable: `LOG_LEVEL=info npm t | ./node_modules/.bin/pino-pretty`
- Stop responding with `X-Tiler-Errors` header. Now errors are properly logged and will end up in ELK as usual.
- Stop responding with `X-Tiler-Profiler` header. Now profiling stats are properly logged and will end up in ELK as usual.
- Be able to reduce the footprint in the final log file depending on the environment
- Be able to pass the logger to the analysis creation (camshaft) while instantiating a named map with analysis.
- Be able to tag requests with labels as an easier way to provide business metrics
- Metro: Add log-collector utility (`metro`), it will be moved to its own repository. Attaching it here fro development purposes. Try it with the following command `LOG_LEVEL=info npm t | node metro`
- Metro: Creates `metrics-collector.js` a stream to update Prometheus' counters and histograms and exposes them via Express' app (`:9145/metrics`). Use the ones defined in `grok_exporter`
Bug Fixes:
- While instantiating a map, set the `cache buster` equal to `0` when there are no affected tables in the MapConfig. Thus `layergroupid` has the same structure always:
- `${map_id}:${cache_buster}` for anonymous map
- `${user}@${template_hash}@${map_id}:${cache_buster}` for named map
## 9.0.0
Released 2020-06-05
Breaking changes:
- Remove `/version` endpoint
- Drop support for Node.js < 12
Announcements:
- Support Node.js 12
- Upgrade `windshaft` to version [`7.0.1`](https://github.com/CartoDB/Windshaft/releases/tag/7.0.1)
- Upgrade `camshaft` to version [`0.65.3`](https://github.com/CartoDB/camshaft/blob/0.65.3/CHANGELOG.md#0653):
- Fix noisy message logs while checking analyses' limits
- Fix CI setup, explicit use of PGPORT while creating the PostgreSQL cluster
- Upgrade `cartodb-redis` to version [`3.0.0`](https://github.com/CartoDB/node-cartodb-redis/releases/tag/3.0.0)
- Fix test where `http-fallback-image` renderer was failing quietly
- Fix stat `named map providers` cache count
- Use new signature for `onTileErrorStrategy`. Required by `windshaft@6.0.0`
- Extract `onTileErrorStrategy` to a module
- In tests, stop using mapnik module exposed by windshaft and require it from development dependencies
- Stop using `MapStore` from `windshaft` while testing and create a custom one instead
- Rename NamedMapProviderReporter by NamedMapProviderCacheReporter
- Remove `bootstrapFonts` at process startup (now done in `windshaft@6.0.0`)
- Stop checking the installed version of some dependencies while testing
- Send metrics about `map views` (#1162)
- Add custom headers in responses to allow to other components to be able to get insights about user activity
- Update dependencies to avoid security vulnerabilities
Bug Fixes:
- Parsing date column in numeric histograms (#1160)
- Use `Array.prototype.sort()`'s callback properly while testing. It should return a number not a boolean.
## 8.1.1
Released 2020-02-17
Announcements:
- Upgrade camshaft to [`0.65.2`](https://github.com/CartoDB/camshaft/blob/69c9447c9fccf00a70a67d713d1ce777775a17ff/CHANGELOG.md#0652): Fixes uncatched errors problem (#1117)
## 8.1.0
Released 2020-01-27
Announcements:
- Removed `jshint` as linter in favour of `eslint` to check syntax, find problems, and enforce code style.
- Upgrade `camshaft` to [`0.65.1`](https://github.com/CartoDB/camshaft/blob/a2836c15fd2830f8364a222eeafdb4dc2f41b580/CHANGELOG.md#0651): Use quoted identifiers for column names and enforce the usage of the cartodb schema when using cartodb extension functions and tables.
- Stop using two different tools for package management, testing, and any other developer workflow.
- Removes Makefile and related bash scripts
- Use npm scripts as the only tool for testing, CI and linting.
- Simplified CI configuration.
- Improved documentation:
- Centralized several documents into README.md
- Remove outdated sections
- Update old sections
- Added missing sections.
- Remove deprecated coverage tool istanbul, using nyc instead.
- Removed unused dockerfiles
- Use cartodb schema when using cartodb extension functions and tables.
- Implemented circle and polygon dataview filters.
## 8.0.0
Released 2019-11-13
Breaking changes:
- Schema change for "routes" in configuration file, each "router" is now an array instead of an object. See [`dd06de2`](https://github.com/CartoDB/Windshaft-cartodb/pull/1126/commits/dd06de2632661e19d64c9fbc2be0ba1a8059f54c) for more details.
Announcements:
- Added validation to only allow "count" and "sum" aggregations in dataview overview.
- Added mechanism to inject custom middlewares through configuration.
- Stop requiring unused config properties: "base_url", "base_url_mapconfig", and "base_url_templated".
- Upgraded cartodb-query-tables to version [0.7.0](https://github.com/CartoDB/node-cartodb-query-tables/blob/0.7.0/NEWS.md#version-0.7.0).
- Be able to set a coherent TTL in Cache-Control header to expire all resources belonging to a map simultaneously.
- When `cache buster` in request path is `0` set header `Last-Modified` to now, it avoids stalled content in 3rd party cache providers when they add `If-Modified-Since` header into the request.
- Adding a logger to MapStore (#1134)
- Qualify calls to cartodb extension so having it in the search_path isn't necessary.
- Fix multiple DB login issues.
## 7.2.0
Released 2019-09-30

188
README.md
View File

@@ -1,80 +1,146 @@
Windshaft-CartoDB
==================
# Windshaft-CartoDB [![Build Status](https://travis-ci.org/CartoDB/Windshaft-cartodb.svg?branch=master)](https://travis-ci.org/CartoDB/Windshaft-cartodb)
[![Build Status](https://travis-ci.org/CartoDB/Windshaft-cartodb.svg?branch=master)](https://travis-ci.org/CartoDB/Windshaft-cartodb)
The [`CARTO Maps API`](https://carto.com/developers/maps-api/) tiler. It extends [`Windshaft`](https://github.com/CartoDB/Windshaft) and exposes a web service with extra functionality:
This is the [CartoDB Maps API](http://docs.cartodb.com/cartodb-platform/maps-api.html) tiler. It extends
[Windshaft](https://github.com/CartoDB/Windshaft) with some extra functionality and custom filters for authentication.
* Instantiate [`Anonymous Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/03-anonymous-maps.md) through CARTO's map configuration ([`MapConfig`](https://github.com/CartoDB/Windshaft/blob/master/doc/MapConfig-specification.md)).
* Create [`Named Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/04-named-maps.md) based on customizable templates.
* Get map previews through [`Static Maps`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/05-static-maps-API.md) API.
* Render maps with a large amount of data faster using [`Tile Aggregation`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/06-tile-aggregation.md).
* Build advanced maps with enriched data through [`Analyses Extension`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/09-MapConfig-analyses-extension.md).
* Fetch tabular data from analysis nodes with [`Dataviews`](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/guides/10-MapConfig-dataviews-extension.md)
* reads dbname from subdomain and cartodb redis for pretty tile urls
* configures windshaft to publish `cartodb_id` as the interactivity layer
* gets the default geometry type from the cartodb redis store
* allows tiles to be styled individually
* provides a link to varnish high speed cache
* provides a [template maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Template-maps.md)
## Build
Install
-------
See [INSTALL.md](INSTALL.md) for detailed installation instructions.
Requirements:
Configure
---------
* [`Node 12.x `](https://nodejs.org/dist/latest-v10.x/)
* [`PostgreSQL >= 11.0`](https://www.postgresql.org/download/)
* [`PostGIS >= 2.4`](https://postgis.net/install/)
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
* [`Redis >= 4`](https://redis.io/download)
* `libcairo2-dev`, `libpango1.0-dev`, `libjpeg8-dev` and `libgif-dev` for server side canvas support
* `C++11` to build internal dependencies. When there's no pre-built binaries for your OS/architecture distribution.
Create the config/environments/<env>.js files (there are .example files
to start from). You can optionally use the ./configure script for this,
see ```./configure --help``` to see available options.
Optional:
Look at lib/cartodb/server_options.js for more on config
* [`Varnish`](http://www.varnish-cache.org)
* [`Statsd`](https://github.com/statsd/statsd)
Upgrading
---------
### PostGIS setup
Checkout your commit/branch. If you need to reinstall dependencies (you can check [NEWS](NEWS.md)) do the following:
A `template_postgis` database is expected. One can be set up with
```sh
$ rm -rf node_modules
```shell
$ createdb --owner postgres --template template0 template_postgis
$ psql -d template_postgis -c 'CREATE EXTENSION postgis;'
```
### Install
To fetch and build all node-based dependencies, run:
```shell
$ npm install
```
```
node app.js <env>
```
### Run
Where <env> is the name of a configuration file under config/environments/.
You can inject the configuration through environment variables at run time. Check the file `./config/environments/config.js` to see the ones you have available.
Note that caches are kept in redis. If you're not seeing what you expect
there may be out-of-sync records in there.
Take a look: http://redis.io/commands
While the migration to the new environment based configuration, you can still use the old method of copying a config file. To enabled the one with environment variables you need to pass `CARTO_WINDSHAFT_ENV_BASED_CONF=true`. You can use the docker image to run it.
Documentation
-------------
The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation
resources, from higher level to more detailed ones:
The [Maps API](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docs/Map-API.md) defined the endpoints and their
expected parameters and outputs.
Examples
--------
[CartoDB's Map Gallery](http://cartodb.com/gallery/) showcases several examples of visualisations built on top of this.
Contributing
---
See [CONTRIBUTING.md](CONTRIBUTING.md).
### Developing with a custom windshaft version
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency) the best option is
to use `npm link`. You can read more about it at [npm-link: Symlink a package folder](https://docs.npmjs.com/cli/link.html).
**Quick start**:
Old way:
```shell
~/windshaft-directory $ npm install
~/windshaft-directory $ npm link
~/windshaft-cartodb-directory $ npm link windshaft
$ node app.js <env>
```
Where `<env>` is the name of a configuration file under `./config/environments/`.
### Test
You can easily run the tests against the dependencies from the `dev-env`. To do so, you need to build the test docker image:
```shell
$ docker-compose build
```
Then you can run the tests like:
```shell
$ docker-compose run windshaft-tests
```
It will mount your code inside a volume. In case you want to play and run `npm test` or something else you can do:
```shell
$ docker-compose run --entrypoint bash windshaft-tests
```
So you will have a bash shell inside the test container, with the code from your host.
### Coverage
```shell
$ npm run cover
```
Open `./coverage/lcov-report/index.html`.
### Docker support
We provide docker images just for testing and continuous integration purposes:
* [`nodejs-xenial-pg1121`](https://hub.docker.com/r/carto/nodejs-xenial-pg1121/tags)
* [`nodejs-xenial-pg101`](https://hub.docker.com/r/carto/nodejs-xenial-pg101/tags)
You can find instructions to install Docker, download, and update images [here](https://github.com/CartoDB/Windshaft-cartodb/blob/master/docker/reference.md).
### Useful `npm` scripts
Run test in a docker image with a specific Node.js version:
```shell
$ DOCKER_IMAGE=<docker-image-tag> NODE_VERSION=<nodejs-version> npm run test:docker
```
Where:
* `<docker-image-tag>`: the tag of required docker image, e.g. `carto/nodejs-xenial-pg1121:latest`
* `<nodejs-version>`: the Node.js version, e.g. `10.15.1`
In case you need to debug:
```shell
$ DOCKER_IMAGE=<docker-image-tag> npm run docker:bash
```
## Documentation
You can find an overview, guides, full reference, and support in [`CARTO's developer center`](https://carto.com/developers/maps-api/). The [docs directory](https://github.com/CartoDB/Windshaft-cartodb/tree/master/docs) contains different documentation resources, from a higher level to more detailed ones.
## Contributing
* The issue tracker: [`Github`](https://github.com/CartoDB/Windshaft-cartodb/issues).
* We love Pull Requests from everyone, see [contributing to Open Source on GitHub](https://guides.github.com/activities/contributing-to-open-source/#contributing).
* You'll need to sign a Contributor License Agreement (CLA) before submitting a Pull Request. [Learn more here](https://carto.com/contributions).
## Developing with a custom `Windshaft` version
If you plan or want to use a custom / not released yet version of windshaft (or any other dependency), the best option is to use `npm link`. You can read more about it at `npm-link`: [symlink a package folder](https://docs.npmjs.com/cli/link.html).
```shell
$ cd /path/to/Windshaft
$ npm install
$ npm link
$ cd /path/to/Windshaft-cartodb
$ npm link windshaft
```
## Versioning
We follow [`SemVer`](http://semver.org/) for versioning. For available versions, see the [tags on this repository](https://github.com/CartoDB/Windshaft-cartodb/tags).
## License
This project is licensed under the BSD 3-clause "New" or "Revised" License. See the [LICENSE](LICENSE) file for details.

256
app.js
View File

@@ -1,132 +1,93 @@
'use strict';
var http = require('http');
var https = require('https');
var path = require('path');
var fs = require('fs');
var _ = require('underscore');
var semver = require('semver');
const setICUEnvVariable = require('./lib/cartodb/utils/icu_data_env_setter');
const http = require('http');
const https = require('https');
const path = require('path');
const semver = require('semver');
// jshint undef:false
var log = console.log.bind(console);
var logError = console.error.bind(console);
// jshint undef:true
var nodejsVersion = process.versions.node;
const { engines } = require('./package.json');
if (!semver.satisfies(nodejsVersion, engines.node)) {
logError(`Node version ${nodejsVersion} is not supported, please use Node.js ${engines.node}.`);
process.exit(1);
}
// TODO: research it it's still needed
const setICUEnvVariable = require('./lib/utils/icu-data-env-setter');
// This function should be called before the require('yargs').
setICUEnvVariable();
var argv = require('yargs')
.usage('Usage: $0 <environment> [options]')
const argv = require('yargs')
.usage('Usage: node $0 <environment> [options]')
.help('h')
.example(
'$0 production -c /etc/sql-api/config.js',
'start server in production environment with /etc/sql-api/config.js as config file'
)
'node $0 production -c /etc/windshaft-cartodb/config.js',
'start server in production environment with /etc/windshaft-cartodb/config.js as config file'
)
.alias('h', 'help')
.alias('c', 'config')
.nargs('c', 1)
.describe('c', 'Load configuration from path')
.argv;
var environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
var configurationFile = path.resolve(argv.config || './config/environments/' + environmentArg + '.js');
if (!fs.existsSync(configurationFile)) {
logError('Configuration file "%s" does not exist', configurationFile);
process.exit(1);
const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
let configFileName = environmentArg;
if (process.env.CARTO_WINDSHAFT_ENV_BASED_CONF) {
// we override the file with the one with env vars
configFileName = 'config';
}
const configurationFile = path.resolve(argv.config || `./config/environments/${configFileName}.js`);
global.environment = require(configurationFile);
var ENVIRONMENT = argv._[0] || process.env.NODE_ENV || global.environment.environment;
process.env.NODE_ENV = ENVIRONMENT;
process.env.NODE_ENV = argv._[0] || process.env.NODE_ENV || global.environment.environment;
var availableEnvironments = {
production: true,
staging: true,
development: true
};
// sanity check
if (!availableEnvironments[ENVIRONMENT]){
logError('node app.js [environment]');
logError('environments: %s', Object.keys(availableEnvironments).join(', '));
process.exit(1);
}
process.env.NODE_ENV = ENVIRONMENT;
if (global.environment.uv_threadpool_size) {
process.env.UV_THREADPOOL_SIZE = global.environment.uv_threadpool_size;
}
// set global HTTP and HTTPS agent default configurations
// ref https://nodejs.org/api/http.html#http_new_agent_options
var agentOptions = _.defaults(global.environment.httpAgent || {}, {
const agentOptions = Object.assign({
keepAlive: false,
keepAliveMsecs: 1000,
maxSockets: Infinity,
maxFreeSockets: 256
});
}, global.environment.httpAgent || {});
http.globalAgent = new http.Agent(agentOptions);
https.globalAgent = new https.Agent(agentOptions);
global.log4js = require('log4js');
var log4jsConfig = {
appenders: [],
replaceConsole: true
};
if ( global.environment.log_filename ) {
var logFilename = path.resolve(global.environment.log_filename);
var logDirectory = path.dirname(logFilename);
if (!fs.existsSync(logDirectory)) {
logError("Log filename directory does not exist: " + logDirectory);
process.exit(1);
}
log("Logs will be written to " + logFilename);
log4jsConfig.appenders.push(
{ type: "file", absolute: true, filename: logFilename }
);
} else {
log4jsConfig.appenders.push(
{ type: "console", layout: { type:'basic' } }
);
}
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
// Include cartodb_windshaft only _after_ the "global" variable is set
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/28
var cartodbWindshaft = require('./lib/cartodb/server');
var serverOptions = require('./lib/cartodb/server_options');
const createServer = require('./lib/server');
const serverOptions = require('./lib/server-options');
const { logger } = serverOptions;
var server = cartodbWindshaft(serverOptions);
const availableEnvironments = {
production: true,
staging: true,
development: true
};
// Maximum number of connections for one process
// 128 is a good number if you have up to 1024 filedescriptors
// 4 is good if you have max 32 filedescriptors
// 1 is good if you have max 16 filedescriptors
var backlog = global.environment.maxConnections || 128;
if (!availableEnvironments[process.env.NODE_ENV]) {
logger.fatal(new Error(`Invalid environment ${process.env.NODE_ENV} argument, valid ones: ${Object.keys(availableEnvironments).join(', ')}`));
process.exit(1);
}
var listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
const { engines } = require('./package.json');
if (!semver.satisfies(process.versions.node, engines.node)) {
logger.fatal(new Error(`Node version ${process.versions.node} is not supported, please use Node.js ${engines.node}.`));
process.exit(1);
}
var version = require("./package").version;
const server = createServer(serverOptions);
listener.on('listening', function() {
log("Using Node.js %s", process.version);
log('Using configuration file "%s"', configurationFile);
log(
"Windshaft tileserver %s started on %s:%s PID=%d (%s)",
version, serverOptions.bind.host, serverOptions.bind.port, process.pid, ENVIRONMENT
);
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
const backlog = global.environment.maxConnections || 128;
const listener = server.listen(serverOptions.bind.port, serverOptions.bind.host, backlog);
const { version, name } = require('./package');
listener.on('listening', function () {
const { address, port } = listener.address();
logger.info({ 'Node.js': process.version, pid: process.pid, environment: process.env.NODE_ENV, [name]: version, address, port, config: configurationFile }, `${name} initialized successfully`);
});
function getCPUUsage (oldUsage) {
@@ -161,32 +122,24 @@ setInterval(function cpuUsageMetrics () {
});
previousCPUUsage = CPUUsage;
}, 5000);
}, 5000).unref();
setInterval(function() {
setInterval(function () {
var memoryUsage = process.memoryUsage();
Object.keys(memoryUsage).forEach(function(k) {
Object.keys(memoryUsage).forEach(function (k) {
global.statsClient.gauge('windshaft.memory.' + k, memoryUsage[k]);
});
}, 5000);
process.on('SIGHUP', function() {
global.log4js.clearAndShutdownAppenders(function() {
global.log4js.configure(log4jsConfig);
global.logger = global.log4js.getLogger();
log('Log files reloaded');
});
});
}, 5000).unref();
if (global.gc) {
var gcInterval = Number.isFinite(global.environment.gc_interval) ?
global.environment.gc_interval :
10000;
var gcInterval = Number.isFinite(global.environment.gc_interval)
? global.environment.gc_interval
: 10000;
if (gcInterval > 0) {
setInterval(function gcForcedCycle() {
setInterval(function gcForcedCycle () {
global.gc();
}, gcInterval);
}, gcInterval).unref();
}
}
@@ -206,64 +159,59 @@ function getGCTypeValue (type) {
let value;
switch (type) {
case 1:
value = 'Scavenge';
break;
case 2:
value = 'MarkSweepCompact';
break;
case 4:
value = 'IncrementalMarking';
break;
case 8:
value = 'ProcessWeakCallbacks';
break;
case 15:
value = 'All';
break;
default:
value = 'Unkown';
break;
case 1:
value = 'Scavenge';
break;
case 2:
value = 'MarkSweepCompact';
break;
case 4:
value = 'IncrementalMarking';
break;
case 8:
value = 'ProcessWeakCallbacks';
break;
case 15:
value = 'All';
break;
default:
value = 'Unkown';
break;
}
return value;
}
addHandlers(listener, global.logger, 45000);
const exitProcess = logger.finish((err, finalLogger, listener, signal, killTimeout) => {
scheduleForcedExit(killTimeout, finalLogger);
function addHandlers(listener, logger, killTimeout) {
process.on('uncaughtException', exitProcess(listener, logger, killTimeout));
process.on('unhandledRejection', exitProcess(listener, logger, killTimeout));
process.on('ENOMEM', exitProcess(listener, logger, killTimeout));
process.on('SIGINT', exitProcess(listener, logger, killTimeout));
process.on('SIGTERM', exitProcess(listener, logger, killTimeout));
finalLogger.info(`Process has received signal: ${signal}`);
let code = 0;
if (err) {
code = 1;
finalLogger.fatal(err);
}
finalLogger.info(`Process is going to exit with code: ${code}`);
listener.close(() => process.exit(code));
});
function addHandlers (listener, killTimeout) {
process.on('uncaughtException', (err) => exitProcess(err, listener, 'uncaughtException', killTimeout));
process.on('unhandledRejection', (err) => exitProcess(err, listener, 'unhandledRejection', killTimeout));
process.on('ENOMEM', (err) => exitProcess(err, listener, 'ENOMEM', killTimeout));
process.on('SIGINT', () => exitProcess(null, listener, 'SIGINT', killTimeout));
process.on('SIGTERM', () => exitProcess(null, listener, 'SIGTERM', killTimeout));
}
function exitProcess (listener, logger, killTimeout) {
return function exitProcessFn (signal) {
scheduleForcedExit(killTimeout, logger);
addHandlers(listener, 45000);
let code = 0;
if (!['SIGINT', 'SIGTERM'].includes(signal)) {
const err = signal instanceof Error ? signal : new Error(signal);
signal = undefined;
code = 1;
logger.fatal(err);
} else {
logger.info(`Process has received signal: ${signal}`);
}
logger.info(`Process is going to exit with code: ${code}`);
listener.close(() => global.log4js.shutdown(() => process.exit(code)));
};
}
function scheduleForcedExit (killTimeout, logger) {
function scheduleForcedExit (killTimeout, finalLogger) {
// Schedule exit if there is still ongoing work to deal with
const killTimer = setTimeout(() => {
logger.info('Process didn\'t close on time. Force exit');
finalLogger.info('Process didn\'t close on time. Force exit');
process.exit(1);
}, killTimeout);

View File

@@ -2,8 +2,8 @@
"name": "carto_windshaft",
"current_version": {
"requires": {
"node": "^10.15.1",
"npm": "^6.4.1",
"node": "^12.16.3",
"npm": "^6.14.4",
"mapnik": "==3.0.15.16",
"crankshaft": "~0.8.1"
},
@@ -11,7 +11,7 @@
"redis": ">=4.0.0",
"postgresql": ">=10.0.0",
"postgis": ">=2.4.4.5",
"carto_postgresql_ext": ">=0.24.1"
"carto_postgresql_ext": ">=0.35.0"
}
}
}

View File

@@ -0,0 +1,411 @@
var config = {
environment: process.env.CARTO_WINDSHAFT_NODE_ENV,
port: 8181,
host: null, // null on purpouse so it listens to whatever address docker assigns
// Size of the threadpool which can be used to run user code and get notified in the loop thread
// Its default size is 4, but it can be changed at startup time (the absolute maximum is 128).
// See http://docs.libuv.org/en/latest/threadpool.html
uv_threadpool_size: undefined,
// Time in milliseconds to force GC cycle.
// Disable by using <=0 value.
gc_interval: 10000,
// Regular expression pattern to extract username
// from hostname. Must have a single grabbing block.
user_from_host: process.env.CARTO_WINDSHAFT_USER_FROM_HOST || '^(.*)\\.cartodb\\.com$',
// Base URLs for the APIs
//
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
//
// Note: each entry corresponds with an express' router.
// You must define at least one path. However, middlewares are optional.
routes: {
api: [{
paths: [
'/api/v1',
'/user/:user/api/v1'
],
// Optional: attach middlewares at the begining of the router
// to perform custom operations.
middlewares: [
function noop () {
return function noopMiddleware (req, res, next) {
next();
};
}
],
// Base url for the Detached Maps API
// "/api/v1/map" is the new API,
map: [{
paths: [
'/map'
],
middlewares: [] // Optional
}],
// Base url for the Templated Maps API
// "/api/v1/map/named" is the new API,
template: [{
paths: [
'/map/named'
],
middlewares: [] // Optional
}]
}]
},
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
//
// This URLs depend on how `routes` and `user_from_host` are configured: the application can be
// configured to accept request with the {user} in the header host or in the request path.
// It also might depend on the configured cdn_url via `serverMetadata.cdn_url`.
//
// This template allows to make the endpoints generation more flexible, the template exposes the following params:
// 1. {{=it.cdn_url}}: will be used when `serverMetadata.cdn_url` exists.
// 2. {{=it.user}}: will use the username as extraced from `user_from_host` or `routes`.
// 3. {{=it.port}}: will use the `port` from this very same configuration file.
resources_url_templates: {
http: process.env.CARTO_WINDSHAFT_RESOURCE_URL_TEMPLATE_HTTP || 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
https: process.env.CARTO_WINDSHAFT_RESOURCE_URL_TEMPLATE_HTTPS || 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
},
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
maxConnections: 128,
// Maximum number of templates per user. Unlimited by default.
maxUserTemplates: 1024,
// Seconds since "last creation" before a detached
// or template instance map expires. Or: how long do you want
// to be able to navigate the map without a reload ?
// Defaults to 7200 (2 hours)
mapConfigTTL: 7200,
// idle socket timeout, in milliseconds
socket_timeout: 600000,
enable_cors: true,
cache_enabled: true,
// Templated database username for authorized user
// Supported labels: 'user_id' (read from redis)
postgres_auth_user: process.env.CARTO_WINDSHAFT_DB_USER || 'cartodb_user_<%= user_id %>',
// Templated database password for authorized user
// Supported labels: 'user_id', 'user_password' (both read from redis)
postgres_auth_pass: '<%= user_password %>',
postgres: {
user: 'publicuser',
password: 'public',
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || 'localhost',
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
pool: {
// maximum number of resources to create at any given time
size: 16,
// max milliseconds a resource can go unused before it should be destroyed
idleTimeout: 3000,
// frequency to check for idle resources
reapInterval: 1000
}
},
mapnik_version: undefined,
mapnik_tile_format: 'png8:m=h',
statsd: {
host: process.env.CARTO_WINDSHAFT_STATSD_HOST || 'localhost',
port: 8125,
prefix: process.env.CARTO_WINDSHAFT_STATSD_PREFIX || ':host.', // could be hostname, better not containing dots
cacheDns: true
// support all allowed node-statsd options
},
renderer: {
// Milliseconds since last access before renderer cache item expires
cache_ttl: 60000,
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
mvt: {
// If enabled, MVTs will be generated with PostGIS directly
// If disabled, MVTs will be generated with Mapnik MVT
usePostGIS: true
},
mapnik: {
// The size of the pool of internal mapnik backend
// This pool size is per mapnik renderer created in Windshaft's RendererFactory
// See https://github.com/CartoDB/Windshaft/blob/master/lib/windshaft/renderers/renderer_factory.js
// Important: check the configuration of uv_threadpool_size to use suitable value
poolSize: 8,
// The maximum number of waiting clients of the pool of internal mapnik backend
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
poolMaxWaitingClients: 64,
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
// This will prevent blocking the main thread.
useCartocssWorkers: false,
// Metatile is the number of tiles-per-side that are going
// to be rendered at once. If all of them will be requested
// we'd have saved time. If only one will be used, we'd have
// wasted time.
metatile: 2,
// tilelive-mapnik uses an internal cache to store tiles/grids
// generated when using metatile. This options allow to tune
// the behaviour for that internal cache.
metatileCache: {
// Time an object must stay in the cache until is removed
ttl: 0,
// Whether an object must be removed after the first hit
// Usually you want to use `true` here when ttl>0.
deleteOnHit: false
},
// Override metatile behaviour depending on the format
formatMetatile: {
png: 2,
'grid.json': 1
},
// Buffer size is the tickness in pixel of a buffer
// around the rendered (meta?)tile.
//
// This is important for labels and other marker that overlap tile boundaries.
// Setting to 128 ensures no render artifacts.
// 64 may have artifacts but is faster.
// Less important if we can turn metatiling on.
bufferSize: 64,
// SQL queries will be wrapped with ST_SnapToGrid
// Snapping all points of the geometry to a regular grid
snapToGrid: false,
// SQL queries will be wrapped with ST_ClipByBox2D
// Returning the portion of a geometry falling within a rectangle
// It will only work if snapToGrid is enabled
clipByBox2d: true,
postgis: {
// Parameters to pass to datasource plugin of mapnik
// See http://github.com/mapnik/mapnik/wiki/PostGIS
user: 'publicuser',
password: 'public',
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || '127.0.0.1',
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
extent: '-20037508.3,-20037508.3,20037508.3,20037508.3',
// max number of rows to return when querying data, 0 means no limit
row_limit: 65535,
/*
* Set persist_connection to false if you want
* database connections to be closed on renderer
* expiration (1 minute after last use).
* Setting to true (the default) would never
* close any connection for the server's lifetime
*/
persist_connection: false,
simplify_geometries: true,
use_overviews: true, // use overviews to retrieve raster
max_size: 500,
twkb_encoding: true
},
limits: {
// Time in milliseconds a render request can take before it fails, some notes:
// - 0 means no render limit
// - it considers metatiling, naive implementation: (render timeout) * (number of tiles in metatile)
render: 0,
// As the render request will finish even if timed out, whether it should be placed in the internal
// cache or it should be fully discarded. When placed in the internal cache another attempt to retrieve
// the same tile will result in an immediate response, however that will use a lot of more application
// memory. If we want to enforce this behaviour we have to implement a cache eviction policy for the
// internal cache.
cacheOnTimeout: true
},
// If enabled Mapnik will reuse the features retrieved from the database
// instead of requesting them once per style inside a layer
'cache-features': true,
// Require metrics to the renderer
metrics: false,
// Options for markers attributes, ellipses and images caches
markers_symbolizer_caches: {
disabled: false
}
},
http: {
timeout: 2000, // the timeout in ms for a http tile request
proxy: undefined, // the url for a proxy server
whitelist: [ // the whitelist of urlTemplates that can be used
'.*', // will enable any URL
'http://{s}.example.com/{z}/{x}/{y}.png'
],
// image to use as placeholder when urlTemplate is not in the whitelist
// if provided the http renderer will use it instead of throw an error
fallbackImage: {
type: 'fs', // 'fs' and 'url' supported
src: __dirname + '/../../assets/default-placeholder.png'
}
},
torque: {}
},
// anything analyses related
analysis: {
// batch configuration
batch: {
// Inline execution avoid the use of SQL API as batch endpoint
// When set to true it will run all analysis queries in series, with a direct connection to the DB
// This might be useful for:
// - testing
// - running an standalone server without any dependency on external services
inlineExecution: false,
// where the SQL API is running, it will use a custom Host header to specify the username.
endpoint: 'http://127.0.0.1:8080/api/v2/sql/job',
// the template to use for adding the host header in the batch api requests
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
},
// Define max execution time in ms for analyses or tags
// If analysis or tag are not found in redis this values will be used as default.
limits: {
moran: { timeout: 120000, maxNumberOfRows: 1e5 },
cpu2x: { timeout: 60000 }
}
},
millstone: {
// Needs to be writable by server user
cache_basedir: process.env.CARTO_WINDSHAFT_TILE_CACHE || '/home/ubuntu/tile_assets/'
},
redis: {
host: process.env.CARTO_WINDSHAFT_REDIS_HOST || '127.0.0.1',
port: process.env.CARTO_WINDSHAFT_REDIS_PORT || 6379,
// Max number of connections in each pool.
// Users will be put on a queue when the limit is hit.
// Set to maxConnection to have no possible queues.
// There are currently 2 pools involved in serving
// windshaft-cartodb requests so multiply this number
// by 2 to know how many possible connections will be
// kept open by the servelsr. The default is 50.
max: 50,
returnToHead: true, // defines the behaviour of the pool: false => queue, true => stack
idleTimeoutMillis: 30000, // idle time before dropping connection
reapIntervalMillis: 1000, // time between cleanups
slowQueries: {
log: true,
elapsedThreshold: 200
},
slowPool: {
log: true, // whether a slow acquire must be logged or not
elapsedThreshold: 25 // the threshold to determine an slow acquire must be reported or not
},
emitter: {
statusInterval: 5000 // time, in ms, between each status report is emitted from the pool, status is sent to statsd
},
unwatchOnRelease: false, // Send unwatch on release, see http://github.com/CartoDB/Windshaft-cartodb/issues/161
noReadyCheck: true // Check `no_ready_check` at https://github.com/mranney/node_redis/tree/v0.12.1#overloading
},
// For more details about this options check https://nodejs.org/api/http.html#http_new_agent_options
httpAgent: {
keepAlive: true,
keepAliveMsecs: 1000,
maxSockets: 25,
maxFreeSockets: 256
},
varnish: {
host: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 'localhost',
port: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 6082, // the por for the telnet interface where varnish is listening to
http_port: 6081, // the port for the HTTP interface where varnish is listening to
purge_enabled: process.env.CARTO_WINDSHAFT_VARNISH_PURGE_ENABLED === 'true' || false, // whether the purge/invalidation mechanism is enabled in varnish or not
secret: 'xxx',
ttl: 86400,
fallbackTtl: 300,
layergroupTtl: 86400 // the max-age for cache-control header in layergroup responses
},
// this [OPTIONAL] configuration enables invalidating by surrogate key in fastly
fastly: {
// whether the invalidation is enabled or not
enabled: false,
// the fastly api key
apiKey: 'wadus_api_key',
// the service that will get surrogate key invalidation
serviceId: 'wadus_service_id'
},
// If useProfiler is true every response will be served with an
// X-Tiler-Profile header containing elapsed timing for various
// steps taken for producing the response.
useProfiler: false,
serverMetadata: {
cdn_url: {
http: process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTP === 'undefined' ? undefined : process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTP || 'api.cartocdn.com',
https: process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTPS === 'undefined' ? undefined : process.env.CARTO_WINDSHAFT_SERVER_CDN_URL_HTTPS || 'cartocdn.global.ssl.fastly.net'
}
},
// Settings for the health check available at /health
health: {
enabled: process.env.CARTO_WINDSHAFT_HEALTH_ENABLED === 'true' || false,
username: 'localhost',
z: 0,
x: 0,
y: 0
},
disabled_file: 'pids/disabled',
// Use this as a feature flags enabling/disabling mechanism
enabledFeatures: {
// whether it should intercept tile render errors an act based on them, enabled by default.
onTileErrorStrategy: false,
// whether the affected tables for a given SQL must query directly postgresql or use the SQL API
cdbQueryTablesFromPostgres: true,
// whether in mapconfig is available stats & metadata for each layer
layerStats: process.env.CARTO_WINDSHAFT_LAYERSTATS_ENABLED === 'true' || false,
// whether it should rate limit endpoints (global configuration)
rateLimitsEnabled: false,
// whether it should rate limit one or more endpoints (only if rateLimitsEnabled = true)
rateLimitsByEndpoint: {
anonymous: false,
static: false,
static_named: false,
dataview: false,
dataview_search: false,
analysis: false,
analysis_catalog: false,
tile: false,
attributes: false,
named_list: false,
named_create: false,
named_get: false,
named: false,
named_update: false,
named_delete: false,
named_tiles: false
}
},
pubSubMetrics: {
enabled: process.env.CARTO_WINDSHAFT_METRICS_ENABLED === 'true' || false,
project_id: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'avid-wavelet-844',
credentials: '',
topic: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'raw-metric-events'
}
};
// override some defaults for tests
if (process.env.NODE_ENV === 'test') {
config.user_from_host = '(.*)';
config.postgres_auth_pass = 'test_windshaft_cartodb_user_<%= user_id %>_pass';
config.millstone.cache_basedir = '/tmp/tile_assets';
config.postgres.user = 'test_windshaft_publicuser';
config.resources_url_templates = {
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
https: 'https://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map'
};
config.cache_enabled = false;
config.postgres_auth_user = 'test_windshaft_cartodb_user_<%= user_id %>';
config.renderer.mapnik.postgis.twkb_encoding = false;
config.renderer.mapnik['cache-features'] = false;
config.renderer.http.whitelist = [ // the whitelist of urlTemplates that can be used
'.*', // will enable any URL
'http://{s}.example.com/{z}/{x}/{y}.png',
// for testing purposes
'http://{s}.basemaps.cartocdn.com/dark_nolabels/{z}/{x}/{y}.png'
];
config.analysis.batch.inlineExecution = true;
config.redis.idleTimeoutMillis = 1;
config.redis.reapIntervalMillis = 1;
config.varnish.purge_enabled = false;
config.health.enabled = false;
config.enabledFeatures.layerStats = true;
}
module.exports = config;

View File

@@ -16,47 +16,41 @@ var config = {
// Base URLs for the APIs
//
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
//
// Note: each entry corresponds with an express' router.
// You must define at least one path. However, middlewares are optional.
,routes: {
v1: {
api: [{
paths: [
'/api/v1',
'/user/:user/api/v1',
],
// Base url for the Detached Maps API
// "/api/v1/map" is the new API,
map: {
paths: [
'/map',
]
},
// Base url for the Templated Maps API
// "/api/v1/map/named" is the new API,
template: {
paths: [
'/map/named'
]
}
},
// For compatibility with versions up to 1.6.x
v0: {
paths: [
'/tiles'
// Optional: attach middlewares at the begining of the router
// to perform custom operations.
middlewares: [
function noop () {
return function noopMiddleware (req, res, next) {
next();
}
}
],
// Base url for the Detached Maps API
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
map: {
// "/api/v1/map" is the new API,
map: [{
paths: [
'/layergroup'
]
},
'/map',
],
middlewares: [] // Optional
}],
// Base url for the Templated Maps API
// "/tiles/template" is for compatibility with versions up to 1.6.x
template: {
// "/api/v1/map/named" is the new API,
template: [{
paths: [
'/template'
]
}
}
'/map/named'
],
middlewares: [] // Optional
}]
}]
}
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
@@ -73,9 +67,10 @@ var config = {
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
https: 'http://localhost.lan:{{=it.port}}/user/{{=it.user}}/api/v1/map'
}
// Maximum number of connections for one process
// 128 is a good value with a limit of 1024 open file descriptors
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
,maxConnections:128
// Maximum number of templates per user. Unlimited by default.
,maxUserTemplates:1024
@@ -88,11 +83,6 @@ var config = {
,socket_timeout: 600000
,enable_cors: true
,cache_enabled: true
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
// If log_filename is given logs will be written
// there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
,log_filename: 'logs/node-windshaft.log'
// Templated database username for authorized user
// Supported labels: 'user_id' (read from redis)
,postgres_auth_user: 'development_cartodb_user_<%= user_id %>'
@@ -267,12 +257,6 @@ var config = {
// the template to use for adding the host header in the batch api requests
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
},
logger: {
// If filename is given logs comming from analysis client will be written
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
filename: 'logs/node-windshaft-analysis.log'
},
// Define max execution time in ms for analyses or tags
// If analysis or tag are not found in redis this values will be used as default.
limits: {
@@ -388,6 +372,12 @@ var config = {
named_tiles: false
}
}
,pubSubMetrics: {
enabled: false,
project_id: '',
credentials: '',
topic: ''
}
};
module.exports = config;

View File

@@ -16,47 +16,41 @@ var config = {
// Base URLs for the APIs
//
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
//
// Note: each entry corresponds with an express' router.
// You must define at least one path. However, middlewares are optional.
,routes: {
v1: {
api: [{
paths: [
'/api/v1',
'/user/:user/api/v1',
],
// Base url for the Detached Maps API
// "/api/v1/map" is the new API,
map: {
paths: [
'/map',
]
},
// Base url for the Templated Maps API
// "/api/v1/map/named" is the new API,
template: {
paths: [
'/map/named'
]
}
},
// For compatibility with versions up to 1.6.x
v0: {
paths: [
'/tiles'
// Optional: attach middlewares at the begining of the router
// to perform custom operations.
middlewares: [
function noop () {
return function noopMiddleware (req, res, next) {
next();
}
}
],
// Base url for the Detached Maps API
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
map: {
// "/api/v1/map" is the new API,
map: [{
paths: [
'/layergroup'
]
},
'/map',
],
middlewares: [] // Optional
}],
// Base url for the Templated Maps API
// "/tiles/template" is for compatibility with versions up to 1.6.x
template: {
// "/api/v1/map/named" is the new API,
template: [{
paths: [
'/template'
]
}
}
'/map/named'
],
middlewares: [] // Optional
}]
}]
}
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
@@ -73,9 +67,10 @@ var config = {
http: 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
https: 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
}
// Maximum number of connections for one process
// 128 is a good value with a limit of 1024 open file descriptors
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
,maxConnections:128
// Maximum number of templates per user. Unlimited by default.
,maxUserTemplates:1024
@@ -88,11 +83,6 @@ var config = {
,socket_timeout: 600000
,enable_cors: true
,cache_enabled: true
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
// If log_filename is given logs will be written
// there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
,log_filename: 'logs/node-windshaft.log'
// Templated database username for authorized user
// Supported labels: 'user_id' (read from redis)
,postgres_auth_user: 'cartodb_user_<%= user_id %>'
@@ -267,12 +257,6 @@ var config = {
// the template to use for adding the host header in the batch api requests
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
},
logger: {
// If filename is given logs comming from analysis client will be written
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
filename: 'logs/node-windshaft-analysis.log'
},
// Define max execution time in ms for analyses or tags
// If analysis or tag are not found in redis this values will be used as default.
limits: {
@@ -388,6 +372,12 @@ var config = {
named_tiles: false
}
}
,pubSubMetrics: {
enabled: true,
project_id: 'avid-wavelet-844',
credentials: '',
topic: 'raw-metric-events'
}
};
module.exports = config;

View File

@@ -16,47 +16,41 @@ var config = {
// Base URLs for the APIs
//
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
//
// Note: each entry corresponds with an express' router.
// You must define at least one path. However, middlewares are optional.
,routes: {
v1: {
api: [{
paths: [
'/api/v1',
'/user/:user/api/v1',
],
// Base url for the Detached Maps API
// "/api/v1/map" is the new API,
map: {
paths: [
'/map',
]
},
// Base url for the Templated Maps API
// "/api/v1/map/named" is the new API,
template: {
paths: [
'/map/named'
]
}
},
// For compatibility with versions up to 1.6.x
v0: {
paths: [
'/tiles'
// Optional: attach middlewares at the begining of the router
// to perform custom operations.
middlewares: [
function noop () {
return function noopMiddleware (req, res, next) {
next();
}
}
],
// Base url for the Detached Maps API
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
map: {
// "/api/v1/map" is the new API,
map: [{
paths: [
'/layergroup'
]
},
'/map',
],
middlewares: [] // Optional
}],
// Base url for the Templated Maps API
// "/tiles/template" is for compatibility with versions up to 1.6.x
template: {
// "/api/v1/map/named" is the new API,
template: [{
paths: [
'/template'
]
}
}
'/map/named'
],
middlewares: [] // Optional
}]
}]
}
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
@@ -73,9 +67,9 @@ var config = {
http: 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
https: 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
}
// Maximum number of connections for one process
// 128 is a good value with a limit of 1024 open file descriptors
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
,maxConnections:128
// Maximum number of templates per user. Unlimited by default.
,maxUserTemplates:1024
@@ -88,11 +82,6 @@ var config = {
,socket_timeout: 600000
,enable_cors: true
,cache_enabled: true
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
// If log_filename is given logs will be written
// there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
,log_filename: 'logs/node-windshaft.log'
// Templated database username for authorized user
// Supported labels: 'user_id' (read from redis)
,postgres_auth_user: 'cartodb_staging_user_<%= user_id %>'
@@ -267,12 +256,6 @@ var config = {
// the template to use for adding the host header in the batch api requests
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
},
logger: {
// If filename is given logs comming from analysis client will be written
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
filename: 'logs/node-windshaft-analysis.log'
},
// Define max execution time in ms for analyses or tags
// If analysis or tag are not found in redis this values will be used as default.
limits: {
@@ -388,6 +371,12 @@ var config = {
named_tiles: false
}
}
,pubSubMetrics: {
enabled: true,
project_id: '',
credentials: '',
topic: 'raw-metric-events'
}
};
module.exports = config;

View File

@@ -16,47 +16,41 @@ var config = {
// Base URLs for the APIs
//
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
//
// Note: each entry corresponds with an express' router.
// You must define at least one path. However, middlewares are optional.
,routes: {
v1: {
api: [{
paths: [
'/api/v1',
'/user/:user/api/v1',
],
// Base url for the Detached Maps API
// "/api/v1/map" is the new API,
map: {
paths: [
'/map',
]
},
// Base url for the Templated Maps API
// "/api/v1/map/named" is the new API,
template: {
paths: [
'/map/named'
]
}
},
// For compatibility with versions up to 1.6.x
v0: {
paths: [
'/tiles'
// Optional: attach middlewares at the begining of the router
// to perform custom operations.
middlewares: [
function noop () {
return function noopMiddleware (req, res, next) {
next();
}
}
],
// Base url for the Detached Maps API
// "/tiles/layergroup" is for compatibility with versions up to 1.6.x
map: {
// "/api/v1/map" is the new API,
map: [{
paths: [
'/layergroup'
]
},
'/map',
],
middlewares: [] // Optional
}],
// Base url for the Templated Maps API
// "/tiles/template" is for compatibility with versions up to 1.6.x
template: {
// "/api/v1/map/named" is the new API,
template: [{
paths: [
'/template'
]
}
}
'/map/named'
],
middlewares: [] // Optional
}]
}]
}
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
@@ -73,9 +67,10 @@ var config = {
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
https: 'https://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map'
}
// Maximum number of connections for one process
// 128 is a good value with a limit of 1024 open file descriptors
// Specify the maximum length of the queue of pending connections for the HTTP server.
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
// The default value of this parameter is 511 (not 512).
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
,maxConnections:128
// Maximum number of templates per user. Unlimited by default.
,maxUserTemplates:1024
@@ -88,11 +83,6 @@ var config = {
,socket_timeout: 600000
,enable_cors: true
,cache_enabled: false
,log_format: ':req[X-Real-IP] :method :req[Host]:url :status :response-time ms -> :res[Content-Type] (:res[X-Tiler-Profiler]) (:res[X-Tiler-Errors])'
// If log_filename is given logs will be written
// there, in append mode. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
,log_filename: '/tmp/node-windshaft.log'
// Templated database username for authorized user
// Supported labels: 'user_id' (read from redis)
,postgres_auth_user: 'test_windshaft_cartodb_user_<%= user_id %>'
@@ -269,12 +259,6 @@ var config = {
// the template to use for adding the host header in the batch api requests
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
},
logger: {
// If filename is given logs comming from analysis client will be written
// there, in append mode. Otherwise 'log_filename' is used. Otherwise stdout is used (default).
// Log file will be re-opened on receiving the HUP signal
filename: '/tmp/node-windshaft-analysis.log'
},
// Define max execution time in ms for analyses or tags
// If analysis or tag are not found in redis this values will be used as default.
limits: {
@@ -390,6 +374,12 @@ var config = {
named_tiles: false
}
}
,pubSubMetrics: {
enabled: false,
project_id: '',
credentials: '',
topic: ''
}
};
module.exports = config;

81
configure vendored
View File

@@ -1,81 +0,0 @@
#!/bin/sh
#
# This script creates config/environments/*.js files using
# config/environments/*.js.example files as input and performing
# settings substitutions.
#
# It relies on a known format of the .js.example files which haven't
# been made easier to parse to still let humans copy them manually and
# do further editing or leave them as such to get the same setup as before
# the introduction of this script.
#
# The script is a work in progress. Available switches are printed
# by invoking with the --help switch. More switches will be added
# as the need/request for them arises.
#
# --strk(2012-07-23)
#
ENVDIR=config/environments
PGPORT=
MAPNIK_VERSION=
ENVIRONMENT=development
STATUS="$0 $*"
usage() {
echo "Usage: $0 [OPTION]"
echo
echo "Configuration:"
echo " --help display this help and exit"
echo " --with-pgport=NUM access PostgreSQL server on TCP port NUM [$PGPORT]"
echo " --with-mapnik-version=STRING set mapnik version string [$MAPNIK_VERSION]"
echo " --environment=STRING set output environment name [$ENVIRONMENT]"
}
while test -n "$1"; do
case "$1" in
--help|-h)
usage
exit 0
;;
--with-pgport=*)
PGPORT=`echo "$1" | cut -d= -f2`
;;
--with-mapnik-version=*)
MAPNIK_VERSION=`echo "$1" | cut -d= -f2`
;;
--environment=*)
ENVIRONMENT=`echo "$1" | cut -d= -f2`
;;
*)
echo "Unused option '$1'" >&2
;;
esac
shift
done
ENVEX=./${ENVDIR}/${ENVIRONMENT}.js.example
if [ -z "$PGPORT" ]; then
PGPORT=`node -e "console.log(require('${ENVEX}').postgres.port)"`
fi
echo "PGPORT: $PGPORT"
echo "MAPNIK_VERSION: $MAPNIK_VERSION"
echo "ENVIRONMENT: $ENVIRONMENT"
o=`dirname "${ENVEX}"`/`basename "${ENVEX}" .example`
echo "Writing $o"
# See http://austinmatzko.com/2008/04/26/sed-multi-line-search-and-replace/
sed -n "1h;1!H;\${;g;s/\(,postgres: {[^}]*port: *'\?\)[^',]*\('\?,\)/\1$PGPORT\2/;p;}" < "${ENVEX}" \
| sed "s/mapnik_version:.*/mapnik_version: '$MAPNIK_VERSION'/" \
> "$o"
STATUSFILE=config.status--${ENVIRONMENT}
echo "Writing ${STATUSFILE}"
echo ${STATUS} > ${STATUSFILE} && chmod +x ${STATUSFILE}

View File

@@ -1,13 +0,0 @@
#!/bin/bash
echo "*********************"
echo "To install Node.js, run:"
echo "/src/nodejs-install.sh"
echo "Use NODEJS_VERSION env var to select the Node.js version"
echo " "
echo "To start postgres, run:"
echo "/etc/init.d/postgresql start"
echo "*********************"
echo " "
docker run -it -v `pwd`:/srv carto/nodejs-xenial-pg101:latest bash

View File

@@ -1,4 +0,0 @@
#!/bin/bash
docker run -e "NODEJS_VERSION=${2}" -v `pwd`:/srv ${1} bash run_tests_docker.sh && \
docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v

View File

@@ -1,92 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
&& . ~/.nvm/nvm.sh \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.6+carto-1 \
postgresql-10-postgis-2.4=2.4.4.6+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.6+carto-1 \
postgresql-10-postgis-scripts=2.4.4.6+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
RUN chmod 777 /src/nodejs-install.sh
CMD /src/nodejs-install.sh

View File

@@ -1,85 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-11 \
&& add-apt-repository -y ppa:cartodb/redis-next \
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
&& . ~/.nvm/nvm.sh \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev=2.3.2+dfsg-2build2~carto1 \
libgdal20=2.3.2+dfsg-2build2~carto1 \
libgeos-dev=3.7.1~carto1 \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin=2.3.2+dfsg-2build2~carto1 \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
libopenscenegraph100v5 \
libsfcgal1 \
liblwgeom-2.5.0=2.5.1.4+carto-1 \
postgresql-11 \
postgresql-11-plproxy \
postgis=2.5.1.4+carto-1 \
postgresql-11-postgis-2.5=2.5.1.4+carto-1 \
postgresql-11-postgis-2.5-scripts=2.5.1.4+carto-1 \
postgresql-client-11 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-11 \
postgresql-server-dev-11 \
redis=5:4.0.9-1carto1~xenial1 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/11/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/11/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/11/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/11/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
RUN chmod 777 /src/nodejs-install.sh
CMD /src/nodejs-install.sh

View File

@@ -1,88 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_10.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -1,89 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
# Install dependencies and PostGIS 2.4 from sources
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgresql-10-postgis-2.4 \
postgresql-10-postgis-2.4-scripts \
postgresql-10-postgis-scripts \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
postgis \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -1,89 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
# Install dependencies and PostGIS 2.4 from sources
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgresql-10-postgis-2.4 \
postgresql-10-postgis-2.4-scripts \
postgresql-10-postgis-scripts \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
postgis \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -1,88 +0,0 @@
FROM ubuntu:xenial
# Use UTF8 to avoid encoding problems with pgsql
ENV LANG C.UTF-8
ENV NPROCS 1
ENV JOBS 1
ENV CXX g++-4.9
ENV PGUSER postgres
# Add external repos
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
curl \
software-properties-common \
locales \
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
&& add-apt-repository -y ppa:cartodb/gis \
&& curl -sL https://deb.nodesource.com/setup_6.x | bash \
&& locale-gen en_US.UTF-8 \
&& update-locale LANG=en_US.UTF-8
RUN set -ex \
&& apt-get update \
&& apt-get install -y \
g++-4.9 \
gcc-4.9 \
git \
libcairo2-dev \
libgdal-dev \
libgdal1i \
libgdal20 \
libgeos-dev \
libgif-dev \
libjpeg8-dev \
libjson-c-dev \
libpango1.0-dev \
libpixman-1-dev \
libproj-dev \
libprotobuf-c-dev \
libxml2-dev \
gdal-bin \
make \
nodejs \
protobuf-c-compiler \
pkg-config \
wget \
zip \
postgresql-10 \
postgresql-10-plproxy \
postgis=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4=2.4.4.5+carto-1 \
postgresql-10-postgis-2.4-scripts=2.4.4.5+carto-1 \
postgresql-10-postgis-scripts=2.4.4.5+carto-1 \
postgresql-client-10 \
postgresql-client-common \
postgresql-common \
postgresql-contrib \
postgresql-plpython-10 \
postgresql-server-dev-10 \
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
&& tar xvzf redis-4.0.8.tar.gz \
&& cd redis-4.0.8 \
&& make \
&& make install \
&& cd .. \
&& rm redis-4.0.8.tar.gz \
&& rm -R redis-4.0.8 \
&& apt-get purge -y wget protobuf-c-compiler \
&& apt-get autoremove -y
# Configure PostgreSQL
RUN set -ex \
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
&& /etc/init.d/postgresql start \
&& createdb template_postgis \
&& createuser publicuser \
&& psql -c "CREATE EXTENSION postgis" template_postgis \
&& /etc/init.d/postgresql stop
WORKDIR /srv
EXPOSE 5858
CMD /etc/init.d/postgresql start

View File

@@ -1,23 +0,0 @@
After running the tests with docker, you will need Docker installed and the docker image downloaded.
## Install docker
`sudo apt install docker.io && sudo usermod -aG docker $(whoami)`
## Download image
`docker pull carto/IMAGE`
## Carto account
https://hub.docker.com/r/carto/
## Update image
- Edit the docker image file with your desired changes
- Build image:
- `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
- Upload to docker hub:
- Login into docker hub:
- `docker login`
- Create tag:
- `docker tag carto/IMAGE carto/IMAGE`
- Upload:
- `docker push carto/IMAGE`

View File

@@ -1,13 +0,0 @@
#!/bin/bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
if [ -z $NODEJS_VERSION ]; then
NODEJS_VERSION="10"
NODEJS_VERSION_OPTIONS="--lts"
fi
nvm install $NODEJS_VERSION $NODEJS_VERSION_OPTIONS
nvm alias default $NODEJS_VERSION
nvm use default

View File

@@ -5,14 +5,14 @@ const { Router: router } = require('express');
const RedisPool = require('redis-mpool');
const cartodbRedis = require('cartodb-redis');
const windshaft = require('windshaft');
const { factory: windshaftFactory } = require('windshaft');
const PgConnection = require('../backends/pg_connection');
const PgConnection = require('../backends/pg-connection');
const AnalysisBackend = require('../backends/analysis');
const AnalysisStatusBackend = require('../backends/analysis-status');
const DataviewBackend = require('../backends/dataview');
const TemplateMaps = require('../backends/template_maps.js');
const PgQueryRunner = require('../backends/pg_query_runner');
const TemplateMaps = require('../backends/template-maps');
const PgQueryRunner = require('../backends/pg-query-runner');
const StatsBackend = require('../backends/stats');
const AuthBackend = require('../backends/auth');
@@ -20,16 +20,16 @@ const UserLimitsBackend = require('../backends/user-limits');
const OverviewsMetadataBackend = require('../backends/overviews-metadata');
const FilterStatsApi = require('../backends/filter-stats');
const TablesExtentBackend = require('../backends/tables-extent');
const ClusterBackend = require('../backends/cluster');
const PubSubMetricsBackend = require('../backends/metrics');
const LayergroupAffectedTablesCache = require('../cache/layergroup_affected_tables');
const SurrogateKeysCache = require('../cache/surrogate_keys_cache');
const VarnishHttpCacheBackend = require('../cache/backend/varnish_http');
const LayergroupAffectedTablesCache = require('../cache/layergroup-affected-tables');
const SurrogateKeysCache = require('../cache/surrogate-keys-cache');
const VarnishHttpCacheBackend = require('../cache/backend/varnish-http');
const FastlyCacheBackend = require('../cache/backend/fastly');
const NamedMapProviderCache = require('../cache/named_map_provider_cache');
const NamedMapsCacheEntry = require('../cache/model/named_maps_entry');
const NamedMapProviderReporter = require('../stats/reporter/named-map-provider');
const NamedMapProviderCache = require('../cache/named-map-provider-cache');
const NamedMapsCacheEntry = require('../cache/model/named-maps-entry');
const NamedMapProviderCacheReporter = require('../stats/reporter/named-map-provider-cache');
const SqlWrapMapConfigAdapter = require('../models/mapconfig/adapter/sql-wrap-mapconfig-adapter');
const MapConfigNamedLayersAdapter = require('../models/mapconfig/adapter/mapconfig-named-layers-adapter');
@@ -47,20 +47,23 @@ const LayergroupMetadata = require('../utils/layergroup-metadata');
const RendererStatsReporter = require('../stats/reporter/renderer');
const initializeStatusCode = require('./middlewares/initialize-status-code');
const logger = require('./middlewares/logger');
const initLogger = require('./middlewares/logger');
const bodyParser = require('body-parser');
const servedByHostHeader = require('./middlewares/served-by-host-header');
const stats = require('./middlewares/stats');
const profiler = require('./middlewares/profiler');
const lzmaMiddleware = require('./middlewares/lzma');
const cors = require('./middlewares/cors');
const user = require('./middlewares/user');
const sendResponse = require('./middlewares/send-response');
const syntaxError = require('./middlewares/syntax-error');
const errorMiddleware = require('./middlewares/error-middleware');
const clientHeader = require('./middlewares/client-header');
const MapRouter = require('./map/map-router');
const TemplateRouter = require('./template/template-router');
const getOnTileErrorStrategy = require('../utils/on-tile-error-strategy');
module.exports = class ApiRouter {
constructor ({ serverOptions, environmentOptions }) {
this.serverOptions = serverOptions;
@@ -80,36 +83,22 @@ module.exports = class ApiRouter {
global.statsClient.gauge(keyPrefix + 'waiting', status.waiting);
});
const { rendererCache, tileBackend, attributesBackend, previewBackend, mapBackend, mapStore } = windshaftFactory({
rendererOptions: serverOptions,
redisPool,
onTileErrorStrategy: getOnTileErrorStrategy({ enabled: environmentOptions.enabledFeatures.onTileErrorStrategy }),
logger: this.serverOptions.logger
});
const rendererStatsReporter = new RendererStatsReporter(rendererCache, serverOptions.renderCache.statsInterval);
rendererStatsReporter.start();
const metadataBackend = cartodbRedis({ pool: redisPool });
const pgConnection = new PgConnection(metadataBackend);
const mapStore = new windshaft.storage.MapStore({
pool: redisPool,
expire_time: serverOptions.grainstore.default_layergroup_ttl
});
const rendererFactory = createRendererFactory({ redisPool, serverOptions, environmentOptions });
const rendererCacheOpts = Object.assign({
ttl: 60000, // 60 seconds TTL by default
statsInterval: 60000 // reports stats every milliseconds defined here
}, serverOptions.renderCache || {});
const rendererCache = new windshaft.cache.RendererCache(rendererFactory, rendererCacheOpts);
const rendererStatsReporter = new RendererStatsReporter(rendererCache, rendererCacheOpts.statsInterval);
rendererStatsReporter.start();
const tileBackend = new windshaft.backend.Tile(rendererCache);
const attributesBackend = new windshaft.backend.Attributes();
const concurrency = serverOptions.renderer.mapnik.poolSize +
serverOptions.renderer.mapnik.poolMaxWaitingClients;
const previewBackend = new windshaft.backend.Preview(rendererCache, { concurrency });
const mapValidatorBackend = new windshaft.backend.MapValidator(tileBackend, attributesBackend);
const mapBackend = new windshaft.backend.Map(rendererCache, mapStore, mapValidatorBackend);
const surrogateKeysCacheBackends = createSurrogateKeysCacheBackends(serverOptions);
const surrogateKeysCache = new SurrogateKeysCache(surrogateKeysCacheBackends);
const templateMaps = createTemplateMaps({ redisPool, surrogateKeysCache });
const templateMaps = createTemplateMaps({ redisPool, surrogateKeysCache, logger: this.serverOptions.logger });
const analysisStatusBackend = new AnalysisStatusBackend();
const analysisBackend = new AnalysisBackend(metadataBackend, serverOptions.analysis);
@@ -162,14 +151,16 @@ module.exports = class ApiRouter {
layergroupAffectedTablesCache
);
const namedMapProviderReporter = new NamedMapProviderReporter({
const namedMapProviderCacheReporter = new NamedMapProviderCacheReporter({
namedMapProviderCache,
intervalInMilliseconds: rendererCacheOpts.statsInterval
intervalInMilliseconds: serverOptions.renderCache.statsInterval
});
namedMapProviderCacheReporter.start();
namedMapProviderReporter.start();
const metricsBackend = new PubSubMetricsBackend(serverOptions.pubSubMetrics);
const collaborators = {
config: serverOptions,
analysisStatusBackend,
attributesBackend,
dataviewBackend,
@@ -189,82 +180,78 @@ module.exports = class ApiRouter {
layergroupMetadata,
namedMapProviderCache,
tablesExtentBackend,
clusterBackend
clusterBackend,
metricsBackend
};
this.metadataBackend = metadataBackend;
this.mapRouter = new MapRouter({ collaborators });
this.templateRouter = new TemplateRouter({ collaborators });
}
register (app) {
route (app, routes) {
// FIXME: we need a better way to reset cache while running tests
if (process.env.NODE_ENV === 'test') {
app.layergroupAffectedTablesCache = this.layergroupAffectedTablesCache;
}
Object.keys(this.serverOptions.routes).forEach(apiVersion => {
const routes = this.serverOptions.routes[apiVersion];
routes.forEach(route => {
const apiRouter = router({ mergeParams: true });
const { paths, middlewares = [] } = route;
apiRouter.use(logger(this.serverOptions));
apiRouter.use(initializeStatusCode());
apiRouter.use(bodyParser.json());
apiRouter.use(servedByHostHeader());
apiRouter.use(stats({
apiRouter.use(initLogger({ logger: this.serverOptions.logger }));
apiRouter.use(user(this.metadataBackend));
apiRouter.use(profiler({
enabled: this.serverOptions.useProfiler,
statsClient: global.statsClient
}));
middlewares.forEach(middleware => apiRouter.use(middleware()));
apiRouter.use(initializeStatusCode());
apiRouter.use(bodyParser.json());
apiRouter.use(servedByHostHeader());
apiRouter.use(clientHeader());
apiRouter.use(lzmaMiddleware());
apiRouter.use(cors());
apiRouter.use(user());
this.templateRouter.register(apiRouter, routes.template.paths);
this.mapRouter.register(apiRouter, routes.map.paths);
this.templateRouter.route(apiRouter, route.template);
this.mapRouter.route(apiRouter, route.map);
apiRouter.use(sendResponse());
apiRouter.use(syntaxError());
apiRouter.use(errorMiddleware());
const apiPaths = routes.paths;
apiPaths.forEach(path => app.use(path, apiRouter));
paths.forEach(path => app.use(path, apiRouter));
});
}
};
function createTemplateMaps ({ redisPool, surrogateKeysCache }) {
function createTemplateMaps ({ redisPool, surrogateKeysCache, logger }) {
const templateMaps = new TemplateMaps(redisPool, {
max_user_templates: global.environment.maxUserTemplates
});
function invalidateNamedMap (owner, templateName) {
var startTime = Date.now();
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(owner, templateName), function(err) {
var logMessage = JSON.stringify({
username: owner,
type: 'named_map_invalidation',
elapsed: Date.now() - startTime,
error: !!err ? JSON.stringify(err.message) : undefined
});
function invalidateNamedMap (user, templateName) {
const startTime = Date.now();
surrogateKeysCache.invalidate(new NamedMapsCacheEntry(user, templateName), (err) => {
if (err) {
global.logger.warn(logMessage);
} else {
global.logger.info(logMessage);
return logger.error({ exception: err, 'cdb-user': user, template_id: templateName }, 'Named map invalidation failed');
}
const elapsed = Date.now() - startTime;
logger.info({ 'cdb-user': user, template_id: templateName, duration: elapsed / 1000, duration_ms: elapsed }, 'Named map invalidation success');
});
}
['update', 'delete'].forEach(function(eventType) {
['update', 'delete'].forEach(function (eventType) {
templateMaps.on(eventType, invalidateNamedMap);
});
return templateMaps;
}
function createSurrogateKeysCacheBackends(serverOptions) {
function createSurrogateKeysCacheBackends (serverOptions) {
var cacheBackends = [];
if (serverOptions.varnish_purge_enabled) {
@@ -282,52 +269,3 @@ function createSurrogateKeysCacheBackends(serverOptions) {
return cacheBackends;
}
const timeoutErrorTilePath = __dirname + '/../../../assets/render-timeout-fallback.png';
const timeoutErrorTile = require('fs').readFileSync(timeoutErrorTilePath, {encoding: null});
function createRendererFactory ({ redisPool, serverOptions, environmentOptions }) {
var onTileErrorStrategy;
if (environmentOptions.enabledFeatures.onTileErrorStrategy !== false) {
onTileErrorStrategy = function onTileErrorStrategy$TimeoutTile(err, tile, headers, stats, format, callback) {
function isRenderTimeoutError (err) {
return err.message === 'Render timed out';
}
function isDatasourceTimeoutError (err) {
return err.message && err.message.match(/canceling statement due to statement timeout/i);
}
function isTimeoutError (err) {
return isRenderTimeoutError(err) || isDatasourceTimeoutError(err);
}
function isRasterFormat (format) {
return format === 'png' || format === 'jpg';
}
if (isTimeoutError(err) && isRasterFormat(format)) {
return callback(null, timeoutErrorTile, {
'Content-Type': 'image/png',
}, {});
} else {
return callback(err, tile, headers, stats);
}
};
}
const rendererFactory = new windshaft.renderer.Factory({
onTileErrorStrategy: onTileErrorStrategy,
mapnik: {
redisPool: redisPool,
grainstore: serverOptions.grainstore,
mapnik: serverOptions.renderer.mapnik
},
http: serverOptions.renderer.http,
mvt: serverOptions.renderer.mvt,
torque: serverOptions.renderer.torque
});
return rendererFactory;
}

View File

@@ -1,6 +1,7 @@
'use strict';
const PSQL = require('cartodb-psql');
const tag = require('../middlewares/tag');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const authorize = require('../middlewares/authorize');
@@ -17,12 +18,13 @@ module.exports = class AnalysesController {
this.userLimitsBackend = userLimitsBackend;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/analyses/catalog', this.middlewares());
}
middlewares () {
return [
tag({ tags: ['analysis', 'catalog'] }),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
@@ -48,10 +50,10 @@ function createPGClient () {
};
}
function getDataFromQuery({ queryTemplate, key }) {
function getDataFromQuery ({ queryTemplate, key }) {
const readOnlyTransactionOn = true;
return function getCatalogMiddleware(req, res, next) {
return function getCatalogMiddleware (req, res, next) {
const { pg, user } = res.locals;
const sql = queryTemplate({ _username: user });
@@ -82,27 +84,27 @@ function prepareResponse () {
}, {});
const analysisCatalog = catalog.map(analysis => {
if (analysisIdToTable.hasOwnProperty(analysis.node_id)) {
if (Object.prototype.hasOwnProperty.call(analysisIdToTable, analysis.node_id)) {
analysis.table = analysisIdToTable[analysis.node_id];
}
return analysis;
})
.sort((analysisA, analysisB) => {
if (!!analysisA.table && !!analysisB.table) {
return analysisB.table.size - analysisA.table.size;
}
.sort((analysisA, analysisB) => {
if (!!analysisA.table && !!analysisB.table) {
return analysisB.table.size - analysisA.table.size;
}
if (analysisA.table) {
return -1;
}
if (analysisB.table) {
return 1;
}
if (!!analysisA.table) {
return -1;
}
if (!!analysisB.table) {
return 1;
}
return -1;
});
});
res.statusCode = 200;
res.body = { catalog: analysisCatalog };
@@ -112,7 +114,7 @@ function prepareResponse () {
}
function unauthorizedError () {
return function unathorizedErrorMiddleware(err, req, res, next) {
return function unathorizedErrorMiddleware (err, req, res, next) {
if (err.message.match(/permission\sdenied/)) {
err = new Error('Unauthorized');
err.http_status = 401;
@@ -123,7 +125,7 @@ function unauthorizedError () {
}
const catalogQueryTpl = ctx => `
SELECT analysis_def->>'type' as type, * FROM cdb_analysis_catalog WHERE username = '${ctx._username}'
SELECT analysis_def->>'type' as type, * FROM cartodb.cdb_analysis_catalog WHERE username = '${ctx._username}'
`;
var tablesQueryTpl = ctx => `

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
@@ -17,12 +18,13 @@ module.exports = class AnalysisLayergroupController {
this.authBackend = authBackend;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/:token/analysis/node/:nodeId', this.middlewares());
}
middlewares () {
return [
tag({ tags: ['analysis', 'node'] }),
layergroupToken(),
credentials(),
authorize(this.authBackend),
@@ -35,7 +37,7 @@ module.exports = class AnalysisLayergroupController {
};
function analysisNodeStatus (analysisStatusBackend) {
return function analysisNodeStatusMiddleware(req, res, next) {
return function analysisNodeStatusMiddleware (req, res, next) {
const { nodeId } = req.params;
const dbParams = dbParamsFromResLocals(res.locals);

View File

@@ -3,11 +3,11 @@
const windshaft = require('windshaft');
const MapConfig = windshaft.model.MapConfig;
const Datasource = windshaft.model.Datasource;
const tag = require('../middlewares/tag');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');
const authorize = require('../middlewares/authorize');
const initProfiler = require('../middlewares/init-profiler');
const checkJsonContentType = require('../middlewares/check-json-content-type');
const incrementMapViewCount = require('../middlewares/increment-map-view-count');
const augmentLayergroupData = require('../middlewares/augment-layergroup-data');
@@ -23,6 +23,7 @@ const mapError = require('../middlewares/map-error');
const CreateLayergroupMapConfigProvider = require('../../models/mapconfig/provider/create-layergroup-provider');
const rateLimit = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
const metrics = require('../middlewares/metrics');
module.exports = class AnonymousMapController {
/**
@@ -39,6 +40,7 @@ module.exports = class AnonymousMapController {
* @constructor
*/
constructor (
config,
pgConnection,
templateMaps,
mapBackend,
@@ -49,8 +51,10 @@ module.exports = class AnonymousMapController {
mapConfigAdapter,
statsBackend,
authBackend,
layergroupMetadata
layergroupMetadata,
metricsBackend
) {
this.config = config;
this.pgConnection = pgConnection;
this.templateMaps = templateMaps;
this.mapBackend = mapBackend;
@@ -62,32 +66,46 @@ module.exports = class AnonymousMapController {
this.statsBackend = statsBackend;
this.authBackend = authBackend;
this.layergroupMetadata = layergroupMetadata;
this.metricsBackend = metricsBackend;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.options('/');
mapRouter.get('/', this.middlewares());
mapRouter.post('/', this.middlewares());
}
middlewares () {
const isTemplateInstantiation = false;
const useTemplateHash = false;
const includeQuery = true;
const label = 'ANONYMOUS LAYERGROUP';
const addContext = true;
const metricsTags = {
event: 'map_view',
attributes: { map_type: 'anonymous' },
from: {
req: {
query: { client: 'client' }
}
}
};
return [
tag({ tags: ['map', 'anonymous'] }),
metrics({
enabled: this.config.pubSubMetrics.enabled,
metricsBackend: this.metricsBackend,
tags: metricsTags
}),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ANONYMOUS),
cleanUpQueryParams(['aggregation']),
initProfiler(isTemplateInstantiation),
checkJsonContentType(),
checkCreateLayergroup(),
prepareAdapterMapConfig(this.mapConfigAdapter),
createLayergroup (
createLayergroup(
this.mapBackend,
this.userLimitsBackend,
this.pgConnection,
@@ -124,22 +142,23 @@ function checkCreateLayergroup () {
}
}
req.profiler.done('checkCreateLayergroup');
return next();
};
}
function prepareAdapterMapConfig (mapConfigAdapter) {
return function prepareAdapterMapConfigMiddleware(req, res, next) {
return function prepareAdapterMapConfigMiddleware (req, res, next) {
const requestMapConfig = req.body;
const { user, api_key } = res.locals;
const { logger } = res.locals;
const { user, api_key: apiKey } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
const context = {
analysisConfiguration: {
user,
logger,
db: {
host: dbhost,
port: dbport,
@@ -149,31 +168,25 @@ function prepareAdapterMapConfig (mapConfigAdapter) {
},
batch: {
username: user,
apiKey: api_key
apiKey
}
}
};
mapConfigAdapter.getMapConfig(user,
requestMapConfig,
params,
context,
(err, requestMapConfig, stats = { overviewsAddedToMapconfig : false }) => {
req.profiler.done('anonymous.getMapConfig');
requestMapConfig,
params,
context,
(err, requestMapConfig) => {
if (err) {
return next(err);
}
stats.mapType = 'anonymous';
req.profiler.add(stats);
req.body = requestMapConfig;
res.locals.context = context;
if (err) {
return next(err);
}
req.body = requestMapConfig;
res.locals.context = context;
next();
});
next();
});
};
}
@@ -182,12 +195,17 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
const requestMapConfig = req.body;
const { context } = res.locals;
const { user, cache_buster, api_key } = res.locals;
const { user, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const params = {
cache_buster, api_key,
dbuser, dbname, dbpassword, dbhost, dbport
cache_buster: cacheBuster,
api_key: apiKey,
dbuser,
dbname,
dbpassword,
dbhost,
dbport
};
const datasource = context.datasource || Datasource.EmptyDatasource();
@@ -203,6 +221,7 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
);
res.locals.mapConfig = mapConfig;
res.locals.mapConfigProvider = mapConfigProvider;
res.locals.analysesResults = context.analysesResults;
const mapParams = { dbuser, dbname, dbpassword, dbhost, dbport };
@@ -216,7 +235,6 @@ function createLayergroup (mapBackend, userLimitsBackend, pgConnection, affected
res.statusCode = 200;
res.body = layergroup;
res.locals.mapConfigProvider = mapConfigProvider;
next();
});

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
@@ -32,12 +33,13 @@ module.exports = class AttributesLayergroupController {
this.surrogateKeysCache = surrogateKeysCache;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/:token/:layer/attributes/:fid', this.middlewares());
}
middlewares () {
return [
tag({ tags: ['attributes'] }),
layergroupToken(),
credentials(),
authorize(this.authBackend),
@@ -61,8 +63,6 @@ module.exports = class AttributesLayergroupController {
function getFeatureAttributes (attributesBackend) {
return function getFeatureAttributesMiddleware (req, res, next) {
req.profiler.start('windshaft.maplayer_attribute');
const { mapConfigProvider } = res.locals;
const { token } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
@@ -70,8 +70,13 @@ function getFeatureAttributes (attributesBackend) {
const params = {
token,
dbuser, dbname, dbpassword, dbhost, dbport,
layer, fid
dbuser,
dbname,
dbpassword,
dbhost,
dbport,
layer,
fid
};
attributesBackend.getFeatureAttributes(mapConfigProvider, params, false, (err, tile, stats = {}) => {

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
@@ -32,19 +33,20 @@ module.exports = class AggregatedFeaturesLayergroupController {
this.surrogateKeysCache = surrogateKeysCache;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/:token/:layer/:z/cluster/:clusterId', this.middlewares());
}
middlewares () {
return [
tag({ tags: ['cluster'] }),
layergroupToken(),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
// TODO: create its rate limit
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.ATTRIBUTES),
cleanUpQueryParams([ 'aggregation' ]),
cleanUpQueryParams(['aggregation']),
createMapStoreMapConfigProvider(
this.mapStore,
this.userLimitsBackend,
@@ -62,8 +64,6 @@ module.exports = class AggregatedFeaturesLayergroupController {
function getClusteredFeatures (clusterBackend) {
return function getFeatureAttributesMiddleware (req, res, next) {
req.profiler.start('windshaft.maplayer_cluster_features');
const { mapConfigProvider } = res.locals;
const { user, token } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
@@ -71,9 +71,16 @@ function getClusteredFeatures (clusterBackend) {
const { aggregation } = req.query;
const params = {
user, token,
dbuser, dbname, dbpassword, dbhost, dbport,
layer, zoom, clusterId,
user,
token,
dbuser,
dbname,
dbpassword,
dbhost,
dbport,
layer,
zoom,
clusterId,
aggregation
};

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
@@ -18,14 +19,16 @@ const ALLOWED_DATAVIEW_QUERY_PARAMS = [
'own_filter', // 0, 1
'no_filters', // 0, 1
'bbox', // w,s,e,n
'circle', // json
'polygon', // json
'start', // number
'end', // number
'column_type', // string
'bins', // number
'aggregation', //string
'aggregation', // string
'offset', // number
'q', // widgets search
'categories', // number
'categories' // number
];
module.exports = class DataviewLayergroupController {
@@ -47,7 +50,7 @@ module.exports = class DataviewLayergroupController {
this.surrogateKeysCache = surrogateKeysCache;
}
register (mapRouter) {
route (mapRouter) {
// Undocumented/non-supported API endpoint methods.
// Use at your own peril.
@@ -74,6 +77,7 @@ module.exports = class DataviewLayergroupController {
middlewares ({ action, rateLimitGroup }) {
return [
tag({ tags: ['dataview', action] }),
layergroupToken(),
credentials(),
authorize(this.authBackend),

View File

@@ -15,6 +15,7 @@ const ClusteredFeaturesLayergroupController = require('./clustered-features-laye
module.exports = class MapRouter {
constructor ({ collaborators }) {
const {
config,
analysisStatusBackend,
attributesBackend,
dataviewBackend,
@@ -34,7 +35,8 @@ module.exports = class MapRouter {
layergroupMetadata,
namedMapProviderCache,
tablesExtentBackend,
clusterBackend
clusterBackend,
metricsBackend
} = collaborators;
this.analysisLayergroupController = new AnalysisLayergroupController(
@@ -85,6 +87,7 @@ module.exports = class MapRouter {
);
this.anonymousMapController = new AnonymousMapController(
config,
pgConnection,
templateMaps,
mapBackend,
@@ -95,10 +98,12 @@ module.exports = class MapRouter {
mapConfigAdapter,
statsBackend,
authBackend,
layergroupMetadata
layergroupMetadata,
metricsBackend
);
this.previewTemplateController = new PreviewTemplateController(
config,
namedMapProviderCache,
previewBackend,
surrogateKeysCache,
@@ -106,7 +111,8 @@ module.exports = class MapRouter {
metadataBackend,
pgConnection,
authBackend,
userLimitsBackend
userLimitsBackend,
metricsBackend
);
this.analysesController = new AnalysesCatalogController(
@@ -126,19 +132,25 @@ module.exports = class MapRouter {
);
}
register (apiRouter, mapPaths) {
route (apiRouter, routes) {
const mapRouter = router({ mergeParams: true });
this.analysisLayergroupController.register(mapRouter);
this.attributesLayergroupController.register(mapRouter);
this.dataviewLayergroupController.register(mapRouter);
this.previewLayergroupController.register(mapRouter);
this.tileLayergroupController.register(mapRouter);
this.anonymousMapController.register(mapRouter);
this.previewTemplateController.register(mapRouter);
this.analysesController.register(mapRouter);
this.clusteredFeaturesLayergroupController.register(mapRouter);
routes.forEach(route => {
const { paths, middlewares = [] } = route;
mapPaths.forEach(path => apiRouter.use(path, mapRouter));
middlewares.forEach(middleware => mapRouter.use(middleware()));
this.analysisLayergroupController.route(mapRouter);
this.attributesLayergroupController.route(mapRouter);
this.dataviewLayergroupController.route(mapRouter);
this.previewLayergroupController.route(mapRouter);
this.tileLayergroupController.route(mapRouter);
this.anonymousMapController.route(mapRouter);
this.previewTemplateController.route(mapRouter);
this.analysesController.route(mapRouter);
this.clusteredFeaturesLayergroupController.route(mapRouter);
paths.forEach(path => apiRouter.use(path, mapRouter));
});
}
};

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
@@ -35,7 +36,7 @@ module.exports = class PreviewLayergroupController {
this.surrogateKeysCache = surrogateKeysCache;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/static/center/:token/:z/:lat/:lng/:width/:height.:format', this.middlewares({
validateZoom: true,
previewType: 'centered'
@@ -61,6 +62,7 @@ module.exports = class PreviewLayergroupController {
}
return [
tag({ tags: ['static', 'tile'] }),
layergroupToken(),
validateZoom ? coordinates({ z: true, x: false, y: false }) : noop(),
credentials(),
@@ -100,7 +102,6 @@ function getPreviewImageByCenter (previewBackend) {
const options = { mapConfigProvider, format, width, height, zoom, center };
previewBackend.getImage(options, (err, image, stats = {}) => {
req.profiler.done(`render-${format}`);
req.profiler.add(stats);
if (err) {
@@ -133,7 +134,6 @@ function getPreviewImageByBoundingBox (previewBackend) {
const options = { mapConfigProvider, format, width, height, bbox };
previewBackend.getImage(options, (err, image, stats = {}) => {
req.profiler.done(`render-${format}`);
req.profiler.add(stats);
if (err) {

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');
@@ -12,6 +13,7 @@ const lastModifiedHeader = require('../middlewares/last-modified-header');
const checkStaticImageFormat = require('../middlewares/check-static-image-format');
const rateLimit = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
const metrics = require('../middlewares/metrics');
const DEFAULT_ZOOM_CENTER = {
zoom: 1,
@@ -21,12 +23,13 @@ const DEFAULT_ZOOM_CENTER = {
}
};
function numMapper(n) {
function numMapper (n) {
return +n;
}
module.exports = class PreviewTemplateController {
constructor (
config,
namedMapProviderCache,
previewBackend,
surrogateKeysCache,
@@ -34,8 +37,10 @@ module.exports = class PreviewTemplateController {
metadataBackend,
pgConnection,
authBackend,
userLimitsBackend
userLimitsBackend,
metricsBackend
) {
this.config = config;
this.namedMapProviderCache = namedMapProviderCache;
this.previewBackend = previewBackend;
this.surrogateKeysCache = surrogateKeysCache;
@@ -44,14 +49,31 @@ module.exports = class PreviewTemplateController {
this.pgConnection = pgConnection;
this.authBackend = authBackend;
this.userLimitsBackend = userLimitsBackend;
this.metricsBackend = metricsBackend;
}
register (mapRouter) {
route (mapRouter) {
mapRouter.get('/static/named/:template_id/:width/:height.:format', this.middlewares());
}
middlewares () {
const metricsTags = {
event: 'map_view',
attributes: { map_type: 'static' },
from: {
req: {
query: { client: 'client' }
}
}
};
return [
tag({ tags: ['named', 'static', 'tile'] }),
metrics({
enabled: this.config.pubSubMetrics.enabled,
metricsBackend: this.metricsBackend,
tags: metricsTags
}),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
@@ -60,7 +82,8 @@ module.exports = class PreviewTemplateController {
checkStaticImageFormat(),
namedMapProvider({
namedMapProviderCache: this.namedMapProviderCache,
label: 'STATIC_VIZ_MAP', forcedFormat: 'png'
label: 'STATIC_VIZ_MAP',
forcedFormat: 'png'
}),
getTemplate({ label: 'STATIC_VIZ_MAP' }),
prepareLayerFilterFromPreviewLayers({
@@ -99,7 +122,7 @@ function getTemplate ({ label }) {
function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label }) {
return function prepareLayerFilterFromPreviewLayersMiddleware (req, res, next) {
const { template } = res.locals;
const { config, auth_token } = req.query;
const { config, auth_token: authToken } = req.query;
if (!template || !template.view || !template.view.preview_layers) {
return next();
@@ -109,8 +132,8 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
var layerVisibilityFilter = [];
template.layergroup.layers.forEach((layer, index) => {
if (previewLayers[''+index] !== false && previewLayers[layer.id] !== false) {
layerVisibilityFilter.push(''+index);
if (previewLayers['' + index] !== false && previewLayers[layer.id] !== false) {
layerVisibilityFilter.push('' + index);
}
});
@@ -118,21 +141,29 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
return next();
}
const { user, token, cache_buster, api_key } = res.locals;
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const { template_id, format } = req.params;
const { template_id: templateId, format } = req.params;
const params = {
user, token, cache_buster, api_key,
dbuser, dbname, dbpassword, dbhost, dbport,
template_id, format
user,
token,
cache_buster: cacheBuster,
api_key: apiKey,
dbuser,
dbname,
dbpassword,
dbhost,
dbport,
template_id: templateId,
format
};
// overwrites 'all' default filter
params.layer = layerVisibilityFilter.join(',');
// recreates the provider
namedMapProviderCache.get(user, template_id, config, auth_token, params, (err, provider) => {
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, provider) => {
if (err) {
err.label = label;
return next(err);
@@ -146,7 +177,7 @@ function prepareLayerFilterFromPreviewLayers ({ namedMapProviderCache, label })
}
function getStaticImageOptions ({ tablesExtentBackend }) {
return function getStaticImageOptionsMiddleware(req, res, next) {
return function getStaticImageOptionsMiddleware (req, res, next) {
const { user, mapConfigProvider, template } = res.locals;
const { zoom, lon, lat, bbox } = req.query;
const params = { zoom, lon, lat, bbox };
@@ -248,7 +279,7 @@ function getImageOptionsFromBoundingBox (bbox = '') {
}
}
function getImage({ previewBackend, label }) {
function getImage ({ previewBackend, label }) {
return function getImageMiddleware (req, res, next) {
const { imageOpts, mapConfigProvider } = res.locals;
const { zoom, center, bbox } = imageOpts;
@@ -263,7 +294,7 @@ function getImage({ previewBackend, label }) {
if (zoom !== undefined && center) {
const options = { mapConfigProvider, format, width, height, zoom, center };
return previewBackend.getImage(options, (err, image, stats) => {
return previewBackend.getImage(options, (err, image, stats = {}) => {
req.profiler.add(stats);
if (err) {
@@ -280,9 +311,8 @@ function getImage({ previewBackend, label }) {
const options = { mapConfigProvider, format, width, height, bbox };
previewBackend.getImage(options, (err, image, stats) => {
previewBackend.getImage(options, (err, image, stats = {}) => {
req.profiler.add(stats);
req.profiler.done('render-' + format);
if (err) {
err.label = label;
@@ -298,7 +328,7 @@ function getImage({ previewBackend, label }) {
}
function setContentTypeHeader () {
return function setContentTypeHeaderMiddleware(req, res, next) {
return function setContentTypeHeaderMiddleware (req, res, next) {
const format = req.params.format === 'jpg' ? 'jpeg' : 'png';
res.set('Content-Type', `image/${format}`);
@@ -307,25 +337,23 @@ function setContentTypeHeader () {
};
}
function incrementMapViewsError (ctx) {
return `ERROR: failed to increment mapview count for user '${ctx.user}': ${ctx.err}`;
}
function incrementMapViews ({ metadataBackend }) {
return function incrementMapViewsMiddleware(req, res, next) {
const { user, mapConfigProvider } = res.locals;
return function incrementMapViewsMiddleware (req, res, next) {
const { user, mapConfigProvider, logger } = res.locals;
mapConfigProvider.getMapConfig((err, mapConfig) => {
if (err) {
global.logger.log(incrementMapViewsError({ user, err }));
logger.warn({ exception: err }, 'Failed to increment mapview count');
return next();
}
res.locals.mapConfig = mapConfig;
const statTag = mapConfig.obj().stat_tag;
metadataBackend.incMapviewCount(user, statTag, (err) => {
if (err) {
global.logger.log(incrementMapViewsError({ user, err }));
logger.warn({ exception: err }, 'Failed to increment mapview count');
}
next();
@@ -334,7 +362,7 @@ function incrementMapViews ({ metadataBackend }) {
};
}
function templateZoomCenter(view) {
function templateZoomCenter (view) {
if (view.zoom !== undefined && view.center) {
return {
zoom: view.zoom,
@@ -344,7 +372,7 @@ function templateZoomCenter(view) {
return false;
}
function templateBounds(view) {
function templateBounds (view) {
if (view.bounds) {
var hasAllBounds = ['west', 'south', 'east', 'north'].every(prop => Number.isFinite(view.bounds[prop]));

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const layergroupToken = require('../middlewares/layergroup-token');
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
@@ -43,22 +44,23 @@ module.exports = class TileLayergroupController {
this.surrogateKeysCache = surrogateKeysCache;
}
register (mapRouter) {
route (mapRouter) {
// REGEXP: doesn't match with `val`
const not = (val) => `(?!${val})([^\/]+?)`;
const not = (val) => `(?!${val})([^\/]+?)`; // eslint-disable-line no-useless-escape
// Sadly the path that matches 1 also matches with 2 so we need to tell to express
// that performs only the middlewares of the first path that matches
// for that we use one array to group all paths.
mapRouter.get([
`/:token/:z/:x/:y@:scale_factor?x.:format`, // 1
`/:token/:z/:x/:y.:format`, // 2
'/:token/:z/:x/:y@:scale_factor?x.:format', // 1
'/:token/:z/:x/:y.:format', // 2
`/:token${not('static')}/:layer/:z/:x/:y.(:format)`
], this.middlewares());
}
middlewares () {
return [
tag({ tags: ['tile'] }),
layergroupToken(),
coordinates(),
credentials(),
@@ -90,14 +92,12 @@ function parseFormat (format = '') {
return SUPPORTED_FORMATS[prettyFormat] ? prettyFormat : 'invalid';
}
function getStatusCode(tile, format){
function getStatusCode (tile, format) {
return tile.length === 0 && format === 'mvt' ? 204 : 200;
}
function getTile (tileBackend) {
return function getTileMiddleware (req, res, next) {
req.profiler.start(`windshaft.${req.params.layer ? 'maplayer_tile' : 'map_tile'}`);
const { mapConfigProvider } = res.locals;
const { token } = res.locals;
const { layer, z, x, y, format } = req.params;
@@ -149,9 +149,8 @@ function incrementErrorMetrics (statsClient) {
function tileError () {
return function tileErrorMiddleware (err, req, res, next) {
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
let errMsg = err.message ? ( '' + err.message ) : ( '' + err );
let errMsg = err.message ? ('' + err.message) : ('' + err);
// Rewrite mapnik parsing errors to start with layer number
const matches = errMsg.match("(.*) in style 'layer([0-9]+)'");

View File

@@ -3,14 +3,12 @@
module.exports = function authorize (authBackend) {
return function authorizeMiddleware (req, res, next) {
authBackend.authorize(req, res, (err, authorized) => {
req.profiler.done('authorize');
if (err) {
return next(err);
}
if(!authorized) {
err = new Error("Sorry, you are unauthorized (permission denied)");
if (!authorized) {
err = new Error('Sorry, you are unauthorized (permission denied)');
err.http_status = 403;
return next(err);
}

View File

@@ -6,11 +6,11 @@ module.exports = function setCacheChannelHeader () {
return next();
}
const { mapConfigProvider } = res.locals;
const { mapConfigProvider, logger } = res.locals;
mapConfigProvider.getAffectedTables((err, affectedTables) => {
if (err) {
global.logger.warn('ERROR generating Cache Channel Header:', err);
logger.warn({ exception: err }, 'Error generating Cache Channel Header');
return next();
}

View File

@@ -0,0 +1,85 @@
'use strict';
const ONE_MINUTE_IN_SECONDS = 60;
const THREE_MINUTE_IN_SECONDS = 60 * 3;
const FIVE_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 5;
const TEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 10;
const FIFTEEN_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 15;
const THIRTY_MINUTES_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 30;
const ONE_HOUR_IN_SECONDS = ONE_MINUTE_IN_SECONDS * 60;
const ONE_YEAR_IN_SECONDS = ONE_HOUR_IN_SECONDS * 24 * 365;
const FALLBACK_TTL = global.environment.varnish.fallbackTtl || FIVE_MINUTES_IN_SECONDS;
const validFallbackTTL = [
ONE_MINUTE_IN_SECONDS,
THREE_MINUTE_IN_SECONDS,
FIVE_MINUTES_IN_SECONDS,
TEN_MINUTES_IN_SECONDS,
FIFTEEN_MINUTES_IN_SECONDS,
THIRTY_MINUTES_IN_SECONDS,
ONE_HOUR_IN_SECONDS
];
module.exports = function setCacheControlHeader ({
ttl = ONE_YEAR_IN_SECONDS,
fallbackTtl = FALLBACK_TTL,
revalidate = false
} = {}) {
if (!validFallbackTTL.includes(fallbackTtl)) {
const message = [
'Invalid fallback TTL value for Cache-Control header.',
`Got ${fallbackTtl}, expected ${validFallbackTTL.join(', ')}`
].join(' ');
throw new Error(message);
}
return function setCacheControlHeaderMiddleware (req, res, next) {
if (req.method !== 'GET') {
return next();
}
const { mapConfigProvider = { getAffectedTables: callback => callback() }, logger } = res.locals;
mapConfigProvider.getAffectedTables((err, affectedTables) => {
if (err) {
logger.warn({ exception: err }, 'Error generating Cache Control Header');
return next();
}
const directives = ['public'];
if (everyAffectedTableCanBeInvalidated(affectedTables)) {
directives.push(`max-age=${ttl}`);
} else {
directives.push(`max-age=${computeNextTTL({ ttlInSeconds: fallbackTtl })}`);
}
if (revalidate) {
directives.push('must-revalidate');
}
res.set('Cache-Control', directives.join(','));
next();
});
};
};
function everyAffectedTableCanBeInvalidated (affectedTables) {
const skipNotUpdatedAtTables = false;
const skipAnalysisCachedTables = true;
return affectedTables &&
affectedTables.getTables(skipNotUpdatedAtTables, skipAnalysisCachedTables)
.every(table => table.updated_at !== null);
}
function computeNextTTL ({ ttlInSeconds } = {}) {
const nowInSeconds = Math.ceil(Date.now() / 1000);
const secondsAfterPreviousTTLStep = nowInSeconds % ttlInSeconds;
const secondsToReachTheNextTTLStep = ttlInSeconds - secondsAfterPreviousTTLStep;
return secondsToReachTheNextTTLStep;
}

View File

@@ -1,13 +1,11 @@
'use strict';
module.exports = function checkJsonContentType () {
return function checkJsonContentTypeMiddleware(req, res, next) {
return function checkJsonContentTypeMiddleware (req, res, next) {
if (req.method === 'POST' && !req.is('application/json')) {
return next(new Error('POST data must be of type application/json'));
}
req.profiler.done('checkJsonContentTypeMiddleware');
next();
};
};

View File

@@ -4,7 +4,7 @@ const VALID_IMAGE_FORMATS = ['png', 'jpg'];
module.exports = function checkStaticImageFormat () {
return function checkStaticImageFormatMiddleware (req, res, next) {
if(!VALID_IMAGE_FORMATS.includes(req.params.format)) {
if (!VALID_IMAGE_FORMATS.includes(req.params.format)) {
return next(new Error(`Unsupported image format "${req.params.format}"`));
}

View File

@@ -0,0 +1,13 @@
'use strict';
module.exports = function clientHeader () {
return function clientHeaderMiddleware (req, res, next) {
const { client } = req.query;
if (client) {
res.set('Carto-Client', client);
}
return next();
};
};

View File

@@ -6,15 +6,18 @@ module.exports = function cors () {
'X-Requested-With',
'X-Prototype-Version',
'X-CSRF-Token',
'Authorization'
'Authorization',
'Carto-Event',
'Carto-Event-Source',
'Carto-Event-Group-Id'
];
if (req.method === 'OPTIONS') {
headers.push('Content-Type');
}
res.set("Access-Control-Allow-Origin", "*");
res.set("Access-Control-Allow-Headers", headers.join(', '));
res.set('Access-Control-Allow-Origin', '*');
res.set('Access-Control-Allow-Headers', headers.join(', '));
next();
};

View File

@@ -3,24 +3,24 @@
const basicAuth = require('basic-auth');
module.exports = function credentials () {
return function credentialsMiddleware(req, res, next) {
return function credentialsMiddleware (req, res, next) {
const apikeyCredentials = getApikeyCredentialsFromRequest(req);
res.locals.api_key = apikeyCredentials.token;
res.locals.basicAuthUsername = apikeyCredentials.username;
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
res.set('vary', 'Authorization'); // Honor Authorization header when caching.
return next();
};
};
function getApikeyCredentialsFromRequest(req) {
function getApikeyCredentialsFromRequest (req) {
let apikeyCredentials = {
token: null,
username: null,
username: null
};
for (let getter of apikeyGetters) {
for (const getter of apikeyGetters) {
apikeyCredentials = getter(req);
if (apikeyTokenFound(apikeyCredentials)) {
break;
@@ -33,10 +33,10 @@ function getApikeyCredentialsFromRequest(req) {
const apikeyGetters = [
getApikeyTokenFromHeaderAuthorization,
getApikeyTokenFromRequestQueryString,
getApikeyTokenFromRequestBody,
getApikeyTokenFromRequestBody
];
function getApikeyTokenFromHeaderAuthorization(req) {
function getApikeyTokenFromHeaderAuthorization (req) {
const credentials = basicAuth(req);
if (credentials) {
@@ -47,12 +47,12 @@ function getApikeyTokenFromHeaderAuthorization(req) {
} else {
return {
username: null,
token: null,
token: null
};
}
}
function getApikeyTokenFromRequestQueryString(req) {
function getApikeyTokenFromRequestQueryString (req) {
let token = null;
if (req.query && req.query.api_key) {
@@ -63,11 +63,11 @@ function getApikeyTokenFromRequestQueryString(req) {
return {
username: null,
token: token,
token: token
};
}
function getApikeyTokenFromRequestBody(req) {
function getApikeyTokenFromRequestBody (req) {
let token = null;
if (req.body && req.body.api_key) {
@@ -78,10 +78,10 @@ function getApikeyTokenFromRequestBody(req) {
return {
username: null,
token: token,
token: token
};
}
function apikeyTokenFound(apikey) {
function apikeyTokenFound (apikey) {
return !!apikey && !!apikey.token;
}

View File

@@ -7,10 +7,8 @@ module.exports = function dbConnSetup (pgConnection) {
const { user } = res.locals;
pgConnection.setDBConn(user, res.locals, (err) => {
req.profiler.done('dbConnSetup');
if (err) {
if (err.message && -1 !== err.message.indexOf('name not found')) {
if (err.message && err.message.indexOf('name not found') !== -1) {
err.http_status = 404;
}

View File

@@ -1,42 +1,31 @@
'use strict';
const _ = require('underscore');
const debug = require('debug')('windshaft:cartodb:error-middleware');
const setCommonHeaders = require('../../utils/common-headers');
module.exports = function errorMiddleware (/* options */) {
return function error (err, req, res, next) {
// jshint unused:false
// jshint maxcomplexity:9
var allErrors = Array.isArray(err) ? err : [err];
const { logger } = res.locals;
const errors = populateLimitErrors(Array.isArray(err) ? err : [err]);
allErrors = populateLimitErrors(allErrors);
errors.forEach((err) => logger.error({ exception: err }, 'Error while handling the request'));
const label = err.label || 'UNKNOWN';
err = allErrors[0] || new Error(label);
allErrors[0] = err;
setCommonHeaders(req, res, () => {
const errorResponseBody = {
errors: errors.map(errorMessage),
errors_with_context: errors.map(errorMessageWithContext)
};
var statusCode = findStatusCode(err);
// If a callback was requested, force status to 200
res.status(req.query.callback ? 200 : findStatusCode(errors[0]));
setErrorHeader(allErrors, statusCode, res);
debug('[%s ERROR] -- %d: %s, %s', label, statusCode, err, err.stack);
if (req.query && req.query.callback) {
res.jsonp(errorResponseBody);
} else {
res.json(errorResponseBody);
}
// If a callback was requested, force status to 200
if (req.query && req.query.callback) {
statusCode = 200;
}
var errorResponseBody = {
errors: allErrors.map(errorMessage),
errors_with_context: allErrors.map(errorMessageWithContext)
};
res.status(statusCode);
if (req.query && req.query.callback) {
res.jsonp(errorResponseBody);
} else {
res.json(errorResponseBody);
}
return next();
});
};
};
@@ -52,10 +41,10 @@ function isTimeoutError (errorTypes) {
return errorTypes.renderTimeoutError || errorTypes.datasourceTimeoutError;
}
function getErrorTypes(error) {
function getErrorTypes (error) {
return {
renderTimeoutError: isRenderTimeoutError(error),
datasourceTimeoutError: isDatasourceTimeoutError(error),
datasourceTimeoutError: isDatasourceTimeoutError(error)
};
}
@@ -99,9 +88,9 @@ function populateLimitErrors (errors) {
});
}
function findStatusCode(err) {
function findStatusCode (err) {
var statusCode;
if ( err.http_status ) {
if (err.http_status) {
statusCode = err.http_status;
} else {
statusCode = statusFromErrorMessage('' + err);
@@ -111,43 +100,38 @@ function findStatusCode(err) {
module.exports.findStatusCode = findStatusCode;
function statusFromErrorMessage(errMsg) {
function statusFromErrorMessage (errMsg) {
// Find an appropriate statusCode based on message
// jshint maxcomplexity:7
var statusCode = 400;
if ( -1 !== errMsg.indexOf('permission denied') ) {
if (errMsg.indexOf('permission denied') !== -1) {
statusCode = 403;
}
else if ( -1 !== errMsg.indexOf('authentication failed') ) {
} else if (errMsg.indexOf('authentication failed') !== -1) {
statusCode = 403;
}
else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
} else if (errMsg.match(/Postgis Plugin.*[\s|\n].*column.*does not exist/)) {
statusCode = 400;
}
else if ( -1 !== errMsg.indexOf('does not exist') ) {
if ( -1 !== errMsg.indexOf(' role ') ) {
} else if (errMsg.indexOf('does not exist') !== -1) {
if (errMsg.indexOf(' role ') !== -1) {
statusCode = 403; // role 'xxx' does not exist
} else if ( errMsg.match(/function .* does not exist/) ) {
} else if (errMsg.match(/function .* does not exist/)) {
statusCode = 400; // invalid SQL (SQL function does not exist)
} else {
statusCode = 404;
}
}
return statusCode;
}
function errorMessage(err) {
function errorMessage (err) {
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
var message = (typeof err === 'string' ? err : err.message) || 'Unknown error';
return stripConnectionInfo(message);
}
module.exports.errorMessage = errorMessage;
function stripConnectionInfo(message) {
function stripConnectionInfo (message) {
// Strip connection info, if any
return message
// See https://github.com/CartoDB/Windshaft/issues/173
@@ -168,71 +152,21 @@ function shouldBeExposed (prop) {
return !!ERROR_INFO_TO_EXPOSE[prop];
}
function errorMessageWithContext(err) {
function errorMessageWithContext (err) {
// See https://github.com/Vizzuality/Windshaft-cartodb/issues/68
var message = (_.isString(err) ? err : err.message) || 'Unknown error';
var message = (typeof err === 'string' ? err : err.message) || 'Unknown error';
var error = {
type: err.type || 'unknown',
message: stripConnectionInfo(message),
message: stripConnectionInfo(message)
};
for (var prop in err) {
// type & message are properties from Error's prototype and will be skipped
if (err.hasOwnProperty(prop) && shouldBeExposed(prop)) {
if (Object.prototype.hasOwnProperty.call(err, prop) && shouldBeExposed(prop)) {
error[prop] = err[prop];
}
}
return error;
}
function setErrorHeader(errors, statusCode, res) {
let errorsCopy = errors.slice(0);
const mainError = errorsCopy.shift();
let errorsLog = {
mainError: {
statusCode: statusCode || 200,
message: mainError.message,
name: mainError.name,
label: mainError.label,
type: mainError.type,
subtype: mainError.subtype
}
};
errorsLog.moreErrors = errorsCopy.map(error => {
return {
message: error.message,
name: error.name,
label: error.label,
type: error.type,
subtype: error.subtype
};
});
res.set('X-Tiler-Errors', stringifyForLogs(errorsLog));
}
/**
* Remove problematic nested characters
* from object for logs RegEx
*
* @param {Object} object
*/
function stringifyForLogs(object) {
Object.keys(object).map(key => {
if(typeof object[key] === 'string') {
object[key] = object[key].replace(/[^a-zA-Z0-9]/g, ' ');
} else if (typeof object[key] === 'object') {
stringifyForLogs(object[key]);
} else if (object[key] instanceof Array) {
for (let element of object[key]) {
stringifyForLogs(element);
}
}
});
return JSON.stringify(object);
}

View File

@@ -0,0 +1,16 @@
'use strict';
module.exports = function incrementMapViewCount (metadataBackend) {
return function incrementMapViewCountMiddleware (req, res, next) {
const { mapConfig, user, logger } = res.locals;
const statTag = mapConfig.obj().stat_tag;
metadataBackend.incMapviewCount(user, statTag, (err) => {
if (err) {
logger.warn({ exception: err }, 'Failed to increment mapview count');
}
next();
});
};
};

View File

@@ -1,16 +1,18 @@
'use strict';
module.exports = function setLastModifiedHeader () {
return function setLastModifiedHeaderMiddleware(req, res, next) {
return function setLastModifiedHeaderMiddleware (req, res, next) {
if (req.method !== 'GET') {
return next();
}
const { mapConfigProvider, cache_buster } = res.locals;
const { mapConfigProvider, cache_buster: cacheBuster, logger } = res.locals;
if (cache_buster) {
const cacheBuster = parseInt(cache_buster, 10);
const lastModifiedDate = Number.isFinite(cacheBuster) ? new Date(cacheBuster) : new Date();
if (cacheBuster) {
const cacheBusterTimestamp = parseInt(cacheBuster, 10);
const lastModifiedDate = Number.isFinite(cacheBusterTimestamp) && cacheBusterTimestamp !== 0
? new Date(cacheBusterTimestamp)
: new Date();
res.set('Last-Modified', lastModifiedDate.toUTCString());
@@ -19,7 +21,7 @@ module.exports = function setLastModifiedHeader () {
mapConfigProvider.getAffectedTables((err, affectedTables) => {
if (err) {
global.logger.warn('ERROR generating Last Modified Header:', err);
logger.warn({ exception: err }, 'Error generating Last Modified Header');
return next();
}
@@ -34,6 +36,8 @@ module.exports = function setLastModifiedHeader () {
res.set('Last-Modified', lastModifiedDate.toUTCString());
res.locals.cache_buster = lastUpdatedAt;
next();
});
};

View File

@@ -11,6 +11,10 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
}
if (!affectedTables) {
res.locals.cache_buster = 0;
layergroup.layergroupid = `${layergroup.layergroupid}:${res.locals.cache_buster}`;
layergroup.last_updated = new Date(res.locals.cache_buster).toISOString();
return next();
}
@@ -22,17 +26,19 @@ module.exports = function setLastUpdatedTimeToLayergroup () {
layergroup.layergroupid = layergroup.layergroupid + ':' + lastUpdateTime;
layergroup.last_updated = new Date(lastUpdateTime).toISOString();
res.locals.cache_buster = lastUpdateTime;
next();
});
};
};
function getLastUpdatedTime(analysesResults, lastUpdateTime) {
function getLastUpdatedTime (analysesResults, lastUpdateTime) {
if (!Array.isArray(analysesResults)) {
return lastUpdateTime;
}
return analysesResults.reduce(function(lastUpdateTime, analysis) {
return analysis.getNodes().reduce(function(lastNodeUpdatedAtTime, node) {
return analysesResults.reduce(function (lastUpdateTime, analysis) {
return analysis.getNodes().reduce(function (lastNodeUpdatedAtTime, node) {
var nodeUpdatedAtDate = node.getUpdatedAt();
var nodeUpdatedTimeAt = (nodeUpdatedAtDate && nodeUpdatedAtDate.getTime()) || 0;
return nodeUpdatedTimeAt > lastNodeUpdatedAtTime ? nodeUpdatedTimeAt : lastNodeUpdatedAtTime;

View File

@@ -1,7 +1,7 @@
'use strict';
module.exports = function setLayerStats (pgConnection, statsBackend) {
return function setLayerStatsMiddleware(req, res, next) {
return function setLayerStatsMiddleware (req, res, next) {
const { user, mapConfig } = res.locals;
const layergroup = res.body;
@@ -10,7 +10,7 @@ module.exports = function setLayerStats (pgConnection, statsBackend) {
return next(err);
}
statsBackend.getStats(mapConfig, connection, function(err, layersStats) {
statsBackend.getStats(mapConfig, connection, function (err, layersStats) {
if (err) {
return next(err);
}

View File

@@ -6,8 +6,9 @@ module.exports = function setLayergroupIdHeader (templateMaps, useTemplateHash)
const layergroup = res.body;
if (useTemplateHash) {
var templateHash = templateMaps.fingerPrint(template).substring(0, 8);
const templateHash = templateMaps.fingerPrint(template).substring(0, 8);
layergroup.layergroupid = `${user}@${templateHash}@${layergroup.layergroupid}`;
res.locals.templateHash = templateHash;
}
res.set('X-Layergroup-Id', layergroup.layergroupid);

View File

@@ -9,7 +9,7 @@ module.exports = function setMetadataToLayergroup (layergroupMetadata, includeQu
layergroupMetadata.addAnalysesMetadata(user, layergroup, analysesResults, includeQuery);
layergroupMetadata.addTurboCartoContextMetadata(layergroup, mapConfig.obj(), context);
layergroupMetadata.addAggregationContextMetadata(layergroup, mapConfig.obj(), context);
layergroupMetadata.addDateWrappingMetadata (layergroup, mapConfig.obj());
layergroupMetadata.addDateWrappingMetadata(layergroup, mapConfig.obj());
layergroupMetadata.addTileJsonMetadata(layergroup, user, mapConfig, userApiKey);
next();

View File

@@ -13,13 +13,17 @@ module.exports = function layergroupToken () {
res.locals.token = layergroupToken.token;
res.locals.cache_buster = layergroupToken.cacheBuster;
if (layergroupToken.templateHash) {
res.locals.templateHash = layergroupToken.templateHash;
}
if (layergroupToken.signer) {
res.locals.signer = layergroupToken.signer;
if (res.locals.signer !== user) {
const err = new Error(authErrorMessageTemplate(res.locals.signer, user));
err.type = 'auth';
err.http_status = (req.query && req.query.callback) ? 200: 403;
err.http_status = (req.query && req.query.callback) ? 200 : 403;
return next(err);
}

View File

@@ -0,0 +1,12 @@
'use strict';
const uuid = require('uuid');
module.exports = function initLogger ({ logger }) {
return function initLoggerMiddleware (req, res, next) {
res.locals.logger = logger.child({ request_id: req.get('X-Request-Id') || uuid.v4(), 'cdb-user': res.locals.user });
res.locals.logger.info({ client_request: req }, 'Incoming request');
res.on('finish', () => res.locals.logger.info({ server_response: res, status: res.statusCode }, 'Response sent'));
next();
};
};

View File

@@ -6,26 +6,24 @@ module.exports = function lzma () {
const lzmaWorker = new LZMA();
return function lzmaMiddleware (req, res, next) {
if (!req.query.hasOwnProperty('lzma')) {
if (!Object.prototype.hasOwnProperty.call(req.query, 'lzma')) {
return next();
}
// Decode (from base64)
var lzma = new Buffer(req.query.lzma, 'base64')
var lzma = Buffer.from(req.query.lzma, 'base64')
.toString('binary')
.split('')
.map(function(c) {
.map(function (c) {
return c.charCodeAt(0) - 128;
});
// Decompress
lzmaWorker.decompress(lzma, function(result) {
lzmaWorker.decompress(lzma, function (result) {
try {
delete req.query.lzma;
Object.assign(req.query, JSON.parse(result));
req.profiler.done('lzma');
next();
} catch (err) {
next(new Error('Error parsing lzma as JSON: ' + err));

View File

@@ -4,7 +4,6 @@ module.exports = function mapError (options) {
const { addContext = false, label = 'MAPS CONTROLLER' } = options;
return function mapErrorMiddleware (err, req, res, next) {
req.profiler.done('error');
const { mapConfig } = res.locals;
if (addContext) {
@@ -17,7 +16,7 @@ module.exports = function mapError (options) {
};
};
function populateError(err, mapConfig) {
function populateError (err, mapConfig) {
var error = new Error(err.message);
error.http_status = err.http_status;

View File

@@ -10,15 +10,27 @@ module.exports = function createMapStoreMapConfigProvider (
forcedFormat = null
) {
return function createMapStoreMapConfigProviderMiddleware (req, res, next) {
const { user, token, cache_buster, api_key } = res.locals;
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const { layer: layerFromParams, z, x, y, scale_factor, format } = req.params;
const { layer: layerFromParams, z, x, y, scale_factor: scaleFactor, format } = req.params;
const { layer: layerFromQuery } = req.query;
const params = {
user, token, cache_buster, api_key,
dbuser, dbname, dbpassword, dbhost, dbport,
layer: (layerFromQuery || layerFromParams), z, x, y, scale_factor, format
user,
token,
cache_buster: cacheBuster,
api_key: apiKey,
dbuser,
dbname,
dbpassword,
dbhost,
dbport,
layer: (layerFromQuery || layerFromParams),
z,
x,
y,
scale_factor: scaleFactor,
format
};
if (forcedFormat) {

View File

@@ -0,0 +1,181 @@
'use strict';
const EVENT_VERSION = '1';
const MAX_LENGTH = 100;
module.exports = function metrics ({ enabled, tags, metricsBackend }) {
if (!enabled) {
return function metricsDisabledMiddleware (req, res, next) {
next();
};
}
if (!tags || !tags.event) {
throw new Error('Missing required "event" parameter to report metrics');
}
return function metricsMiddleware (req, res, next) {
// FIXME: use parent logger as we don't want bind the error to the request
// but we still want to know if an error is thrown
const { logger } = res.locals;
res.on('finish', () => {
const { event, attributes } = getEventData(req, res, tags);
metricsBackend.send(event, attributes)
.catch((err) => logger.error({ exception: err, event }, 'Failed to publish event'));
});
return next();
};
};
function getEventData (req, res, tags) {
const event = tags.event;
const extra = {};
if (tags.from) {
if (tags.from.req) {
Object.assign(extra, getFromReq(req, tags.from.req));
}
if (tags.from.res) {
Object.assign(extra, getFromRes(res, tags.from.res));
}
}
const attributes = Object.assign({}, {
client_event: normalizedField(req.get('Carto-Event')),
client_event_group_id: normalizedField(req.get('Carto-Event-Group-Id')),
event_source: normalizedField(req.get('Carto-Event-Source')),
event_time: new Date().toISOString(),
user_id: res.locals.userId,
user_agent: req.get('User-Agent'),
map_id: getLayergroupid({ res }),
cache_buster: getCacheBuster({ res }),
template_hash: getTemplateHash({ res }),
stat_tag: getStatTag({ res }),
response_code: res.statusCode.toString(),
response_time: getResponseTime(req),
source_domain: req.hostname,
event_version: EVENT_VERSION
}, tags.attributes, extra);
// remove undefined properties
Object.keys(attributes).forEach(key => attributes[key] === undefined && delete attributes[key]);
return { event, attributes };
}
function normalizedField (field) {
if (!field) {
return undefined;
}
return field.toString().trim().substr(0, MAX_LENGTH);
}
function getLayergroupid ({ res }) {
if (res.locals.token) {
return res.locals.token;
}
if (res.locals.mapConfig) {
return res.locals.mapConfig.id();
}
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.mapConfig) {
return res.locals.mapConfigProvider.mapConfig.id();
}
}
function getCacheBuster ({ res }) {
if (res.locals.cache_buster !== undefined) {
return `${res.locals.cache_buster}`;
}
if (res.locals.mapConfigProvider) {
return `${res.locals.mapConfigProvider.getCacheBuster()}`;
}
}
function getTemplateHash ({ res }) {
if (res.locals.templateHash) {
return res.locals.templateHash;
}
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.getTemplateHash) {
let templateHash;
try {
templateHash = res.locals.mapConfigProvider.getTemplateHash().substring(0, 8);
} catch (e) {}
return templateHash;
}
}
function getStatTag ({ res }) {
if (res.locals.mapConfig) {
return res.locals.mapConfig.obj().stat_tag;
}
// FIXME: don't expect that mapConfig is already set
if (res.locals.mapConfigProvider && res.locals.mapConfigProvider.mapConfig) {
return res.locals.mapConfigProvider.mapConfig.obj().stat_tag;
}
}
// FIXME: 'Profiler' might not be accurate enough
function getResponseTime (req) {
let stats;
try {
stats = req.profiler.toJSON();
} catch (e) {
return undefined;
}
return stats && stats.total ? stats.total.toString() : undefined;
}
function getFromReq (req, { query = {}, body = {}, params = {}, headers = {} } = {}) {
const extra = {};
for (const [queryParam, eventName] of Object.entries(query)) {
extra[eventName] = req.query[queryParam];
}
for (const [bodyParam, eventName] of Object.entries(body)) {
extra[eventName] = req.body[bodyParam];
}
for (const [pathParam, eventName] of Object.entries(params)) {
extra[eventName] = req.params[pathParam];
}
for (const [header, eventName] of Object.entries(headers)) {
extra[eventName] = req.get(header);
}
return extra;
}
function getFromRes (res, { body = {}, headers = {}, locals = {} } = {}) {
const extra = {};
if (res.body) {
for (const [bodyParam, eventName] of Object.entries(body)) {
extra[eventName] = res.body[bodyParam];
}
}
for (const [header, eventName] of Object.entries(headers)) {
extra[eventName] = res.get(header);
}
for (const [localParam, eventName] of Object.entries(locals)) {
extra[eventName] = res.locals[localParam];
}
return extra;
}

View File

@@ -0,0 +1,46 @@
'use strict';
module.exports = function getNamedMapProvider ({ namedMapProviderCache, label, forcedFormat = null }) {
return function getNamedMapProviderMiddleware (req, res, next) {
const { user, token, cache_buster: cacheBuster, api_key: apiKey } = res.locals;
const { dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const { template_id: templateId, layer: layerFromParams, z, x, y, format } = req.params;
const { layer: layerFromQuery } = req.query;
const params = {
user,
token,
cache_buster: cacheBuster,
api_key: apiKey,
dbuser,
dbname,
dbpassword,
dbhost,
dbport,
template_id: templateId,
layer: (layerFromQuery || layerFromParams),
z,
x,
y,
format
};
if (forcedFormat) {
params.format = forcedFormat;
params.layer = params.layer || 'all';
}
const { config, auth_token: authToken } = req.query;
namedMapProviderCache.get(user, templateId, config, authToken, params, (err, namedMapProvider) => {
if (err) {
err.label = label;
return next(err);
}
res.locals.mapConfigProvider = namedMapProvider;
next();
});
};
};

View File

@@ -0,0 +1,37 @@
'use strict';
const Profiler = require('../../stats/profiler-proxy');
const debug = require('debug')('windshaft:cartodb:stats');
const { name: prefix } = require('../../../package.json');
module.exports = function profiler (options) {
const { enabled = true, statsClient } = options;
return function profilerMiddleware (req, res, next) {
const { logger } = res.locals;
// TODO: stop using profiler and log stats instead of adding them to the profiler
req.profiler = new Profiler({
statsd_client: statsClient,
profile: enabled
});
req.profiler.start(prefix);
res.on('finish', () => {
req.profiler.done('response');
req.profiler.end();
const stats = req.profiler.toJSON();
logger.info({ stats, duration: stats.response / 1000, duration_ms: stats.response }, 'Request profiling stats');
try {
// May throw due to dns, see: http://github.com/CartoDB/Windshaft/issues/166
req.profiler.sendStats();
} catch (err) {
debug('error sending profiling stats: ' + err);
}
});
next();
};
};

View File

@@ -19,12 +19,12 @@ const RATE_LIMIT_ENDPOINTS_GROUPS = {
NAMED_TILES: 'named_tiles'
};
function rateLimit(userLimitsBackend, endpointGroup = null) {
function rateLimit (userLimitsBackend, endpointGroup = null) {
if (!isRateLimitEnabled(endpointGroup)) {
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
return function rateLimitDisabledMiddleware (req, res, next) { next(); };
}
return function rateLimitMiddleware(req, res, next) {
return function rateLimitMiddleware (req, res, next) {
userLimitsBackend.getRateLimit(res.locals.user, endpointGroup, function (err, userRateLimit) {
if (err) {
return next(err);
@@ -46,7 +46,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
// retry is floor rounded in seconds by redis-cell
res.set('Retry-After', retry + 1);
let rateLimitError = new Error(
const rateLimitError = new Error(
'You are over platform\'s limits: too many requests.' +
' Please contact us to know more details'
);
@@ -61,8 +61,7 @@ function rateLimit(userLimitsBackend, endpointGroup = null) {
};
}
function isRateLimitEnabled(endpointGroup) {
function isRateLimitEnabled (endpointGroup) {
return global.environment.enabledFeatures.rateLimitsEnabled &&
endpointGroup &&
global.environment.enabledFeatures.rateLimitsByEndpoint[endpointGroup];

View File

@@ -0,0 +1,24 @@
'use strict';
const setCommonHeaders = require('../../utils/common-headers');
module.exports = function sendResponse () {
return function sendResponseMiddleware (req, res, next) {
setCommonHeaders(req, res, () => {
res.status(res.statusCode);
if (Buffer.isBuffer(res.body)) {
res.send(res.body);
return next();
}
if (req.query.callback) {
res.jsonp(res.body);
return next();
}
res.json(res.body);
return next();
});
};
};

View File

@@ -1,11 +1,11 @@
'use strict';
const NamedMapsCacheEntry = require('../../cache/model/named_maps_entry');
const NamedMapsCacheEntry = require('../../cache/model/named-maps-entry');
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
return function setSurrogateKeyHeaderMiddleware(req, res, next) {
const { user, mapConfigProvider } = res.locals;
return function setSurrogateKeyHeaderMiddleware (req, res, next) {
const { user, mapConfigProvider, logger } = res.locals;
if (mapConfigProvider instanceof NamedMapMapConfigProvider) {
surrogateKeysCache.tag(res, new NamedMapsCacheEntry(user, mapConfigProvider.getTemplateName()));
@@ -17,7 +17,7 @@ module.exports = function setSurrogateKeyHeader ({ surrogateKeysCache }) {
mapConfigProvider.getAffectedTables((err, affectedTables) => {
if (err) {
global.logger.warn('ERROR generating Surrogate Key Header:', err);
logger.warn({ exception: err }, 'Error generating Surrogate Key Header');
return next();
}

View File

@@ -0,0 +1,15 @@
'use strict';
module.exports = function tag ({ tags }) {
if (!Array.isArray(tags) || !tags.every((tag) => typeof tag === 'string')) {
throw new Error('Required "tags" option must be a valid Array: [string, string, ...]');
}
return function tagMiddleware (req, res, next) {
const { logger } = res.locals;
res.locals.tags = tags;
res.on('finish', () => logger.info({ tags: res.locals.tags }, 'Request tagged'));
next();
};
};

View File

@@ -0,0 +1,29 @@
'use strict';
const CdbRequest = require('../../models/cdb-request');
module.exports = function user (metadataBackend) {
const cdbRequest = new CdbRequest();
return function userMiddleware (req, res, next) {
try {
res.locals.user = getUserNameFromRequest(req, cdbRequest);
} catch (err) {
return next(err);
}
metadataBackend.getUserId(res.locals.user, (err, userId) => {
if (err || !userId) {
return next();
}
res.locals.userId = userId;
return next();
});
};
};
function getUserNameFromRequest (req, cdbRequest) {
return cdbRequest.userByReq(req);
}

View File

@@ -1,12 +1,12 @@
'use strict';
const fs = require('fs');
const timeoutErrorVectorTile = fs.readFileSync(__dirname + '/../../../../assets/render-timeout-fallback.mvt');
module.exports = function vectorError() {
return function vectorErrorMiddleware(err, req, res, next) {
if(req.params.format === 'mvt') {
const path = require('path');
const timeoutErrorVectorTile = fs.readFileSync(path.join(__dirname, '/../../../assets/render-timeout-fallback.mvt'));
module.exports = function vectorError () {
return function vectorErrorMiddleware (err, req, res, next) {
if (req.params.format === 'mvt') {
if (isTimeoutError(err) || isRateLimitError(err)) {
res.set('Content-Type', 'application/x-protobuf');
return res.status(429).send(timeoutErrorVectorTile);
@@ -17,7 +17,6 @@ module.exports = function vectorError() {
};
};
function isRenderTimeoutError (err) {
return err.message === 'Render timed out';
}

View File

@@ -1,6 +1,7 @@
'use strict';
const { templateName } = require('../../backends/template_maps');
const { templateName } = require('../../backends/template-maps');
const tag = require('../middlewares/tag');
const credentials = require('../middlewares/credentials');
const rateLimit = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
@@ -18,8 +19,8 @@ module.exports = class AdminTemplateController {
this.userLimitsBackend = userLimitsBackend;
}
register (templateRouter) {
templateRouter.options(`/:template_id`);
route (templateRouter) {
templateRouter.options('/:template_id');
templateRouter.post('/', this.middlewares({
action: 'create',
@@ -76,6 +77,7 @@ module.exports = class AdminTemplateController {
}
return [
tag({ tags: ['named', 'admin', action] }),
credentials(),
authorizedByAPIKey({ authBackend: this.authBackend, action, label }),
rateLimit(this.userLimitsBackend, rateLimitGroup),
@@ -166,8 +168,6 @@ function updateTemplate ({ templateMaps }) {
function retrieveTemplate ({ templateMaps }) {
return function retrieveTemplateMiddleware (req, res, next) {
req.profiler.start('windshaft-cartodb.get_template');
const { user } = res.locals;
const templateId = templateName(req.params.template_id);
@@ -195,8 +195,6 @@ function retrieveTemplate ({ templateMaps }) {
function destroyTemplate ({ templateMaps }) {
return function destroyTemplateMiddleware (req, res, next) {
req.profiler.start('windshaft-cartodb.delete_template');
const { user } = res.locals;
const templateId = templateName(req.params.template_id);
@@ -215,8 +213,6 @@ function destroyTemplate ({ templateMaps }) {
function listTemplates ({ templateMaps }) {
return function listTemplatesMiddleware (req, res, next) {
req.profiler.start('windshaft-cartodb.get_template_list');
const { user } = res.locals;
templateMaps.listTemplates(user, (err, templateIds) => {

View File

@@ -1,10 +1,10 @@
'use strict';
const tag = require('../middlewares/tag');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');
const authorize = require('../middlewares/authorize');
const initProfiler = require('../middlewares/init-profiler');
const checkJsonContentType = require('../middlewares/check-json-content-type');
const incrementMapViewCount = require('../middlewares/increment-map-view-count');
const augmentLayergroupData = require('../middlewares/augment-layergroup-data');
@@ -21,6 +21,7 @@ const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named
const CreateLayergroupMapConfigProvider = require('../../models/mapconfig/provider/create-layergroup-provider');
const rateLimit = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
const metrics = require('../middlewares/metrics');
module.exports = class NamedMapController {
/**
@@ -38,6 +39,7 @@ module.exports = class NamedMapController {
* @constructor
*/
constructor (
config,
pgConnection,
templateMaps,
mapBackend,
@@ -48,8 +50,10 @@ module.exports = class NamedMapController {
mapConfigAdapter,
statsBackend,
authBackend,
layergroupMetadata
layergroupMetadata,
metricsBackend
) {
this.config = config;
this.pgConnection = pgConnection;
this.templateMaps = templateMaps;
this.mapBackend = mapBackend;
@@ -61,27 +65,41 @@ module.exports = class NamedMapController {
this.statsBackend = statsBackend;
this.authBackend = authBackend;
this.layergroupMetadata = layergroupMetadata;
this.metricsBackend = metricsBackend;
}
register (templateRouter) {
route (templateRouter) {
templateRouter.get('/:template_id/jsonp', this.middlewares());
templateRouter.post('/:template_id', this.middlewares());
}
middlewares () {
const isTemplateInstantiation = true;
const useTemplateHash = true;
const includeQuery = false;
const label = 'NAMED MAP LAYERGROUP';
const addContext = false;
const metricsTags = {
event: 'map_view',
attributes: { map_type: 'named' },
from: {
req: {
query: { client: 'client' }
}
}
};
return [
tag({ tags: ['map', 'named'] }),
metrics({
enabled: this.config.pubSubMetrics.enabled,
metricsBackend: this.metricsBackend,
tags: metricsTags
}),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.NAMED),
cleanUpQueryParams(['aggregation']),
initProfiler(isTemplateInstantiation),
checkJsonContentType(),
checkInstantiteLayergroup(),
getTemplate(
@@ -106,7 +124,7 @@ module.exports = class NamedMapController {
lastModifiedHeader(),
lastUpdatedTimeLayergroup(),
layerStats(this.pgConnection, this.statsBackend),
layergroupIdHeader(this.templateMaps ,useTemplateHash),
layergroupIdHeader(this.templateMaps, useTemplateHash),
layergroupMetadata(this.layergroupMetadata, includeQuery),
mapError({ label, addContext })
];
@@ -114,7 +132,7 @@ module.exports = class NamedMapController {
};
function checkInstantiteLayergroup () {
return function checkInstantiteLayergroupMiddleware(req, res, next) {
return function checkInstantiteLayergroupMiddleware (req, res, next) {
if (req.method === 'GET') {
const { callback, config } = req.query;
@@ -125,14 +143,12 @@ function checkInstantiteLayergroup () {
if (config) {
try {
req.body = JSON.parse(config);
} catch(e) {
} catch (e) {
return next(new Error('Invalid config parameter, should be a valid JSON'));
}
}
}
req.profiler.done('checkInstantiteLayergroup');
return next();
};
}
@@ -148,8 +164,8 @@ function getTemplate (
return function getTemplateMiddleware (req, res, next) {
const templateParams = req.body;
const { user, dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const { template_id } = req.params;
const { auth_token } = req.query;
const { template_id: templateId } = req.params;
const { auth_token: authToken } = req.query;
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
@@ -161,16 +177,15 @@ function getTemplate (
mapConfigAdapter,
affectedTablesCache,
user,
template_id,
templateId,
templateParams,
auth_token,
authToken,
params
);
mapConfigProvider.getMapConfig((err, mapConfig, rendererParams, context, stats = {}) => {
req.profiler.done('named.getMapConfig');
mapConfigProvider.logger = res.locals.logger;
stats.mapType = 'named';
mapConfigProvider.getMapConfig((err, mapConfig, rendererParams, context, stats = {}) => {
req.profiler.add(stats);
if (err) {

View File

@@ -9,6 +9,7 @@ const TileTemplateController = require('./tile-template-controller');
module.exports = class TemplateRouter {
constructor ({ collaborators }) {
const {
config,
pgConnection,
templateMaps,
mapBackend,
@@ -22,9 +23,11 @@ module.exports = class TemplateRouter {
layergroupMetadata,
namedMapProviderCache,
tileBackend,
metricsBackend
} = collaborators;
this.namedMapController = new NamedMapController(
config,
pgConnection,
templateMaps,
mapBackend,
@@ -35,7 +38,8 @@ module.exports = class TemplateRouter {
mapConfigAdapter,
statsBackend,
authBackend,
layergroupMetadata
layergroupMetadata,
metricsBackend
);
this.tileTemplateController = new TileTemplateController(
@@ -54,13 +58,19 @@ module.exports = class TemplateRouter {
);
}
register (apiRouter, templatePaths) {
route (apiRouter, routes) {
const templateRouter = router({ mergeParams: true });
this.namedMapController.register(templateRouter);
this.tileTemplateController.register(templateRouter);
this.adminTemplateController.register(templateRouter);
routes.forEach(route => {
const { paths, middlewares = [] } = route;
templatePaths.forEach(path => apiRouter.use(path, templateRouter));
middlewares.forEach(middleware => templateRouter.use(middleware()));
this.namedMapController.route(templateRouter);
this.tileTemplateController.route(templateRouter);
this.adminTemplateController.route(templateRouter);
paths.forEach(path => apiRouter.use(path, templateRouter));
});
}
};

View File

@@ -1,5 +1,6 @@
'use strict';
const tag = require('../middlewares/tag');
const coordinates = require('../middlewares/coordinates');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
@@ -31,12 +32,13 @@ module.exports = class TileTemplateController {
this.userLimitsBackend = userLimitsBackend;
}
register (templateRouter) {
route (templateRouter) {
templateRouter.get('/:template_id/:layer/:z/:x/:y.(:format)', this.middlewares());
}
middlewares () {
return [
tag({ tags: ['tile', 'named'] }),
coordinates(),
credentials(),
authorize(this.authBackend),
@@ -67,9 +69,8 @@ function getTile ({ tileBackend, label }) {
const { layer, z, x, y, format } = req.params;
const params = { layer, z, x, y, format };
tileBackend.getTile(mapConfigProvider, params, (err, tile, headers, stats) => {
tileBackend.getTile(mapConfigProvider, params, (err, tile, headers, stats = {}) => {
req.profiler.add(stats);
req.profiler.done('render-' + format);
if (err) {
err.label = label;
@@ -89,7 +90,7 @@ function getTile ({ tileBackend, label }) {
}
function setContentTypeHeader () {
return function setContentTypeHeaderMiddleware(req, res, next) {
return function setContentTypeHeaderMiddleware (req, res, next) {
res.set('Content-Type', res.get('content-type') || res.get('Content-Type') || 'image/png');
next();

View File

@@ -2,7 +2,7 @@
var PSQL = require('cartodb-psql');
function AnalysisStatusBackend() {
function AnalysisStatusBackend () {
}
module.exports = AnalysisStatusBackend;
@@ -10,12 +10,12 @@ module.exports = AnalysisStatusBackend;
AnalysisStatusBackend.prototype.getNodeStatus = function (nodeId, dbParams, callback) {
var statusQuery = [
'SELECT node_id, status, updated_at, last_error_message as error_message',
'FROM cdb_analysis_catalog where node_id = \'' + nodeId + '\''
'FROM cartodb.cdb_analysis_catalog where node_id = \'' + nodeId + '\''
].join(' ');
var pg = new PSQL(dbParams);
pg.query(statusQuery, function(err, result) {
pg.query(statusQuery, function (err, result) {
if (err) {
return callback(err, result);
}

View File

@@ -2,7 +2,6 @@
var _ = require('underscore');
var camshaft = require('camshaft');
var fs = require('fs');
var REDIS_LIMITS = {
DB: 5,
@@ -14,7 +13,6 @@ function AnalysisBackend (metadataBackend, options) {
this.options = options || {};
this.options.limits = this.options.limits || {};
this.setBatchConfig(this.options.batch);
this.setLoggerConfig(this.options.logger);
}
module.exports = AnalysisBackend;
@@ -27,38 +25,19 @@ AnalysisBackend.prototype.setBatchConfig = function (options) {
this.batchConfig = batchConfig;
};
AnalysisBackend.prototype.setLoggerConfig = function (options) {
this.loggerConfig = options || {};
if (this.loggerConfig.filename) {
this.stream = fs.createWriteStream(this.loggerConfig.filename, { flags: 'a', encoding: 'utf8' });
process.on('SIGHUP', function () {
if (this.stream) {
this.stream.destroy();
}
this.stream = fs.createWriteStream(this.loggerConfig.filename, { flags: 'a', encoding: 'utf8' });
}.bind(this));
}
};
AnalysisBackend.prototype.create = function(analysisConfiguration, analysisDefinition, callback) {
AnalysisBackend.prototype.create = function (analysisConfiguration, analysisDefinition, callback) {
analysisConfiguration.batch.endpoint = this.batchConfig.endpoint;
analysisConfiguration.batch.inlineExecution = this.batchConfig.inlineExecution;
analysisConfiguration.batch.hostHeaderTemplate = this.batchConfig.hostHeaderTemplate;
analysisConfiguration.logger = {
stream: this.stream ? this.stream : process.stdout
};
this.getAnalysesLimits(analysisConfiguration.user, function(err, limits) {
this.getAnalysesLimits(analysisConfiguration.user, function (err, limits) {
if (err) {}
analysisConfiguration.limits = limits || {};
camshaft.create(analysisConfiguration, analysisDefinition, callback);
});
};
AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
AnalysisBackend.prototype.getAnalysesLimits = function (username, callback) {
var self = this;
var analysesLimits = {
@@ -70,16 +49,17 @@ AnalysisBackend.prototype.getAnalysesLimits = function(username, callback) {
}
};
Object.keys(self.options.limits).forEach(function(analysisTypeOrTag) {
Object.keys(self.options.limits).forEach(function (analysisTypeOrTag) {
analysesLimits.analyses[analysisTypeOrTag] = _.extend({}, self.options.limits[analysisTypeOrTag]);
});
var analysesLimitsKey = REDIS_LIMITS.PREFIX + username;
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function(err, analysesTimeouts) {
this.metadataBackend.redisCmd(REDIS_LIMITS.DB, 'HGETALL', [analysesLimitsKey], function (err, analysesTimeouts) {
if (err) {}
// analysesTimeouts wil be something like: { moran: 3000, intersection: 5000 }
analysesTimeouts = analysesTimeouts || {};
Object.keys(analysesTimeouts).forEach(function(analysisType) {
Object.keys(analysesTimeouts).forEach(function (analysisType) {
analysesLimits.analyses[analysisType] = _.defaults(
{
timeout: Number.isFinite(+analysesTimeouts[analysisType]) ? +analysesTimeouts[analysisType] : 0

View File

@@ -9,7 +9,7 @@
* @constructor
* @type {AuthBackend}
*/
function AuthBackend(pgConnection, metadataBackend, mapStore, templateMaps) {
function AuthBackend (pgConnection, metadataBackend, mapStore, templateMaps) {
this.pgConnection = pgConnection;
this.metadataBackend = metadataBackend;
this.mapStore = mapStore;
@@ -25,28 +25,28 @@ module.exports = AuthBackend;
// null if the request is not signed by anyone
// or will be a string cartodb username otherwise.
//
AuthBackend.prototype.authorizedBySigner = function(req, res, callback) {
if ( ! res.locals.token || ! res.locals.signer ) {
AuthBackend.prototype.authorizedBySigner = function (req, res, callback) {
if (!res.locals.token || !res.locals.signer) {
return callback(null, false); // no signer requested
}
var self = this;
var layergroup_id = res.locals.token;
var auth_token = req.query.auth_token;
var layergroupId = res.locals.token;
var authToken = req.query.auth_token;
this.mapStore.load(layergroup_id, function(err, mapConfig) {
this.mapStore.load(layergroupId, function (err, mapConfig) {
if (err) {
return callback(err);
}
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, auth_token);
var authorized = self.templateMaps.isAuthorized(mapConfig.obj().template, authToken);
return callback(null, authorized);
});
};
function isValidApiKey(apikey) {
function isValidApiKey (apikey) {
return apikey.type &&
apikey.user &&
apikey.databasePassword &&
@@ -60,11 +60,11 @@ function isValidApiKey(apikey) {
// @param callback function(err, authorized)
// NOTE: authorized is expected to be 0 or 1 (integer)
//
AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
AuthBackend.prototype.authorizedByAPIKey = function (user, res, callback) {
const apikeyToken = res.locals.api_key;
const basicAuthUsername = res.locals.basicAuthUsername;
if ( ! apikeyToken ) {
if (!apikeyToken) {
return callback(null, false); // no api key, no authorization...
}
@@ -77,7 +77,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
return callback(err);
}
if ( !isValidApiKey(apikey)) {
if (!isValidApiKey(apikey)) {
const error = new Error('Unauthorized');
error.type = 'auth';
error.subtype = 'api-key-not-found';
@@ -109,7 +109,7 @@ AuthBackend.prototype.authorizedByAPIKey = function(user, res, callback) {
};
function isNameNotFoundError (err) {
return err.message && -1 !== err.message.indexOf('name not found');
return err.message && err.message.indexOf('name not found') !== -1;
}
function usernameMatches (basicAuthUsername, requestUsername) {
@@ -123,7 +123,7 @@ function usernameMatches (basicAuthUsername, requestUsername) {
* @param res - standard res object. Contains the auth parameters in locals
* @param callback function(err, allowed) is access allowed not?
*/
AuthBackend.prototype.authorize = function(req, res, callback) {
AuthBackend.prototype.authorize = function (req, res, callback) {
var user = res.locals.user;
this.authorizedByAPIKey(user, res, (err, isAuthorizedByApikey) => {
@@ -133,8 +133,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
if (isAuthorizedByApikey) {
return this.pgConnection.setDBAuth(user, res.locals, 'regular', function (err) {
req.profiler.done('setDBAuth');
if (err) {
return callback(err);
}
@@ -150,8 +148,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
if (isAuthorizedBySigner) {
return this.pgConnection.setDBAuth(user, res.locals, 'master', function (err) {
req.profiler.done('setDBAuth');
if (err) {
return callback(err);
}
@@ -163,8 +159,6 @@ AuthBackend.prototype.authorize = function(req, res, callback) {
// if no signer name was given, use default api key
if (!res.locals.signer) {
return this.pgConnection.setDBAuth(user, res.locals, 'default', function (err) {
req.profiler.done('setDBAuth');
if (err) {
return callback(err);
}

View File

@@ -7,6 +7,7 @@ const AggregationMapConfig = require('../models/aggregation/aggregation-mapconfi
const WebMercatorHelper = require('cartodb-query-tables').utils.webMercatorHelper;
const webmercator = new WebMercatorHelper();
const queryUtils = require('../../lib/utils/query-utils');
module.exports = class ClusterBackend {
getClusterFeatures (mapConfigProvider, params, callback) {
@@ -71,8 +72,8 @@ function getFeatures (pg, layer, params, callback) {
}
const SKIP_COLUMNS = {
'the_geom': true,
'the_geom_webmercator': true
the_geom: true,
the_geom_webmercator: true
};
function getColumnsName (pg, query, callback) {
@@ -89,7 +90,7 @@ function getColumnsName (pg, query, callback) {
const fields = resultSet.fields || [];
const columnNames = fields.map(field => field.name)
.filter(columnName => !SKIP_COLUMNS[columnName]);
.filter(columnName => !SKIP_COLUMNS[queryUtils.stripQuotes(columnName)]);
return callback(null, columnNames);
}, true);
@@ -100,7 +101,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
zoom: zoom,
id: clusterId,
query: query,
res: 256/resolution,
res: 256 / resolution,
columns: columns
});
@@ -127,7 +128,7 @@ function getClusterFeatures (pg, zoom, clusterId, columns, query, resolution, ag
}
return callback(null, data);
} , true); // use read-only transaction
}, true); // use read-only transaction
}
const schemaQuery = ctx => `SELECT * FROM (${ctx.query}) __cdb_cluster_schema LIMIT 0`;
@@ -159,8 +160,8 @@ const clusterFeaturesQuery = ctx => `
`;
const gridResolution = ctx => {
const zoomResolution = webmercator.getResolution({ z : Math.min(38, ctx.zoom) });
return `${256/ctx.res} * (${zoomResolution})::double precision`;
const zoomResolution = webmercator.getResolution({ z: Math.min(38, ctx.zoom) });
return `${256 / ctx.res} * (${zoomResolution})::double precision`;
};
const aggregationQuery = ctx => `
@@ -194,9 +195,8 @@ function parseAggregation (aggregation) {
try {
aggregation = JSON.parse(aggregation);
} catch (err) {
throw new Error(`Invalid aggregation input, should be a a valid JSON`);
throw new Error('Invalid aggregation input, should be a a valid JSON');
}
}
return aggregation;
@@ -207,7 +207,7 @@ function validateAggregation (aggregation) {
const { columns, expressions } = aggregation;
if (!hasColumns(columns)) {
throw new Error(`Invalid aggregation input, columns should be and array of column names`);
throw new Error('Invalid aggregation input, columns should be and array of column names');
}
validateExpressions(expressions);
@@ -221,16 +221,16 @@ function hasColumns (columns) {
function validateExpressions (expressions) {
if (expressions !== undefined) {
if (!isValidExpression(expressions)) {
throw new Error(`Invalid aggregation input, expressions should be and object with valid functions`);
throw new Error('Invalid aggregation input, expressions should be and object with valid functions');
}
for (const { aggregate_function, aggregated_column } of Object.values(expressions)) {
if (typeof aggregated_column !== 'string') {
throw new Error(`Invalid aggregation input, aggregated column should be an string`);
for (const { aggregate_function: aggregateFunction, aggregated_column: aggregatedColumn } of Object.values(expressions)) {
if (typeof aggregatedColumn !== 'string') {
throw new Error('Invalid aggregation input, aggregated column should be an string');
}
if (typeof aggregate_function !== 'string') {
throw new Error(`Invalid aggregation input, aggregate function should be an string`);
if (typeof aggregateFunction !== 'string') {
throw new Error('Invalid aggregation input, aggregate function should be an string');
}
}
}

View File

@@ -3,18 +3,20 @@
var _ = require('underscore');
var PSQL = require('cartodb-psql');
var BBoxFilter = require('../models/filter/bbox');
const CircleFilter = require('../models/filter/circle');
const PolygonFilter = require('../models/filter/polygon');
var DataviewFactory = require('../models/dataview/factory');
var DataviewFactoryWithOverviews = require('../models/dataview/overviews/factory');
const dbParamsFromReqParams = require('../utils/database-params');
var OverviewsQueryRewriter = require('../utils/overviews_query_rewriter');
var OverviewsQueryRewriter = require('../utils/overviews-query-rewriter');
var overviewsQueryRewriter = new OverviewsQueryRewriter({
zoom_level: 'CDB_ZoomFromScale(!scale_denominator!)'
zoom_level: 'cartodb.CDB_ZoomFromScale(!scale_denominator!)'
});
var dot = require('dot');
dot.templateSettings.strip = false;
function DataviewBackend(analysisBackend) {
function DataviewBackend (analysisBackend) {
this.analysisBackend = analysisBackend;
}
@@ -84,14 +86,20 @@ function getQueryWithFilters (dataviewDefinition, params) {
var query = getDataviewQuery(dataviewDefinition, ownFilter, noFilters);
if (params.bbox) {
var bboxFilter = new BBoxFilter({column: 'the_geom_webmercator', srid: 3857}, {bbox: params.bbox});
var bboxFilter = new BBoxFilter({ column: 'the_geom_webmercator', srid: 3857 }, { bbox: params.bbox });
query = bboxFilter.sql(query);
} else if (params.circle) {
const circleFilter = new CircleFilter({ column: 'the_geom_webmercator', srid: 3857 }, { circle: params.circle });
query = circleFilter.sql(query);
} else if (params.polygon) {
const polygonFilter = new PolygonFilter({ column: 'the_geom_webmercator', srid: 3857 }, { polygon: params.polygon });
query = polygonFilter.sql(query);
}
return query;
}
function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
function getDataviewQuery (dataviewDefinition, ownFilter, noFilters) {
if (noFilters) {
return dataviewDefinition.sql.no_filters;
} else if (ownFilter === 1) {
@@ -101,9 +109,9 @@ function getDataviewQuery(dataviewDefinition, ownFilter, noFilters) {
}
}
function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
function getQueryRewriteData (mapConfig, dataviewDefinition, params) {
var sourceId = dataviewDefinition.source.id; // node.id
var layer = _.find(mapConfig.obj().layers, function(l) {
var layer = _.find(mapConfig.obj().layers, function (l) {
return l.options.source && (l.options.source.id === sourceId);
});
var queryRewriteData = layer && layer.options.query_rewrite_data;
@@ -115,7 +123,7 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
}
if (params.bbox && queryRewriteData) {
var bbox_filter_definition = {
var bboxFilterDefinition = {
type: 'bbox',
options: {
column: 'the_geom_webmercator',
@@ -125,22 +133,22 @@ function getQueryRewriteData(mapConfig, dataviewDefinition, params) {
bbox: params.bbox
}
};
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bbox_filter_definition });
queryRewriteData = _.extend(queryRewriteData, { bbox_filter: bboxFilterDefinition });
}
return queryRewriteData;
}
function getOverrideParams(params, ownFilter) {
function getOverrideParams (params, ownFilter) {
var overrideParams = _.reduce(_.pick(params, 'start', 'end', 'bins', 'offset', 'categories'),
function castNumbers(overrides, val, k) {
function castNumbers (overrides, val, k) {
if (!Number.isFinite(+val)) {
throw new Error('Invalid number format for parameter \'' + k + '\'');
}
overrides[k] = +val;
return overrides;
},
{ownFilter: ownFilter}
{ ownFilter: ownFilter }
);
// validation will be delegated to the proper dataview
@@ -197,12 +205,18 @@ function getQueryWithOwnFilters (dataviewDefinition, params) {
if (params.bbox) {
var bboxFilter = new BBoxFilter({ column: 'the_geom', srid: 4326 }, { bbox: params.bbox });
query = bboxFilter.sql(query);
} else if (params.circle) {
const circleFilter = new CircleFilter({ column: 'the_geom', srid: 4326 }, { circle: params.circle });
query = circleFilter.sql(query);
} else if (params.polygon) {
const polygonFilter = new PolygonFilter({ column: 'the_geom', srid: 4326 }, { polygon: params.polygon });
query = polygonFilter.sql(query);
}
return query;
}
function getDataviewDefinition(mapConfig, dataviewName) {
function getDataviewDefinition (mapConfig, dataviewName) {
var dataviews = mapConfig.dataviews || {};
return dataviews[dataviewName];
}

View File

@@ -3,32 +3,32 @@
var _ = require('underscore');
var AnalysisFilter = require('../models/filter/analysis');
function FilterStatsBackends(pgQueryRunner) {
function FilterStatsBackends (pgQueryRunner) {
this.pgQueryRunner = pgQueryRunner;
}
module.exports = FilterStatsBackends;
function getEstimatedRows(pgQueryRunner, username, query, callback) {
pgQueryRunner.run(username, "EXPLAIN (FORMAT JSON)"+query, function(err, result_rows) {
if (err){
function getEstimatedRows (pgQueryRunner, username, query, callback) {
pgQueryRunner.run(username, 'EXPLAIN (FORMAT JSON)' + query, function (err, resultRows) {
if (err) {
callback(err);
return;
}
var rows;
if ( result_rows[0] && result_rows[0]['QUERY PLAN'] &&
result_rows[0]['QUERY PLAN'][0] && result_rows[0]['QUERY PLAN'][0].Plan ) {
rows = result_rows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
if (resultRows[0] && resultRows[0]['QUERY PLAN'] &&
resultRows[0]['QUERY PLAN'][0] && resultRows[0]['QUERY PLAN'][0].Plan) {
rows = resultRows[0]['QUERY PLAN'][0].Plan['Plan Rows'];
}
return callback(null, rows);
});
}
FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_query, filters, callback) {
FilterStatsBackends.prototype.getFilterStats = function (username, unfilteredQuery, filters, callback) {
var stats = {};
getEstimatedRows(this.pgQueryRunner, username, unfiltered_query, (err, rows) => {
if (err){
getEstimatedRows(this.pgQueryRunner, username, unfilteredQuery, (err, rows) => {
if (err) {
return callback(err);
}
@@ -39,10 +39,10 @@ FilterStatsBackends.prototype.getFilterStats = function (username, unfiltered_qu
}
var analysisFilter = new AnalysisFilter(filters);
var query = analysisFilter.sql(unfiltered_query);
var query = analysisFilter.sql(unfilteredQuery);
getEstimatedRows(this.pgQueryRunner, username, query, (err, rows) => {
if (err){
if (err) {
return callback(err);
}

View File

@@ -1,6 +1,6 @@
'use strict';
function EmptyLayerStats(types) {
function EmptyLayerStats (types) {
this._types = types || {};
}
@@ -10,7 +10,7 @@ EmptyLayerStats.prototype.is = function (type) {
EmptyLayerStats.prototype.getStats =
function (layer, dbConnection, callback) {
setImmediate(function() {
setImmediate(function () {
callback(null, {});
});
};

View File

@@ -5,7 +5,7 @@ var EmptyLayerStats = require('./empty-layer-stats');
var MapnikLayerStats = require('./mapnik-layer-stats');
var TorqueLayerStats = require('./torque-layer-stats');
module.exports = function LayerStatsFactory(type) {
module.exports = function LayerStatsFactory (type) {
var layerStatsIterator = [];
var selectedType = type || 'ALL';

View File

@@ -2,7 +2,7 @@
var queue = require('queue-async');
function LayerStats(layerStatsIterator) {
function LayerStats (layerStatsIterator) {
this.layerStatsIterator = layerStatsIterator;
}
@@ -41,7 +41,6 @@ LayerStats.prototype.getStats = function (mapConfig, dbConnection, callback) {
return callback(err, stats);
});
};
module.exports = LayerStats;

View File

@@ -15,7 +15,7 @@ MapnikLayerStats.prototype.is = function (type) {
return this._types[type] ? this._types[type] : false;
};
function columnAggregations(field) {
function columnAggregations (field) {
if (field.type === 'number') {
return ['min', 'max', 'avg', 'sum'];
}
@@ -28,25 +28,24 @@ function columnAggregations(field) {
return [];
}
function _getSQL(ctx, query, type='pre', zoom=0) {
function _getSQL (ctx, query, type = 'pre', zoom = 0) {
let sql;
if (type === 'pre') {
sql = ctx.preQuery;
}
else {
} else {
sql = ctx.aggrQuery;
}
sql = queryUtils.substituteTokensForZoom(sql, zoom || 0);
return query(sql);
}
function _estimatedFeatureCount(ctx) {
function _estimatedFeatureCount (ctx) {
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryRowEstimation))
.then(res => ({ estimatedFeatureCount: res.rows[0].rows }))
.catch(() => ({ estimatedFeatureCount: -1 }));
}
function _featureCount(ctx) {
function _featureCount (ctx) {
if (ctx.metaOptions.featureCount) {
// TODO: if ctx.metaOptions.columnStats we can combine this with column stats query
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, queryUtils.getQueryActualRowCount))
@@ -55,20 +54,20 @@ function _featureCount(ctx) {
return Promise.resolve();
}
function _aggrFeatureCount(ctx) {
if (ctx.metaOptions.hasOwnProperty('aggrFeatureCount')) {
function _aggrFeatureCount (ctx) {
if (Object.prototype.hasOwnProperty.call(ctx.metaOptions, 'aggrFeatureCount')) {
// We expect as zoom level as the value of aggrFeatureCount
// TODO: it'd be nice to admit an array of zoom levels to
// return metadata for multiple levels.
return queryUtils.queryPromise(
ctx.dbConnection,
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
_getSQL(ctx, queryUtils.getQueryActualRowCount, 'post', ctx.metaOptions.aggrFeatureCount)
).then(res => ({ aggrFeatureCount: res.rows[0].rows }));
}
return Promise.resolve();
}
function _geometryType(ctx) {
function _geometryType (ctx) {
if (ctx.metaOptions.geometryType) {
const geometryColumn = AggregationMapConfig.getAggregationGeometryColumn();
const sqlQuery = _getSQL(ctx, sql => queryUtils.getQueryGeometryType(sql, geometryColumn));
@@ -78,7 +77,7 @@ function _geometryType(ctx) {
return Promise.resolve();
}
function _columns(ctx) {
function _columns (ctx) {
if (ctx.metaOptions.columns || ctx.metaOptions.columnStats || ctx.metaOptions.dimensions) {
// note: post-aggregation columns are in layer.options.columns when aggregation is present
return queryUtils.queryPromise(ctx.dbConnection, _getSQL(ctx, sql => queryUtils.getQueryLimited(sql, 0)))
@@ -89,7 +88,7 @@ function _columns(ctx) {
// combine a list of results merging the properties of all the objects
// undefined results are admitted and ignored
function mergeResults(results) {
function mergeResults (results) {
if (results) {
if (results.length === 0) {
return {};
@@ -108,15 +107,15 @@ function mergeResults(results) {
// deeper (1 level) combination of a list of objects:
// mergeColumns([{ col1: { a: 1 }, col2: { a: 2 } }, { col1: { b: 3 } }]) => { col1: { a: 1, b: 3 }, col2: { a: 2 } }
function mergeColumns(results) {
function mergeColumns (results) {
if (results) {
if (results.length === 0) {
return {};
}
return results.reduce((a, b) => {
let c = Object.assign({}, b || {}, a || {});
const c = Object.assign({}, b || {}, a || {});
Object.keys(c).forEach(key => {
if (b.hasOwnProperty(key)) {
if (Object.prototype.hasOwnProperty.call(b, key)) {
c[key] = Object.assign(c[key], b[key]);
}
});
@@ -127,7 +126,7 @@ function mergeColumns(results) {
const DEFAULT_SAMPLE_ROWS = 100;
function _sample(ctx) {
function _sample (ctx) {
if (!ctx.metaOptions.sample) {
return Promise.resolve();
}
@@ -164,32 +163,32 @@ function _getSampleValuesFromRange (min, span, limit) {
return Array.from(sample);
}
function _columnsMetadataRequired(options) {
function _columnsMetadataRequired (options) {
// We need determine the columns of a query
// if either column stats or dimension stats are required,
// since we'll ultimately use the same query to fetch both
return options.columnStats || options.dimensions;
}
function _columnStats(ctx, columns, dimensions) {
function _columnStats (ctx, columns, dimensions) {
if (!columns) {
return Promise.resolve();
}
if (_columnsMetadataRequired(ctx.metaOptions)) {
let queries = [];
const queries = [];
let aggr = [];
if (ctx.metaOptions.columnStats) {
queries.push(new Promise(resolve => resolve({ columns }))); // add columns as first result
Object.keys(columns).forEach(name => {
aggr = aggr.concat(
columnAggregations(columns[name])
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
.map(fn => `${fn}("${name}") AS "${name}_${fn}"`)
);
if (columns[name].type === 'string') {
const topN = ctx.metaOptions.columnStats.topCategories || 1024;
const includeNulls = ctx.metaOptions.columnStats.hasOwnProperty('includeNulls') ?
ctx.metaOptions.columnStats.includeNulls :
true;
const includeNulls = Object.prototype.hasOwnProperty.call(ctx.metaOptions.columnStats, 'includeNulls')
? ctx.metaOptions.columnStats.includeNulls
: true;
// TODO: ctx.metaOptions.columnStats.maxCategories
// => use PG stats to dismiss columns with more distinct values
@@ -223,7 +222,7 @@ function _columnStats(ctx, columns, dimensions) {
ctx.dbConnection,
_getSQL(ctx, sql => `SELECT ${aggr.join(',')} FROM (${sql}) AS __cdb_query`)
).then(res => {
let stats = { columns: {}, dimensions: {} };
const stats = { columns: {}, dimensions: {} };
Object.keys(columns).forEach(name => {
stats.columns[name] = {};
columnAggregations(columns[name]).forEach(fn => {
@@ -245,62 +244,62 @@ function _columnStats(ctx, columns, dimensions) {
);
return Promise.all(queries).then(results => ({
columns: mergeColumns(results.map(r => r.columns)),
dimensions: mergeColumns(results.map( r => r.dimensions))
dimensions: mergeColumns(results.map(r => r.dimensions))
}));
}
return Promise.resolve({ columns });
}
// This is adapted from SQL API:
function fieldType(cname) {
function fieldType (cname) {
let tname;
switch (true) {
case /bool/.test(cname):
tname = 'boolean';
break;
case /int|float|numeric/.test(cname):
tname = 'number';
break;
case /text|char|unknown/.test(cname):
tname = 'string';
break;
case /date|time/.test(cname):
tname = 'date';
break;
default:
tname = cname;
case /bool/.test(cname):
tname = 'boolean';
break;
case /int|float|numeric/.test(cname):
tname = 'number';
break;
case /text|char|unknown/.test(cname):
tname = 'string';
break;
case /date|time/.test(cname):
tname = 'date';
break;
default:
tname = cname;
}
if ( tname && cname.match(/^_/) ) {
if (tname && cname.match(/^_/)) {
tname += '[]';
}
return tname;
}
function fieldTypeSafe(dbConnection, field) {
function fieldTypeSafe (dbConnection, field) {
const cname = dbConnection.typeName(field.dataTypeID);
return cname ? fieldType(cname) : `unknown(${field.dataTypeID})`;
}
// columns are returned as an object { columnName1: { type1: ...}, ..}
// for consistency with SQL API
function formatResultFields(dbConnection, fields = []) {
let nfields = {};
for (let field of fields) {
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
function formatResultFields (dbConnection, fields = []) {
const nfields = {};
for (const field of fields) {
nfields[field.name] = { type: fieldTypeSafe(dbConnection, field) };
}
return nfields;
}
MapnikLayerStats.prototype.getStats =
function (layer, dbConnection, callback) {
let aggrQuery = layer.options.sql;
let preQuery = layer.options.sql_raw || aggrQuery;
const aggrQuery = layer.options.sql;
const preQuery = layer.options.sql_raw || aggrQuery;
let ctx = {
const ctx = {
dbConnection,
preQuery,
aggrQuery,
metaOptions: layer.options.metadata || {},
metaOptions: layer.options.metadata || {}
};
// TODO: could save some queries if queryUtils.getAggregationMetadata() has been used and kept somewhere
@@ -316,7 +315,7 @@ function (layer, dbConnection, callback) {
Promise.all([
_estimatedFeatureCount(ctx).then(
({ estimatedFeatureCount }) => _sample(ctx)
.then(sampleResults => mergeResults([ sampleResults, { estimatedFeatureCount }] ))
.then(sampleResults => mergeResults([sampleResults, { estimatedFeatureCount }]))
),
_featureCount(ctx),
_aggrFeatureCount(ctx),

View File

@@ -1,6 +1,6 @@
'use strict';
function TorqueLayerStats() {
function TorqueLayerStats () {
this._types = {
torque: true
};

17
lib/backends/metrics.js Normal file
View File

@@ -0,0 +1,17 @@
'use strict';
const { PubSub } = require('@google-cloud/pubsub');
module.exports = class MetricsBackend {
constructor (options = {}) {
const { project_id: projectId, credentials: keyFilename, topic } = options;
this._metricsClient = new PubSub({ projectId, keyFilename });
this._topicName = topic;
}
send (event, attributes) {
const data = Buffer.from(event);
return this._metricsClient.topic(this._topicName).publish(data, attributes);
}
};

View File

@@ -2,7 +2,7 @@
const queryUtils = require('../utils/query-utils');
function OverviewsMetadataBackend(pgQueryRunner) {
function OverviewsMetadataBackend (pgQueryRunner) {
this.pgQueryRunner = pgQueryRunner;
}
@@ -12,20 +12,20 @@ OverviewsMetadataBackend.prototype.getOverviewsMetadata = function (username, sq
// FIXME: Currently using internal function _cdb_schema_name
// CDB_Overviews should provide the schema information directly.
const query = `
SELECT *, _cdb_schema_name(base_table)
FROM CDB_Overviews(
CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
SELECT *, cartodb._cdb_schema_name(base_table)
FROM cartodb.CDB_Overviews(
cartodb.CDB_QueryTablesText($windshaft$${queryUtils.substituteDummyTokens(sql)}$windshaft$)
);
`;
this.pgQueryRunner.run(username, query, function handleOverviewsRows(err, rows) {
if (err){
this.pgQueryRunner.run(username, query, function handleOverviewsRows (err, rows) {
if (err) {
callback(err);
return;
}
var metadata = rows.reduce(function(metadata, row){
var metadata = rows.reduce(function (metadata, row) {
var table = row.base_table;
var schema = row._cdb_schema_name;
if ( !metadata[table] ) {
if (!metadata[table]) {
metadata[table] = {};
}
metadata[table][row.z] = { table: row.overview_table };

View File

@@ -3,14 +3,14 @@
var PSQL = require('cartodb-psql');
var _ = require('underscore');
const debug = require('debug')('cachechan');
const dbParamsFromReqParams = require('../utils/database-params');
function PgConnection(metadataBackend) {
function PgConnection (metadataBackend) {
this.metadataBackend = metadataBackend;
}
module.exports = PgConnection;
// Set db authentication parameters to those of the given username
//
// @param username the cartodb username, mapped to a database username
@@ -21,7 +21,7 @@ module.exports = PgConnection;
//
// @param callback function(err)
//
PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callback) {
PgConnection.prototype.setDBAuth = function (username, params, apikeyType, callback) {
if (apikeyType === 'master') {
this.metadataBackend.getMasterApikey(username, (err, apikey) => {
if (err) {
@@ -36,7 +36,7 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
return callback();
});
} else if (apikeyType === 'regular') { //Actually it can be any type of api key
} else if (apikeyType === 'regular') { // Actually it can be any type of api key
this.metadataBackend.getApikey(username, params.api_key, (err, apikey) => {
if (err) {
if (isNameNotFoundError(err)) {
@@ -70,10 +70,9 @@ PgConnection.prototype.setDBAuth = function(username, params, apikeyType, callba
};
function isNameNotFoundError (err) {
return err.message && -1 !== err.message.indexOf('name not found');
return err.message && err.message.indexOf('name not found') !== -1;
}
// Set db connection parameters to those for the given username
//
// @param dbowner cartodb username of database owner,
@@ -85,7 +84,7 @@ function isNameNotFoundError (err) {
//
// @param callback function(err)
//
PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
PgConnection.prototype.setDBConn = function (dbowner, params, callback) {
_.defaults(params, {
// dbuser: global.environment.postgres.user,
// dbpassword: global.environment.postgres.password,
@@ -117,25 +116,18 @@ PgConnection.prototype.setDBConn = function(dbowner, params, callback) {
* @param {Function} callback function({Error}, {PSQL})
*/
PgConnection.prototype.getConnection = function(username, callback) {
debug("getConn1");
PgConnection.prototype.getConnection = function (username, callback) {
debug('getConn1');
this.getDatabaseParams(username, (err, databaseParams) => {
if (err) {
return callback(err);
}
return callback(err, new PSQL({
user: databaseParams.dbuser,
pass: databaseParams.dbpass,
host: databaseParams.dbhost,
port: databaseParams.dbport,
dbname: databaseParams.dbname
}));
return callback(err, new PSQL(dbParamsFromReqParams(databaseParams)));
});
};
PgConnection.prototype.getDatabaseParams = function(username, callback) {
PgConnection.prototype.getDatabaseParams = function (username, callback) {
const databaseParams = {};
this.setDBAuth(username, databaseParams, 'master', err => {

Some files were not shown because too many files have changed in this diff Show More