Use env vars and fix tests
This commit is contained in:
55
.github/workflows/main.yml
vendored
Normal file
55
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
name: continuous integration
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- 'LICENSE'
|
||||||
|
- 'README**'
|
||||||
|
- 'HOW_TO_RELEASE**'
|
||||||
|
- 'LOGGING**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
GCLOUD_VERSION: '306.0.0'
|
||||||
|
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-node:
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
# we tag with "latest" but we don't push it on purpose. We use it as a base for the testing image
|
||||||
|
run: |
|
||||||
|
echo ${GITHUB_SHA::7}
|
||||||
|
echo ${GITHUB_REF##*/}
|
||||||
|
docker build -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7} .
|
||||||
|
|
||||||
|
- name: Build testing image
|
||||||
|
# here it uses the lastest from prev step to add the needed parts on top
|
||||||
|
run: |
|
||||||
|
docker build -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft-test:latest -f Dockerfile.test .
|
||||||
|
|
||||||
|
- name: Setup gcloud authentication
|
||||||
|
uses: google-github-actions/setup-gcloud@master
|
||||||
|
with:
|
||||||
|
version: ${{env.GCLOUD_VERSION}}
|
||||||
|
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||||
|
|
||||||
|
- name: Configure docker and pull images
|
||||||
|
# we pull images manually, as if done in next step using docker-compose it fails because missing openssl
|
||||||
|
run: |
|
||||||
|
gcloud auth configure-docker
|
||||||
|
docker pull gcr.io/cartodb-on-gcp-main-artifacts/postgres:latest
|
||||||
|
docker pull gcr.io/cartodb-on-gcp-main-artifacts/redis:latest
|
||||||
|
|
||||||
|
- name: Run tests inside container
|
||||||
|
run: docker-compose -f ci/docker-compose.yml run windshaft-tests
|
||||||
|
|
||||||
|
- name: Upload image
|
||||||
|
run: |
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}--${GITHUB_SHA::7}
|
||||||
|
|
||||||
40
.github/workflows/master.yml
vendored
Normal file
40
.github/workflows/master.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# in this workflow we don't run the tests. Only build image, tag (also latests) and upload. The tests are not run because they are run
|
||||||
|
# on each pull request, and there is a branch protection that forces to have branch up to date before merging, so tests are always run
|
||||||
|
# with the latest code
|
||||||
|
|
||||||
|
name: master build image
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
env:
|
||||||
|
GCLOUD_VERSION: '306.0.0'
|
||||||
|
ARTIFACTS_PROJECT_ID: cartodb-on-gcp-main-artifacts
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-node:
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
timeout-minutes: 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Build image
|
||||||
|
run: |
|
||||||
|
echo ${GITHUB_SHA::7}
|
||||||
|
echo ${GITHUB_REF##*/}
|
||||||
|
docker build -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/} -t gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7} .
|
||||||
|
|
||||||
|
- name: Setup gcloud authentication
|
||||||
|
uses: google-github-actions/setup-gcloud@master
|
||||||
|
with:
|
||||||
|
version: ${{env.GCLOUD_VERSION}}
|
||||||
|
service_account_key: ${{ secrets.ARTIFACTS_GCLOUD_ACCOUNT_BASE64 }}
|
||||||
|
|
||||||
|
- name: Upload image
|
||||||
|
run: |
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_REF##*/}
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:${GITHUB_SHA::7}
|
||||||
|
docker push gcr.io/$ARTIFACTS_PROJECT_ID/windshaft:latest
|
||||||
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,6 +12,5 @@ redis.pid
|
|||||||
coverage/
|
coverage/
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.nyc_output
|
.nyc_output
|
||||||
Dockerfile*
|
|
||||||
build_resources/
|
build_resources/
|
||||||
.dockerignore
|
.dockerignore
|
||||||
|
|||||||
63
.travis.yml
63
.travis.yml
@@ -1,63 +0,0 @@
|
|||||||
sudo: false
|
|
||||||
|
|
||||||
language: node_js
|
|
||||||
|
|
||||||
services:
|
|
||||||
- redis-server
|
|
||||||
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- PGUSER=postgres
|
|
||||||
- PGDATABASE=postgres
|
|
||||||
- PGOPTIONS='-c client_min_messages=NOTICE'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
include:
|
|
||||||
- env: POSTGRESQL_VERSION="10" POSTGIS_VERSION="2.4"
|
|
||||||
dist: xenial
|
|
||||||
- env: POSTGRESQL_VERSION="11" POSTGIS_VERSION="2.5"
|
|
||||||
dist: xenial
|
|
||||||
- env: POSTGRESQL_VERSION="12" POSTGIS_VERSION="3"
|
|
||||||
dist: bionic
|
|
||||||
|
|
||||||
node_js:
|
|
||||||
- "12"
|
|
||||||
|
|
||||||
install:
|
|
||||||
- npm ci
|
|
||||||
|
|
||||||
script:
|
|
||||||
# Ensure dev dependencies are installed
|
|
||||||
- sudo apt-get install -y libpangocairo-1.0-0 libpango1.0-dev pkg-config
|
|
||||||
|
|
||||||
# Remove old packages
|
|
||||||
- sudo apt-get remove postgresql-$POSTGRESQL_VERSION postgresql-client-$POSTGRESQL_VERSION postgresql-server-dev-$POSTGRESQL_VERSION postgresql-common postgresql-client-common postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION-scripts postgis
|
|
||||||
|
|
||||||
# Install CARTO packages
|
|
||||||
- if [[ $POSTGRESQL_VERSION == '10' ]]; then sudo add-apt-repository -y ppa:cartodb/gis; fi;
|
|
||||||
- sudo add-apt-repository -y ppa:cartodb/postgresql-$POSTGRESQL_VERSION
|
|
||||||
- sudo apt-get -q update
|
|
||||||
# We use -t $TRAVIS_DIST to give preference to our ppa's (which are called as the ${dist}), instead of
|
|
||||||
# pgdg repos (which are called ${dist}-pgdg. Nasty but it works.
|
|
||||||
- sudo apt-get install -y --allow-unauthenticated --no-install-recommends --no-install-suggests postgresql-$POSTGRESQL_VERSION postgresql-client-$POSTGRESQL_VERSION postgresql-server-dev-$POSTGRESQL_VERSION postgresql-common postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION postgresql-$POSTGRESQL_VERSION-postgis-$POSTGIS_VERSION-scripts postgis -t $TRAVIS_DIST
|
|
||||||
# For pre12, install plpython2. For PG12 install plpython3
|
|
||||||
- if [[ $POSTGRESQL_VERSION != '12' ]]; then sudo apt-get install -y postgresql-plpython-$POSTGRESQL_VERSION python python-redis -t $TRAVIS_DIST; else sudo apt-get install -y postgresql-plpython3-12 python3 python3-redis -t $TRAVIS_DIST; fi;
|
|
||||||
|
|
||||||
# Remove old clusters and create the new one
|
|
||||||
- for i in $(pg_lsclusters | tail -n +2 | awk '{print $1}'); do sudo pg_dropcluster --stop $i main; done;
|
|
||||||
- sudo rm -rf /etc/postgresql/$POSTGRESQL_VERSION /var/lib/postgresql/$POSTGRESQL_VERSION /var/ramfs/postgresql/$POSTGRESQL_VERSION
|
|
||||||
- sudo pg_createcluster -u postgres $POSTGRESQL_VERSION main --start -p 5432 -- --auth-local trust
|
|
||||||
|
|
||||||
- git clone https://github.com/CartoDB/cartodb-postgresql.git
|
|
||||||
- cd cartodb-postgresql && make && sudo make install && cd ..
|
|
||||||
|
|
||||||
- createdb template_postgis
|
|
||||||
- psql -c "CREATE EXTENSION postgis" template_postgis
|
|
||||||
- if [[ $POSTGRESQL_VERSION == '12' ]]; then psql -c "CREATE EXTENSION postgis_raster" template_postgis; fi;
|
|
||||||
|
|
||||||
- cp config/environments/test.js.example config/environments/test.js
|
|
||||||
- npm test
|
|
||||||
|
|
||||||
after_failure:
|
|
||||||
- pg_lsclusters
|
|
||||||
- sudo cat /var/log/postgresql/postgresql-$POSTGRESQL_VERSION-main.log
|
|
||||||
28
README.md
28
README.md
@@ -13,8 +13,8 @@ The [`CARTO Maps API`](https://carto.com/developers/maps-api/) tiler. It extends
|
|||||||
|
|
||||||
Requirements:
|
Requirements:
|
||||||
|
|
||||||
* [`Node 10.x (npm 6.x)`](https://nodejs.org/dist/latest-v10.x/)
|
* [`Node 12.x `](https://nodejs.org/dist/latest-v10.x/)
|
||||||
* [`PostgreSQL >= 10.0`](https://www.postgresql.org/download/)
|
* [`PostgreSQL >= 11.0`](https://www.postgresql.org/download/)
|
||||||
* [`PostGIS >= 2.4`](https://postgis.net/install/)
|
* [`PostGIS >= 2.4`](https://postgis.net/install/)
|
||||||
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
|
* [`CARTO Postgres Extension >= 0.24.1`](https://github.com/CartoDB/cartodb-postgresql)
|
||||||
* [`Redis >= 4`](https://redis.io/download)
|
* [`Redis >= 4`](https://redis.io/download)
|
||||||
@@ -45,7 +45,11 @@ $ npm install
|
|||||||
|
|
||||||
### Run
|
### Run
|
||||||
|
|
||||||
Create the `./config/environments/<env>.js` file (there are `.example` files to start from). Look at `./lib/server-options.js` for more on config.
|
You can inject the configuration through environment variables at run time. Check the file `./config/environments/config.js` to see the ones you have available.
|
||||||
|
|
||||||
|
While the migration to the new environment based configuration, you can still use the old method of copying a config file. To enabled the one with environment variables you need to pass `CARTO_WINDSHAFT_ENV_BASED_CONF=true`. You can use the docker image to run it.
|
||||||
|
|
||||||
|
Old way:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ node app.js <env>
|
$ node app.js <env>
|
||||||
@@ -55,10 +59,26 @@ Where `<env>` is the name of a configuration file under `./config/environments/`
|
|||||||
|
|
||||||
### Test
|
### Test
|
||||||
|
|
||||||
|
You can easily run the tests against the dependencies from the `dev-env`. To do so, you need to build the test docker image:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ npm test
|
$ docker-compose build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Then you can run the tests like:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ docker-compose run windshaft-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
It will mount your code inside a volume. In case you want to play and run `npm test` or something else you can do:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ docker-compose run --entrypoint bash windshaft-tests
|
||||||
|
```
|
||||||
|
|
||||||
|
So you will have a bash shell inside the test container, with the code from your host.
|
||||||
|
|
||||||
### Coverage
|
### Coverage
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|||||||
7
app.js
7
app.js
@@ -25,7 +25,12 @@ const argv = require('yargs')
|
|||||||
.argv;
|
.argv;
|
||||||
|
|
||||||
const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
const environmentArg = argv._[0] || process.env.NODE_ENV || 'development';
|
||||||
const configurationFile = path.resolve(argv.config || `./config/environments/${environmentArg}.js`);
|
let configFileName = environmentArg;
|
||||||
|
if (process.env.CARTO_WINDSHAFT_ENV_BASED_CONF) {
|
||||||
|
// we override the file with the one with env vars
|
||||||
|
configFileName = 'config';
|
||||||
|
}
|
||||||
|
const configurationFile = path.resolve(argv.config || `./config/environments/${configFileName}.js`);
|
||||||
|
|
||||||
global.environment = require(configurationFile);
|
global.environment = require(configurationFile);
|
||||||
process.env.NODE_ENV = argv._[0] || process.env.NODE_ENV || global.environment.environment;
|
process.env.NODE_ENV = argv._[0] || process.env.NODE_ENV || global.environment.environment;
|
||||||
|
|||||||
411
config/environments/config.js
Normal file
411
config/environments/config.js
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
var config = {
|
||||||
|
environment: process.env.NODE_ENV,
|
||||||
|
port: 8181,
|
||||||
|
host: null, // null on purpouse so it listens to whatever address docker assigns
|
||||||
|
// Size of the threadpool which can be used to run user code and get notified in the loop thread
|
||||||
|
// Its default size is 4, but it can be changed at startup time (the absolute maximum is 128).
|
||||||
|
// See http://docs.libuv.org/en/latest/threadpool.html
|
||||||
|
uv_threadpool_size: undefined,
|
||||||
|
// Time in milliseconds to force GC cycle.
|
||||||
|
// Disable by using <=0 value.
|
||||||
|
gc_interval: 10000,
|
||||||
|
// Regular expression pattern to extract username
|
||||||
|
// from hostname. Must have a single grabbing block.
|
||||||
|
user_from_host: process.env.CARTO_WINDSHAFT_USER_FROM_HOST || '^(.*)\\.cartodb\\.com$',
|
||||||
|
|
||||||
|
// Base URLs for the APIs
|
||||||
|
//
|
||||||
|
// See https://github.com/CartoDB/Windshaft-cartodb/wiki/Unified-Map-API
|
||||||
|
//
|
||||||
|
// Note: each entry corresponds with an express' router.
|
||||||
|
// You must define at least one path. However, middlewares are optional.
|
||||||
|
routes: {
|
||||||
|
api: [{
|
||||||
|
paths: [
|
||||||
|
'/api/v1',
|
||||||
|
'/user/:user/api/v1'
|
||||||
|
],
|
||||||
|
// Optional: attach middlewares at the begining of the router
|
||||||
|
// to perform custom operations.
|
||||||
|
middlewares: [
|
||||||
|
function noop () {
|
||||||
|
return function noopMiddleware (req, res, next) {
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
],
|
||||||
|
// Base url for the Detached Maps API
|
||||||
|
// "/api/v1/map" is the new API,
|
||||||
|
map: [{
|
||||||
|
paths: [
|
||||||
|
'/map'
|
||||||
|
],
|
||||||
|
middlewares: [] // Optional
|
||||||
|
}],
|
||||||
|
// Base url for the Templated Maps API
|
||||||
|
// "/api/v1/map/named" is the new API,
|
||||||
|
template: [{
|
||||||
|
paths: [
|
||||||
|
'/map/named'
|
||||||
|
],
|
||||||
|
middlewares: [] // Optional
|
||||||
|
}]
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
|
||||||
|
// Resource URLs expose endpoints to request/retrieve metadata associated to Maps: dataviews, analysis node status.
|
||||||
|
//
|
||||||
|
// This URLs depend on how `routes` and `user_from_host` are configured: the application can be
|
||||||
|
// configured to accept request with the {user} in the header host or in the request path.
|
||||||
|
// It also might depend on the configured cdn_url via `serverMetadata.cdn_url`.
|
||||||
|
//
|
||||||
|
// This template allows to make the endpoints generation more flexible, the template exposes the following params:
|
||||||
|
// 1. {{=it.cdn_url}}: will be used when `serverMetadata.cdn_url` exists.
|
||||||
|
// 2. {{=it.user}}: will use the username as extraced from `user_from_host` or `routes`.
|
||||||
|
// 3. {{=it.port}}: will use the `port` from this very same configuration file.
|
||||||
|
resources_url_templates: {
|
||||||
|
http: 'http://{{=it.cdn_url}}/{{=it.user}}/api/v1/map',
|
||||||
|
https: 'https://{{=it.cdn_url}}/{{=it.user}}/api/v1/map'
|
||||||
|
},
|
||||||
|
// Specify the maximum length of the queue of pending connections for the HTTP server.
|
||||||
|
// The actual length will be determined by the OS through sysctl settings such as tcp_max_syn_backlog and somaxconn on Linux.
|
||||||
|
// The default value of this parameter is 511 (not 512).
|
||||||
|
// See: https://nodejs.org/docs/latest/api/net.html#net_server_listen
|
||||||
|
maxConnections: 128,
|
||||||
|
// Maximum number of templates per user. Unlimited by default.
|
||||||
|
maxUserTemplates: 1024,
|
||||||
|
// Seconds since "last creation" before a detached
|
||||||
|
// or template instance map expires. Or: how long do you want
|
||||||
|
// to be able to navigate the map without a reload ?
|
||||||
|
// Defaults to 7200 (2 hours)
|
||||||
|
mapConfigTTL: 7200,
|
||||||
|
// idle socket timeout, in milliseconds
|
||||||
|
socket_timeout: 600000,
|
||||||
|
enable_cors: true,
|
||||||
|
cache_enabled: true,
|
||||||
|
// Templated database username for authorized user
|
||||||
|
// Supported labels: 'user_id' (read from redis)
|
||||||
|
postgres_auth_user: process.env.CARTO_WINDSHAFT_DB_USER || 'cartodb_user_<%= user_id %>',
|
||||||
|
// Templated database password for authorized user
|
||||||
|
// Supported labels: 'user_id', 'user_password' (both read from redis)
|
||||||
|
postgres_auth_pass: '<%= user_password %>',
|
||||||
|
postgres: {
|
||||||
|
user: 'publicuser',
|
||||||
|
password: 'public',
|
||||||
|
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || 'localhost',
|
||||||
|
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
|
||||||
|
pool: {
|
||||||
|
// maximum number of resources to create at any given time
|
||||||
|
size: 16,
|
||||||
|
// max milliseconds a resource can go unused before it should be destroyed
|
||||||
|
idleTimeout: 3000,
|
||||||
|
// frequency to check for idle resources
|
||||||
|
reapInterval: 1000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
mapnik_version: undefined,
|
||||||
|
mapnik_tile_format: 'png8:m=h',
|
||||||
|
statsd: {
|
||||||
|
host: process.env.CARTO_WINDSHAFT_STATSD_HOST || 'localhost',
|
||||||
|
port: 8125,
|
||||||
|
prefix: process.env.CARTO_WINDSHAFT_STATSD_PREFIX || ':host.', // could be hostname, better not containing dots
|
||||||
|
cacheDns: true
|
||||||
|
// support all allowed node-statsd options
|
||||||
|
},
|
||||||
|
renderer: {
|
||||||
|
// Milliseconds since last access before renderer cache item expires
|
||||||
|
cache_ttl: 60000,
|
||||||
|
statsInterval: 5000, // milliseconds between each report to statsd about number of renderers and mapnik pool status
|
||||||
|
mvt: {
|
||||||
|
// If enabled, MVTs will be generated with PostGIS directly
|
||||||
|
// If disabled, MVTs will be generated with Mapnik MVT
|
||||||
|
usePostGIS: true
|
||||||
|
},
|
||||||
|
mapnik: {
|
||||||
|
// The size of the pool of internal mapnik backend
|
||||||
|
// This pool size is per mapnik renderer created in Windshaft's RendererFactory
|
||||||
|
// See https://github.com/CartoDB/Windshaft/blob/master/lib/windshaft/renderers/renderer_factory.js
|
||||||
|
// Important: check the configuration of uv_threadpool_size to use suitable value
|
||||||
|
poolSize: 8,
|
||||||
|
|
||||||
|
// The maximum number of waiting clients of the pool of internal mapnik backend
|
||||||
|
// This maximum number is per mapnik renderer created in Windshaft's RendererFactory
|
||||||
|
poolMaxWaitingClients: 64,
|
||||||
|
|
||||||
|
// Whether grainstore will use a child process or not to transform CartoCSS into Mapnik XML.
|
||||||
|
// This will prevent blocking the main thread.
|
||||||
|
useCartocssWorkers: false,
|
||||||
|
|
||||||
|
// Metatile is the number of tiles-per-side that are going
|
||||||
|
// to be rendered at once. If all of them will be requested
|
||||||
|
// we'd have saved time. If only one will be used, we'd have
|
||||||
|
// wasted time.
|
||||||
|
metatile: 2,
|
||||||
|
|
||||||
|
// tilelive-mapnik uses an internal cache to store tiles/grids
|
||||||
|
// generated when using metatile. This options allow to tune
|
||||||
|
// the behaviour for that internal cache.
|
||||||
|
metatileCache: {
|
||||||
|
// Time an object must stay in the cache until is removed
|
||||||
|
ttl: 0,
|
||||||
|
// Whether an object must be removed after the first hit
|
||||||
|
// Usually you want to use `true` here when ttl>0.
|
||||||
|
deleteOnHit: false
|
||||||
|
},
|
||||||
|
|
||||||
|
// Override metatile behaviour depending on the format
|
||||||
|
formatMetatile: {
|
||||||
|
png: 2,
|
||||||
|
'grid.json': 1
|
||||||
|
},
|
||||||
|
|
||||||
|
// Buffer size is the tickness in pixel of a buffer
|
||||||
|
// around the rendered (meta?)tile.
|
||||||
|
//
|
||||||
|
// This is important for labels and other marker that overlap tile boundaries.
|
||||||
|
// Setting to 128 ensures no render artifacts.
|
||||||
|
// 64 may have artifacts but is faster.
|
||||||
|
// Less important if we can turn metatiling on.
|
||||||
|
bufferSize: 64,
|
||||||
|
|
||||||
|
// SQL queries will be wrapped with ST_SnapToGrid
|
||||||
|
// Snapping all points of the geometry to a regular grid
|
||||||
|
snapToGrid: false,
|
||||||
|
|
||||||
|
// SQL queries will be wrapped with ST_ClipByBox2D
|
||||||
|
// Returning the portion of a geometry falling within a rectangle
|
||||||
|
// It will only work if snapToGrid is enabled
|
||||||
|
clipByBox2d: true,
|
||||||
|
|
||||||
|
postgis: {
|
||||||
|
// Parameters to pass to datasource plugin of mapnik
|
||||||
|
// See http://github.com/mapnik/mapnik/wiki/PostGIS
|
||||||
|
user: 'publicuser',
|
||||||
|
password: 'public',
|
||||||
|
host: process.env.CARTO_WINDSHAFT_POSTGRES_HOST || '127.0.0.1',
|
||||||
|
port: process.env.CARTO_WINDSHAFT_POSTGRES_PORT || 5432,
|
||||||
|
extent: '-20037508.3,-20037508.3,20037508.3,20037508.3',
|
||||||
|
// max number of rows to return when querying data, 0 means no limit
|
||||||
|
row_limit: 65535,
|
||||||
|
/*
|
||||||
|
* Set persist_connection to false if you want
|
||||||
|
* database connections to be closed on renderer
|
||||||
|
* expiration (1 minute after last use).
|
||||||
|
* Setting to true (the default) would never
|
||||||
|
* close any connection for the server's lifetime
|
||||||
|
*/
|
||||||
|
persist_connection: false,
|
||||||
|
simplify_geometries: true,
|
||||||
|
use_overviews: true, // use overviews to retrieve raster
|
||||||
|
max_size: 500,
|
||||||
|
twkb_encoding: true
|
||||||
|
},
|
||||||
|
|
||||||
|
limits: {
|
||||||
|
// Time in milliseconds a render request can take before it fails, some notes:
|
||||||
|
// - 0 means no render limit
|
||||||
|
// - it considers metatiling, naive implementation: (render timeout) * (number of tiles in metatile)
|
||||||
|
render: 0,
|
||||||
|
// As the render request will finish even if timed out, whether it should be placed in the internal
|
||||||
|
// cache or it should be fully discarded. When placed in the internal cache another attempt to retrieve
|
||||||
|
// the same tile will result in an immediate response, however that will use a lot of more application
|
||||||
|
// memory. If we want to enforce this behaviour we have to implement a cache eviction policy for the
|
||||||
|
// internal cache.
|
||||||
|
cacheOnTimeout: true
|
||||||
|
},
|
||||||
|
|
||||||
|
// If enabled Mapnik will reuse the features retrieved from the database
|
||||||
|
// instead of requesting them once per style inside a layer
|
||||||
|
'cache-features': true,
|
||||||
|
|
||||||
|
// Require metrics to the renderer
|
||||||
|
metrics: false,
|
||||||
|
|
||||||
|
// Options for markers attributes, ellipses and images caches
|
||||||
|
markers_symbolizer_caches: {
|
||||||
|
disabled: false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
http: {
|
||||||
|
timeout: 2000, // the timeout in ms for a http tile request
|
||||||
|
proxy: undefined, // the url for a proxy server
|
||||||
|
whitelist: [ // the whitelist of urlTemplates that can be used
|
||||||
|
'.*', // will enable any URL
|
||||||
|
'http://{s}.example.com/{z}/{x}/{y}.png'
|
||||||
|
],
|
||||||
|
// image to use as placeholder when urlTemplate is not in the whitelist
|
||||||
|
// if provided the http renderer will use it instead of throw an error
|
||||||
|
fallbackImage: {
|
||||||
|
type: 'fs', // 'fs' and 'url' supported
|
||||||
|
src: __dirname + '/../../assets/default-placeholder.png'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
torque: {}
|
||||||
|
},
|
||||||
|
// anything analyses related
|
||||||
|
analysis: {
|
||||||
|
// batch configuration
|
||||||
|
batch: {
|
||||||
|
// Inline execution avoid the use of SQL API as batch endpoint
|
||||||
|
// When set to true it will run all analysis queries in series, with a direct connection to the DB
|
||||||
|
// This might be useful for:
|
||||||
|
// - testing
|
||||||
|
// - running an standalone server without any dependency on external services
|
||||||
|
inlineExecution: false,
|
||||||
|
// where the SQL API is running, it will use a custom Host header to specify the username.
|
||||||
|
endpoint: 'http://127.0.0.1:8080/api/v2/sql/job',
|
||||||
|
// the template to use for adding the host header in the batch api requests
|
||||||
|
hostHeaderTemplate: '{{=it.username}}.localhost.lan'
|
||||||
|
},
|
||||||
|
// Define max execution time in ms for analyses or tags
|
||||||
|
// If analysis or tag are not found in redis this values will be used as default.
|
||||||
|
limits: {
|
||||||
|
moran: { timeout: 120000, maxNumberOfRows: 1e5 },
|
||||||
|
cpu2x: { timeout: 60000 }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
millstone: {
|
||||||
|
// Needs to be writable by server user
|
||||||
|
cache_basedir: process.env.CARTO_WINDSHAFT_TILE_CACHE || '/home/ubuntu/tile_assets/'
|
||||||
|
},
|
||||||
|
redis: {
|
||||||
|
host: process.env.CARTO_WINDSHAFT_REDIS_HOST || '127.0.0.1',
|
||||||
|
port: process.env.CARTO_WINDSHAFT_REDIS_PORT || 6379,
|
||||||
|
// Max number of connections in each pool.
|
||||||
|
// Users will be put on a queue when the limit is hit.
|
||||||
|
// Set to maxConnection to have no possible queues.
|
||||||
|
// There are currently 2 pools involved in serving
|
||||||
|
// windshaft-cartodb requests so multiply this number
|
||||||
|
// by 2 to know how many possible connections will be
|
||||||
|
// kept open by the servelsr. The default is 50.
|
||||||
|
max: 50,
|
||||||
|
returnToHead: true, // defines the behaviour of the pool: false => queue, true => stack
|
||||||
|
idleTimeoutMillis: 30000, // idle time before dropping connection
|
||||||
|
reapIntervalMillis: 1000, // time between cleanups
|
||||||
|
slowQueries: {
|
||||||
|
log: true,
|
||||||
|
elapsedThreshold: 200
|
||||||
|
},
|
||||||
|
slowPool: {
|
||||||
|
log: true, // whether a slow acquire must be logged or not
|
||||||
|
elapsedThreshold: 25 // the threshold to determine an slow acquire must be reported or not
|
||||||
|
},
|
||||||
|
emitter: {
|
||||||
|
statusInterval: 5000 // time, in ms, between each status report is emitted from the pool, status is sent to statsd
|
||||||
|
},
|
||||||
|
unwatchOnRelease: false, // Send unwatch on release, see http://github.com/CartoDB/Windshaft-cartodb/issues/161
|
||||||
|
noReadyCheck: true // Check `no_ready_check` at https://github.com/mranney/node_redis/tree/v0.12.1#overloading
|
||||||
|
},
|
||||||
|
// For more details about this options check https://nodejs.org/api/http.html#http_new_agent_options
|
||||||
|
httpAgent: {
|
||||||
|
keepAlive: true,
|
||||||
|
keepAliveMsecs: 1000,
|
||||||
|
maxSockets: 25,
|
||||||
|
maxFreeSockets: 256
|
||||||
|
},
|
||||||
|
varnish: {
|
||||||
|
host: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 'localhost',
|
||||||
|
port: process.env.CARTO_WINDSHAFT_VARNISH_PORT || 6082, // the por for the telnet interface where varnish is listening to
|
||||||
|
http_port: 6081, // the port for the HTTP interface where varnish is listening to
|
||||||
|
purge_enabled: process.env.CARTO_WINDSHAFT_VARNISH_PURGE_ENABLED === 'true' || false, // whether the purge/invalidation mechanism is enabled in varnish or not
|
||||||
|
secret: 'xxx',
|
||||||
|
ttl: 86400,
|
||||||
|
fallbackTtl: 300,
|
||||||
|
layergroupTtl: 86400 // the max-age for cache-control header in layergroup responses
|
||||||
|
},
|
||||||
|
// this [OPTIONAL] configuration enables invalidating by surrogate key in fastly
|
||||||
|
fastly: {
|
||||||
|
// whether the invalidation is enabled or not
|
||||||
|
enabled: false,
|
||||||
|
// the fastly api key
|
||||||
|
apiKey: 'wadus_api_key',
|
||||||
|
// the service that will get surrogate key invalidation
|
||||||
|
serviceId: 'wadus_service_id'
|
||||||
|
},
|
||||||
|
// If useProfiler is true every response will be served with an
|
||||||
|
// X-Tiler-Profile header containing elapsed timing for various
|
||||||
|
// steps taken for producing the response.
|
||||||
|
useProfiler: false,
|
||||||
|
serverMetadata: {
|
||||||
|
cdn_url: {
|
||||||
|
http: 'api.cartocdn.com',
|
||||||
|
https: 'cartocdn.global.ssl.fastly.net'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Settings for the health check available at /health
|
||||||
|
health: {
|
||||||
|
enabled: process.env.CARTO_WINDSHAFT_HEALTH_ENABLED === 'true' || false,
|
||||||
|
username: 'localhost',
|
||||||
|
z: 0,
|
||||||
|
x: 0,
|
||||||
|
y: 0
|
||||||
|
},
|
||||||
|
disabled_file: 'pids/disabled',
|
||||||
|
|
||||||
|
// Use this as a feature flags enabling/disabling mechanism
|
||||||
|
enabledFeatures: {
|
||||||
|
// whether it should intercept tile render errors an act based on them, enabled by default.
|
||||||
|
onTileErrorStrategy: false,
|
||||||
|
// whether the affected tables for a given SQL must query directly postgresql or use the SQL API
|
||||||
|
cdbQueryTablesFromPostgres: true,
|
||||||
|
// whether in mapconfig is available stats & metadata for each layer
|
||||||
|
layerStats: process.env.CARTO_WINDSHAFT_LAYERSTATS_ENABLED === 'true' || false,
|
||||||
|
// whether it should rate limit endpoints (global configuration)
|
||||||
|
rateLimitsEnabled: false,
|
||||||
|
// whether it should rate limit one or more endpoints (only if rateLimitsEnabled = true)
|
||||||
|
rateLimitsByEndpoint: {
|
||||||
|
anonymous: false,
|
||||||
|
static: false,
|
||||||
|
static_named: false,
|
||||||
|
dataview: false,
|
||||||
|
dataview_search: false,
|
||||||
|
analysis: false,
|
||||||
|
analysis_catalog: false,
|
||||||
|
tile: false,
|
||||||
|
attributes: false,
|
||||||
|
named_list: false,
|
||||||
|
named_create: false,
|
||||||
|
named_get: false,
|
||||||
|
named: false,
|
||||||
|
named_update: false,
|
||||||
|
named_delete: false,
|
||||||
|
named_tiles: false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
pubSubMetrics: {
|
||||||
|
enabled: process.env.CARTO_WINDSHAFT_METRICS_ENABLED === 'true' || false,
|
||||||
|
project_id: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'avid-wavelet-844',
|
||||||
|
credentials: '',
|
||||||
|
topic: process.env.CARTO_WINDSHAFT_METRICS_PROJECT_ID || 'raw-metric-events'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// override some defaults for tests
|
||||||
|
if (process.env.NODE_ENV === 'test') {
|
||||||
|
config.user_from_host = '(.*)';
|
||||||
|
config.postgres_auth_pass = 'test_windshaft_cartodb_user_<%= user_id %>_pass';
|
||||||
|
config.millstone.cache_basedir = '/tmp/tile_assets';
|
||||||
|
config.postgres.user = 'test_windshaft_publicuser';
|
||||||
|
config.resources_url_templates = {
|
||||||
|
http: 'http://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map',
|
||||||
|
https: 'https://{{=it.user}}.localhost.lan:{{=it.port}}/api/v1/map'
|
||||||
|
};
|
||||||
|
config.cache_enabled = false;
|
||||||
|
config.postgres_auth_user = 'test_windshaft_cartodb_user_<%= user_id %>';
|
||||||
|
config.renderer.mapnik.postgis.twkb_encoding = false;
|
||||||
|
config.renderer.mapnik['cache-features'] = false;
|
||||||
|
config.renderer.http.whitelist = [ // the whitelist of urlTemplates that can be used
|
||||||
|
'.*', // will enable any URL
|
||||||
|
'http://{s}.example.com/{z}/{x}/{y}.png',
|
||||||
|
// for testing purposes
|
||||||
|
'http://{s}.basemaps.cartocdn.com/dark_nolabels/{z}/{x}/{y}.png'
|
||||||
|
];
|
||||||
|
config.analysis.batch.inlineExecution = true;
|
||||||
|
config.redis.idleTimeoutMillis = 1;
|
||||||
|
config.redis.reapIntervalMillis = 1;
|
||||||
|
config.varnish.purge_enabled = false;
|
||||||
|
config.health.enabled = false;
|
||||||
|
config.enabledFeatures.layerStats = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = config;
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
FROM ubuntu:xenial
|
|
||||||
|
|
||||||
# Use UTF8 to avoid encoding problems with pgsql
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
ENV NPROCS 1
|
|
||||||
ENV JOBS 1
|
|
||||||
ENV CXX g++-4.9
|
|
||||||
ENV PGUSER postgres
|
|
||||||
|
|
||||||
# Add external repos
|
|
||||||
RUN set -ex \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
curl \
|
|
||||||
software-properties-common \
|
|
||||||
locales \
|
|
||||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
|
||||||
&& add-apt-repository -y ppa:cartodb/postgresql-10 \
|
|
||||||
&& add-apt-repository -y ppa:cartodb/gis \
|
|
||||||
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
|
||||||
&& . ~/.nvm/nvm.sh \
|
|
||||||
&& locale-gen en_US.UTF-8 \
|
|
||||||
&& update-locale LANG=en_US.UTF-8
|
|
||||||
|
|
||||||
RUN set -ex \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
g++-4.9 \
|
|
||||||
gcc-4.9 \
|
|
||||||
git \
|
|
||||||
libcairo2-dev \
|
|
||||||
libgdal-dev \
|
|
||||||
libgdal1i \
|
|
||||||
libgdal20 \
|
|
||||||
libgeos-dev \
|
|
||||||
libgif-dev \
|
|
||||||
libjpeg8-dev \
|
|
||||||
libjson-c-dev \
|
|
||||||
libpango1.0-dev \
|
|
||||||
libpixman-1-dev \
|
|
||||||
libproj-dev \
|
|
||||||
libprotobuf-c-dev \
|
|
||||||
libxml2-dev \
|
|
||||||
gdal-bin \
|
|
||||||
make \
|
|
||||||
nodejs \
|
|
||||||
protobuf-c-compiler \
|
|
||||||
pkg-config \
|
|
||||||
wget \
|
|
||||||
zip \
|
|
||||||
postgresql-10 \
|
|
||||||
postgresql-10-plproxy \
|
|
||||||
postgis=2.4.4.6+carto-1 \
|
|
||||||
postgresql-10-postgis-2.4=2.4.4.6+carto-1 \
|
|
||||||
postgresql-10-postgis-2.4-scripts=2.4.4.6+carto-1 \
|
|
||||||
postgresql-10-postgis-scripts=2.4.4.6+carto-1 \
|
|
||||||
postgresql-client-10 \
|
|
||||||
postgresql-client-common \
|
|
||||||
postgresql-common \
|
|
||||||
postgresql-contrib \
|
|
||||||
postgresql-plpython-10 \
|
|
||||||
postgresql-server-dev-10 \
|
|
||||||
&& wget http://download.redis.io/releases/redis-4.0.8.tar.gz \
|
|
||||||
&& tar xvzf redis-4.0.8.tar.gz \
|
|
||||||
&& cd redis-4.0.8 \
|
|
||||||
&& make \
|
|
||||||
&& make install \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm redis-4.0.8.tar.gz \
|
|
||||||
&& rm -R redis-4.0.8 \
|
|
||||||
&& apt-get purge -y wget protobuf-c-compiler \
|
|
||||||
&& apt-get autoremove -y
|
|
||||||
|
|
||||||
# Configure PostgreSQL
|
|
||||||
RUN set -ex \
|
|
||||||
&& echo "listen_addresses='*'" >> /etc/postgresql/10/main/postgresql.conf \
|
|
||||||
&& echo "local all all trust" > /etc/postgresql/10/main/pg_hba.conf \
|
|
||||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
|
||||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/10/main/pg_hba.conf \
|
|
||||||
&& /etc/init.d/postgresql start \
|
|
||||||
&& createdb template_postgis \
|
|
||||||
&& createuser publicuser \
|
|
||||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
|
||||||
&& /etc/init.d/postgresql stop
|
|
||||||
|
|
||||||
WORKDIR /srv
|
|
||||||
EXPOSE 5858
|
|
||||||
|
|
||||||
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
|
|
||||||
RUN chmod 777 /src/nodejs-install.sh
|
|
||||||
CMD /src/nodejs-install.sh
|
|
||||||
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
FROM ubuntu:xenial
|
|
||||||
|
|
||||||
# Use UTF8 to avoid encoding problems with pgsql
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
ENV NPROCS 1
|
|
||||||
ENV JOBS 1
|
|
||||||
ENV CXX g++-4.9
|
|
||||||
ENV PGUSER postgres
|
|
||||||
|
|
||||||
# Add external repos
|
|
||||||
RUN set -ex \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
curl \
|
|
||||||
software-properties-common \
|
|
||||||
locales \
|
|
||||||
&& add-apt-repository -y ppa:ubuntu-toolchain-r/test \
|
|
||||||
&& add-apt-repository -y ppa:cartodb/postgresql-11 \
|
|
||||||
&& add-apt-repository -y ppa:cartodb/redis-next \
|
|
||||||
&& curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
|
||||||
&& . ~/.nvm/nvm.sh \
|
|
||||||
&& locale-gen en_US.UTF-8 \
|
|
||||||
&& update-locale LANG=en_US.UTF-8
|
|
||||||
|
|
||||||
RUN set -ex \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
g++-4.9 \
|
|
||||||
gcc-4.9 \
|
|
||||||
git \
|
|
||||||
libcairo2-dev \
|
|
||||||
libgdal-dev=2.3.2+dfsg-2build2~carto1 \
|
|
||||||
libgdal20=2.3.2+dfsg-2build2~carto1 \
|
|
||||||
libgeos-dev=3.7.1~carto1 \
|
|
||||||
libgif-dev \
|
|
||||||
libjpeg8-dev \
|
|
||||||
libjson-c-dev \
|
|
||||||
libpango1.0-dev \
|
|
||||||
libpixman-1-dev \
|
|
||||||
libproj-dev \
|
|
||||||
libprotobuf-c-dev \
|
|
||||||
libxml2-dev \
|
|
||||||
gdal-bin=2.3.2+dfsg-2build2~carto1 \
|
|
||||||
make \
|
|
||||||
nodejs \
|
|
||||||
protobuf-c-compiler \
|
|
||||||
pkg-config \
|
|
||||||
wget \
|
|
||||||
zip \
|
|
||||||
libopenscenegraph100v5 \
|
|
||||||
libsfcgal1 \
|
|
||||||
liblwgeom-2.5.0=2.5.1.4+carto-1 \
|
|
||||||
postgresql-11 \
|
|
||||||
postgresql-11-plproxy \
|
|
||||||
postgis=2.5.1.4+carto-1 \
|
|
||||||
postgresql-11-postgis-2.5=2.5.1.4+carto-1 \
|
|
||||||
postgresql-11-postgis-2.5-scripts=2.5.1.4+carto-1 \
|
|
||||||
postgresql-client-11 \
|
|
||||||
postgresql-client-common \
|
|
||||||
postgresql-common \
|
|
||||||
postgresql-contrib \
|
|
||||||
postgresql-plpython-11 \
|
|
||||||
postgresql-server-dev-11 \
|
|
||||||
redis=5:4.0.9-1carto1~xenial1 \
|
|
||||||
&& apt-get purge -y wget protobuf-c-compiler \
|
|
||||||
&& apt-get autoremove -y
|
|
||||||
|
|
||||||
# Configure PostgreSQL
|
|
||||||
RUN set -ex \
|
|
||||||
&& echo "listen_addresses='*'" >> /etc/postgresql/11/main/postgresql.conf \
|
|
||||||
&& echo "local all all trust" > /etc/postgresql/11/main/pg_hba.conf \
|
|
||||||
&& echo "host all all 0.0.0.0/0 trust" >> /etc/postgresql/11/main/pg_hba.conf \
|
|
||||||
&& echo "host all all ::1/128 trust" >> /etc/postgresql/11/main/pg_hba.conf \
|
|
||||||
&& /etc/init.d/postgresql start \
|
|
||||||
&& createdb template_postgis \
|
|
||||||
&& createuser publicuser \
|
|
||||||
&& psql -c "CREATE EXTENSION postgis" template_postgis \
|
|
||||||
&& /etc/init.d/postgresql stop
|
|
||||||
|
|
||||||
WORKDIR /srv
|
|
||||||
EXPOSE 5858
|
|
||||||
|
|
||||||
COPY ./scripts/nodejs-install.sh /src/nodejs-install.sh
|
|
||||||
RUN chmod 777 /src/nodejs-install.sh
|
|
||||||
CMD /src/nodejs-install.sh
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
# Testing with Docker
|
|
||||||
|
|
||||||
Before running the tests with docker, you'll need Docker installed and the docker image downloaded.
|
|
||||||
|
|
||||||
## Install docker
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ sudo apt install docker.io && sudo usermod -aG docker $(whoami)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Download image
|
|
||||||
|
|
||||||
```shell
|
|
||||||
docker pull carto/IMAGE
|
|
||||||
```
|
|
||||||
|
|
||||||
## Carto account
|
|
||||||
|
|
||||||
* `https://hub.docker.com/r/carto/`
|
|
||||||
|
|
||||||
## Update image
|
|
||||||
|
|
||||||
* Edit the docker image file
|
|
||||||
* Build image:
|
|
||||||
* `docker build -t carto/IMAGE -f docker/DOCKER_FILE docker/`
|
|
||||||
|
|
||||||
* Upload to docker hub:
|
|
||||||
* Login into docker hub:
|
|
||||||
* `docker login`
|
|
||||||
* Create tag:
|
|
||||||
* `docker tag carto/IMAGE carto/IMAGE`
|
|
||||||
* Upload:
|
|
||||||
* `docker push carto/IMAGE`
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
export NVM_DIR="$HOME/.nvm"
|
|
||||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
|
||||||
|
|
||||||
if [ -z $NODEJS_VERSION ]; then
|
|
||||||
NODEJS_VERSION="10"
|
|
||||||
NODEJS_VERSION_OPTIONS="--lts"
|
|
||||||
fi
|
|
||||||
|
|
||||||
nvm install $NODEJS_VERSION $NODEJS_VERSION_OPTIONS
|
|
||||||
nvm alias default $NODEJS_VERSION
|
|
||||||
nvm use default
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
/etc/init.d/postgresql start
|
|
||||||
|
|
||||||
source /src/nodejs-install.sh
|
|
||||||
|
|
||||||
# Install cartodb-postgresql extension
|
|
||||||
git clone https://github.com/CartoDB/cartodb-postgresql.git
|
|
||||||
cd cartodb-postgresql && make && make install && cd ..
|
|
||||||
|
|
||||||
cp config/environments/test.js.example config/environments/test.js
|
|
||||||
|
|
||||||
npm ci
|
|
||||||
npm test
|
|
||||||
@@ -199,8 +199,8 @@ module.exports = class ApiRouter {
|
|||||||
const apiRouter = router({ mergeParams: true });
|
const apiRouter = router({ mergeParams: true });
|
||||||
const { paths, middlewares = [] } = route;
|
const { paths, middlewares = [] } = route;
|
||||||
|
|
||||||
apiRouter.use(user(this.metadataBackend));
|
|
||||||
apiRouter.use(initLogger({ logger: this.serverOptions.logger }));
|
apiRouter.use(initLogger({ logger: this.serverOptions.logger }));
|
||||||
|
apiRouter.use(user(this.metadataBackend));
|
||||||
apiRouter.use(profiler({
|
apiRouter.use(profiler({
|
||||||
enabled: this.serverOptions.useProfiler,
|
enabled: this.serverOptions.useProfiler,
|
||||||
statsClient: global.statsClient
|
statsClient: global.statsClient
|
||||||
|
|||||||
822
package-lock.json
generated
822
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -77,6 +77,7 @@
|
|||||||
"mocha": "^7.2.0",
|
"mocha": "^7.2.0",
|
||||||
"moment": "2.22.1",
|
"moment": "2.22.1",
|
||||||
"nock": "9.2.6",
|
"nock": "9.2.6",
|
||||||
|
"nodemon": "^2.0.6",
|
||||||
"nyc": "^14.1.1",
|
"nyc": "^14.1.1",
|
||||||
"pino-pretty": "^4.0.0",
|
"pino-pretty": "^4.0.0",
|
||||||
"redis": "2.8.0",
|
"redis": "2.8.0",
|
||||||
@@ -94,8 +95,7 @@
|
|||||||
"posttest": "npm run test:teardown",
|
"posttest": "npm run test:teardown",
|
||||||
"test:teardown": "NODE_ENV=test node test teardown",
|
"test:teardown": "NODE_ENV=test node test teardown",
|
||||||
"cover": "nyc --reporter=lcov npm test",
|
"cover": "nyc --reporter=lcov npm test",
|
||||||
"test:docker": "docker run -e \"NODEJS_VERSION=$NODE_VERSION\" -v `pwd`:/srv $DOCKER_IMAGE bash docker/scripts/test-setup.sh && docker ps --filter status=dead --filter status=exited -aq | xargs docker rm -v",
|
"dev": "NODE_ENV=development nodemon app.js"
|
||||||
"docker:bash": "docker run -it -v `pwd`:/srv $DOCKER_IMAGE bash"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "^12.16.3",
|
"node": "^12.16.3",
|
||||||
|
|||||||
@@ -512,7 +512,11 @@ var QueryTables = require('cartodb-query-tables').queryTables;
|
|||||||
]
|
]
|
||||||
};
|
};
|
||||||
var statskey = 'user:localhost:mapviews';
|
var statskey = 'user:localhost:mapviews';
|
||||||
var redisStatsClient = redis.createClient(global.environment.redis.port);
|
var redisStatsClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
var expectedToken; // will be set on first post and checked on second
|
var expectedToken; // will be set on first post and checked on second
|
||||||
var now = strftime('%Y%m%d', new Date());
|
var now = strftime('%Y%m%d', new Date());
|
||||||
step(
|
step(
|
||||||
|
|||||||
@@ -164,7 +164,11 @@ describe('rate limit', function () {
|
|||||||
global.environment.enabledFeatures.rateLimitsEnabled = true;
|
global.environment.enabledFeatures.rateLimitsEnabled = true;
|
||||||
global.environment.enabledFeatures.rateLimitsByEndpoint.anonymous = true;
|
global.environment.enabledFeatures.rateLimitsByEndpoint.anonymous = true;
|
||||||
|
|
||||||
redisClient = redis.createClient(global.environment.redis.port);
|
redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
testClient = new TestClient(createMapConfig(), 1234);
|
testClient = new TestClient(createMapConfig(), 1234);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -226,7 +230,11 @@ describe('rate limit middleware', function () {
|
|||||||
});
|
});
|
||||||
rateLimit = rateLimitMiddleware(userLimitsApi, RATE_LIMIT_ENDPOINTS_GROUPS.ANONYMOUS);
|
rateLimit = rateLimitMiddleware(userLimitsApi, RATE_LIMIT_ENDPOINTS_GROUPS.ANONYMOUS);
|
||||||
|
|
||||||
redisClient = redis.createClient(global.environment.redis.port);
|
redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
testClient = new TestClient(createMapConfig(), 1234);
|
testClient = new TestClient(createMapConfig(), 1234);
|
||||||
|
|
||||||
const count = 1;
|
const count = 1;
|
||||||
@@ -290,7 +298,11 @@ function rateLimitAndVectorTilesTest (usePostGIS) {
|
|||||||
global.environment.enabledFeatures.rateLimitsEnabled = true;
|
global.environment.enabledFeatures.rateLimitsEnabled = true;
|
||||||
global.environment.enabledFeatures.rateLimitsByEndpoint.tile = true;
|
global.environment.enabledFeatures.rateLimitsByEndpoint.tile = true;
|
||||||
|
|
||||||
redisClient = redis.createClient(global.environment.redis.port);
|
redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
const count = 1;
|
const count = 1;
|
||||||
const period = 1;
|
const period = 1;
|
||||||
const burst = 0;
|
const burst = 0;
|
||||||
|
|||||||
@@ -1506,7 +1506,11 @@ describe('template_api', function () {
|
|||||||
layergroup: layergroup
|
layergroup: layergroup
|
||||||
};
|
};
|
||||||
var statskey = 'user:localhost:mapviews';
|
var statskey = 'user:localhost:mapviews';
|
||||||
var redisStatsClient = redis.createClient(global.environment.redis.port);
|
var redisStatsClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
var templateId; // will be set on template post
|
var templateId; // will be set on template post
|
||||||
var now = strftime('%Y%m%d', new Date());
|
var now = strftime('%Y%m%d', new Date());
|
||||||
var errors = [];
|
var errors = [];
|
||||||
|
|||||||
@@ -1,21 +1,24 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const util = require('util');
|
const util = require('util');
|
||||||
const path = require('path');
|
|
||||||
const exec = util.promisify(require('child_process').exec);
|
const exec = util.promisify(require('child_process').exec);
|
||||||
|
|
||||||
if (!process.env.NODE_ENV) {
|
if (!process.env.NODE_ENV) {
|
||||||
console.error('Please set "NODE_ENV" variable, e.g.: "NODE_ENV=test"');
|
console.error('Please set "NODE_ENV" variable, e.g.: "NODE_ENV=test"');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
let configFileName = process.env.NODE_ENV;
|
||||||
|
if (process.env.CARTO_WINDSHAFT_ENV_BASED_CONF) {
|
||||||
|
// we override the file with the one with env vars
|
||||||
|
configFileName = 'config';
|
||||||
|
}
|
||||||
|
|
||||||
const environment = require(`../config/environments/${process.env.NODE_ENV}.js`);
|
const environment = require(`../config/environments/${configFileName}.js`);
|
||||||
const REDIS_PORT = environment.redis.port;
|
const REDIS_PORT = environment.redis.port;
|
||||||
const REDIS_CELL_PATH = path.resolve(
|
const REDIS_HOST = environment.redis.host;
|
||||||
process.platform === 'darwin'
|
|
||||||
? './test/support/libredis_cell.dylib'
|
const PGHOST = environment.postgres.host;
|
||||||
: './test/support/libredis_cell.so'
|
const PGPORT = environment.postgres.port;
|
||||||
);
|
|
||||||
|
|
||||||
const TEST_USER_ID = 1;
|
const TEST_USER_ID = 1;
|
||||||
const TEST_USER = environment.postgres_auth_user.replace('<%= user_id %>', TEST_USER_ID);
|
const TEST_USER = environment.postgres_auth_user.replace('<%= user_id %>', TEST_USER_ID);
|
||||||
@@ -25,28 +28,28 @@ const PUBLIC_USER_PASSWORD = environment.postgres.password;
|
|||||||
const TEST_DB = `${TEST_USER}_db`;
|
const TEST_DB = `${TEST_USER}_db`;
|
||||||
|
|
||||||
async function startRedis () {
|
async function startRedis () {
|
||||||
await exec(`redis-server --port ${REDIS_PORT} --loadmodule ${REDIS_CELL_PATH} --logfile ${__dirname}/redis-server.log --daemonize yes`);
|
// await exec(`redis-server --port ${REDIS_PORT} --loadmodule ${REDIS_CELL_PATH} --logfile ${__dirname}/redis-server.log --daemonize yes`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function stopRedis () {
|
async function stopRedis () {
|
||||||
await exec(`redis-cli -p ${REDIS_PORT} shutdown`);
|
// await exec(`redis-cli -p ${REDIS_PORT} shutdown`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function dropDatabase () {
|
async function dropDatabase () {
|
||||||
await exec(`dropdb --if-exists ${TEST_DB}`, {
|
await exec(`dropdb --if-exists ${TEST_DB}`, {
|
||||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
env: Object.assign({ PGUSER: 'postgres', PGHOST: PGHOST, PGPORT: PGPORT }, process.env)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createDatabase () {
|
async function createDatabase () {
|
||||||
await exec(`createdb -T template_postgis -EUTF8 "${TEST_DB}"`, {
|
await exec(`createdb -T template_postgis -EUTF8 "${TEST_DB}"`, {
|
||||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
env: Object.assign({ PGUSER: 'postgres', PGHOST: PGHOST, PGPORT: PGPORT }, process.env)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createDatabaseExtension () {
|
async function createDatabaseExtension () {
|
||||||
await exec(`psql -c "CREATE EXTENSION IF NOT EXISTS cartodb CASCADE;" ${TEST_DB}`, {
|
await exec(`psql -c "CREATE EXTENSION IF NOT EXISTS cartodb CASCADE;" ${TEST_DB}`, {
|
||||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
env: Object.assign({ PGUSER: 'postgres', PGHOST: PGHOST, PGPORT: PGPORT }, process.env)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,7 +74,7 @@ async function populateDatabase () {
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
await exec(populateDatabaseCmd, {
|
await exec(populateDatabaseCmd, {
|
||||||
env: Object.assign({ PGUSER: 'postgres' }, process.env)
|
env: Object.assign({ PGUSER: 'postgres', PGHOST: PGHOST, PGPORT: PGPORT }, process.env)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,13 +85,13 @@ async function populateRedis () {
|
|||||||
HMSET rails:users:localhost \
|
HMSET rails:users:localhost \
|
||||||
id ${TEST_USER_ID} \
|
id ${TEST_USER_ID} \
|
||||||
database_name "${TEST_DB}" \
|
database_name "${TEST_DB}" \
|
||||||
database_host localhost \
|
database_host "${PGHOST}" \
|
||||||
map_key 1234
|
map_key 1234
|
||||||
|
|
||||||
HMSET rails:users:cartodb250user \
|
HMSET rails:users:cartodb250user \
|
||||||
id ${TEST_USER_ID} \
|
id ${TEST_USER_ID} \
|
||||||
database_name "${TEST_DB}" \
|
database_name "${TEST_DB}" \
|
||||||
database_host "localhost" \
|
database_host "${PGHOST}" \
|
||||||
database_password "${TEST_PASSWORD}" \
|
database_password "${TEST_PASSWORD}" \
|
||||||
map_key 4321
|
map_key 4321
|
||||||
|
|
||||||
@@ -141,7 +144,7 @@ async function populateRedis () {
|
|||||||
database_password "public"
|
database_password "public"
|
||||||
`;
|
`;
|
||||||
|
|
||||||
await exec(`echo "${commands}" | redis-cli -p ${REDIS_PORT} -n 5`);
|
await exec(`echo "${commands}" | redis-cli -h ${REDIS_HOST} -p ${REDIS_PORT} -n 5`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main (args) {
|
async function main (args) {
|
||||||
|
|||||||
@@ -16,7 +16,11 @@ describe('analysis-backend limits', function () {
|
|||||||
var user = 'localhost';
|
var user = 'localhost';
|
||||||
|
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
redisClient = redis.createClient(global.environment.redis.port);
|
redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
keysToDelete = {};
|
keysToDelete = {};
|
||||||
var redisPool = new RedisPool(global.environment.redis);
|
var redisPool = new RedisPool(global.environment.redis);
|
||||||
this.metadataBackend = cartodbRedis({ pool: redisPool });
|
this.metadataBackend = cartodbRedis({ pool: redisPool });
|
||||||
|
|||||||
@@ -30,7 +30,11 @@ describe('TemplateMaps limits', function () {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
var redisClient = redis.createClient(global.environment.redis.port);
|
var redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
var redisPool = new RedisPool(global.environment.redis);
|
var redisPool = new RedisPool(global.environment.redis);
|
||||||
|
|
||||||
afterEach(function (done) {
|
afterEach(function (done) {
|
||||||
|
|||||||
@@ -10,8 +10,14 @@ var redis = require('redis');
|
|||||||
const setICUEnvVariable = require('../../lib/utils/icu-data-env-setter');
|
const setICUEnvVariable = require('../../lib/utils/icu-data-env-setter');
|
||||||
|
|
||||||
// set environment specific variables
|
// set environment specific variables
|
||||||
global.environment = require('../../config/environments/test');
|
|
||||||
global.environment.name = 'test';
|
let configFileName = process.env.NODE_ENV;
|
||||||
|
if (process.env.CARTO_WINDSHAFT_ENV_BASED_CONF) {
|
||||||
|
// we override the file with the one with env vars
|
||||||
|
configFileName = 'config';
|
||||||
|
}
|
||||||
|
|
||||||
|
global.environment = require(`../../config/environments/${configFileName}.js`);
|
||||||
process.env.NODE_ENV = 'test';
|
process.env.NODE_ENV = 'test';
|
||||||
|
|
||||||
setICUEnvVariable();
|
setICUEnvVariable();
|
||||||
@@ -82,7 +88,11 @@ var redisClient;
|
|||||||
|
|
||||||
beforeEach(function () {
|
beforeEach(function () {
|
||||||
if (!redisClient) {
|
if (!redisClient) {
|
||||||
redisClient = redis.createClient(global.environment.redis.port);
|
redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -145,7 +155,11 @@ function deleteRedisKeys (keysToDelete, callback) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object.keys(keysToDelete).forEach(function (k) {
|
Object.keys(keysToDelete).forEach(function (k) {
|
||||||
var redisClient = redis.createClient(global.environment.redis.port);
|
var redisClient = redis.createClient(
|
||||||
|
{
|
||||||
|
port: global.environment.redis.port,
|
||||||
|
host: global.environment.redis.host
|
||||||
|
});
|
||||||
redisClient.select(keysToDelete[k], function () {
|
redisClient.select(keysToDelete[k], function () {
|
||||||
redisClient.del(k, function (err, deletedKeysCount) {
|
redisClient.del(k, function (err, deletedKeysCount) {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ var serverOptions = require('../../../lib/server-options');
|
|||||||
|
|
||||||
describe('windshaft', function () {
|
describe('windshaft', function () {
|
||||||
it('should have valid global environment', function () {
|
it('should have valid global environment', function () {
|
||||||
assert.strictEqual(global.environment.name, 'test');
|
assert.strictEqual(global.environment.environment, 'test');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('can instantiate a Windshaft object (configured express instance)', function () {
|
it('can instantiate a Windshaft object (configured express instance)', function () {
|
||||||
|
|||||||
Reference in New Issue
Block a user