first commit
This commit is contained in:
74
app/auth/apikey.js
Normal file
74
app/auth/apikey.js
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* this module allows to auth user using an pregenerated api key
|
||||
*/
|
||||
function ApikeyAuth(req, metadataBackend, username, apikeyToken) {
|
||||
this.req = req;
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.username = username;
|
||||
this.apikeyToken = apikeyToken;
|
||||
}
|
||||
|
||||
module.exports = ApikeyAuth;
|
||||
|
||||
function usernameMatches(basicAuthUsername, requestUsername) {
|
||||
return !(basicAuthUsername && (basicAuthUsername !== requestUsername));
|
||||
}
|
||||
|
||||
ApikeyAuth.prototype.verifyCredentials = function (callback) {
|
||||
this.metadataBackend.getApikey(this.username, this.apikeyToken, (err, apikey) => {
|
||||
if (err) {
|
||||
err.http_status = 500;
|
||||
err.message = 'Unexpected error';
|
||||
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (isApiKeyFound(apikey)) {
|
||||
if (!usernameMatches(apikey.user, this.username)) {
|
||||
const usernameError = new Error('Forbidden');
|
||||
usernameError.type = 'auth';
|
||||
usernameError.subtype = 'api-key-username-mismatch';
|
||||
usernameError.http_status = 403;
|
||||
|
||||
return callback(usernameError);
|
||||
}
|
||||
|
||||
if (!apikey.grantsSql) {
|
||||
const forbiddenError = new Error('forbidden');
|
||||
forbiddenError.http_status = 403;
|
||||
|
||||
return callback(forbiddenError);
|
||||
}
|
||||
|
||||
return callback(null, getAuthorizationLevel(apikey));
|
||||
} else {
|
||||
const apiKeyNotFoundError = new Error('Unauthorized');
|
||||
apiKeyNotFoundError.type = 'auth';
|
||||
apiKeyNotFoundError.subtype = 'api-key-not-found';
|
||||
apiKeyNotFoundError.http_status = 401;
|
||||
|
||||
return callback(apiKeyNotFoundError);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
ApikeyAuth.prototype.hasCredentials = function () {
|
||||
return !!this.apikeyToken;
|
||||
};
|
||||
|
||||
ApikeyAuth.prototype.getCredentials = function () {
|
||||
return this.apikeyToken;
|
||||
};
|
||||
|
||||
function getAuthorizationLevel(apikey) {
|
||||
return apikey.type;
|
||||
}
|
||||
|
||||
function isApiKeyFound(apikey) {
|
||||
return apikey.type !== null &&
|
||||
apikey.user !== null &&
|
||||
apikey.databasePassword !== null &&
|
||||
apikey.databaseRole !== null;
|
||||
}
|
||||
48
app/auth/auth_api.js
Normal file
48
app/auth/auth_api.js
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict';
|
||||
|
||||
var ApiKeyAuth = require('./apikey'),
|
||||
OAuthAuth = require('./oauth');
|
||||
|
||||
function AuthApi(req, requestParams) {
|
||||
this.req = req;
|
||||
this.authBackend = getAuthBackend(req, requestParams);
|
||||
|
||||
this._hasCredentials = null;
|
||||
}
|
||||
|
||||
AuthApi.prototype.getType = function () {
|
||||
if (this.authBackend instanceof ApiKeyAuth) {
|
||||
return 'apiKey';
|
||||
} else if (this.authBackend instanceof OAuthAuth) {
|
||||
return 'oAuth';
|
||||
}
|
||||
};
|
||||
|
||||
AuthApi.prototype.hasCredentials = function() {
|
||||
if (this._hasCredentials === null) {
|
||||
this._hasCredentials = this.authBackend.hasCredentials();
|
||||
}
|
||||
return this._hasCredentials;
|
||||
};
|
||||
|
||||
AuthApi.prototype.getCredentials = function() {
|
||||
return this.authBackend.getCredentials();
|
||||
};
|
||||
|
||||
AuthApi.prototype.verifyCredentials = function(callback) {
|
||||
if (this.hasCredentials()) {
|
||||
this.authBackend.verifyCredentials(callback);
|
||||
} else {
|
||||
callback(null, false);
|
||||
}
|
||||
};
|
||||
|
||||
function getAuthBackend(req, requestParams) {
|
||||
if (requestParams.api_key) {
|
||||
return new ApiKeyAuth(req, requestParams.metadataBackend, requestParams.user, requestParams.api_key);
|
||||
} else {
|
||||
return new OAuthAuth(req, requestParams.metadataBackend);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AuthApi;
|
||||
192
app/auth/oauth.js
Normal file
192
app/auth/oauth.js
Normal file
@@ -0,0 +1,192 @@
|
||||
'use strict';
|
||||
|
||||
// too bound to the request object, but ok for now
|
||||
var _ = require('underscore');
|
||||
var OAuthUtil = require('oauth-client');
|
||||
var step = require('step');
|
||||
var CdbRequest = require('../models/cartodb_request');
|
||||
var cdbReq = new CdbRequest();
|
||||
|
||||
var oAuth = (function(){
|
||||
var me = {
|
||||
oauth_database: 3,
|
||||
oauth_user_key: "rails:oauth_access_tokens:<%= oauth_access_key %>",
|
||||
is_oauth_request: true
|
||||
};
|
||||
|
||||
// oauth token cases:
|
||||
// * in GET request
|
||||
// * in header
|
||||
me.parseTokens = function(req){
|
||||
var query_oauth = _.clone(req.method === "POST" ? req.body: req.query);
|
||||
var header_oauth = {};
|
||||
var oauth_variables = ['oauth_body_hash',
|
||||
'oauth_consumer_key',
|
||||
'oauth_token',
|
||||
'oauth_signature_method',
|
||||
'oauth_signature',
|
||||
'oauth_timestamp',
|
||||
'oauth_nonce',
|
||||
'oauth_version'];
|
||||
|
||||
// pull only oauth tokens out of query
|
||||
var non_oauth = _.difference(_.keys(query_oauth), oauth_variables);
|
||||
_.each(non_oauth, function(key){ delete query_oauth[key]; });
|
||||
|
||||
// pull oauth tokens out of header
|
||||
var header_string = req.headers.authorization;
|
||||
if (!_.isUndefined(header_string)) {
|
||||
_.each(oauth_variables, function(oauth_key){
|
||||
var matched_string = header_string.match(new RegExp(oauth_key + '=\"([^\"]+)\"'));
|
||||
if (!_.isNull(matched_string)) {
|
||||
header_oauth[oauth_key] = decodeURIComponent(matched_string[1]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//merge header and query oauth tokens. preference given to header oauth
|
||||
return _.defaults(header_oauth, query_oauth);
|
||||
};
|
||||
|
||||
// remove oauthy tokens from an object
|
||||
me.splitParams = function(obj) {
|
||||
var removed = null;
|
||||
for (var prop in obj) {
|
||||
if (/^oauth_\w+$/.test(prop)) {
|
||||
if(!removed) {
|
||||
removed = {};
|
||||
}
|
||||
removed[prop] = obj[prop];
|
||||
delete obj[prop];
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
};
|
||||
|
||||
me.getAllowedHosts= function() {
|
||||
var oauthConfig = global.settings.oauth || {};
|
||||
return oauthConfig.allowedHosts || ['carto.com', 'cartodb.com'];
|
||||
};
|
||||
|
||||
// do new fancy get User ID
|
||||
me.verifyRequest = function(req, metadataBackend, callback) {
|
||||
var that = this;
|
||||
//TODO: review this
|
||||
var httpProto = req.protocol;
|
||||
if(!httpProto || (httpProto !== 'http' && httpProto !== 'https')) {
|
||||
var msg = "Unknown HTTP protocol " + httpProto + ".";
|
||||
var unknownProtocolErr = new Error(msg);
|
||||
unknownProtocolErr.http_status = 500;
|
||||
return callback(unknownProtocolErr);
|
||||
}
|
||||
|
||||
var username = cdbReq.userByReq(req);
|
||||
var requestTokens;
|
||||
var signature;
|
||||
|
||||
step(
|
||||
function getTokensFromURL(){
|
||||
return oAuth.parseTokens(req);
|
||||
},
|
||||
function getOAuthHash(err, _requestTokens) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
// this is oauth request only if oauth headers are present
|
||||
this.is_oauth_request = !_.isEmpty(_requestTokens);
|
||||
|
||||
if (this.is_oauth_request) {
|
||||
requestTokens = _requestTokens;
|
||||
that.getOAuthHash(metadataBackend, requestTokens.oauth_token, this);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
function regenerateSignature(err, oAuthHash){
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
if (!this.is_oauth_request) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var consumer = OAuthUtil.createConsumer(oAuthHash.consumer_key, oAuthHash.consumer_secret);
|
||||
var access_token = OAuthUtil.createToken(oAuthHash.access_token_token, oAuthHash.access_token_secret);
|
||||
var signer = OAuthUtil.createHmac(consumer, access_token);
|
||||
|
||||
var method = req.method;
|
||||
var hostsToValidate = {};
|
||||
var requestHost = req.headers.host;
|
||||
hostsToValidate[requestHost] = true;
|
||||
that.getAllowedHosts().forEach(function(allowedHost) {
|
||||
hostsToValidate[username + '.' + allowedHost] = true;
|
||||
});
|
||||
|
||||
that.splitParams(req.query);
|
||||
// remove oauth_signature from body
|
||||
if(req.body) {
|
||||
delete req.body.oauth_signature;
|
||||
}
|
||||
signature = requestTokens.oauth_signature;
|
||||
// remove signature from requestTokens
|
||||
delete requestTokens.oauth_signature;
|
||||
var requestParams = _.extend({}, req.body, requestTokens, req.query);
|
||||
|
||||
var hosts = Object.keys(hostsToValidate);
|
||||
var requestSignatures = hosts.map(function(host) {
|
||||
var url = httpProto + '://' + host + req.path;
|
||||
return signer.sign(method, url, requestParams);
|
||||
});
|
||||
|
||||
return requestSignatures.reduce(function(validSignature, requestSignature) {
|
||||
if (signature === requestSignature && !_.isUndefined(requestSignature)) {
|
||||
validSignature = true;
|
||||
}
|
||||
return validSignature;
|
||||
}, false);
|
||||
},
|
||||
function finishValidation(err, hasValidSignature) {
|
||||
const authorizationLevel = hasValidSignature ? 'master' : null;
|
||||
return callback(err, authorizationLevel);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
me.getOAuthHash = function(metadataBackend, oAuthAccessKey, callback){
|
||||
metadataBackend.getOAuthHash(oAuthAccessKey, callback);
|
||||
};
|
||||
|
||||
return me;
|
||||
})();
|
||||
|
||||
function OAuthAuth(req, metadataBackend) {
|
||||
this.req = req;
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.isOAuthRequest = null;
|
||||
}
|
||||
|
||||
OAuthAuth.prototype.verifyCredentials = function(callback) {
|
||||
if (this.hasCredentials()) {
|
||||
oAuth.verifyRequest(this.req, this.metadataBackend, callback);
|
||||
} else {
|
||||
callback(null, false);
|
||||
}
|
||||
};
|
||||
|
||||
OAuthAuth.prototype.getCredentials = function() {
|
||||
return oAuth.parseTokens(this.req);
|
||||
};
|
||||
|
||||
OAuthAuth.prototype.hasCredentials = function() {
|
||||
if (this.isOAuthRequest === null) {
|
||||
var passed_tokens = oAuth.parseTokens(this.req);
|
||||
this.isOAuthRequest = !_.isEmpty(passed_tokens);
|
||||
}
|
||||
|
||||
return this.isOAuthRequest;
|
||||
};
|
||||
|
||||
|
||||
module.exports = OAuthAuth;
|
||||
module.exports.backend = oAuth;
|
||||
29
app/controllers/cache_status_controller.js
Normal file
29
app/controllers/cache_status_controller.js
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
|
||||
function CacheStatusController(tableCache) {
|
||||
this.tableCache = tableCache;
|
||||
}
|
||||
|
||||
CacheStatusController.prototype.route = function (app) {
|
||||
app.get(global.settings.base_url + '/cachestatus', this.handleCacheStatus.bind(this));
|
||||
};
|
||||
|
||||
CacheStatusController.prototype.handleCacheStatus = function (req, res) {
|
||||
var tableCacheValues = this.tableCache.values();
|
||||
var totalExplainKeys = tableCacheValues.length;
|
||||
var totalExplainHits = _.reduce(tableCacheValues, function(memo, res) {
|
||||
return memo + res.hits;
|
||||
}, 0);
|
||||
|
||||
res.send({
|
||||
explain: {
|
||||
pid: process.pid,
|
||||
hits: totalExplainHits,
|
||||
keys : totalExplainKeys
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = CacheStatusController;
|
||||
207
app/controllers/copy_controller.js
Normal file
207
app/controllers/copy_controller.js
Normal file
@@ -0,0 +1,207 @@
|
||||
'use strict';
|
||||
|
||||
const userMiddleware = require('../middlewares/user');
|
||||
const errorMiddleware = require('../middlewares/error');
|
||||
const authorizationMiddleware = require('../middlewares/authorization');
|
||||
const connectionParamsMiddleware = require('../middlewares/connection-params');
|
||||
const { initializeProfilerMiddleware } = require('../middlewares/profiler');
|
||||
const rateLimitsMiddleware = require('../middlewares/rate-limit');
|
||||
const dbQuotaMiddleware = require('../middlewares/db-quota');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimitsMiddleware;
|
||||
const errorHandlerFactory = require('../services/error_handler_factory');
|
||||
const StreamCopy = require('../services/stream_copy');
|
||||
const StreamCopyMetrics = require('../services/stream_copy_metrics');
|
||||
const zlib = require('zlib');
|
||||
const { PassThrough } = require('stream');
|
||||
|
||||
function CopyController(metadataBackend, userDatabaseService, userLimitsService, logger) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.userDatabaseService = userDatabaseService;
|
||||
this.userLimitsService = userLimitsService;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
CopyController.prototype.route = function (app) {
|
||||
const { base_url } = global.settings;
|
||||
|
||||
const copyFromMiddlewares = endpointGroup => {
|
||||
return [
|
||||
initializeProfilerMiddleware('copyfrom'),
|
||||
userMiddleware(this.metadataBackend),
|
||||
rateLimitsMiddleware(this.userLimitsService, endpointGroup),
|
||||
authorizationMiddleware(this.metadataBackend),
|
||||
connectionParamsMiddleware(this.userDatabaseService),
|
||||
validateCopyQuery(),
|
||||
dbQuotaMiddleware(),
|
||||
handleCopyFrom(this.logger),
|
||||
errorHandler(),
|
||||
errorMiddleware()
|
||||
];
|
||||
};
|
||||
|
||||
const copyToMiddlewares = endpointGroup => {
|
||||
return [
|
||||
initializeProfilerMiddleware('copyto'),
|
||||
userMiddleware(this.metadataBackend),
|
||||
rateLimitsMiddleware(this.userLimitsService, endpointGroup),
|
||||
authorizationMiddleware(this.metadataBackend),
|
||||
connectionParamsMiddleware(this.userDatabaseService),
|
||||
validateCopyQuery(),
|
||||
handleCopyTo(this.logger),
|
||||
errorHandler(),
|
||||
errorMiddleware()
|
||||
];
|
||||
};
|
||||
|
||||
app.post(`${base_url}/sql/copyfrom`, copyFromMiddlewares(RATE_LIMIT_ENDPOINTS_GROUPS.COPY_FROM));
|
||||
app.get(`${base_url}/sql/copyto`, copyToMiddlewares(RATE_LIMIT_ENDPOINTS_GROUPS.COPY_TO));
|
||||
};
|
||||
|
||||
|
||||
function handleCopyTo (logger) {
|
||||
return function handleCopyToMiddleware (req, res, next) {
|
||||
const sql = req.query.q;
|
||||
const { userDbParams, user } = res.locals;
|
||||
const filename = req.query.filename || 'carto-sql-copyto.dmp';
|
||||
|
||||
// it is not sure, nginx may choose not to compress the body
|
||||
// but we want to know it and save it in the metrics
|
||||
// https://github.com/CartoDB/CartoDB-SQL-API/issues/515
|
||||
const isGzip = req.get('accept-encoding') && req.get('accept-encoding').includes('gzip');
|
||||
|
||||
const streamCopy = new StreamCopy(sql, userDbParams);
|
||||
const metrics = new StreamCopyMetrics(logger, 'copyto', sql, user, isGzip);
|
||||
|
||||
res.header("Content-Disposition", `attachment; filename=${encodeURIComponent(filename)}`);
|
||||
res.header("Content-Type", "application/octet-stream");
|
||||
|
||||
streamCopy.getPGStream(StreamCopy.ACTION_TO, (err, pgstream) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
pgstream
|
||||
.on('data', data => metrics.addSize(data.length))
|
||||
.on('error', err => {
|
||||
metrics.end(null, err);
|
||||
pgstream.unpipe(res);
|
||||
|
||||
return next(err);
|
||||
})
|
||||
.on('end', () => metrics.end( streamCopy.getRowCount(StreamCopy.ACTION_TO) ))
|
||||
.pipe(res)
|
||||
.on('close', () => {
|
||||
const err = new Error('Connection closed by client');
|
||||
pgstream.emit('cancelQuery', err);
|
||||
pgstream.emit('error', err);
|
||||
})
|
||||
.on('error', err => {
|
||||
pgstream.emit('error', err);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function handleCopyFrom (logger) {
|
||||
return function handleCopyFromMiddleware (req, res, next) {
|
||||
const sql = req.query.q;
|
||||
const { userDbParams, user, dbRemainingQuota } = res.locals;
|
||||
const isGzip = req.get('content-encoding') === 'gzip';
|
||||
const COPY_FROM_MAX_POST_SIZE = global.settings.copy_from_max_post_size || 2 * 1024 * 1024 * 1024; // 2 GB
|
||||
const COPY_FROM_MAX_POST_SIZE_PRETTY = global.settings.copy_from_max_post_size_pretty || '2 GB';
|
||||
|
||||
const streamCopy = new StreamCopy(sql, userDbParams);
|
||||
const metrics = new StreamCopyMetrics(logger, 'copyfrom', sql, user, isGzip);
|
||||
|
||||
streamCopy.getPGStream(StreamCopy.ACTION_FROM, (err, pgstream) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
req
|
||||
.on('data', data => isGzip ? metrics.addGzipSize(data.length) : undefined)
|
||||
.on('error', err => {
|
||||
metrics.end(null, err);
|
||||
pgstream.emit('error', err);
|
||||
})
|
||||
.on('close', () => {
|
||||
const err = new Error('Connection closed by client');
|
||||
pgstream.emit('cancelQuery', err);
|
||||
pgstream.emit('error', err);
|
||||
})
|
||||
.pipe(isGzip ? zlib.createGunzip() : new PassThrough())
|
||||
.on('error', err => {
|
||||
err.message = `Error while gunzipping: ${err.message}`;
|
||||
metrics.end(null, err);
|
||||
pgstream.emit('error', err);
|
||||
})
|
||||
.on('data', data => {
|
||||
metrics.addSize(data.length);
|
||||
|
||||
if(metrics.size > dbRemainingQuota) {
|
||||
const quotaError = new Error('DB Quota exceeded');
|
||||
pgstream.emit('cancelQuery', err);
|
||||
pgstream.emit('error', quotaError);
|
||||
}
|
||||
if((metrics.gzipSize || metrics.size) > COPY_FROM_MAX_POST_SIZE) {
|
||||
const maxPostSizeError = new Error(
|
||||
`COPY FROM maximum POST size of ${COPY_FROM_MAX_POST_SIZE_PRETTY} exceeded`
|
||||
);
|
||||
pgstream.emit('cancelQuery', err);
|
||||
pgstream.emit('error', maxPostSizeError);
|
||||
}
|
||||
})
|
||||
.pipe(pgstream)
|
||||
.on('error', err => {
|
||||
metrics.end(null, err);
|
||||
req.unpipe(pgstream);
|
||||
return next(err);
|
||||
})
|
||||
.on('end', () => {
|
||||
metrics.end( streamCopy.getRowCount(StreamCopy.ACTION_FROM) );
|
||||
|
||||
const { time, rows } = metrics;
|
||||
|
||||
if (!rows) {
|
||||
return next(new Error("No rows copied"));
|
||||
}
|
||||
|
||||
res.send({
|
||||
time,
|
||||
total_rows: rows
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function validateCopyQuery () {
|
||||
return function validateCopyQueryMiddleware (req, res, next) {
|
||||
const sql = req.query.q;
|
||||
|
||||
if (!sql) {
|
||||
return next(new Error("SQL is missing"));
|
||||
}
|
||||
|
||||
if (!sql.toUpperCase().startsWith("COPY ")) {
|
||||
return next(new Error("SQL must start with COPY"));
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
function errorHandler () {
|
||||
return function errorHandlerMiddleware (err, req, res, next) {
|
||||
if (res.headersSent) {
|
||||
console.error("EXCEPTION REPORT: " + err.stack);
|
||||
const errorHandler = errorHandlerFactory(err);
|
||||
res.write(JSON.stringify(errorHandler.getResponse()));
|
||||
res.end();
|
||||
} else {
|
||||
return next(err);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = CopyController;
|
||||
14
app/controllers/generic_controller.js
Normal file
14
app/controllers/generic_controller.js
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
function GenericController() {
|
||||
}
|
||||
|
||||
GenericController.prototype.route = function (app) {
|
||||
app.options('*', this.handleRequest.bind(this));
|
||||
};
|
||||
|
||||
GenericController.prototype.handleRequest = function(req, res) {
|
||||
res.end();
|
||||
};
|
||||
|
||||
module.exports = GenericController;
|
||||
35
app/controllers/health_check_controller.js
Normal file
35
app/controllers/health_check_controller.js
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
var HealthCheck = require('../monitoring/health_check');
|
||||
|
||||
function HealthCheckController() {
|
||||
this.healthCheck = new HealthCheck(global.settings.disabled_file);
|
||||
}
|
||||
|
||||
HealthCheckController.prototype.route = function (app) {
|
||||
app.get(global.settings.base_url + '/health', this.handleHealthCheck.bind(this));
|
||||
};
|
||||
|
||||
HealthCheckController.prototype.handleHealthCheck = function (req, res) {
|
||||
var healthConfig = global.settings.health || {};
|
||||
if (!!healthConfig.enabled) {
|
||||
var startTime = Date.now();
|
||||
this.healthCheck.check(function(err) {
|
||||
var ok = !err;
|
||||
var response = {
|
||||
enabled: true,
|
||||
ok: ok,
|
||||
elapsed: Date.now() - startTime
|
||||
};
|
||||
if (err) {
|
||||
response.err = err.message;
|
||||
}
|
||||
res.status(ok ? 200 : 503).send(response);
|
||||
|
||||
});
|
||||
} else {
|
||||
res.status(200).send({enabled: false, ok: true});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = HealthCheckController;
|
||||
253
app/controllers/job_controller.js
Normal file
253
app/controllers/job_controller.js
Normal file
@@ -0,0 +1,253 @@
|
||||
'use strict';
|
||||
|
||||
const util = require('util');
|
||||
|
||||
const bodyParserMiddleware = require('../middlewares/body-parser');
|
||||
const userMiddleware = require('../middlewares/user');
|
||||
const { initializeProfilerMiddleware, finishProfilerMiddleware } = require('../middlewares/profiler');
|
||||
const authorizationMiddleware = require('../middlewares/authorization');
|
||||
const connectionParamsMiddleware = require('../middlewares/connection-params');
|
||||
const errorMiddleware = require('../middlewares/error');
|
||||
const rateLimitsMiddleware = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimitsMiddleware;
|
||||
|
||||
function JobController(metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.userDatabaseService = userDatabaseService;
|
||||
this.jobService = jobService;
|
||||
this.statsdClient = statsdClient;
|
||||
this.userLimitsService = userLimitsService;
|
||||
}
|
||||
|
||||
module.exports = JobController;
|
||||
|
||||
JobController.prototype.route = function (app) {
|
||||
const { base_url } = global.settings;
|
||||
const jobMiddlewares = composeJobMiddlewares(
|
||||
this.metadataBackend,
|
||||
this.userDatabaseService,
|
||||
this.jobService,
|
||||
this.statsdClient,
|
||||
this.userLimitsService
|
||||
);
|
||||
|
||||
app.get(
|
||||
`${base_url}/jobs-wip`,
|
||||
bodyParserMiddleware(),
|
||||
listWorkInProgressJobs(this.jobService),
|
||||
sendResponse(),
|
||||
errorMiddleware()
|
||||
);
|
||||
app.post(
|
||||
`${base_url}/sql/job`,
|
||||
bodyParserMiddleware(),
|
||||
checkBodyPayloadSize(),
|
||||
jobMiddlewares('create', createJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_CREATE)
|
||||
);
|
||||
app.get(
|
||||
`${base_url}/sql/job/:job_id`,
|
||||
bodyParserMiddleware(),
|
||||
jobMiddlewares('retrieve', getJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_GET)
|
||||
);
|
||||
app.delete(
|
||||
`${base_url}/sql/job/:job_id`,
|
||||
bodyParserMiddleware(),
|
||||
jobMiddlewares('cancel', cancelJob, RATE_LIMIT_ENDPOINTS_GROUPS.JOB_DELETE)
|
||||
);
|
||||
};
|
||||
|
||||
function composeJobMiddlewares (metadataBackend, userDatabaseService, jobService, statsdClient, userLimitsService) {
|
||||
return function jobMiddlewares (action, jobMiddleware, endpointGroup) {
|
||||
const forceToBeMaster = true;
|
||||
|
||||
return [
|
||||
initializeProfilerMiddleware('job'),
|
||||
userMiddleware(metadataBackend),
|
||||
rateLimitsMiddleware(userLimitsService, endpointGroup),
|
||||
authorizationMiddleware(metadataBackend, forceToBeMaster),
|
||||
connectionParamsMiddleware(userDatabaseService),
|
||||
jobMiddleware(jobService),
|
||||
setServedByDBHostHeader(),
|
||||
finishProfilerMiddleware(),
|
||||
logJobResult(action),
|
||||
incrementSuccessMetrics(statsdClient),
|
||||
sendResponse(),
|
||||
incrementErrorMetrics(statsdClient),
|
||||
errorMiddleware()
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
function cancelJob (jobService) {
|
||||
return function cancelJobMiddleware (req, res, next) {
|
||||
const { job_id } = req.params;
|
||||
|
||||
jobService.cancel(job_id, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('cancelJob');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.body = job.serialize();
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function getJob (jobService) {
|
||||
return function getJobMiddleware (req, res, next) {
|
||||
const { job_id } = req.params;
|
||||
|
||||
jobService.get(job_id, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getJob');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.body = job.serialize();
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function createJob (jobService) {
|
||||
return function createJobMiddleware (req, res, next) {
|
||||
const params = Object.assign({}, req.query, req.body);
|
||||
|
||||
var data = {
|
||||
user: res.locals.user,
|
||||
query: params.query,
|
||||
host: res.locals.userDbParams.host,
|
||||
port: global.settings.db_batch_port || res.locals.userDbParams.port,
|
||||
pass: res.locals.userDbParams.pass,
|
||||
dbname: res.locals.userDbParams.dbname,
|
||||
dbuser: res.locals.userDbParams.user
|
||||
};
|
||||
|
||||
jobService.create(data, (err, job) => {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('createJob');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.job_id = job.job_id;
|
||||
|
||||
res.statusCode = 201;
|
||||
res.body = job.serialize();
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function listWorkInProgressJobs (jobService) {
|
||||
return function listWorkInProgressJobsMiddleware (req, res, next) {
|
||||
jobService.listWorkInProgressJobs((err, list) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.body = list;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function checkBodyPayloadSize () {
|
||||
return function checkBodyPayloadSizeMiddleware(req, res, next) {
|
||||
const payload = JSON.stringify(req.body);
|
||||
|
||||
if (payload.length > MAX_LIMIT_QUERY_SIZE_IN_BYTES) {
|
||||
return next(new Error(getMaxSizeErrorMessage(payload)), res);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
const ONE_KILOBYTE_IN_BYTES = 1024;
|
||||
const MAX_LIMIT_QUERY_SIZE_IN_KB = 16;
|
||||
const MAX_LIMIT_QUERY_SIZE_IN_BYTES = MAX_LIMIT_QUERY_SIZE_IN_KB * ONE_KILOBYTE_IN_BYTES;
|
||||
|
||||
function getMaxSizeErrorMessage(sql) {
|
||||
return util.format([
|
||||
'Your payload is too large: %s bytes. Max size allowed is %s bytes (%skb).',
|
||||
'Are you trying to import data?.',
|
||||
'Please, check out import api http://docs.cartodb.com/cartodb-platform/import-api/'
|
||||
].join(' '),
|
||||
sql.length,
|
||||
MAX_LIMIT_QUERY_SIZE_IN_BYTES,
|
||||
Math.round(MAX_LIMIT_QUERY_SIZE_IN_BYTES / ONE_KILOBYTE_IN_BYTES)
|
||||
);
|
||||
}
|
||||
|
||||
module.exports.MAX_LIMIT_QUERY_SIZE_IN_BYTES = MAX_LIMIT_QUERY_SIZE_IN_BYTES;
|
||||
module.exports.getMaxSizeErrorMessage = getMaxSizeErrorMessage;
|
||||
|
||||
function setServedByDBHostHeader () {
|
||||
return function setServedByDBHostHeaderMiddleware (req, res, next) {
|
||||
const { userDbParams } = res.locals;
|
||||
|
||||
if (userDbParams.host) {
|
||||
res.header('X-Served-By-DB-Host', res.locals.userDbParams.host);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
function logJobResult (action) {
|
||||
return function logJobResultMiddleware (req, res, next) {
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
console.info(JSON.stringify({
|
||||
type: 'sql_api_batch_job',
|
||||
username: res.locals.user,
|
||||
action: action,
|
||||
job_id: req.params.job_id || res.locals.job_id
|
||||
}));
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
const METRICS_PREFIX = 'sqlapi.job';
|
||||
|
||||
function incrementSuccessMetrics (statsdClient) {
|
||||
return function incrementSuccessMetricsMiddleware (req, res, next) {
|
||||
if (statsdClient !== undefined) {
|
||||
statsdClient.increment(`${METRICS_PREFIX}.success`);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
function incrementErrorMetrics (statsdClient) {
|
||||
return function incrementErrorMetricsMiddleware (err, req, res, next) {
|
||||
if (statsdClient !== undefined) {
|
||||
statsdClient.increment(`${METRICS_PREFIX}.error`);
|
||||
}
|
||||
|
||||
next(err);
|
||||
};
|
||||
}
|
||||
|
||||
function sendResponse () {
|
||||
return function sendResponseMiddleware (req, res) {
|
||||
res.status(res.statusCode || 200).send(res.body);
|
||||
};
|
||||
}
|
||||
275
app/controllers/query_controller.js
Normal file
275
app/controllers/query_controller.js
Normal file
@@ -0,0 +1,275 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
var step = require('step');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var CachedQueryTables = require('../services/cached-query-tables');
|
||||
const pgEntitiesAccessValidator = require('../services/pg-entities-access-validator');
|
||||
var queryMayWrite = require('../utils/query_may_write');
|
||||
|
||||
var formats = require('../models/formats');
|
||||
|
||||
var sanitize_filename = require('../utils/filename_sanitizer');
|
||||
var getContentDisposition = require('../utils/content_disposition');
|
||||
const bodyParserMiddleware = require('../middlewares/body-parser');
|
||||
const userMiddleware = require('../middlewares/user');
|
||||
const errorMiddleware = require('../middlewares/error');
|
||||
const authorizationMiddleware = require('../middlewares/authorization');
|
||||
const connectionParamsMiddleware = require('../middlewares/connection-params');
|
||||
const timeoutLimitsMiddleware = require('../middlewares/timeout-limits');
|
||||
const { initializeProfilerMiddleware } = require('../middlewares/profiler');
|
||||
const rateLimitsMiddleware = require('../middlewares/rate-limit');
|
||||
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimitsMiddleware;
|
||||
|
||||
var ONE_YEAR_IN_SECONDS = 31536000; // 1 year time to live by default
|
||||
|
||||
function QueryController(metadataBackend, userDatabaseService, tableCache, statsd_client, userLimitsService) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.statsd_client = statsd_client;
|
||||
this.userDatabaseService = userDatabaseService;
|
||||
this.queryTables = new CachedQueryTables(tableCache);
|
||||
this.userLimitsService = userLimitsService;
|
||||
}
|
||||
|
||||
QueryController.prototype.route = function (app) {
|
||||
const { base_url } = global.settings;
|
||||
const forceToBeMaster = false;
|
||||
|
||||
const queryMiddlewares = () => {
|
||||
return [
|
||||
bodyParserMiddleware(),
|
||||
initializeProfilerMiddleware('query'),
|
||||
userMiddleware(this.metadataBackend),
|
||||
rateLimitsMiddleware(this.userLimitsService, RATE_LIMIT_ENDPOINTS_GROUPS.QUERY),
|
||||
authorizationMiddleware(this.metadataBackend, forceToBeMaster),
|
||||
connectionParamsMiddleware(this.userDatabaseService),
|
||||
timeoutLimitsMiddleware(this.metadataBackend),
|
||||
this.handleQuery.bind(this),
|
||||
errorMiddleware()
|
||||
];
|
||||
};
|
||||
|
||||
app.all(`${base_url}/sql`, queryMiddlewares());
|
||||
app.all(`${base_url}/sql.:f`, queryMiddlewares());
|
||||
};
|
||||
|
||||
// jshint maxcomplexity:21
|
||||
QueryController.prototype.handleQuery = function (req, res, next) {
|
||||
var self = this;
|
||||
// extract input
|
||||
var body = (req.body) ? req.body : {};
|
||||
// clone so don't modify req.params or req.body so oauth is not broken
|
||||
var params = _.extend({}, req.query, body);
|
||||
var sql = params.q;
|
||||
var limit = parseInt(params.rows_per_page);
|
||||
var offset = parseInt(params.page);
|
||||
var orderBy = params.order_by;
|
||||
var sortOrder = params.sort_order;
|
||||
var requestedFormat = params.format;
|
||||
var format = _.isArray(requestedFormat) ? _.last(requestedFormat) : requestedFormat;
|
||||
var requestedFilename = params.filename;
|
||||
var filename = requestedFilename;
|
||||
var requestedSkipfields = params.skipfields;
|
||||
|
||||
const { user: username, userDbParams: dbopts, authDbParams, userLimits, authorizationLevel } = res.locals;
|
||||
|
||||
var skipfields;
|
||||
var dp = params.dp; // decimal point digits (defaults to 6)
|
||||
var gn = "the_geom"; // TODO: read from configuration FILE
|
||||
|
||||
req.aborted = false;
|
||||
req.on("close", function() {
|
||||
if (req.formatter && _.isFunction(req.formatter.cancel)) {
|
||||
req.formatter.cancel();
|
||||
}
|
||||
req.aborted = true; // TODO: there must be a builtin way to check this
|
||||
});
|
||||
|
||||
function checkAborted(step) {
|
||||
if ( req.aborted ) {
|
||||
var err = new Error("Request aborted during " + step);
|
||||
// We'll use status 499, same as ngnix in these cases
|
||||
// see http://en.wikipedia.org/wiki/List_of_HTTP_status_codes#4xx_Client_Error
|
||||
err.http_status = 499;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
// sanitize and apply defaults to input
|
||||
dp = (dp === "" || _.isUndefined(dp)) ? '6' : dp;
|
||||
format = (format === "" || _.isUndefined(format)) ? 'json' : format.toLowerCase();
|
||||
filename = (filename === "" || _.isUndefined(filename)) ? 'cartodb-query' : sanitize_filename(filename);
|
||||
sql = (sql === "" || _.isUndefined(sql)) ? null : sql;
|
||||
limit = (!_.isNaN(limit)) ? limit : null;
|
||||
offset = (!_.isNaN(offset)) ? offset * limit : null;
|
||||
|
||||
// Accept both comma-separated string or array of comma-separated strings
|
||||
if ( requestedSkipfields ) {
|
||||
if ( _.isString(requestedSkipfields) ) {
|
||||
skipfields = requestedSkipfields.split(',');
|
||||
} else if ( _.isArray(requestedSkipfields) ) {
|
||||
skipfields = [];
|
||||
_.each(requestedSkipfields, function(ele) {
|
||||
skipfields = skipfields.concat(ele.split(','));
|
||||
});
|
||||
}
|
||||
} else {
|
||||
skipfields = [];
|
||||
}
|
||||
|
||||
//if ( -1 === supportedFormats.indexOf(format) )
|
||||
if ( ! formats.hasOwnProperty(format) ) {
|
||||
throw new Error("Invalid format: " + format);
|
||||
}
|
||||
|
||||
if (!_.isString(sql)) {
|
||||
throw new Error("You must indicate a sql query");
|
||||
}
|
||||
|
||||
var formatter;
|
||||
|
||||
if ( req.profiler ) {
|
||||
req.profiler.done('init');
|
||||
}
|
||||
|
||||
// 1. Get the list of tables affected by the query
|
||||
// 2. Setup headers
|
||||
// 3. Send formatted results back
|
||||
// 4. Handle error
|
||||
step(
|
||||
function queryExplain() {
|
||||
var next = this;
|
||||
|
||||
checkAborted('queryExplain');
|
||||
|
||||
var pg = new PSQL(authDbParams);
|
||||
|
||||
var skipCache = authorizationLevel === 'master';
|
||||
|
||||
self.queryTables.getAffectedTablesFromQuery(pg, sql, skipCache, function(err, result) {
|
||||
if (err) {
|
||||
var errorMessage = (err && err.message) || 'unknown error';
|
||||
console.error("Error on query explain '%s': %s", sql, errorMessage);
|
||||
}
|
||||
return next(null, result);
|
||||
});
|
||||
},
|
||||
function setHeaders(err, affectedTables) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
var mayWrite = queryMayWrite(sql);
|
||||
if ( req.profiler ) {
|
||||
req.profiler.done('queryExplain');
|
||||
}
|
||||
|
||||
checkAborted('setHeaders');
|
||||
if(!pgEntitiesAccessValidator.validate(affectedTables, authorizationLevel)) {
|
||||
const syntaxError = new SyntaxError("system tables are forbidden");
|
||||
syntaxError.http_status = 403;
|
||||
throw(syntaxError);
|
||||
}
|
||||
|
||||
var FormatClass = formats[format];
|
||||
formatter = new FormatClass();
|
||||
req.formatter = formatter;
|
||||
|
||||
|
||||
// configure headers for given format
|
||||
var use_inline = !requestedFormat && !requestedFilename;
|
||||
res.header("Content-Disposition", getContentDisposition(formatter, filename, use_inline));
|
||||
res.header("Content-Type", formatter.getContentType());
|
||||
|
||||
// set cache headers
|
||||
var cachePolicy = req.query.cache_policy;
|
||||
if (cachePolicy === 'persist') {
|
||||
res.header('Cache-Control', 'public,max-age=' + ONE_YEAR_IN_SECONDS);
|
||||
} else {
|
||||
var maxAge = (mayWrite) ? 0 : ONE_YEAR_IN_SECONDS;
|
||||
res.header('Cache-Control', 'no-cache,max-age='+maxAge+',must-revalidate,public');
|
||||
}
|
||||
|
||||
// Only set an X-Cache-Channel for responses we want Varnish to cache.
|
||||
var skipNotUpdatedAtTables = true;
|
||||
if (!!affectedTables && affectedTables.getTables(skipNotUpdatedAtTables).length > 0 && !mayWrite) {
|
||||
res.header('X-Cache-Channel', affectedTables.getCacheChannel(skipNotUpdatedAtTables));
|
||||
res.header('Surrogate-Key', affectedTables.key(skipNotUpdatedAtTables).join(' '));
|
||||
}
|
||||
|
||||
if(!!affectedTables) {
|
||||
res.header('Last-Modified',
|
||||
new Date(affectedTables.getLastUpdatedAt(Number(new Date()))).toUTCString());
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
function generateFormat(err){
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
checkAborted('generateFormat');
|
||||
|
||||
// TODO: drop this, fix UI!
|
||||
sql = new PSQL.QueryWrapper(sql).orderBy(orderBy, sortOrder).window(limit, offset).query();
|
||||
|
||||
var opts = {
|
||||
username: username,
|
||||
dbopts: dbopts,
|
||||
sink: res,
|
||||
gn: gn,
|
||||
dp: dp,
|
||||
skipfields: skipfields,
|
||||
sql: sql,
|
||||
filename: filename,
|
||||
bufferedRows: global.settings.bufferedRows,
|
||||
callback: params.callback,
|
||||
abortChecker: checkAborted,
|
||||
timeout: userLimits.timeout
|
||||
};
|
||||
|
||||
if ( req.profiler ) {
|
||||
opts.profiler = req.profiler;
|
||||
opts.beforeSink = function() {
|
||||
req.profiler.done('beforeSink');
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
};
|
||||
}
|
||||
|
||||
if (dbopts.host) {
|
||||
res.header('X-Served-By-DB-Host', dbopts.host);
|
||||
}
|
||||
formatter.sendResponse(opts, this);
|
||||
},
|
||||
function errorHandle(err){
|
||||
formatter = null;
|
||||
|
||||
if (err) {
|
||||
next(err);
|
||||
}
|
||||
|
||||
if ( req.profiler ) {
|
||||
req.profiler.sendStats();
|
||||
}
|
||||
if (self.statsd_client) {
|
||||
if ( err ) {
|
||||
self.statsd_client.increment('sqlapi.query.error');
|
||||
} else {
|
||||
self.statsd_client.increment('sqlapi.query.success');
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
|
||||
if (self.statsd_client) {
|
||||
self.statsd_client.increment('sqlapi.query.error');
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
module.exports = QueryController;
|
||||
18
app/controllers/version_controller.js
Normal file
18
app/controllers/version_controller.js
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict';
|
||||
|
||||
var version = {
|
||||
cartodb_sql_api: require(__dirname + '/../../package.json').version
|
||||
};
|
||||
|
||||
function VersionController() {
|
||||
}
|
||||
|
||||
VersionController.prototype.route = function (app) {
|
||||
app.get(global.settings.base_url + '/version', this.handleVersion.bind(this));
|
||||
};
|
||||
|
||||
VersionController.prototype.handleVersion = function (req, res) {
|
||||
res.send(version);
|
||||
};
|
||||
|
||||
module.exports = VersionController;
|
||||
125
app/middlewares/authorization.js
Normal file
125
app/middlewares/authorization.js
Normal file
@@ -0,0 +1,125 @@
|
||||
'use strict';
|
||||
|
||||
const AuthApi = require('../auth/auth_api');
|
||||
const basicAuth = require('basic-auth');
|
||||
|
||||
module.exports = function authorization (metadataBackend, forceToBeMaster = false) {
|
||||
return function authorizationMiddleware (req, res, next) {
|
||||
const { user } = res.locals;
|
||||
const credentials = getCredentialsFromRequest(req);
|
||||
|
||||
if (!userMatches(credentials, user)) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('authorization');
|
||||
}
|
||||
|
||||
return next(new Error('permission denied'));
|
||||
}
|
||||
|
||||
res.locals.api_key = credentials.apiKeyToken;
|
||||
|
||||
const params = Object.assign({ metadataBackend }, res.locals, req.query, req.body);
|
||||
const authApi = new AuthApi(req, params);
|
||||
|
||||
authApi.verifyCredentials(function (err, authorizationLevel) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('authorization');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.authorizationLevel = authorizationLevel;
|
||||
|
||||
if (forceToBeMaster && authorizationLevel !== 'master') {
|
||||
return next(new Error('permission denied'));
|
||||
}
|
||||
|
||||
res.set('vary', 'Authorization'); //Honor Authorization header when caching.
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const credentialsGetters = [
|
||||
getCredentialsFromHeaderAuthorization,
|
||||
getCredentialsFromRequestQueryString,
|
||||
getCredentialsFromRequestBody,
|
||||
];
|
||||
|
||||
function getCredentialsFromRequest (req) {
|
||||
let credentials = null;
|
||||
|
||||
for (var getter of credentialsGetters) {
|
||||
credentials = getter(req);
|
||||
|
||||
if (apiKeyTokenFound(credentials)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return credentials;
|
||||
}
|
||||
|
||||
function getCredentialsFromHeaderAuthorization(req) {
|
||||
const { pass, name } = basicAuth(req) || {};
|
||||
|
||||
if (pass !== undefined && name !== undefined) {
|
||||
return {
|
||||
apiKeyToken: pass,
|
||||
user: name
|
||||
};
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function getCredentialsFromRequestQueryString(req) {
|
||||
if (req.query.api_key) {
|
||||
return {
|
||||
apiKeyToken: req.query.api_key
|
||||
};
|
||||
}
|
||||
|
||||
if (req.query.map_key) {
|
||||
return {
|
||||
apiKeyToken: req.query.map_key
|
||||
};
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function getCredentialsFromRequestBody(req) {
|
||||
if (req.body && req.body.api_key) {
|
||||
return {
|
||||
apiKeyToken: req.body.api_key
|
||||
};
|
||||
}
|
||||
|
||||
if (req.body && req.body.map_key) {
|
||||
return {
|
||||
apiKeyToken: req.body.map_key
|
||||
};
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function apiKeyTokenFound(credentials) {
|
||||
if (typeof credentials === 'boolean') {
|
||||
return credentials;
|
||||
}
|
||||
|
||||
if (credentials.apiKeyToken !== undefined) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function userMatches (credentials, user) {
|
||||
return !(credentials.user !== undefined && credentials.user !== user);
|
||||
}
|
||||
146
app/middlewares/body-parser.js
Normal file
146
app/middlewares/body-parser.js
Normal file
@@ -0,0 +1,146 @@
|
||||
'use strict';
|
||||
|
||||
/*!
|
||||
* Connect - bodyParser
|
||||
* Copyright(c) 2010 Sencha Inc.
|
||||
* Copyright(c) 2011 TJ Holowaychuk
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var qs = require('qs');
|
||||
var multer = require('multer');
|
||||
|
||||
/**
|
||||
* Extract the mime type from the given request's
|
||||
* _Content-Type_ header.
|
||||
*
|
||||
* @param {IncomingMessage} req
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function mime(req) {
|
||||
var str = req.headers['content-type'] || '';
|
||||
return str.split(';')[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse request bodies.
|
||||
*
|
||||
* By default _application/json_, _application/x-www-form-urlencoded_,
|
||||
* and _multipart/form-data_ are supported, however you may map `connect.bodyParser.parse[contentType]`
|
||||
* to a function receiving `(req, options, callback)`.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* connect.createServer(
|
||||
* connect.bodyParser()
|
||||
* , function(req, res) {
|
||||
* res.end('viewing user ' + req.body.user.name);
|
||||
* }
|
||||
* );
|
||||
*
|
||||
* $ curl -d 'user[name]=tj' http://localhost/
|
||||
* $ curl -d '{"user":{"name":"tj"}}' -H "Content-Type: application/json" http://localhost/
|
||||
*
|
||||
* Multipart req.files:
|
||||
*
|
||||
* As a security measure files are stored in a separate object, stored
|
||||
* as `req.files`. This prevents attacks that may potentially alter
|
||||
* filenames, and depending on the application gain access to restricted files.
|
||||
*
|
||||
* Multipart configuration:
|
||||
*
|
||||
* The `options` passed are provided to each parser function.
|
||||
* The _multipart/form-data_ parser merges these with formidable's
|
||||
* IncomingForm object, allowing you to tweak the upload directory,
|
||||
* size limits, etc. For example you may wish to retain the file extension
|
||||
* and change the upload directory:
|
||||
*
|
||||
* server.use(bodyParser({ uploadDir: '/www/mysite.com/uploads' }));
|
||||
*
|
||||
* View [node-formidable](https://github.com/felixge/node-formidable) for more information.
|
||||
*
|
||||
* If you wish to use formidable directly within your app, and do not
|
||||
* desire this behaviour for multipart requests simply remove the
|
||||
* parser:
|
||||
*
|
||||
* delete connect.bodyParser.parse['multipart/form-data'];
|
||||
*
|
||||
* Or
|
||||
*
|
||||
* delete express.bodyParser.parse['multipart/form-data'];
|
||||
*
|
||||
* @param {Object} options
|
||||
* @return {Function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
exports = module.exports = function bodyParser(options){
|
||||
options = options || {};
|
||||
return function bodyParser(req, res, next) {
|
||||
if (req.body) {
|
||||
return next();
|
||||
}
|
||||
req.body = {};
|
||||
|
||||
if ('GET' === req.method || 'HEAD' === req.method) {
|
||||
return next();
|
||||
}
|
||||
var parser = exports.parse[mime(req)];
|
||||
if (parser) {
|
||||
parser(req, options, next);
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Parsers.
|
||||
*/
|
||||
|
||||
exports.parse = {};
|
||||
|
||||
/**
|
||||
* Parse application/x-www-form-urlencoded.
|
||||
*/
|
||||
|
||||
exports.parse['application/x-www-form-urlencoded'] = function(req, options, fn){
|
||||
var buf = '';
|
||||
req.setEncoding('utf8');
|
||||
req.on('data', function(chunk){ buf += chunk; });
|
||||
req.on('end', function(){
|
||||
try {
|
||||
req.body = buf.length ? qs.parse(buf) : {};
|
||||
fn();
|
||||
} catch (err){
|
||||
fn(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse application/json.
|
||||
*/
|
||||
|
||||
exports.parse['application/json'] = function(req, options, fn){
|
||||
var buf = '';
|
||||
req.setEncoding('utf8');
|
||||
req.on('data', function(chunk){ buf += chunk; });
|
||||
req.on('end', function(){
|
||||
try {
|
||||
req.body = buf.length ? JSON.parse(buf) : {};
|
||||
fn();
|
||||
} catch (err){
|
||||
fn(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var multipartMiddleware = multer({ limits: { fieldSize: Infinity } });
|
||||
exports.parse['multipart/form-data'] = multipartMiddleware.none();
|
||||
23
app/middlewares/connection-params.js
Normal file
23
app/middlewares/connection-params.js
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function connectionParams (userDatabaseService) {
|
||||
return function connectionParamsMiddleware (req, res, next) {
|
||||
const { user, api_key: apikeyToken, authorizationLevel } = res.locals;
|
||||
|
||||
userDatabaseService.getConnectionParams(user, apikeyToken, authorizationLevel,
|
||||
function (err, userDbParams, authDbParams) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getConnectionParams');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
res.locals.userDbParams = userDbParams;
|
||||
res.locals.authDbParams = authDbParams;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
16
app/middlewares/cors.js
Normal file
16
app/middlewares/cors.js
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function cors(extraHeaders) {
|
||||
return function(req, res, next) {
|
||||
var baseHeaders = 'X-Requested-With, X-Prototype-Version, X-CSRF-Token, Authorization';
|
||||
|
||||
if(extraHeaders) {
|
||||
baseHeaders += ', ' + extraHeaders;
|
||||
}
|
||||
|
||||
res.header('Access-Control-Allow-Origin', '*');
|
||||
res.header('Access-Control-Allow-Headers', baseHeaders);
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
26
app/middlewares/db-quota.js
Normal file
26
app/middlewares/db-quota.js
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
const PSQL = require('cartodb-psql');
|
||||
|
||||
const remainingQuotaQuery = 'SELECT _CDB_UserQuotaInBytes() - CDB_UserDataSize(current_schema()) AS remaining_quota';
|
||||
|
||||
module.exports = function dbQuota () {
|
||||
return function dbQuotaMiddleware (req, res, next) {
|
||||
const { userDbParams } = res.locals;
|
||||
const pg = new PSQL(userDbParams);
|
||||
pg.connect((err, client, done) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
client.query(remainingQuotaQuery, (err, result) => {
|
||||
if(err) {
|
||||
return next(err);
|
||||
}
|
||||
const remainingQuota = result.rows[0].remaining_quota;
|
||||
res.locals.dbRemainingQuota = remainingQuota;
|
||||
done();
|
||||
next();
|
||||
});
|
||||
});
|
||||
};
|
||||
};
|
||||
91
app/middlewares/error.js
Normal file
91
app/middlewares/error.js
Normal file
@@ -0,0 +1,91 @@
|
||||
'use strict';
|
||||
|
||||
const errorHandlerFactory = require('../services/error_handler_factory');
|
||||
const MAX_ERROR_STRING_LENGTH = 1024;
|
||||
|
||||
module.exports = function error() {
|
||||
return function errorMiddleware(err, req, res, next) {
|
||||
const errorHandler = errorHandlerFactory(err);
|
||||
let errorResponse = errorHandler.getResponse();
|
||||
|
||||
if (global.settings.environment === 'development') {
|
||||
errorResponse.stack = err.stack;
|
||||
}
|
||||
|
||||
if (global.settings.environment !== 'test') {
|
||||
// TODO: email this Exception report
|
||||
console.error("EXCEPTION REPORT: " + err.stack);
|
||||
}
|
||||
|
||||
// Force inline content disposition
|
||||
res.header("Content-Disposition", 'inline');
|
||||
|
||||
if (req && req.profiler) {
|
||||
req.profiler.done('finish');
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
}
|
||||
|
||||
setErrorHeader(errorHandler, res);
|
||||
|
||||
res.header('Content-Type', 'application/json; charset=utf-8');
|
||||
res.status(getStatusError(errorHandler, req));
|
||||
if (req.query && req.query.callback) {
|
||||
res.jsonp(errorResponse);
|
||||
} else {
|
||||
res.json(errorResponse);
|
||||
}
|
||||
|
||||
if (req && req.profiler) {
|
||||
res.req.profiler.sendStats();
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
function getStatusError(errorHandler, req) {
|
||||
let statusError = errorHandler.http_status;
|
||||
|
||||
// JSONP has to return 200 status error
|
||||
if (req && req.query && req.query.callback) {
|
||||
statusError = 200;
|
||||
}
|
||||
|
||||
return statusError;
|
||||
}
|
||||
|
||||
function setErrorHeader(errorHandler, res) {
|
||||
const errorsLog = {
|
||||
context: errorHandler.context,
|
||||
detail: errorHandler.detail,
|
||||
hint: errorHandler.hint,
|
||||
statusCode: errorHandler.http_status,
|
||||
message: errorHandler.message
|
||||
};
|
||||
|
||||
res.set('X-SQLAPI-Errors', stringifyForLogs(errorsLog));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove problematic nested characters
|
||||
* from object for logs RegEx
|
||||
*
|
||||
* @param {Object} object
|
||||
*/
|
||||
function stringifyForLogs(object) {
|
||||
Object.keys(object).map(key => {
|
||||
if (typeof object[key] === 'string') {
|
||||
object[key] = object[key]
|
||||
.substring(0, MAX_ERROR_STRING_LENGTH)
|
||||
.replace(/[^a-zA-Z0-9]/g, ' ');
|
||||
} else if (typeof object[key] === 'object') {
|
||||
stringifyForLogs(object[key]);
|
||||
} else if (object[key] instanceof Array) {
|
||||
for (let element of object[key]) {
|
||||
stringifyForLogs(element);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return JSON.stringify(object);
|
||||
}
|
||||
24
app/middlewares/profiler.js
Normal file
24
app/middlewares/profiler.js
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
module.exports.initializeProfilerMiddleware = function initializeProfiler (label) {
|
||||
return function initializeProfilerMiddleware (req, res, next) {
|
||||
if (req.profiler) {
|
||||
req.profiler.start(`sqlapi.${label}`);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.finishProfilerMiddleware = function finishProfiler () {
|
||||
return function finishProfilerMiddleware (req, res, next) {
|
||||
if (req.profiler) {
|
||||
req.profiler.end();
|
||||
req.profiler.sendStats();
|
||||
|
||||
res.header('X-SQLAPI-Profiler', req.profiler.toJSONString());
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
};
|
||||
61
app/middlewares/rate-limit.js
Normal file
61
app/middlewares/rate-limit.js
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
const RATE_LIMIT_ENDPOINTS_GROUPS = {
|
||||
QUERY: 'query',
|
||||
JOB_CREATE: 'job_create',
|
||||
JOB_GET: 'job_get',
|
||||
JOB_DELETE: 'job_delete',
|
||||
COPY_FROM: 'copy_from',
|
||||
COPY_TO: 'copy_to'
|
||||
};
|
||||
|
||||
|
||||
function rateLimit(userLimits, endpointGroup = null) {
|
||||
if (!isRateLimitEnabled(endpointGroup)) {
|
||||
return function rateLimitDisabledMiddleware(req, res, next) { next(); };
|
||||
}
|
||||
|
||||
return function rateLimitMiddleware(req, res, next) {
|
||||
userLimits.getRateLimit(res.locals.user, endpointGroup, function(err, userRateLimit) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if (!userRateLimit) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const [isBlocked, limit, remaining, retry, reset] = userRateLimit;
|
||||
|
||||
res.set({
|
||||
'Carto-Rate-Limit-Limit': limit,
|
||||
'Carto-Rate-Limit-Remaining': remaining,
|
||||
'Carto-Rate-Limit-Reset': reset
|
||||
});
|
||||
|
||||
if (isBlocked) {
|
||||
// retry is floor rounded in seconds by redis-cell
|
||||
res.set('Retry-After', retry + 1);
|
||||
|
||||
const rateLimitError = new Error(
|
||||
'You are over platform\'s limits. Please contact us to know more details'
|
||||
);
|
||||
rateLimitError.http_status = 429;
|
||||
rateLimitError.context = 'limit';
|
||||
rateLimitError.detail = 'rate-limit';
|
||||
return next(rateLimitError);
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function isRateLimitEnabled(endpointGroup) {
|
||||
return global.settings.ratelimits.rateLimitsEnabled &&
|
||||
endpointGroup &&
|
||||
global.settings.ratelimits.endpoints[endpointGroup];
|
||||
}
|
||||
|
||||
module.exports = rateLimit;
|
||||
module.exports.RATE_LIMIT_ENDPOINTS_GROUPS = RATE_LIMIT_ENDPOINTS_GROUPS;
|
||||
25
app/middlewares/timeout-limits.js
Normal file
25
app/middlewares/timeout-limits.js
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function timeoutLimits (metadataBackend) {
|
||||
return function timeoutLimitsMiddleware (req, res, next) {
|
||||
const { user, authorizationLevel } = res.locals;
|
||||
|
||||
metadataBackend.getUserTimeoutRenderLimits(user, function (err, timeoutRenderLimit) {
|
||||
if (req.profiler) {
|
||||
req.profiler.done('getUserTimeoutLimits');
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
const userLimits = {
|
||||
timeout: (authorizationLevel === 'master') ? timeoutRenderLimit.render : timeoutRenderLimit.renderPublic
|
||||
};
|
||||
|
||||
res.locals.userLimits = userLimits;
|
||||
|
||||
next();
|
||||
});
|
||||
};
|
||||
};
|
||||
38
app/middlewares/user.js
Normal file
38
app/middlewares/user.js
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
const CdbRequest = require('../models/cartodb_request');
|
||||
|
||||
module.exports = function user(metadataBackend) {
|
||||
const cdbRequest = new CdbRequest();
|
||||
|
||||
return function userMiddleware (req, res, next) {
|
||||
res.locals.user = getUserNameFromRequest(req, cdbRequest);
|
||||
|
||||
checkUserExists(metadataBackend, res.locals.user, function(err, userExists) {
|
||||
if (err || !userExists) {
|
||||
const error = new Error('Unauthorized');
|
||||
error.type = 'auth';
|
||||
error.subtype = 'user-not-found';
|
||||
error.http_status = 404;
|
||||
error.message = errorUserNotFoundMessageTemplate(res.locals.user);
|
||||
next(error);
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
function getUserNameFromRequest(req, cdbRequest) {
|
||||
return cdbRequest.userByReq(req);
|
||||
}
|
||||
|
||||
function checkUserExists(metadataBackend, userName, callback) {
|
||||
metadataBackend.getUserId(userName, function(err) {
|
||||
callback(err, !err);
|
||||
});
|
||||
}
|
||||
|
||||
function errorUserNotFoundMessageTemplate(user) {
|
||||
return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`;
|
||||
}
|
||||
166
app/models/bin_encoder.js
Normal file
166
app/models/bin_encoder.js
Normal file
@@ -0,0 +1,166 @@
|
||||
'use strict';
|
||||
|
||||
function ArrayBufferSer(type, data, options) {
|
||||
if(type === undefined) {
|
||||
throw "ArrayBufferSer should be created with a type";
|
||||
}
|
||||
this.options = options || {};
|
||||
this._initFunctions();
|
||||
this.headerSize = 8;
|
||||
this.data = data;
|
||||
this.type = type = Math.min(type, ArrayBufferSer.BUFFER);
|
||||
var size = this._sizeFor(this.headerSize, data);
|
||||
this.buffer = new Buffer(this.headerSize + size);
|
||||
this.buffer.writeUInt32BE(type, 0); // this could be one byte but for byte padding is better to be 4 bytes
|
||||
this.buffer.writeUInt32BE(size, 4);
|
||||
this.offset = this.headerSize;
|
||||
|
||||
var w = this.writeFn[type];
|
||||
|
||||
var i;
|
||||
if(!this.options.delta) {
|
||||
for(i = 0; i < data.length; ++i) {
|
||||
this[w](data[i]);
|
||||
}
|
||||
} else {
|
||||
this[w](data[0]);
|
||||
for(i = 1; i < data.length; ++i) {
|
||||
this[w](data[i] - data[i - 1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// constants
|
||||
//
|
||||
ArrayBufferSer.INT8 = 1;
|
||||
ArrayBufferSer.UINT8 = 2;
|
||||
ArrayBufferSer.UINT8_CLAMP = 3;
|
||||
ArrayBufferSer.INT16 = 4;
|
||||
ArrayBufferSer.UINT16 = 5;
|
||||
ArrayBufferSer.INT32 = 6;
|
||||
ArrayBufferSer.UINT32 = 7;
|
||||
ArrayBufferSer.FLOAT32 = 8;
|
||||
//ArrayBufferSer.FLOAT64 = 9; not supported
|
||||
ArrayBufferSer.STRING = 10;
|
||||
ArrayBufferSer.BUFFER = 11;
|
||||
|
||||
ArrayBufferSer.MAX_PADDING = ArrayBufferSer.INT32;
|
||||
|
||||
|
||||
ArrayBufferSer.typeNames = {
|
||||
'int8': ArrayBufferSer.INT8,
|
||||
'uint8': ArrayBufferSer.UINT8,
|
||||
'uintclamp': ArrayBufferSer.UINT8_CLAMP,
|
||||
'int16': ArrayBufferSer.INT16,
|
||||
'uint16': ArrayBufferSer.UINT16,
|
||||
'int32': ArrayBufferSer.INT32,
|
||||
'uint32': ArrayBufferSer.UINT32,
|
||||
'float32': ArrayBufferSer.FLOAT32,
|
||||
'string': ArrayBufferSer.STRING,
|
||||
'buffer': ArrayBufferSer.BUFFER
|
||||
};
|
||||
|
||||
ArrayBufferSer.prototype = {
|
||||
|
||||
// 0 not used
|
||||
sizes: [NaN, 1, 1, 1, 2, 2, 4, 4, 4, 8],
|
||||
|
||||
_paddingFor: function(off, type) {
|
||||
var s = this.sizes[type];
|
||||
if(s) {
|
||||
var r = off % s;
|
||||
return r === 0 ? 0 : s - r;
|
||||
}
|
||||
return 0;
|
||||
},
|
||||
|
||||
_sizeFor: function(offset, t) {
|
||||
var self = this;
|
||||
var s = this.sizes[this.type];
|
||||
if(s) {
|
||||
return s*t.length;
|
||||
}
|
||||
s = 0;
|
||||
if(this.type === ArrayBufferSer.STRING) {
|
||||
// calculate size with padding
|
||||
t.forEach(function(arr) {
|
||||
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
|
||||
s += pad;
|
||||
offset += pad;
|
||||
var len = (self.headerSize + arr.length*2);
|
||||
s += len;
|
||||
offset += len;
|
||||
});
|
||||
} else {
|
||||
t.forEach(function(arr) {
|
||||
var pad = self._paddingFor(offset, ArrayBufferSer.MAX_PADDING);
|
||||
s += pad;
|
||||
offset += pad;
|
||||
s += arr.getSize();
|
||||
offset += arr.getSize();
|
||||
});
|
||||
}
|
||||
return s;
|
||||
},
|
||||
|
||||
getDataSize: function() {
|
||||
return this._sizeFor(0, this.data);
|
||||
},
|
||||
|
||||
getSize: function() {
|
||||
return this.headerSize + this._sizeFor(this.headerSize, this.data);
|
||||
},
|
||||
|
||||
writeFn: [
|
||||
'',
|
||||
'writeInt8',
|
||||
'writeUInt8',
|
||||
'writeUInt8Clamp',
|
||||
'writeInt16LE',
|
||||
'writeUInt16LE',
|
||||
'writeUInt32LE',
|
||||
'writeUInt32LE',
|
||||
'writeFloatLE',
|
||||
'writeDoubleLE',
|
||||
'writeString',
|
||||
'writteBuffer'
|
||||
],
|
||||
|
||||
_initFunctions: function() {
|
||||
var self = this;
|
||||
this.writeFn.forEach(function(fn) {
|
||||
if(self[fn] === undefined) {
|
||||
self[fn] = function(d) {
|
||||
self.buffer[fn](d, self.offset);
|
||||
self.offset += self.sizes[self.type];
|
||||
};
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
writeUInt8Clamp: function(c) {
|
||||
this.buffer.writeUInt8(Math.min(255, c), this.offset);
|
||||
this.offset += 1;
|
||||
},
|
||||
|
||||
writeString: function(s) {
|
||||
var arr = [];
|
||||
for(var i = 0, len = s.length; i < len; ++i) {
|
||||
arr.push(s.charCodeAt(i));
|
||||
}
|
||||
var str = new ArrayBufferSer(ArrayBufferSer.UINT16, arr);
|
||||
this.writteBuffer(str);
|
||||
},
|
||||
|
||||
writteBuffer: function(b) {
|
||||
this.offset += this._paddingFor(this.offset, ArrayBufferSer.MAX_PADDING);
|
||||
// copy header
|
||||
b.buffer.copy(this.buffer, this.offset);
|
||||
this.offset += b.buffer.length;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
module.exports = ArrayBufferSer;
|
||||
41
app/models/cartodb_request.js
Normal file
41
app/models/cartodb_request.js
Normal file
@@ -0,0 +1,41 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* this module provides cartodb-specific interpretation
|
||||
* of request headers
|
||||
*/
|
||||
|
||||
function CartodbRequest() {
|
||||
}
|
||||
|
||||
module.exports = CartodbRequest;
|
||||
|
||||
/**
|
||||
* If the request contains the user use it, if not guess from the host
|
||||
*/
|
||||
CartodbRequest.prototype.userByReq = function(req) {
|
||||
if (req.params.user) {
|
||||
return req.params.user;
|
||||
}
|
||||
return userByHostName(req.headers.host);
|
||||
};
|
||||
|
||||
var re_userFromHost = new RegExp(
|
||||
global.settings.user_from_host || '^([^\\.]+)\\.' // would extract "strk" from "strk.cartodb.com"
|
||||
);
|
||||
|
||||
function userByHostName(host) {
|
||||
var mat = host.match(re_userFromHost);
|
||||
if (!mat) {
|
||||
console.error("ERROR: user pattern '" + re_userFromHost + "' does not match hostname '" + host + "'");
|
||||
return;
|
||||
}
|
||||
|
||||
if (mat.length !== 2) {
|
||||
console.error(
|
||||
"ERROR: pattern '" + re_userFromHost + "' gave unexpected matches against '" + host + "': " + mat
|
||||
);
|
||||
return;
|
||||
}
|
||||
return mat[1];
|
||||
}
|
||||
18
app/models/formats/README
Normal file
18
app/models/formats/README
Normal file
@@ -0,0 +1,18 @@
|
||||
Format classes are required to expose a constructor with no arguments,
|
||||
a getFileExtension() and a sendResponse(opts, callback) method.
|
||||
|
||||
The ``opts`` parameter contains:
|
||||
|
||||
sink Output stream to send the reponse to
|
||||
sql SQL query requested by the user
|
||||
skipfields Comma separate list of fields to skip from output
|
||||
really only needed with "SELECT *" queries
|
||||
gn Name of the geometry column (for formats requiring one)
|
||||
dp Number of decimal points of precision for geometries (if used)
|
||||
database Name of the database to connect to
|
||||
user_id Identifier of the user
|
||||
filename Name to use for attachment disposition
|
||||
|
||||
The ``callback`` parameter is a function that is invoked when the
|
||||
format object finished with sending the result to the sink.
|
||||
If an error occurs the callback is invoked with an Error argument.
|
||||
22
app/models/formats/index.js
Normal file
22
app/models/formats/index.js
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require("fs");
|
||||
var formats = {};
|
||||
|
||||
function formatFilesWithPath(dir) {
|
||||
var formatDir = __dirname + '/' + dir;
|
||||
return fs.readdirSync(formatDir).map(function(formatFile) {
|
||||
return formatDir + '/' + formatFile;
|
||||
});
|
||||
}
|
||||
|
||||
var formatFilesPaths = []
|
||||
.concat(formatFilesWithPath('ogr'))
|
||||
.concat(formatFilesWithPath('pg'));
|
||||
|
||||
formatFilesPaths.forEach(function(file) {
|
||||
var format = require(file);
|
||||
formats[format.prototype.id] = format;
|
||||
});
|
||||
|
||||
module.exports = formats;
|
||||
346
app/models/formats/ogr.js
Normal file
346
app/models/formats/ogr.js
Normal file
@@ -0,0 +1,346 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
var step = require('step');
|
||||
var fs = require('fs');
|
||||
var _ = require('underscore');
|
||||
var PSQL = require('cartodb-psql');
|
||||
var spawn = require('child_process').spawn;
|
||||
|
||||
// Keeps track of what's waiting baking for export
|
||||
var bakingExports = {};
|
||||
|
||||
function OgrFormat(id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
OgrFormat.prototype = {
|
||||
|
||||
id: "ogr",
|
||||
|
||||
is_file: true,
|
||||
|
||||
getQuery: function(/*sql, options*/) {
|
||||
return null; // dont execute the query
|
||||
},
|
||||
|
||||
transform: function(/*result, options, callback*/) {
|
||||
throw "should not be called for file formats";
|
||||
},
|
||||
|
||||
getContentType: function(){ return this._contentType; },
|
||||
|
||||
getFileExtension: function(){ return this._fileExtension; },
|
||||
|
||||
getKey: function(options) {
|
||||
return [this.id,
|
||||
options.dbopts.dbname,
|
||||
options.dbopts.user,
|
||||
options.gn,
|
||||
this.generateMD5(options.filename),
|
||||
this.generateMD5(options.sql)].concat(options.skipfields).join(':');
|
||||
},
|
||||
|
||||
generateMD5: function (data){
|
||||
var hash = crypto.createHash('md5');
|
||||
hash.update(data);
|
||||
return hash.digest('hex');
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
// Internal function usable by all OGR-driven outputs
|
||||
OgrFormat.prototype.toOGR = function(options, out_format, out_filename, callback) {
|
||||
|
||||
//var gcol = options.gn;
|
||||
var sql = options.sql;
|
||||
var skipfields = options.skipfields;
|
||||
var out_layername = options.filename;
|
||||
|
||||
var dbopts = options.dbopts;
|
||||
|
||||
var ogr2ogr = global.settings.ogr2ogrCommand || 'ogr2ogr';
|
||||
var dbhost = dbopts.host;
|
||||
var dbport = dbopts.port;
|
||||
var dbuser = dbopts.user;
|
||||
var dbpass = dbopts.pass;
|
||||
var dbname = dbopts.dbname;
|
||||
|
||||
var timeout = options.timeout;
|
||||
|
||||
var that = this;
|
||||
|
||||
var columns = [];
|
||||
var geocol;
|
||||
var pg;
|
||||
// Drop ending semicolon (ogr doens't like it)
|
||||
sql = sql.replace(/;\s*$/, '');
|
||||
|
||||
const theGeomFirst = (fieldA, fieldB) => {
|
||||
if (fieldA.name === 'the_geom') {
|
||||
return -1;
|
||||
}
|
||||
if (fieldB.name === 'the_geom') {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
step (
|
||||
|
||||
function fetchColumns() {
|
||||
var colsql = 'SELECT * FROM (' + sql + ') as _cartodbsqlapi LIMIT 0';
|
||||
pg = new PSQL(dbopts);
|
||||
pg.query(colsql, this);
|
||||
},
|
||||
function findSRS(err, result) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
var needSRS = that._needSRS;
|
||||
|
||||
columns = result.fields
|
||||
// skip columns
|
||||
.filter(field => skipfields.indexOf(field.name) === -1)
|
||||
// put "the_geom" first (if exists)
|
||||
.sort(theGeomFirst)
|
||||
// get first geometry to calculate SRID ("the_geom" if exists)
|
||||
.map(field => {
|
||||
if (needSRS && !geocol && pg.typeName(field.dataTypeID) === 'geometry') {
|
||||
geocol = field.name;
|
||||
}
|
||||
|
||||
return field;
|
||||
})
|
||||
// apply quotes to columns
|
||||
.map(field => out_format === 'CSV' ? pg.quoteIdentifier(field.name)+'::text' : pg.quoteIdentifier(field.name));
|
||||
|
||||
if ( ! needSRS || ! geocol ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var next = this;
|
||||
|
||||
var qgeocol = pg.quoteIdentifier(geocol);
|
||||
var sridsql = 'SELECT ST_Srid(' + qgeocol + ') as srid, GeometryType(' +
|
||||
qgeocol + ') as type FROM (' + sql + ') as _cartodbsqlapi WHERE ' +
|
||||
qgeocol + ' is not null limit 1';
|
||||
|
||||
pg.query(sridsql, function(err, result) {
|
||||
if ( err ) { next(err); return; }
|
||||
if ( result.rows.length ) {
|
||||
var srid = result.rows[0].srid;
|
||||
var type = result.rows[0].type;
|
||||
next(null, srid, type);
|
||||
} else {
|
||||
// continue as srid and geom type are not critical when there are no results
|
||||
next(null);
|
||||
}
|
||||
});
|
||||
},
|
||||
function spawnDumper(err, srid, type) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
var next = this;
|
||||
|
||||
var ogrsql = 'SELECT ' + columns.join(',') + ' FROM (' + sql + ') as _cartodbsqlapi';
|
||||
|
||||
var ogrargs = [
|
||||
'-f', out_format,
|
||||
'-lco', 'RESIZE=YES',
|
||||
'-lco', 'ENCODING=UTF-8',
|
||||
'-lco', 'LINEFORMAT=CRLF',
|
||||
out_filename,
|
||||
"PG:host=" + dbhost + " port=" + dbport + " user=" + dbuser + " dbname=" + dbname + " password=" + dbpass,
|
||||
'-sql', ogrsql
|
||||
];
|
||||
|
||||
if ( srid ) {
|
||||
ogrargs.push('-a_srs', 'EPSG:'+srid);
|
||||
}
|
||||
|
||||
if ( type ) {
|
||||
ogrargs.push('-nlt', type);
|
||||
}
|
||||
|
||||
if (options.cmd_params){
|
||||
ogrargs = ogrargs.concat(options.cmd_params);
|
||||
}
|
||||
|
||||
ogrargs.push('-nln', out_layername);
|
||||
|
||||
// TODO: research if `exec` could fit better than `spawn`
|
||||
var child = spawn(ogr2ogr, ogrargs);
|
||||
|
||||
var timedOut = false;
|
||||
var ogrTimeout;
|
||||
if (timeout > 0) {
|
||||
ogrTimeout = setTimeout(function () {
|
||||
timedOut = true;
|
||||
child.kill();
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
child.on('error', function (err) {
|
||||
clearTimeout(ogrTimeout);
|
||||
next(err);
|
||||
});
|
||||
|
||||
var stderrData = [];
|
||||
child.stderr.setEncoding('utf8');
|
||||
child.stderr.on('data', function (data) {
|
||||
stderrData.push(data);
|
||||
});
|
||||
|
||||
child.on('exit', function(code) {
|
||||
clearTimeout(ogrTimeout);
|
||||
|
||||
if (timedOut) {
|
||||
return next(new Error('statement timeout'));
|
||||
}
|
||||
|
||||
if (code !== 0) {
|
||||
var errMessage = 'ogr2ogr command return code ' + code;
|
||||
if (stderrData.length > 0) {
|
||||
errMessage += ', Error: ' + stderrData.join('\n');
|
||||
}
|
||||
|
||||
return next(new Error(errMessage));
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
|
||||
},
|
||||
function finish(err) {
|
||||
callback(err, out_filename);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
OgrFormat.prototype.toOGR_SingleFile = function(options, fmt, callback) {
|
||||
|
||||
var dbname = options.dbopts.dbname;
|
||||
var user_id = options.dbopts.user;
|
||||
var gcol = options.gcol;
|
||||
var sql = options.sql;
|
||||
var skipfields = options.skipfields;
|
||||
var ext = this._fileExtension;
|
||||
var layername = options.filename;
|
||||
|
||||
var tmpdir = global.settings.tmpDir || '/tmp';
|
||||
var reqKey = [
|
||||
fmt,
|
||||
dbname,
|
||||
user_id,
|
||||
gcol,
|
||||
this.generateMD5(layername),
|
||||
this.generateMD5(sql)
|
||||
].concat(skipfields).join(':');
|
||||
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
|
||||
var dumpfile = outdirpath + ':cartodb-query.' + ext;
|
||||
|
||||
// TODO: following tests:
|
||||
// - fetch query with no "the_geom" column
|
||||
this.toOGR(options, fmt, dumpfile, callback);
|
||||
};
|
||||
|
||||
OgrFormat.prototype.sendResponse = function(opts, callback) {
|
||||
//var next = callback;
|
||||
var reqKey = this.getKey(opts);
|
||||
var qElem = new ExportRequest(opts.sink, callback, opts.beforeSink);
|
||||
var baking = bakingExports[reqKey];
|
||||
if ( baking ) {
|
||||
baking.req.push( qElem );
|
||||
} else {
|
||||
baking = bakingExports[reqKey] = { req: [ qElem ] };
|
||||
this.generate(opts, function(err, dumpfile) {
|
||||
if ( opts.profiler ) {
|
||||
opts.profiler.done('generate');
|
||||
}
|
||||
step (
|
||||
function sendResults() {
|
||||
var nextPipe = function(finish) {
|
||||
var r = baking.req.shift();
|
||||
if ( ! r ) { finish(null); return; }
|
||||
r.sendFile(err, dumpfile, function() {
|
||||
nextPipe(finish);
|
||||
});
|
||||
};
|
||||
|
||||
if ( ! err ) {
|
||||
nextPipe(this);
|
||||
} else {
|
||||
_.each(baking.req, function(r) {
|
||||
r.cb(err);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
},
|
||||
function cleanup(/*err*/) {
|
||||
delete bakingExports[reqKey];
|
||||
|
||||
// unlink dump file (sync to avoid race condition)
|
||||
console.log("removing", dumpfile);
|
||||
try { fs.unlinkSync(dumpfile); }
|
||||
catch (e) {
|
||||
if ( e.code !== 'ENOENT' ) {
|
||||
console.log("Could not unlink dumpfile " + dumpfile + ": " + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: put in an ExportRequest.js ----- {
|
||||
|
||||
function ExportRequest(ostream, callback, beforeSink) {
|
||||
this.cb = callback;
|
||||
this.beforeSink = beforeSink;
|
||||
this.ostream = ostream;
|
||||
this.istream = null;
|
||||
this.canceled = false;
|
||||
|
||||
var that = this;
|
||||
|
||||
this.ostream.on('close', function() {
|
||||
//console.log("Request close event, qElem.stream is " + qElem.stream);
|
||||
that.canceled = true;
|
||||
if ( that.istream ) {
|
||||
that.istream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ExportRequest.prototype.sendFile = function (err, filename, callback) {
|
||||
var that = this;
|
||||
if ( ! this.canceled ) {
|
||||
//console.log("Creating readable stream out of dumpfile");
|
||||
this.istream = fs.createReadStream(filename)
|
||||
.on('open', function(/*fd*/) {
|
||||
if ( that.beforeSink ) {
|
||||
that.beforeSink();
|
||||
}
|
||||
that.istream.pipe(that.ostream);
|
||||
callback();
|
||||
})
|
||||
.on('error', function(e) {
|
||||
console.log("Can't send response: " + e);
|
||||
that.ostream.end();
|
||||
callback();
|
||||
});
|
||||
} else {
|
||||
//console.log("Response was canceled, not streaming the file");
|
||||
callback();
|
||||
}
|
||||
this.cb();
|
||||
};
|
||||
|
||||
//------ }
|
||||
|
||||
module.exports = OgrFormat;
|
||||
16
app/models/formats/ogr/csv.js
Normal file
16
app/models/formats/ogr/csv.js
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
|
||||
var ogr = require('./../ogr');
|
||||
|
||||
function CsvFormat() {}
|
||||
|
||||
CsvFormat.prototype = new ogr('csv');
|
||||
|
||||
CsvFormat.prototype._contentType = "text/csv; charset=utf-8; header=present";
|
||||
CsvFormat.prototype._fileExtension = "csv";
|
||||
|
||||
CsvFormat.prototype.generate = function(options, callback) {
|
||||
this.toOGR_SingleFile(options, 'CSV', callback);
|
||||
};
|
||||
|
||||
module.exports = CsvFormat;
|
||||
25
app/models/formats/ogr/geopackage.js
Normal file
25
app/models/formats/ogr/geopackage.js
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
var ogr = require('./../ogr');
|
||||
|
||||
function GeoPackageFormat() {}
|
||||
|
||||
GeoPackageFormat.prototype = new ogr('gpkg');
|
||||
|
||||
GeoPackageFormat.prototype._contentType = "application/x-sqlite3; charset=utf-8";
|
||||
GeoPackageFormat.prototype._fileExtension = "gpkg";
|
||||
// As of GDAL 1.10.1 SRID detection is bogus, so we use
|
||||
// our own method. See:
|
||||
// http://trac.osgeo.org/gdal/ticket/5131
|
||||
// http://trac.osgeo.org/gdal/ticket/5287
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/110
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/116
|
||||
// Bug was fixed in GDAL 1.10.2
|
||||
GeoPackageFormat.prototype._needSRS = true;
|
||||
|
||||
GeoPackageFormat.prototype.generate = function(options, callback) {
|
||||
options.cmd_params = ['-lco', 'FID=cartodb_id'];
|
||||
this.toOGR_SingleFile(options, 'GPKG', callback);
|
||||
};
|
||||
|
||||
module.exports = GeoPackageFormat;
|
||||
24
app/models/formats/ogr/kml.js
Normal file
24
app/models/formats/ogr/kml.js
Normal file
@@ -0,0 +1,24 @@
|
||||
'use strict';
|
||||
|
||||
var ogr = require('./../ogr');
|
||||
|
||||
function KmlFormat() {}
|
||||
|
||||
KmlFormat.prototype = new ogr('kml');
|
||||
|
||||
KmlFormat.prototype._contentType = "application/kml; charset=utf-8";
|
||||
KmlFormat.prototype._fileExtension = "kml";
|
||||
// As of GDAL 1.10.1 SRID detection is bogus, so we use
|
||||
// our own method. See:
|
||||
// http://trac.osgeo.org/gdal/ticket/5131
|
||||
// http://trac.osgeo.org/gdal/ticket/5287
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/110
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/116
|
||||
// Bug was fixed in GDAL 1.10.2
|
||||
KmlFormat.prototype._needSRS = true;
|
||||
|
||||
KmlFormat.prototype.generate = function(options, callback) {
|
||||
this.toOGR_SingleFile(options, 'KML', callback);
|
||||
};
|
||||
|
||||
module.exports = KmlFormat;
|
||||
135
app/models/formats/ogr/shp.js
Normal file
135
app/models/formats/ogr/shp.js
Normal file
@@ -0,0 +1,135 @@
|
||||
'use strict';
|
||||
|
||||
var step = require('step');
|
||||
var fs = require('fs');
|
||||
var spawn = require('child_process').spawn;
|
||||
|
||||
var ogr = require('./../ogr');
|
||||
|
||||
function ShpFormat() {
|
||||
}
|
||||
|
||||
ShpFormat.prototype = new ogr('shp');
|
||||
|
||||
ShpFormat.prototype._contentType = "application/zip; charset=utf-8";
|
||||
ShpFormat.prototype._fileExtension = "zip";
|
||||
// As of GDAL 1.10 SRID detection is bogus, so we use
|
||||
// our own method. See:
|
||||
// http://trac.osgeo.org/gdal/ticket/5131
|
||||
// http://trac.osgeo.org/gdal/ticket/5287
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/110
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/116
|
||||
ShpFormat.prototype._needSRS = true;
|
||||
|
||||
ShpFormat.prototype.generate = function(options, callback) {
|
||||
this.toSHP(options, callback);
|
||||
};
|
||||
|
||||
ShpFormat.prototype.toSHP = function (options, callback) {
|
||||
var dbname = options.database;
|
||||
var user_id = options.user_id;
|
||||
var gcol = options.gn;
|
||||
var sql = options.sql;
|
||||
var skipfields = options.skipfields;
|
||||
var filename = options.filename;
|
||||
|
||||
var fmtObj = this;
|
||||
var zip = global.settings.zipCommand || 'zip';
|
||||
var zipOptions = '-qrj';
|
||||
var tmpdir = global.settings.tmpDir || '/tmp';
|
||||
var reqKey = [ 'shp', dbname, user_id, gcol, this.generateMD5(sql) ].concat(skipfields).join(':');
|
||||
var outdirpath = tmpdir + '/sqlapi-' + process.pid + '-' + reqKey;
|
||||
var zipfile = outdirpath + '.zip';
|
||||
var shapefile = outdirpath + '/' + filename + '.shp';
|
||||
|
||||
// TODO: following tests:
|
||||
// - fetch query with no "the_geom" column
|
||||
|
||||
step (
|
||||
function createOutDir() {
|
||||
fs.mkdir(outdirpath, 0o777, this);
|
||||
},
|
||||
function spawnDumper(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
fmtObj.toOGR(options, 'ESRI Shapefile', shapefile, this);
|
||||
},
|
||||
function doZip(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
var next = this;
|
||||
|
||||
var child = spawn(zip, [zipOptions, zipfile, outdirpath ]);
|
||||
|
||||
child.on('error', function (err) {
|
||||
next(new Error('Error executing zip command, ' + err));
|
||||
});
|
||||
|
||||
var stderrData = [];
|
||||
child.stderr.setEncoding('utf8');
|
||||
child.stderr.on('data', function (data) {
|
||||
stderrData.push(data);
|
||||
});
|
||||
|
||||
child.on('exit', function(code) {
|
||||
if (code !== 0) {
|
||||
var errMessage = 'Zip command return code ' + code;
|
||||
if (stderrData.length) {
|
||||
errMessage += ', Error: ' + stderrData.join('\n');
|
||||
}
|
||||
|
||||
return next(new Error(errMessage));
|
||||
}
|
||||
|
||||
return next();
|
||||
});
|
||||
},
|
||||
function cleanupDir(topError) {
|
||||
|
||||
var next = this;
|
||||
|
||||
// Unlink the dir content
|
||||
var unlinkall = function(dir, files, finish) {
|
||||
var f = files.shift();
|
||||
if ( ! f ) { finish(null); return; }
|
||||
var fn = dir + '/' + f;
|
||||
fs.unlink(fn, function(err) {
|
||||
if ( err ) {
|
||||
console.log("Unlinking " + fn + ": " + err);
|
||||
finish(err);
|
||||
} else {
|
||||
unlinkall(dir, files, finish);
|
||||
}
|
||||
});
|
||||
};
|
||||
fs.readdir(outdirpath, function(err, files) {
|
||||
if ( err ) {
|
||||
if ( err.code !== 'ENOENT' ) {
|
||||
next(new Error([topError, err].join('\n')));
|
||||
} else {
|
||||
next(topError);
|
||||
}
|
||||
} else {
|
||||
unlinkall(outdirpath, files, function(/*err*/) {
|
||||
fs.rmdir(outdirpath, function(err) {
|
||||
if ( err ) {
|
||||
console.log("Removing dir " + outdirpath + ": " + err);
|
||||
}
|
||||
next(topError, zipfile);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
function finalStep(err, zipfile) {
|
||||
callback(err, zipfile);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
module.exports = ShpFormat;
|
||||
25
app/models/formats/ogr/spatialite.js
Normal file
25
app/models/formats/ogr/spatialite.js
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
var ogr = require('./../ogr');
|
||||
|
||||
function SpatiaLiteFormat() {}
|
||||
|
||||
SpatiaLiteFormat.prototype = new ogr('spatialite');
|
||||
|
||||
SpatiaLiteFormat.prototype._contentType = "application/x-sqlite3; charset=utf-8";
|
||||
SpatiaLiteFormat.prototype._fileExtension = "sqlite";
|
||||
// As of GDAL 1.10.1 SRID detection is bogus, so we use
|
||||
// our own method. See:
|
||||
// http://trac.osgeo.org/gdal/ticket/5131
|
||||
// http://trac.osgeo.org/gdal/ticket/5287
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/110
|
||||
// http://github.com/CartoDB/CartoDB-SQL-API/issues/116
|
||||
// Bug was fixed in GDAL 1.10.2
|
||||
SpatiaLiteFormat.prototype._needSRS = true;
|
||||
|
||||
SpatiaLiteFormat.prototype.generate = function(options, callback) {
|
||||
this.toOGR_SingleFile(options, 'SQLite', callback);
|
||||
options.cmd_params = ['SPATIALITE=yes'];
|
||||
};
|
||||
|
||||
module.exports = SpatiaLiteFormat;
|
||||
164
app/models/formats/pg.js
Normal file
164
app/models/formats/pg.js
Normal file
@@ -0,0 +1,164 @@
|
||||
'use strict';
|
||||
|
||||
var step = require('step');
|
||||
var PSQL = require('cartodb-psql');
|
||||
|
||||
function PostgresFormat(id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
PostgresFormat.prototype = {
|
||||
|
||||
getQuery: function(sql/*, options*/) {
|
||||
return sql;
|
||||
},
|
||||
|
||||
getContentType: function(){
|
||||
return this._contentType;
|
||||
},
|
||||
|
||||
getFileExtension: function() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.handleQueryRow = function(row, result) {
|
||||
result.addRow(row);
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.handleQueryRowWithSkipFields = function(row, result) {
|
||||
var sf = this.opts.skipfields;
|
||||
for ( var j=0; j<sf.length; ++j ) {
|
||||
delete row[sf[j]];
|
||||
}
|
||||
this.handleQueryRow(row, result);
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.handleNotice = function(msg, result) {
|
||||
if ( ! result.notices ) {
|
||||
result.notices = [];
|
||||
}
|
||||
for (var i=0; i<msg.length; i++) {
|
||||
result.notices.push(msg[i]);
|
||||
}
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.handleQueryEnd = function(result) {
|
||||
this.queryCanceller = undefined;
|
||||
|
||||
if ( this.error ) {
|
||||
this.callback(this.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( this.opts.profiler ) {
|
||||
this.opts.profiler.done('gotRows');
|
||||
}
|
||||
|
||||
this.opts.total_time = (Date.now() - this.start_time)/1000;
|
||||
|
||||
// Drop field description for skipped fields
|
||||
if (this.hasSkipFields) {
|
||||
var sf = this.opts.skipfields;
|
||||
var newfields = [];
|
||||
for ( var j=0; j<result.fields.length; ++j ) {
|
||||
var f = result.fields[j];
|
||||
if ( sf.indexOf(f.name) === -1 ) {
|
||||
newfields.push(f);
|
||||
}
|
||||
}
|
||||
result.fields = newfields;
|
||||
}
|
||||
|
||||
var that = this;
|
||||
|
||||
step (
|
||||
function packageResult() {
|
||||
if ( that.opts.abortChecker ) {
|
||||
that.opts.abortChecker('packageResult');
|
||||
}
|
||||
that.transform(result, that.opts, this);
|
||||
},
|
||||
function sendResults(err, out){
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
// return to browser
|
||||
if ( out ) {
|
||||
if ( that.opts.beforeSink ) {
|
||||
that.opts.beforeSink();
|
||||
}
|
||||
that.opts.sink.send(out);
|
||||
} else {
|
||||
console.error("No output from transform, doing nothing ?!");
|
||||
}
|
||||
},
|
||||
function errorHandle(err){
|
||||
that.callback(err);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.sendResponse = function(opts, callback) {
|
||||
if ( this.callback ) {
|
||||
callback(new Error("Invalid double call to .sendResponse on a pg formatter"));
|
||||
return;
|
||||
}
|
||||
this.callback = callback;
|
||||
this.opts = opts;
|
||||
|
||||
this.hasSkipFields = opts.skipfields.length;
|
||||
|
||||
var sql = this.getQuery(opts.sql, {
|
||||
gn: opts.gn,
|
||||
dp: opts.dp,
|
||||
skipfields: opts.skipfields
|
||||
});
|
||||
|
||||
var that = this;
|
||||
|
||||
this.start_time = Date.now();
|
||||
|
||||
this.client = new PSQL(opts.dbopts);
|
||||
this.client.eventedQuery(sql, function(err, query, queryCanceller) {
|
||||
that.queryCanceller = queryCanceller;
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
if ( that.opts.profiler ) {
|
||||
that.opts.profiler.done('eventedQuery');
|
||||
}
|
||||
|
||||
if (that.hasSkipFields) {
|
||||
query.on('row', that.handleQueryRowWithSkipFields.bind(that));
|
||||
} else {
|
||||
query.on('row', that.handleQueryRow.bind(that));
|
||||
}
|
||||
query.on('end', that.handleQueryEnd.bind(that));
|
||||
query.on('error', function(err) {
|
||||
that.error = err;
|
||||
if (err.message && err.message.match(/row too large, was \d* bytes/i)) {
|
||||
return console.error(JSON.stringify({
|
||||
username: opts.username,
|
||||
type: 'row_size_limit_exceeded',
|
||||
error: err.message
|
||||
}));
|
||||
}
|
||||
that.handleQueryEnd();
|
||||
});
|
||||
query.on('notice', function(msg) {
|
||||
that.handleNotice(msg, query._result);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
PostgresFormat.prototype.cancel = function() {
|
||||
if (this.queryCanceller) {
|
||||
this.queryCanceller.call();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = PostgresFormat;
|
||||
87
app/models/formats/pg/arraybuffer.js
Normal file
87
app/models/formats/pg/arraybuffer.js
Normal file
@@ -0,0 +1,87 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
|
||||
var pg = require('./../pg');
|
||||
var ArrayBufferSer = require("../../bin_encoder");
|
||||
|
||||
function BinaryFormat() {}
|
||||
|
||||
BinaryFormat.prototype = new pg('arraybuffer');
|
||||
|
||||
BinaryFormat.prototype._contentType = "application/octet-stream";
|
||||
|
||||
BinaryFormat.prototype._extractTypeFromName = function(name) {
|
||||
var g = name.match(/.*__(uintclamp|uint|int|float)(8|16|32)/i);
|
||||
if(g && g.length === 3) {
|
||||
var typeName = g[1] + g[2];
|
||||
return ArrayBufferSer.typeNames[typeName];
|
||||
}
|
||||
};
|
||||
|
||||
// jshint maxcomplexity:12
|
||||
BinaryFormat.prototype.transform = function(result, options, callback) {
|
||||
var total_rows = result.rowCount;
|
||||
var rows = result.rows;
|
||||
|
||||
// get headers
|
||||
if(!total_rows) {
|
||||
callback(null, new Buffer(0));
|
||||
return;
|
||||
}
|
||||
|
||||
var headersNames = Object.keys(rows[0]);
|
||||
var headerTypes = [];
|
||||
|
||||
if(_.contains(headersNames, 'the_geom')) {
|
||||
callback(new Error("geometry types are not supported"), null);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
var i;
|
||||
var t;
|
||||
// get header types (and guess from name)
|
||||
for(i = 0; i < headersNames.length; ++i) {
|
||||
r = rows[0];
|
||||
n = headersNames[i];
|
||||
if(typeof(r[n]) === 'string') {
|
||||
headerTypes.push(ArrayBufferSer.STRING);
|
||||
} else if(typeof(r[n]) === 'object') {
|
||||
t = this._extractTypeFromName(n);
|
||||
t = t || ArrayBufferSer.FLOAT32;
|
||||
headerTypes.push(ArrayBufferSer.BUFFER + t);
|
||||
} else {
|
||||
t = this._extractTypeFromName(n);
|
||||
headerTypes.push(t || ArrayBufferSer.FLOAT32);
|
||||
}
|
||||
}
|
||||
|
||||
// pack the data
|
||||
var header = new ArrayBufferSer(ArrayBufferSer.STRING, headersNames);
|
||||
var data = [header];
|
||||
for(i = 0; i < headersNames.length; ++i) {
|
||||
var d = [];
|
||||
var n = headersNames[i];
|
||||
for(var r = 0; r < total_rows; ++r) {
|
||||
var row = rows[r][n];
|
||||
if(headerTypes[i] > ArrayBufferSer.BUFFER) {
|
||||
row = new ArrayBufferSer(headerTypes[i] - ArrayBufferSer.BUFFER, row);
|
||||
}
|
||||
d.push(row);
|
||||
}
|
||||
var b = new ArrayBufferSer(headerTypes[i], d);
|
||||
data.push(b);
|
||||
}
|
||||
|
||||
// create the final buffer
|
||||
var all = new ArrayBufferSer(ArrayBufferSer.BUFFER, data);
|
||||
|
||||
callback(null, all.buffer);
|
||||
|
||||
} catch(e) {
|
||||
callback(e, null);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = BinaryFormat;
|
||||
120
app/models/formats/pg/geojson.js
Normal file
120
app/models/formats/pg/geojson.js
Normal file
@@ -0,0 +1,120 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
|
||||
var pg = require('./../pg');
|
||||
const errorHandlerFactory = require('../../../services/error_handler_factory');
|
||||
|
||||
function GeoJsonFormat() {
|
||||
this.buffer = '';
|
||||
}
|
||||
|
||||
GeoJsonFormat.prototype = new pg('geojson');
|
||||
|
||||
GeoJsonFormat.prototype._contentType = "application/json; charset=utf-8";
|
||||
|
||||
GeoJsonFormat.prototype.getQuery = function(sql, options) {
|
||||
var gn = options.gn;
|
||||
var dp = options.dp;
|
||||
return 'SELECT *, ST_AsGeoJSON(' + gn + ',' + dp + ') as the_geom FROM (' + sql + ') as foo';
|
||||
};
|
||||
|
||||
GeoJsonFormat.prototype.startStreaming = function() {
|
||||
this.total_rows = 0;
|
||||
if (this.opts.beforeSink) {
|
||||
this.opts.beforeSink();
|
||||
}
|
||||
if (this.opts.callback) {
|
||||
this.buffer += this.opts.callback + '(';
|
||||
}
|
||||
this.buffer += '{"type": "FeatureCollection", "features": [';
|
||||
this._streamingStarted = true;
|
||||
};
|
||||
|
||||
GeoJsonFormat.prototype.handleQueryRow = function(row) {
|
||||
|
||||
if ( ! this._streamingStarted ) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
var geojson = [
|
||||
'{',
|
||||
'"type":"Feature",',
|
||||
'"geometry":' + row[this.opts.gn] + ',',
|
||||
'"properties":'
|
||||
];
|
||||
delete row[this.opts.gn];
|
||||
delete row.the_geom_webmercator;
|
||||
geojson.push(JSON.stringify(row));
|
||||
geojson.push('}');
|
||||
|
||||
this.buffer += (this.total_rows++ ? ',' : '') + geojson.join('');
|
||||
|
||||
if (this.total_rows % (this.opts.bufferedRows || 1000)) {
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.buffer = '';
|
||||
}
|
||||
};
|
||||
|
||||
GeoJsonFormat.prototype.handleQueryEnd = function(/*result*/) {
|
||||
if (this.error && !this._streamingStarted) {
|
||||
this.callback(this.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( this.opts.profiler ) {
|
||||
this.opts.profiler.done('gotRows');
|
||||
}
|
||||
|
||||
if ( ! this._streamingStarted ) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
this.buffer += ']'; // end of features
|
||||
|
||||
if (this.error) {
|
||||
this.buffer += ',"error":' + JSON.stringify(errorHandlerFactory(this.error).getResponse().error);
|
||||
}
|
||||
|
||||
this.buffer += '}'; // end of root object
|
||||
|
||||
if (this.opts.callback) {
|
||||
this.buffer += ')';
|
||||
}
|
||||
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.opts.sink.end();
|
||||
this.buffer = '';
|
||||
|
||||
this.callback();
|
||||
};
|
||||
|
||||
function _toGeoJSON(data, gn, callback){
|
||||
try {
|
||||
var out = {
|
||||
type: "FeatureCollection",
|
||||
features: []
|
||||
};
|
||||
|
||||
_.each(data.rows, function(ele){
|
||||
var _geojson = {
|
||||
type: "Feature",
|
||||
properties: { },
|
||||
geometry: { }
|
||||
};
|
||||
_geojson.geometry = JSON.parse(ele[gn]);
|
||||
delete ele[gn];
|
||||
delete ele.the_geom_webmercator; // TODO: use skipfields
|
||||
_geojson.properties = ele;
|
||||
out.features.push(_geojson);
|
||||
});
|
||||
|
||||
// return payload
|
||||
callback(null, out);
|
||||
} catch (err) {
|
||||
callback(err,null);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GeoJsonFormat;
|
||||
module.exports.toGeoJSON = _toGeoJSON;
|
||||
174
app/models/formats/pg/json.js
Normal file
174
app/models/formats/pg/json.js
Normal file
@@ -0,0 +1,174 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
|
||||
var pg = require('./../pg');
|
||||
const errorHandlerFactory = require('../../../services/error_handler_factory');
|
||||
|
||||
function JsonFormat() {
|
||||
this.buffer = '';
|
||||
this.lastKnownResult = {};
|
||||
}
|
||||
|
||||
JsonFormat.prototype = new pg('json');
|
||||
|
||||
JsonFormat.prototype._contentType = "application/json; charset=utf-8";
|
||||
|
||||
// jshint maxcomplexity:9
|
||||
JsonFormat.prototype.formatResultFields = function(flds) {
|
||||
flds = flds || [];
|
||||
var nfields = {};
|
||||
for (var i=0; i<flds.length; ++i) {
|
||||
var f = flds[i];
|
||||
var cname = this.client.typeName(f.dataTypeID);
|
||||
var tname;
|
||||
if ( ! cname ) {
|
||||
tname = 'unknown(' + f.dataTypeID + ')';
|
||||
} else {
|
||||
if ( cname.match('bool') ) {
|
||||
tname = 'boolean';
|
||||
}
|
||||
else if ( cname.match(/int|float|numeric/) ) {
|
||||
tname = 'number';
|
||||
}
|
||||
else if ( cname.match(/text|char|unknown/) ) {
|
||||
tname = 'string';
|
||||
}
|
||||
else if ( cname.match(/date|time/) ) {
|
||||
tname = 'date';
|
||||
}
|
||||
else {
|
||||
tname = cname;
|
||||
}
|
||||
if ( tname && cname.match(/^_/) ) {
|
||||
tname += '[]';
|
||||
}
|
||||
}
|
||||
nfields[f.name] = { type: tname };
|
||||
}
|
||||
return nfields;
|
||||
};
|
||||
|
||||
JsonFormat.prototype.startStreaming = function() {
|
||||
this.total_rows = 0;
|
||||
if (this.opts.beforeSink) {
|
||||
this.opts.beforeSink();
|
||||
}
|
||||
if (this.opts.callback) {
|
||||
this.buffer += this.opts.callback + '(';
|
||||
}
|
||||
this.buffer += '{"rows":[';
|
||||
this._streamingStarted = true;
|
||||
};
|
||||
|
||||
JsonFormat.prototype.handleQueryRow = function(row, result) {
|
||||
if ( ! this._streamingStarted ) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
this.lastKnownResult = result;
|
||||
|
||||
this.buffer += (this.total_rows++ ? ',' : '') + JSON.stringify(row, function (key, value) {
|
||||
if (value !== value) {
|
||||
return 'NaN';
|
||||
}
|
||||
|
||||
if (value === Infinity) {
|
||||
return 'Infinity';
|
||||
}
|
||||
|
||||
if (value === -Infinity) {
|
||||
return '-Infinity';
|
||||
}
|
||||
|
||||
return value;
|
||||
});
|
||||
|
||||
if (this.total_rows % (this.opts.bufferedRows || 1000)) {
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.buffer = '';
|
||||
}
|
||||
};
|
||||
|
||||
// jshint maxcomplexity:13
|
||||
JsonFormat.prototype.handleQueryEnd = function(result) {
|
||||
if (this.error && !this._streamingStarted) {
|
||||
this.callback(this.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( this.opts.profiler ) {
|
||||
this.opts.profiler.done('gotRows');
|
||||
}
|
||||
|
||||
if ( ! this._streamingStarted ) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
this.opts.total_time = (Date.now() - this.start_time)/1000;
|
||||
|
||||
result = result || this.lastKnownResult || {};
|
||||
|
||||
// Drop field description for skipped fields
|
||||
if (this.hasSkipFields) {
|
||||
var newfields = [];
|
||||
var sf = this.opts.skipfields;
|
||||
for (var i = 0; i < result.fields.length; i++) {
|
||||
var f = result.fields[i];
|
||||
if ( sf.indexOf(f.name) === -1 ) {
|
||||
newfields.push(f);
|
||||
}
|
||||
}
|
||||
result.fields = newfields;
|
||||
}
|
||||
|
||||
var total_time = (Date.now() - this.start_time)/1000;
|
||||
|
||||
var out = [
|
||||
'],', // end of "rows" array
|
||||
'"time":', JSON.stringify(total_time),
|
||||
',"fields":', JSON.stringify(this.formatResultFields(result.fields)),
|
||||
',"total_rows":', JSON.stringify(result.rowCount || this.total_rows)
|
||||
];
|
||||
|
||||
if (this.error) {
|
||||
out.push(',"error":', JSON.stringify(errorHandlerFactory(this.error).getResponse().error));
|
||||
}
|
||||
|
||||
|
||||
if ( result.notices && result.notices.length > 0 ) {
|
||||
var notices = {},
|
||||
severities = [];
|
||||
_.each(result.notices, function(notice) {
|
||||
var severity = notice.severity.toLowerCase() + 's';
|
||||
if (!notices[severity]) {
|
||||
severities.push(severity);
|
||||
notices[severity] = [];
|
||||
}
|
||||
notices[severity].push(notice.message);
|
||||
});
|
||||
_.each(severities, function(severity) {
|
||||
out.push(',');
|
||||
out.push(JSON.stringify(severity));
|
||||
out.push(':');
|
||||
out.push(JSON.stringify(notices[severity]));
|
||||
});
|
||||
}
|
||||
|
||||
out.push('}');
|
||||
|
||||
|
||||
this.buffer += out.join('');
|
||||
|
||||
if (this.opts.callback) {
|
||||
this.buffer += ')';
|
||||
}
|
||||
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.opts.sink.end();
|
||||
this.buffer = '';
|
||||
|
||||
this.callback();
|
||||
};
|
||||
|
||||
module.exports = JsonFormat;
|
||||
166
app/models/formats/pg/svg.js
Normal file
166
app/models/formats/pg/svg.js
Normal file
@@ -0,0 +1,166 @@
|
||||
'use strict';
|
||||
|
||||
var pg = require('./../pg');
|
||||
|
||||
var svg_width = 1024.0;
|
||||
var svg_height = 768.0;
|
||||
var svg_ratio = svg_width/svg_height;
|
||||
|
||||
var radius = 5; // in pixels (based on svg_width and svg_height)
|
||||
|
||||
var stroke_width = 1; // in pixels (based on svg_width and svg_height)
|
||||
var stroke_color = 'black';
|
||||
// fill settings affect polygons and points (circles)
|
||||
var fill_opacity = 0.5; // 0.0 is fully transparent, 1.0 is fully opaque
|
||||
// unused if fill_color='none'
|
||||
var fill_color = 'none'; // affects polygons and circles
|
||||
|
||||
function SvgFormat() {
|
||||
this.totalRows = 0;
|
||||
|
||||
this.bbox = null; // will be computed during the results scan
|
||||
this.buffer = '';
|
||||
|
||||
this._streamingStarted = false;
|
||||
}
|
||||
|
||||
SvgFormat.prototype = new pg('svg');
|
||||
SvgFormat.prototype._contentType = "image/svg+xml; charset=utf-8";
|
||||
|
||||
SvgFormat.prototype.getQuery = function(sql, options) {
|
||||
var gn = options.gn;
|
||||
var dp = options.dp;
|
||||
return 'WITH source AS ( ' + sql + '), extent AS ( ' +
|
||||
' SELECT ST_Extent(' + gn + ') AS e FROM source ' +
|
||||
'), extent_info AS ( SELECT e, ' +
|
||||
'st_xmin(e) as ex0, st_ymax(e) as ey0, ' +
|
||||
'st_xmax(e)-st_xmin(e) as ew, ' +
|
||||
'st_ymax(e)-st_ymin(e) as eh FROM extent )' +
|
||||
', trans AS ( SELECT CASE WHEN ' +
|
||||
'eh = 0 THEN ' + svg_width +
|
||||
'/ COALESCE(NULLIF(ew,0),' + svg_width +') WHEN ' +
|
||||
svg_ratio + ' <= (ew / eh) THEN (' +
|
||||
svg_width + '/ew ) ELSE (' +
|
||||
svg_height + '/eh ) END as s ' +
|
||||
', ex0 as x0, ey0 as y0 FROM extent_info ) ' +
|
||||
'SELECT st_TransScale(e, -x0, -y0, s, s)::box2d as ' +
|
||||
gn + '_box, ST_Dimension(' + gn + ') as ' + gn +
|
||||
'_dimension, ST_AsSVG(ST_TransScale(' + gn + ', ' +
|
||||
'-x0, -y0, s, s), 0, ' + dp + ') as ' + gn +
|
||||
//+ ', ex0, ey0, ew, eh, s ' // DEBUG ONLY +
|
||||
' FROM trans, extent_info, source' +
|
||||
' ORDER BY the_geom_dimension ASC';
|
||||
};
|
||||
|
||||
SvgFormat.prototype.startStreaming = function() {
|
||||
if (this.opts.beforeSink) {
|
||||
this.opts.beforeSink();
|
||||
}
|
||||
|
||||
var header = [
|
||||
'<?xml version="1.0" standalone="no"?>',
|
||||
'<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">'
|
||||
];
|
||||
|
||||
var rootTag = '<svg ';
|
||||
if ( this.bbox ) {
|
||||
// expand box by "radius" + "stroke-width"
|
||||
// TODO: use a Box2d class for these ops
|
||||
var growby = radius + stroke_width;
|
||||
this.bbox.xmin -= growby;
|
||||
this.bbox.ymin -= growby;
|
||||
this.bbox.xmax += growby;
|
||||
this.bbox.ymax += growby;
|
||||
this.bbox.width = this.bbox.xmax - this.bbox.xmin;
|
||||
this.bbox.height = this.bbox.ymax - this.bbox.ymin;
|
||||
rootTag += 'viewBox="' + this.bbox.xmin + ' ' + (-this.bbox.ymax) + ' ' +
|
||||
this.bbox.width + ' ' + this.bbox.height + '" ';
|
||||
}
|
||||
rootTag += 'style="fill-opacity:' + fill_opacity + '; stroke:' + stroke_color + '; ' +
|
||||
'stroke-width:' + stroke_width + '; fill:' + fill_color + '" ';
|
||||
rootTag += 'xmlns="http://www.w3.org/2000/svg" version="1.1">\n';
|
||||
|
||||
header.push(rootTag);
|
||||
|
||||
this.opts.sink.write(header.join('\n'));
|
||||
|
||||
this._streamingStarted = true;
|
||||
};
|
||||
|
||||
// jshint maxcomplexity:11
|
||||
SvgFormat.prototype.handleQueryRow = function(row) {
|
||||
this.totalRows++;
|
||||
|
||||
if ( ! row.hasOwnProperty(this.opts.gn) ) {
|
||||
this.error = new Error('column "' + this.opts.gn + '" does not exist');
|
||||
}
|
||||
|
||||
var g = row[this.opts.gn];
|
||||
if ( ! g ) {
|
||||
return;
|
||||
} // null or empty
|
||||
|
||||
// jshint ignore:start
|
||||
var gdims = row[this.opts.gn + '_dimension'];
|
||||
// TODO: add an identifier, if any of "cartodb_id", "oid", "id", "gid" are found
|
||||
// TODO: add "class" attribute to help with styling ?
|
||||
if ( gdims == '0' ) {
|
||||
this.buffer += '<circle r="' + radius + '" ' + g + ' />\n';
|
||||
} else if ( gdims == '1' ) {
|
||||
// Avoid filling closed linestrings
|
||||
this.buffer += '<path ' + ( fill_color !== 'none' ? 'fill="none" ' : '' ) + 'd="' + g + '" />\n';
|
||||
} else if ( gdims == '2' ) {
|
||||
this.buffer += '<path d="' + g + '" />\n';
|
||||
}
|
||||
// jshint ignore:end
|
||||
|
||||
if ( ! this.bbox ) {
|
||||
// Parse layer extent: "BOX(x y, X Y)"
|
||||
// NOTE: the name of the extent field is
|
||||
// determined by the same code adding the
|
||||
// ST_AsSVG call (in queryResult)
|
||||
//
|
||||
var bbox = row[this.opts.gn + '_box'];
|
||||
bbox = bbox.match(/BOX\(([^ ]*) ([^ ,]*),([^ ]*) ([^)]*)\)/);
|
||||
this.bbox = {
|
||||
xmin: parseFloat(bbox[1]),
|
||||
ymin: parseFloat(bbox[2]),
|
||||
xmax: parseFloat(bbox[3]),
|
||||
ymax: parseFloat(bbox[4])
|
||||
};
|
||||
}
|
||||
|
||||
if (!this._streamingStarted && this.bbox) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
if (this._streamingStarted && (this.totalRows % (this.opts.bufferedRows || 1000))) {
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.buffer = '';
|
||||
}
|
||||
};
|
||||
|
||||
SvgFormat.prototype.handleQueryEnd = function() {
|
||||
if ( this.error && !this._streamingStarted) {
|
||||
this.callback(this.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( this.opts.profiler ) {
|
||||
this.opts.profiler.done('gotRows');
|
||||
}
|
||||
|
||||
if (!this._streamingStarted) {
|
||||
this.startStreaming();
|
||||
}
|
||||
|
||||
// rootTag close
|
||||
this.buffer += '</svg>\n';
|
||||
|
||||
this.opts.sink.write(this.buffer);
|
||||
this.opts.sink.end();
|
||||
|
||||
this.callback();
|
||||
};
|
||||
|
||||
module.exports = SvgFormat;
|
||||
138
app/models/formats/pg/topojson.js
Normal file
138
app/models/formats/pg/topojson.js
Normal file
@@ -0,0 +1,138 @@
|
||||
'use strict';
|
||||
|
||||
var pg = require('./../pg');
|
||||
var _ = require('underscore');
|
||||
var geojson = require('./geojson');
|
||||
var TopoJSON = require('topojson');
|
||||
|
||||
function TopoJsonFormat() {
|
||||
this.features = [];
|
||||
}
|
||||
|
||||
TopoJsonFormat.prototype = new pg('topojson');
|
||||
|
||||
TopoJsonFormat.prototype.getQuery = function(sql, options) {
|
||||
return geojson.prototype.getQuery(sql, options) + ' where ' + options.gn + ' is not null';
|
||||
};
|
||||
|
||||
TopoJsonFormat.prototype.handleQueryRow = function(row) {
|
||||
var _geojson = {
|
||||
type: "Feature"
|
||||
};
|
||||
_geojson.geometry = JSON.parse(row[this.opts.gn]);
|
||||
delete row[this.opts.gn];
|
||||
delete row.the_geom_webmercator;
|
||||
_geojson.properties = row;
|
||||
this.features.push(_geojson);
|
||||
};
|
||||
|
||||
TopoJsonFormat.prototype.handleQueryEnd = function() {
|
||||
if (this.error) {
|
||||
this.callback(this.error);
|
||||
return;
|
||||
}
|
||||
|
||||
if ( this.opts.profiler ) {
|
||||
this.opts.profiler.done('gotRows');
|
||||
}
|
||||
|
||||
var topology = TopoJSON.topology(this.features, {
|
||||
"quantization": 1e4,
|
||||
"force-clockwise": true,
|
||||
"property-filter": function(d) {
|
||||
return d;
|
||||
}
|
||||
});
|
||||
|
||||
this.features = [];
|
||||
|
||||
var stream = this.opts.sink;
|
||||
var jsonpCallback = this.opts.callback;
|
||||
var bufferedRows = this.opts.bufferedRows;
|
||||
var buffer = '';
|
||||
|
||||
var immediately = global.setImmediate || process.nextTick;
|
||||
|
||||
function streamObjectSubtree(obj, key, done) {
|
||||
buffer += '"' + key + '":';
|
||||
|
||||
var isObject = _.isObject(obj[key]),
|
||||
isArray = _.isArray(obj[key]),
|
||||
isIterable = isArray || isObject;
|
||||
|
||||
if (isIterable) {
|
||||
buffer += isArray ? '[' : '{';
|
||||
var subtreeKeys = Object.keys(obj[key]);
|
||||
var pos = 0;
|
||||
function streamNext() {
|
||||
immediately(function() {
|
||||
var subtreeKey = subtreeKeys.shift();
|
||||
if (!isArray) {
|
||||
buffer += '"' + subtreeKey + '":';
|
||||
}
|
||||
buffer += JSON.stringify(obj[key][subtreeKey]);
|
||||
|
||||
if (pos++ % (bufferedRows || 1000)) {
|
||||
stream.write(buffer);
|
||||
buffer = '';
|
||||
}
|
||||
|
||||
if (subtreeKeys.length > 0) {
|
||||
delete obj[key][subtreeKey];
|
||||
buffer += ',';
|
||||
streamNext();
|
||||
} else {
|
||||
buffer += isArray ? ']' : '}';
|
||||
stream.write(buffer);
|
||||
buffer = '';
|
||||
done();
|
||||
}
|
||||
});
|
||||
}
|
||||
streamNext();
|
||||
} else {
|
||||
buffer += JSON.stringify(obj[key]);
|
||||
done();
|
||||
}
|
||||
}
|
||||
|
||||
if (jsonpCallback) {
|
||||
buffer += jsonpCallback + '(';
|
||||
}
|
||||
buffer += '{';
|
||||
var keys = Object.keys(topology);
|
||||
function sendResponse() {
|
||||
immediately(function () {
|
||||
var key = keys.shift();
|
||||
function done() {
|
||||
if (keys.length > 0) {
|
||||
delete topology[key];
|
||||
buffer += ',';
|
||||
sendResponse();
|
||||
} else {
|
||||
buffer += '}';
|
||||
if (jsonpCallback) {
|
||||
buffer += ')';
|
||||
}
|
||||
stream.write(buffer);
|
||||
stream.end();
|
||||
topology = null;
|
||||
}
|
||||
}
|
||||
streamObjectSubtree(topology, key, done);
|
||||
});
|
||||
}
|
||||
sendResponse();
|
||||
|
||||
this.callback();
|
||||
};
|
||||
|
||||
TopoJsonFormat.prototype.cancel = function() {
|
||||
if (this.queryCanceller) {
|
||||
this.queryCanceller.call();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = TopoJsonFormat;
|
||||
34
app/monitoring/health_check.js
Normal file
34
app/monitoring/health_check.js
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
|
||||
var step = require('step'),
|
||||
fs = require('fs');
|
||||
|
||||
function HealthCheck(disableFile) {
|
||||
this.disableFile = disableFile;
|
||||
}
|
||||
|
||||
module.exports = HealthCheck;
|
||||
|
||||
HealthCheck.prototype.check = function(callback) {
|
||||
var self = this;
|
||||
|
||||
step(
|
||||
function getManualDisable() {
|
||||
fs.readFile(self.disableFile, this);
|
||||
},
|
||||
function handleDisabledFile(err, data) {
|
||||
var next = this;
|
||||
if (err) {
|
||||
return next();
|
||||
}
|
||||
if (!!data) {
|
||||
err = new Error(data);
|
||||
err.http_status = 503;
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
function handleResult(err) {
|
||||
callback(err);
|
||||
}
|
||||
);
|
||||
};
|
||||
285
app/postgresql/error_codes.js
Normal file
285
app/postgresql/error_codes.js
Normal file
@@ -0,0 +1,285 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
|
||||
// reference http://www.postgresql.org/docs/9.3/static/errcodes-appendix.html
|
||||
// Used `^([A-Z0-9]*)\s(.*)` -> `"$1": "$2"` to create the JS object
|
||||
var codeToCondition = {
|
||||
// Class 00 — Successful Completion
|
||||
"00000": "successful_completion",
|
||||
// Class 01 — Warning
|
||||
"01000": "warning",
|
||||
"0100C": "dynamic_result_sets_returned",
|
||||
"01008": "implicit_zero_bit_padding",
|
||||
"01003": "null_value_eliminated_in_set_function",
|
||||
"01007": "privilege_not_granted",
|
||||
"01006": "privilege_not_revoked",
|
||||
"01004": "string_data_right_truncation",
|
||||
"01P01": "deprecated_feature",
|
||||
// Class 02 — No Data (this is also a warning class per the SQL standard)
|
||||
"02000": "no_data",
|
||||
"02001": "no_additional_dynamic_result_sets_returned",
|
||||
// Class 03 — SQL Statement Not Yet Complete
|
||||
"03000": "sql_statement_not_yet_complete",
|
||||
// Class 08 — Connection Exception
|
||||
"08000": "connection_exception",
|
||||
"08003": "connection_does_not_exist",
|
||||
"08006": "connection_failure",
|
||||
"08001": "sqlclient_unable_to_establish_sqlconnection",
|
||||
"08004": "sqlserver_rejected_establishment_of_sqlconnection",
|
||||
"08007": "transaction_resolution_unknown",
|
||||
"08P01": "protocol_violation",
|
||||
// Class 09 — Triggered Action Exception
|
||||
"09000": "triggered_action_exception",
|
||||
// Class 0A — Feature Not Supported
|
||||
"0A000": "feature_not_supported",
|
||||
// Class 0B — Invalid Transaction Initiation
|
||||
"0B000": "invalid_transaction_initiation",
|
||||
// Class 0F — Locator Exception
|
||||
"0F000": "locator_exception",
|
||||
"0F001": "invalid_locator_specification",
|
||||
// Class 0L — Invalid Grantor
|
||||
"0L000": "invalid_grantor",
|
||||
"0LP01": "invalid_grant_operation",
|
||||
// Class 0P — Invalid Role Specification
|
||||
"0P000": "invalid_role_specification",
|
||||
// Class 0Z — Diagnostics Exception
|
||||
"0Z000": "diagnostics_exception",
|
||||
"0Z002": "stacked_diagnostics_accessed_without_active_handler",
|
||||
// Class 20 — Case Not Found
|
||||
"20000": "case_not_found",
|
||||
// Class 21 — Cardinality Violation
|
||||
"21000": "cardinality_violation",
|
||||
// Class 22 — Data Exception
|
||||
"22000": "data_exception",
|
||||
"2202E": "array_subscript_error",
|
||||
"22021": "character_not_in_repertoire",
|
||||
"22008": "datetime_field_overflow",
|
||||
"22012": "division_by_zero",
|
||||
"22005": "error_in_assignment",
|
||||
"2200B": "escape_character_conflict",
|
||||
"22022": "indicator_overflow",
|
||||
"22015": "interval_field_overflow",
|
||||
"2201E": "invalid_argument_for_logarithm",
|
||||
"22014": "invalid_argument_for_ntile_function",
|
||||
"22016": "invalid_argument_for_nth_value_function",
|
||||
"2201F": "invalid_argument_for_power_function",
|
||||
"2201G": "invalid_argument_for_width_bucket_function",
|
||||
"22018": "invalid_character_value_for_cast",
|
||||
"22007": "invalid_datetime_format",
|
||||
"22019": "invalid_escape_character",
|
||||
"2200D": "invalid_escape_octet",
|
||||
"22025": "invalid_escape_sequence",
|
||||
"22P06": "nonstandard_use_of_escape_character",
|
||||
"22010": "invalid_indicator_parameter_value",
|
||||
"22023": "invalid_parameter_value",
|
||||
"2201B": "invalid_regular_expression",
|
||||
"2201W": "invalid_row_count_in_limit_clause",
|
||||
"2201X": "invalid_row_count_in_result_offset_clause",
|
||||
"22009": "invalid_time_zone_displacement_value",
|
||||
"2200C": "invalid_use_of_escape_character",
|
||||
"2200G": "most_specific_type_mismatch",
|
||||
"22004": "null_value_not_allowed",
|
||||
"22002": "null_value_no_indicator_parameter",
|
||||
"22003": "numeric_value_out_of_range",
|
||||
"22026": "string_data_length_mismatch",
|
||||
"22001": "string_data_right_truncation",
|
||||
"22011": "substring_error",
|
||||
"22027": "trim_error",
|
||||
"22024": "unterminated_c_string",
|
||||
"2200F": "zero_length_character_string",
|
||||
"22P01": "floating_point_exception",
|
||||
"22P02": "invalid_text_representation",
|
||||
"22P03": "invalid_binary_representation",
|
||||
"22P04": "bad_copy_file_format",
|
||||
"22P05": "untranslatable_character",
|
||||
"2200L": "not_an_xml_document",
|
||||
"2200M": "invalid_xml_document",
|
||||
"2200N": "invalid_xml_content",
|
||||
"2200S": "invalid_xml_comment",
|
||||
"2200T": "invalid_xml_processing_instruction",
|
||||
// Class 23 — Integrity Constraint Violation
|
||||
"23000": "integrity_constraint_violation",
|
||||
"23001": "restrict_violation",
|
||||
"23502": "not_null_violation",
|
||||
"23503": "foreign_key_violation",
|
||||
"23505": "unique_violation",
|
||||
"23514": "check_violation",
|
||||
"23P01": "exclusion_violation",
|
||||
// Class 24 — Invalid Cursor State
|
||||
"24000": "invalid_cursor_state",
|
||||
// Class 25 — Invalid Transaction State
|
||||
"25000": "invalid_transaction_state",
|
||||
"25001": "active_sql_transaction",
|
||||
"25002": "branch_transaction_already_active",
|
||||
"25008": "held_cursor_requires_same_isolation_level",
|
||||
"25003": "inappropriate_access_mode_for_branch_transaction",
|
||||
"25004": "inappropriate_isolation_level_for_branch_transaction",
|
||||
"25005": "no_active_sql_transaction_for_branch_transaction",
|
||||
"25006": "read_only_sql_transaction",
|
||||
"25007": "schema_and_data_statement_mixing_not_supported",
|
||||
"25P01": "no_active_sql_transaction",
|
||||
"25P02": "in_failed_sql_transaction",
|
||||
// Class 26 — Invalid SQL Statement Name
|
||||
"26000": "invalid_sql_statement_name",
|
||||
// Class 27 — Triggered Data Change Violation
|
||||
"27000": "triggered_data_change_violation",
|
||||
// Class 28 — Invalid Authorization Specification
|
||||
"28000": "invalid_authorization_specification",
|
||||
"28P01": "invalid_password",
|
||||
// Class 2B — Dependent Privilege Descriptors Still Exist
|
||||
"2B000": "dependent_privilege_descriptors_still_exist",
|
||||
"2BP01": "dependent_objects_still_exist",
|
||||
// Class 2D — Invalid Transaction Termination
|
||||
"2D000": "invalid_transaction_termination",
|
||||
// Class 2F — SQL Routine Exception
|
||||
"2F000": "sql_routine_exception",
|
||||
"2F005": "function_executed_no_return_statement",
|
||||
"2F002": "modifying_sql_data_not_permitted",
|
||||
"2F003": "prohibited_sql_statement_attempted",
|
||||
"2F004": "reading_sql_data_not_permitted",
|
||||
// Class 34 — Invalid Cursor Name
|
||||
"34000": "invalid_cursor_name",
|
||||
// Class 38 — External Routine Exception
|
||||
"38000": "external_routine_exception",
|
||||
"38001": "containing_sql_not_permitted",
|
||||
"38002": "modifying_sql_data_not_permitted",
|
||||
"38003": "prohibited_sql_statement_attempted",
|
||||
"38004": "reading_sql_data_not_permitted",
|
||||
// Class 39 — External Routine Invocation Exception
|
||||
"39000": "external_routine_invocation_exception",
|
||||
"39001": "invalid_sqlstate_returned",
|
||||
"39004": "null_value_not_allowed",
|
||||
"39P01": "trigger_protocol_violated",
|
||||
"39P02": "srf_protocol_violated",
|
||||
// Class 3B — Savepoint Exception
|
||||
"3B000": "savepoint_exception",
|
||||
"3B001": "invalid_savepoint_specification",
|
||||
// Class 3D — Invalid Catalog Name
|
||||
"3D000": "invalid_catalog_name",
|
||||
// Class 3F — Invalid Schema Name
|
||||
"3F000": "invalid_schema_name",
|
||||
// Class 40 — Transaction Rollback
|
||||
"40000": "transaction_rollback",
|
||||
"40002": "transaction_integrity_constraint_violation",
|
||||
"40001": "serialization_failure",
|
||||
"40003": "statement_completion_unknown",
|
||||
"40P01": "deadlock_detected",
|
||||
// Class 42 — Syntax Error or Access Rule Violation
|
||||
"42000": "syntax_error_or_access_rule_violation",
|
||||
"42601": "syntax_error",
|
||||
"42501": "insufficient_privilege",
|
||||
"42846": "cannot_coerce",
|
||||
"42803": "grouping_error",
|
||||
"42P20": "windowing_error",
|
||||
"42P19": "invalid_recursion",
|
||||
"42830": "invalid_foreign_key",
|
||||
"42602": "invalid_name",
|
||||
"42622": "name_too_long",
|
||||
"42939": "reserved_name",
|
||||
"42804": "datatype_mismatch",
|
||||
"42P18": "indeterminate_datatype",
|
||||
"42P21": "collation_mismatch",
|
||||
"42P22": "indeterminate_collation",
|
||||
"42809": "wrong_object_type",
|
||||
"42703": "undefined_column",
|
||||
"42883": "undefined_function",
|
||||
"42P01": "undefined_table",
|
||||
"42P02": "undefined_parameter",
|
||||
"42704": "undefined_object",
|
||||
"42701": "duplicate_column",
|
||||
"42P03": "duplicate_cursor",
|
||||
"42P04": "duplicate_database",
|
||||
"42723": "duplicate_function",
|
||||
"42P05": "duplicate_prepared_statement",
|
||||
"42P06": "duplicate_schema",
|
||||
"42P07": "duplicate_table",
|
||||
"42712": "duplicate_alias",
|
||||
"42710": "duplicate_object",
|
||||
"42702": "ambiguous_column",
|
||||
"42725": "ambiguous_function",
|
||||
"42P08": "ambiguous_parameter",
|
||||
"42P09": "ambiguous_alias",
|
||||
"42P10": "invalid_column_reference",
|
||||
"42611": "invalid_column_definition",
|
||||
"42P11": "invalid_cursor_definition",
|
||||
"42P12": "invalid_database_definition",
|
||||
"42P13": "invalid_function_definition",
|
||||
"42P14": "invalid_prepared_statement_definition",
|
||||
"42P15": "invalid_schema_definition",
|
||||
"42P16": "invalid_table_definition",
|
||||
"42P17": "invalid_object_definition",
|
||||
// Class 44 — WITH CHECK OPTION Violation
|
||||
"44000": "with_check_option_violation",
|
||||
// Class 53 — Insufficient Resources
|
||||
"53000": "insufficient_resources",
|
||||
"53100": "disk_full",
|
||||
"53200": "out_of_memory",
|
||||
"53300": "too_many_connections",
|
||||
"53400": "configuration_limit_exceeded",
|
||||
// Class 54 — Program Limit Exceeded
|
||||
"54000": "program_limit_exceeded",
|
||||
"54001": "statement_too_complex",
|
||||
"54011": "too_many_columns",
|
||||
"54023": "too_many_arguments",
|
||||
// Class 55 — Object Not In Prerequisite State
|
||||
"55000": "object_not_in_prerequisite_state",
|
||||
"55006": "object_in_use",
|
||||
"55P02": "cant_change_runtime_param",
|
||||
"55P03": "lock_not_available",
|
||||
// Class 57 — Operator Intervention
|
||||
"57000": "operator_intervention",
|
||||
"57014": "query_canceled",
|
||||
"57P01": "admin_shutdown",
|
||||
"57P02": "crash_shutdown",
|
||||
"57P03": "cannot_connect_now",
|
||||
"57P04": "database_dropped",
|
||||
// Class 58 — System Error (errors external to PostgreSQL itself)
|
||||
"58000": "system_error",
|
||||
"58030": "io_error",
|
||||
"58P01": "undefined_file",
|
||||
"58P02": "duplicate_file",
|
||||
// Class F0 — Configuration File Error
|
||||
"F0000": "config_file_error",
|
||||
"F0001": "lock_file_exists",
|
||||
// Class HV — Foreign Data Wrapper Error (SQL/MED)
|
||||
"HV000": "fdw_error",
|
||||
"HV005": "fdw_column_name_not_found",
|
||||
"HV002": "fdw_dynamic_parameter_value_needed",
|
||||
"HV010": "fdw_function_sequence_error",
|
||||
"HV021": "fdw_inconsistent_descriptor_information",
|
||||
"HV024": "fdw_invalid_attribute_value",
|
||||
"HV007": "fdw_invalid_column_name",
|
||||
"HV008": "fdw_invalid_column_number",
|
||||
"HV004": "fdw_invalid_data_type",
|
||||
"HV006": "fdw_invalid_data_type_descriptors",
|
||||
"HV091": "fdw_invalid_descriptor_field_identifier",
|
||||
"HV00B": "fdw_invalid_handle",
|
||||
"HV00C": "fdw_invalid_option_index",
|
||||
"HV00D": "fdw_invalid_option_name",
|
||||
"HV090": "fdw_invalid_string_length_or_buffer_length",
|
||||
"HV00A": "fdw_invalid_string_format",
|
||||
"HV009": "fdw_invalid_use_of_null_pointer",
|
||||
"HV014": "fdw_too_many_handles",
|
||||
"HV001": "fdw_out_of_memory",
|
||||
"HV00P": "fdw_no_schemas",
|
||||
"HV00J": "fdw_option_name_not_found",
|
||||
"HV00K": "fdw_reply_handle",
|
||||
"HV00Q": "fdw_schema_not_found",
|
||||
"HV00R": "fdw_table_not_found",
|
||||
"HV00L": "fdw_unable_to_create_execution",
|
||||
"HV00M": "fdw_unable_to_create_reply",
|
||||
"HV00N": "fdw_unable_to_establish_connection",
|
||||
// Class P0 — PL/pgSQL Error
|
||||
"P0000": "plpgsql_error",
|
||||
"P0001": "raise_exception",
|
||||
"P0002": "no_data_found",
|
||||
"P0003": "too_many_rows",
|
||||
// Class XX — Internal Error
|
||||
"XX000": "internal_error",
|
||||
"XX001": "data_corrupted",
|
||||
"XX002": "index_corrupted"
|
||||
};
|
||||
|
||||
module.exports.codeToCondition = codeToCondition;
|
||||
module.exports.conditionToCode = _.invert(codeToCondition);
|
||||
226
app/server.js
Normal file
226
app/server.js
Normal file
@@ -0,0 +1,226 @@
|
||||
'use strict';
|
||||
|
||||
// CartoDB SQL API
|
||||
//
|
||||
// all requests expect the following URL args:
|
||||
// - `sql` {String} SQL to execute
|
||||
//
|
||||
// for private (read/write) queries:
|
||||
// - OAuth. Must have proper OAuth 1.1 headers. For OAuth 1.1 spec see Google
|
||||
//
|
||||
// eg. /api/v1/?sql=SELECT 1 as one (with a load of OAuth headers or URL arguments)
|
||||
//
|
||||
// for public (read only) queries:
|
||||
// - sql only, provided the subdomain exists in CartoDB and the table's sharing options are public
|
||||
//
|
||||
// eg. vizzuality.cartodb.com/api/v1/?sql=SELECT * from my_table
|
||||
//
|
||||
|
||||
var express = require('express');
|
||||
var Profiler = require('./stats/profiler-proxy');
|
||||
var _ = require('underscore');
|
||||
var fs = require('fs');
|
||||
var mkdirp = require('mkdirp');
|
||||
var TableCacheFactory = require('./utils/table_cache_factory');
|
||||
|
||||
var RedisPool = require('redis-mpool');
|
||||
var cartodbRedis = require('cartodb-redis');
|
||||
var UserDatabaseService = require('./services/user_database_service');
|
||||
var UserLimitsService = require('./services/user_limits');
|
||||
var JobPublisher = require('../batch/pubsub/job-publisher');
|
||||
var JobQueue = require('../batch/job_queue');
|
||||
var JobBackend = require('../batch/job_backend');
|
||||
var JobCanceller = require('../batch/job_canceller');
|
||||
var JobService = require('../batch/job_service');
|
||||
const Logger = require('./services/logger');
|
||||
|
||||
var cors = require('./middlewares/cors');
|
||||
|
||||
var GenericController = require('./controllers/generic_controller');
|
||||
var QueryController = require('./controllers/query_controller');
|
||||
var CopyController = require('./controllers/copy_controller');
|
||||
var JobController = require('./controllers/job_controller');
|
||||
var CacheStatusController = require('./controllers/cache_status_controller');
|
||||
var HealthCheckController = require('./controllers/health_check_controller');
|
||||
var VersionController = require('./controllers/version_controller');
|
||||
|
||||
var batchFactory = require('../batch');
|
||||
|
||||
process.env.PGAPPNAME = process.env.PGAPPNAME || 'cartodb_sqlapi';
|
||||
|
||||
// override Date.toJSON
|
||||
require('./utils/date_to_json');
|
||||
|
||||
// jshint maxcomplexity:9
|
||||
function App(statsClient) {
|
||||
|
||||
var app = express();
|
||||
|
||||
var redisPool = new RedisPool({
|
||||
name: 'sql-api',
|
||||
host: global.settings.redis_host,
|
||||
port: global.settings.redis_port,
|
||||
max: global.settings.redisPool,
|
||||
idleTimeoutMillis: global.settings.redisIdleTimeoutMillis,
|
||||
reapIntervalMillis: global.settings.redisReapIntervalMillis
|
||||
});
|
||||
var metadataBackend = cartodbRedis({ pool: redisPool });
|
||||
|
||||
// Set default configuration
|
||||
global.settings.db_pubuser = global.settings.db_pubuser || "publicuser";
|
||||
global.settings.bufferedRows = global.settings.bufferedRows || 1000;
|
||||
global.settings.ratelimits = Object.assign(
|
||||
{
|
||||
rateLimitsEnabled: false,
|
||||
endpoints: {
|
||||
query: false,
|
||||
job_create: false,
|
||||
job_get: false,
|
||||
job_delete: false
|
||||
}
|
||||
},
|
||||
global.settings.ratelimits
|
||||
);
|
||||
|
||||
global.settings.tmpDir = global.settings.tmpDir || '/tmp';
|
||||
if (!fs.existsSync(global.settings.tmpDir)) {
|
||||
mkdirp.sync(global.settings.tmpDir);
|
||||
}
|
||||
|
||||
var tableCache = new TableCacheFactory().build(global.settings);
|
||||
|
||||
// Size based on https://github.com/CartoDB/cartodb.js/blob/3.15.2/src/geo/layer_definition.js#L72
|
||||
var SQL_QUERY_BODY_LOG_MAX_LENGTH = 2000;
|
||||
app.getSqlQueryFromRequestBody = function(req) {
|
||||
var sqlQuery = req.body && req.body.q;
|
||||
if (!sqlQuery) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (sqlQuery.length > SQL_QUERY_BODY_LOG_MAX_LENGTH) {
|
||||
sqlQuery = sqlQuery.substring(0, SQL_QUERY_BODY_LOG_MAX_LENGTH) + ' [...]';
|
||||
}
|
||||
return JSON.stringify({q: sqlQuery});
|
||||
};
|
||||
|
||||
if ( global.log4js ) {
|
||||
var loggerOpts = {
|
||||
buffer: true,
|
||||
// log4js provides a tokens solution as expess but in does not provide the request/response in the callback.
|
||||
// Thus it is not possible to extract relevant information from them.
|
||||
// This is a workaround to be able to access request/response.
|
||||
format: function(req, res, format) {
|
||||
var logFormat = global.settings.log_format ||
|
||||
':remote-addr :method :req[Host]:url :status :response-time ms -> :res[Content-Type]';
|
||||
|
||||
logFormat = logFormat.replace(/:sql/, app.getSqlQueryFromRequestBody(req));
|
||||
return format(logFormat);
|
||||
}
|
||||
};
|
||||
app.use(global.log4js.connectLogger(global.log4js.getLogger(), _.defaults(loggerOpts, {level:'info'})));
|
||||
}
|
||||
|
||||
app.use(cors());
|
||||
|
||||
// Use step-profiler
|
||||
app.use(function bootstrap$prepareRequestResponse(req, res, next) {
|
||||
res.locals = res.locals || {};
|
||||
|
||||
if (global.settings.api_hostname) {
|
||||
res.header('X-Served-By-Host', global.settings.api_hostname);
|
||||
}
|
||||
|
||||
var profile = global.settings.useProfiler;
|
||||
req.profiler = new Profiler({
|
||||
profile: profile,
|
||||
statsd_client: statsClient
|
||||
});
|
||||
next();
|
||||
});
|
||||
|
||||
// Set connection timeout
|
||||
if ( global.settings.hasOwnProperty('node_socket_timeout') ) {
|
||||
var timeout = parseInt(global.settings.node_socket_timeout);
|
||||
app.use(function(req, res, next) {
|
||||
req.connection.setTimeout(timeout);
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
app.enable('jsonp callback');
|
||||
app.set("trust proxy", true);
|
||||
app.disable('x-powered-by');
|
||||
app.disable('etag');
|
||||
|
||||
// basic routing
|
||||
|
||||
var userDatabaseService = new UserDatabaseService(metadataBackend);
|
||||
|
||||
const userLimitsServiceOptions = {
|
||||
limits: {
|
||||
rateLimitsEnabled: global.settings.ratelimits.rateLimitsEnabled
|
||||
}
|
||||
};
|
||||
const userLimitsService = new UserLimitsService(metadataBackend, userLimitsServiceOptions);
|
||||
|
||||
const dataIngestionLogger = new Logger(global.settings.dataIngestionLogPath, 'data-ingestion');
|
||||
app.dataIngestionLogger = dataIngestionLogger;
|
||||
|
||||
var jobPublisher = new JobPublisher(redisPool);
|
||||
var jobQueue = new JobQueue(metadataBackend, jobPublisher);
|
||||
var jobBackend = new JobBackend(metadataBackend, jobQueue);
|
||||
var jobCanceller = new JobCanceller();
|
||||
var jobService = new JobService(jobBackend, jobCanceller);
|
||||
|
||||
var genericController = new GenericController();
|
||||
genericController.route(app);
|
||||
|
||||
var queryController = new QueryController(
|
||||
metadataBackend,
|
||||
userDatabaseService,
|
||||
tableCache,
|
||||
statsClient,
|
||||
userLimitsService
|
||||
);
|
||||
queryController.route(app);
|
||||
|
||||
var copyController = new CopyController(
|
||||
metadataBackend,
|
||||
userDatabaseService,
|
||||
userLimitsService,
|
||||
dataIngestionLogger
|
||||
);
|
||||
copyController.route(app);
|
||||
|
||||
var jobController = new JobController(
|
||||
metadataBackend,
|
||||
userDatabaseService,
|
||||
jobService,
|
||||
statsClient,
|
||||
userLimitsService
|
||||
);
|
||||
jobController.route(app);
|
||||
|
||||
var cacheStatusController = new CacheStatusController(tableCache);
|
||||
cacheStatusController.route(app);
|
||||
|
||||
var healthCheckController = new HealthCheckController();
|
||||
healthCheckController.route(app);
|
||||
|
||||
var versionController = new VersionController();
|
||||
versionController.route(app);
|
||||
|
||||
var isBatchProcess = process.argv.indexOf('--no-batch') === -1;
|
||||
|
||||
if (global.settings.environment !== 'test' && isBatchProcess) {
|
||||
var batchName = global.settings.api_hostname || 'batch';
|
||||
app.batch = batchFactory(
|
||||
metadataBackend, redisPool, batchName, statsClient, global.settings.batch_log_filename
|
||||
);
|
||||
app.batch.start();
|
||||
}
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
module.exports = App;
|
||||
44
app/services/cached-query-tables.js
Normal file
44
app/services/cached-query-tables.js
Normal file
@@ -0,0 +1,44 @@
|
||||
'use strict';
|
||||
|
||||
var QueryTables = require('cartodb-query-tables');
|
||||
|
||||
var generateMD5 = require('../utils/md5');
|
||||
|
||||
function CachedQueryTables(tableCache) {
|
||||
this.tableCache = tableCache;
|
||||
}
|
||||
|
||||
module.exports = CachedQueryTables;
|
||||
|
||||
CachedQueryTables.prototype.getAffectedTablesFromQuery = function(pg, sql, skipCache, callback) {
|
||||
var self = this;
|
||||
|
||||
var cacheKey = sqlCacheKey(pg.username(), sql);
|
||||
|
||||
var cachedResult;
|
||||
if (!skipCache) {
|
||||
cachedResult = this.tableCache.peek(cacheKey);
|
||||
}
|
||||
|
||||
if (cachedResult) {
|
||||
cachedResult.hits++;
|
||||
return callback(null, cachedResult.result);
|
||||
} else {
|
||||
QueryTables.getAffectedTablesFromQuery(pg, sql, function(err, result) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
self.tableCache.set(cacheKey, {
|
||||
result: result,
|
||||
hits: 0
|
||||
});
|
||||
|
||||
return callback(null, result);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function sqlCacheKey(user, sql) {
|
||||
return user + ':' + generateMD5(sql);
|
||||
}
|
||||
36
app/services/error_handler.js
Normal file
36
app/services/error_handler.js
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
class ErrorHandler extends Error {
|
||||
constructor({ message, context, detail, hint, http_status, name }) {
|
||||
super(message);
|
||||
|
||||
this.http_status = this.getHttpStatus(http_status);
|
||||
this.context = context;
|
||||
this.detail = detail;
|
||||
this.hint = hint;
|
||||
|
||||
if (name) {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
getResponse() {
|
||||
return {
|
||||
error: [this.message],
|
||||
context: this.context,
|
||||
detail: this.detail,
|
||||
hint: this.hint
|
||||
};
|
||||
}
|
||||
|
||||
getHttpStatus(http_status = 400) {
|
||||
if (this.message.includes('permission denied')) {
|
||||
return 403;
|
||||
}
|
||||
|
||||
return http_status;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = ErrorHandler;
|
||||
41
app/services/error_handler_factory.js
Normal file
41
app/services/error_handler_factory.js
Normal file
@@ -0,0 +1,41 @@
|
||||
'use strict';
|
||||
|
||||
const ErrorHandler = require('./error_handler');
|
||||
const { codeToCondition } = require('../postgresql/error_codes');
|
||||
|
||||
module.exports = function ErrorHandlerFactory (err) {
|
||||
if (isTimeoutError(err)) {
|
||||
return createTimeoutError();
|
||||
} else {
|
||||
return createGenericError(err);
|
||||
}
|
||||
};
|
||||
|
||||
function isTimeoutError(err) {
|
||||
return err.message && (
|
||||
err.message.indexOf('statement timeout') > -1 ||
|
||||
err.message.indexOf('RuntimeError: Execution of function interrupted by signal') > -1 ||
|
||||
err.message.indexOf('canceling statement due to user request') > -1
|
||||
);
|
||||
}
|
||||
|
||||
function createTimeoutError() {
|
||||
return new ErrorHandler({
|
||||
message: 'You are over platform\'s limits: SQL query timeout error.' +
|
||||
' Refactor your query before running again or contact CARTO support for more details.',
|
||||
context: 'limit',
|
||||
detail: 'datasource',
|
||||
http_status: 429
|
||||
});
|
||||
}
|
||||
|
||||
function createGenericError(err) {
|
||||
return new ErrorHandler({
|
||||
message: err.message,
|
||||
context: err.context,
|
||||
detail: err.detail,
|
||||
hint: err.hint,
|
||||
http_status: err.http_status,
|
||||
name: codeToCondition[err.code] || err.name
|
||||
});
|
||||
}
|
||||
38
app/services/logger.js
Normal file
38
app/services/logger.js
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
const bunyan = require('bunyan');
|
||||
|
||||
class Logger {
|
||||
constructor (path, name) {
|
||||
const stream = {
|
||||
level: process.env.NODE_ENV === 'test' ? 'fatal' : 'info'
|
||||
};
|
||||
|
||||
if (path) {
|
||||
stream.path = path;
|
||||
} else {
|
||||
stream.stream = process.stdout;
|
||||
}
|
||||
|
||||
this.path = path;
|
||||
this.logger = bunyan.createLogger({
|
||||
name,
|
||||
streams: [stream]
|
||||
});
|
||||
}
|
||||
|
||||
info (log, message) {
|
||||
this.logger.info(log, message);
|
||||
}
|
||||
|
||||
warn (log, message) {
|
||||
this.logger.warn(log, message);
|
||||
}
|
||||
|
||||
reopenFileStreams () {
|
||||
console.log('Reloading log file', this.path);
|
||||
this.logger.reopenFileStreams();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Logger;
|
||||
63
app/services/pg-entities-access-validator.js
Normal file
63
app/services/pg-entities-access-validator.js
Normal file
@@ -0,0 +1,63 @@
|
||||
'use strict';
|
||||
|
||||
const FORBIDDEN_ENTITIES = {
|
||||
carto: ['*'],
|
||||
cartodb: [
|
||||
'cdb_analysis_catalog',
|
||||
'cdb_conf',
|
||||
'cdb_tablemetadata'
|
||||
],
|
||||
pg_catalog: ['*'],
|
||||
information_schema: ['*'],
|
||||
public: ['spatial_ref_sys'],
|
||||
topology: [
|
||||
'layer',
|
||||
'topology'
|
||||
]
|
||||
};
|
||||
|
||||
const Validator = {
|
||||
validate(affectedTables, authorizationLevel) {
|
||||
let hardValidationResult = true;
|
||||
let softValidationResult = true;
|
||||
|
||||
if (!!affectedTables && affectedTables.tables) {
|
||||
if (global.settings.validatePGEntitiesAccess) {
|
||||
hardValidationResult = this.hardValidation(affectedTables.tables);
|
||||
}
|
||||
|
||||
if (authorizationLevel !== 'master') {
|
||||
softValidationResult = this.softValidation(affectedTables.tables);
|
||||
}
|
||||
}
|
||||
|
||||
return hardValidationResult && softValidationResult;
|
||||
},
|
||||
|
||||
hardValidation(tables) {
|
||||
for (let table of tables) {
|
||||
if (FORBIDDEN_ENTITIES[table.schema_name] && FORBIDDEN_ENTITIES[table.schema_name].length &&
|
||||
(
|
||||
FORBIDDEN_ENTITIES[table.schema_name][0] === '*' ||
|
||||
FORBIDDEN_ENTITIES[table.schema_name].includes(table.table_name)
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
softValidation(tables) {
|
||||
for (let table of tables) {
|
||||
if (table.table_name.match(/\bpg_/)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Validator;
|
||||
77
app/services/stream_copy.js
Normal file
77
app/services/stream_copy.js
Normal file
@@ -0,0 +1,77 @@
|
||||
'use strict';
|
||||
|
||||
const PSQL = require('cartodb-psql');
|
||||
const copyTo = require('pg-copy-streams').to;
|
||||
const copyFrom = require('pg-copy-streams').from;
|
||||
const { Client } = require('pg');
|
||||
|
||||
const ACTION_TO = 'to';
|
||||
const ACTION_FROM = 'from';
|
||||
const DEFAULT_TIMEOUT = "'5h'";
|
||||
|
||||
module.exports = class StreamCopy {
|
||||
|
||||
constructor(sql, userDbParams) {
|
||||
const dbParams = Object.assign({}, userDbParams, {
|
||||
port: global.settings.db_batch_port || userDbParams.port
|
||||
});
|
||||
this.pg = new PSQL(dbParams);
|
||||
this.sql = sql;
|
||||
this.stream = null;
|
||||
this.timeout = global.settings.copy_timeout || DEFAULT_TIMEOUT;
|
||||
}
|
||||
|
||||
static get ACTION_TO() {
|
||||
return ACTION_TO;
|
||||
}
|
||||
|
||||
static get ACTION_FROM() {
|
||||
return ACTION_FROM;
|
||||
}
|
||||
|
||||
getPGStream(action, cb) {
|
||||
this.pg.connect((err, client, done) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
client.query('SET statement_timeout=' + this.timeout, (err) => {
|
||||
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
const streamMaker = action === ACTION_TO ? copyTo : copyFrom;
|
||||
this.stream = streamMaker(this.sql);
|
||||
const pgstream = client.query(this.stream);
|
||||
|
||||
pgstream
|
||||
.on('end', () => {
|
||||
if(action === ACTION_TO) {
|
||||
pgstream.connection.stream.resume();
|
||||
}
|
||||
done();
|
||||
})
|
||||
.on('error', err => done(err))
|
||||
.on('cancelQuery', err => {
|
||||
if(action === ACTION_TO) {
|
||||
// See https://www.postgresql.org/docs/9.5/static/protocol-flow.html#PROTOCOL-COPY
|
||||
const cancelingClient = new Client(client.connectionParameters);
|
||||
cancelingClient.cancel(client, pgstream);
|
||||
|
||||
// see https://node-postgres.com/api/pool#releasecallback
|
||||
return done(err);
|
||||
} else if (action === ACTION_FROM) {
|
||||
client.connection.sendCopyFail('CARTO SQL API: Connection closed by client');
|
||||
}
|
||||
});
|
||||
|
||||
cb(null, pgstream);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getRowCount() {
|
||||
return this.stream.rowCount;
|
||||
}
|
||||
};
|
||||
85
app/services/stream_copy_metrics.js
Normal file
85
app/services/stream_copy_metrics.js
Normal file
@@ -0,0 +1,85 @@
|
||||
'use strict';
|
||||
|
||||
const { getFormatFromCopyQuery } = require('../utils/query_info');
|
||||
|
||||
module.exports = class StreamCopyMetrics {
|
||||
constructor(logger, type, sql, user, isGzip = false) {
|
||||
this.logger = logger;
|
||||
|
||||
this.type = type;
|
||||
this.format = getFormatFromCopyQuery(sql);
|
||||
this.isGzip = isGzip;
|
||||
this.username = user;
|
||||
this.size = 0;
|
||||
this.gzipSize = 0;
|
||||
this.rows = 0;
|
||||
|
||||
this.startTime = new Date();
|
||||
this.endTime = null;
|
||||
this.time = null;
|
||||
|
||||
this.success = true;
|
||||
this.error = null;
|
||||
|
||||
this.ended = false;
|
||||
}
|
||||
|
||||
addSize(size) {
|
||||
this.size += size;
|
||||
}
|
||||
|
||||
addGzipSize(size) {
|
||||
this.gzipSize += size;
|
||||
}
|
||||
|
||||
end(rows = null, error = null) {
|
||||
if (this.ended) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.ended = true;
|
||||
|
||||
if (Number.isInteger(rows)) {
|
||||
this.rows = rows;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
this.error = error;
|
||||
}
|
||||
|
||||
this.endTime = new Date();
|
||||
this.time = (this.endTime.getTime() - this.startTime.getTime()) / 1000;
|
||||
|
||||
this._log(
|
||||
this.startTime.toISOString(),
|
||||
this.isGzip && this.gzipSize ? this.gzipSize : null,
|
||||
this.error ? this.error.message : null
|
||||
);
|
||||
}
|
||||
|
||||
_log(timestamp, gzipSize = null, errorMessage = null) {
|
||||
let logData = {
|
||||
type: this.type,
|
||||
format: this.format,
|
||||
size: this.size,
|
||||
rows: this.rows,
|
||||
gzip: this.isGzip,
|
||||
'cdb-user': this.username,
|
||||
time: this.time,
|
||||
timestamp
|
||||
};
|
||||
|
||||
if (gzipSize) {
|
||||
logData.gzipSize = gzipSize;
|
||||
}
|
||||
|
||||
if (errorMessage) {
|
||||
logData.error = errorMessage;
|
||||
this.success = false;
|
||||
}
|
||||
|
||||
logData.success = this.success;
|
||||
|
||||
this.logger.info(logData);
|
||||
}
|
||||
};
|
||||
107
app/services/user_database_service.js
Normal file
107
app/services/user_database_service.js
Normal file
@@ -0,0 +1,107 @@
|
||||
'use strict';
|
||||
|
||||
function isApiKeyFound(apikey) {
|
||||
return apikey.type !== null &&
|
||||
apikey.user !== null &&
|
||||
apikey.databasePassword !== null &&
|
||||
apikey.databaseRole !== null;
|
||||
}
|
||||
|
||||
function UserDatabaseService(metadataBackend) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
}
|
||||
|
||||
function errorUserNotFoundMessageTemplate (user) {
|
||||
return `Sorry, we can't find CARTO user '${user}'. Please check that you have entered the correct domain.`;
|
||||
}
|
||||
|
||||
function isOauthAuthorization({ apikeyToken, authorizationLevel }) {
|
||||
return (authorizationLevel === 'master') && !apikeyToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback is invoked with `dbParams` and `authDbParams`.
|
||||
* `dbParams` depends on AuthApi verification so it might return a public user with just SELECT permission, where
|
||||
* `authDbParams` will always return connection params as AuthApi had authorized the connection.
|
||||
* That might be useful when you have to run a query with and without permissions.
|
||||
*
|
||||
* @param {AuthApi} authApi
|
||||
* @param {String} cdbUsername
|
||||
* @param {Function} callback (err, dbParams, authDbParams)
|
||||
*/
|
||||
UserDatabaseService.prototype.getConnectionParams = function (username, apikeyToken, authorizationLevel, callback) {
|
||||
this.metadataBackend.getAllUserDBParams(username, (err, dbParams) => {
|
||||
if (err) {
|
||||
err.http_status = 404;
|
||||
err.message = errorUserNotFoundMessageTemplate(username);
|
||||
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const commonDBConfiguration = {
|
||||
port: global.settings.db_port,
|
||||
host: dbParams.dbhost,
|
||||
dbname: dbParams.dbname,
|
||||
};
|
||||
|
||||
this.metadataBackend.getMasterApikey(username, (err, masterApikey) => {
|
||||
|
||||
if (err) {
|
||||
err.http_status = 404;
|
||||
err.message = errorUserNotFoundMessageTemplate(username);
|
||||
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!isApiKeyFound(masterApikey)) {
|
||||
const apiKeyNotFoundError = new Error('Unauthorized');
|
||||
apiKeyNotFoundError.type = 'auth';
|
||||
apiKeyNotFoundError.subtype = 'api-key-not-found';
|
||||
apiKeyNotFoundError.http_status = 401;
|
||||
|
||||
return callback(apiKeyNotFoundError);
|
||||
}
|
||||
|
||||
const masterDBConfiguration = Object.assign({
|
||||
user: masterApikey.databaseRole,
|
||||
pass: masterApikey.databasePassword
|
||||
},
|
||||
commonDBConfiguration);
|
||||
|
||||
if (isOauthAuthorization({ apikeyToken, authorizationLevel})) {
|
||||
return callback(null, masterDBConfiguration, masterDBConfiguration);
|
||||
}
|
||||
|
||||
// Default Api key fallback
|
||||
apikeyToken = apikeyToken || 'default_public';
|
||||
|
||||
this.metadataBackend.getApikey(username, apikeyToken, (err, apikey) => {
|
||||
if (err) {
|
||||
err.http_status = 404;
|
||||
err.message = errorUserNotFoundMessageTemplate(username);
|
||||
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (!isApiKeyFound(apikey)) {
|
||||
const apiKeyNotFoundError = new Error('Unauthorized');
|
||||
apiKeyNotFoundError.type = 'auth';
|
||||
apiKeyNotFoundError.subtype = 'api-key-not-found';
|
||||
apiKeyNotFoundError.http_status = 401;
|
||||
|
||||
return callback(apiKeyNotFoundError);
|
||||
}
|
||||
|
||||
const DBConfiguration = Object.assign({
|
||||
user: apikey.databaseRole,
|
||||
pass: apikey.databasePassword
|
||||
},
|
||||
commonDBConfiguration);
|
||||
|
||||
callback(null, DBConfiguration, masterDBConfiguration);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = UserDatabaseService;
|
||||
27
app/services/user_limits.js
Normal file
27
app/services/user_limits.js
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* UserLimits
|
||||
* @param {cartodb-redis} metadataBackend
|
||||
* @param {object} options
|
||||
*/
|
||||
class UserLimits {
|
||||
constructor(metadataBackend, options = {}) {
|
||||
this.metadataBackend = metadataBackend;
|
||||
this.options = options;
|
||||
|
||||
this.preprareRateLimit();
|
||||
}
|
||||
|
||||
preprareRateLimit() {
|
||||
if (this.options.limits.rateLimitsEnabled) {
|
||||
this.metadataBackend.loadRateLimitsScript();
|
||||
}
|
||||
}
|
||||
|
||||
getRateLimit(user, endpointGroup, callback) {
|
||||
this.metadataBackend.getRateLimit(user, 'sql', endpointGroup, callback);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = UserLimits;
|
||||
75
app/stats/client.js
Normal file
75
app/stats/client.js
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict';
|
||||
|
||||
var _ = require('underscore');
|
||||
var debug = require('debug')('windshaft:stats_client');
|
||||
var StatsD = require('node-statsd').StatsD;
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Returns an StatsD instance or an stub object that replicates the StatsD public interface so there is no need to
|
||||
* keep checking if the stats_client is instantiated or not.
|
||||
*
|
||||
* The first call to this method implies all future calls will use the config specified in the very first call.
|
||||
*
|
||||
* TODO: It's far from ideal to use make this a singleton, improvement desired.
|
||||
* We proceed this way to be able to use StatsD from several places sharing one single StatsD instance.
|
||||
*
|
||||
* @param config Configuration for StatsD, if undefined it will return an stub
|
||||
* @returns {StatsD|Object}
|
||||
*/
|
||||
getInstance: function(config) {
|
||||
|
||||
if (!this.instance) {
|
||||
|
||||
var instance;
|
||||
|
||||
if (config) {
|
||||
instance = new StatsD(config);
|
||||
instance.last_error = { msg: '', count: 0 };
|
||||
instance.socket.on('error', function (err) {
|
||||
var last_err = instance.last_error;
|
||||
var last_msg = last_err.msg;
|
||||
var this_msg = '' + err;
|
||||
if (this_msg !== last_msg) {
|
||||
debug("statsd client socket error: " + err);
|
||||
instance.last_error.count = 1;
|
||||
instance.last_error.msg = this_msg;
|
||||
} else {
|
||||
++last_err.count;
|
||||
if (!last_err.interval) {
|
||||
instance.last_error.interval = setInterval(function () {
|
||||
var count = instance.last_error.count;
|
||||
if (count > 1) {
|
||||
debug("last statsd client socket error repeated " + count + " times");
|
||||
instance.last_error.count = 1;
|
||||
clearInterval(instance.last_error.interval);
|
||||
instance.last_error.interval = null;
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var stubFunc = function (stat, value, sampleRate, callback) {
|
||||
if (_.isFunction(callback)) {
|
||||
callback(null, 0);
|
||||
}
|
||||
};
|
||||
instance = {
|
||||
timing: stubFunc,
|
||||
increment: stubFunc,
|
||||
decrement: stubFunc,
|
||||
gauge: stubFunc,
|
||||
unique: stubFunc,
|
||||
set: stubFunc,
|
||||
sendAll: stubFunc,
|
||||
send: stubFunc
|
||||
};
|
||||
}
|
||||
|
||||
this.instance = instance;
|
||||
}
|
||||
|
||||
return this.instance;
|
||||
}
|
||||
};
|
||||
55
app/stats/profiler-proxy.js
Normal file
55
app/stats/profiler-proxy.js
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
var Profiler = require('step-profiler');
|
||||
|
||||
/**
|
||||
* Proxy to encapsulate node-step-profiler module so there is no need to check if there is an instance
|
||||
*/
|
||||
function ProfilerProxy(opts) {
|
||||
this.profile = !!opts.profile;
|
||||
|
||||
this.profiler = null;
|
||||
if (!!opts.profile) {
|
||||
this.profiler = new Profiler({statsd_client: opts.statsd_client});
|
||||
}
|
||||
}
|
||||
|
||||
ProfilerProxy.prototype.done = function(what) {
|
||||
if (this.profile) {
|
||||
this.profiler.done(what);
|
||||
}
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.end = function() {
|
||||
if (this.profile) {
|
||||
this.profiler.end();
|
||||
}
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.start = function(what) {
|
||||
if (this.profile) {
|
||||
this.profiler.start(what);
|
||||
}
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.add = function(what) {
|
||||
if (this.profile) {
|
||||
this.profiler.add(what || {});
|
||||
}
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.sendStats = function() {
|
||||
if (this.profile) {
|
||||
this.profiler.sendStats();
|
||||
}
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.toString = function() {
|
||||
return this.profile ? this.profiler.toString() : "";
|
||||
};
|
||||
|
||||
ProfilerProxy.prototype.toJSONString = function() {
|
||||
return this.profile ? this.profiler.toJSONString() : "{}";
|
||||
};
|
||||
|
||||
module.exports = ProfilerProxy;
|
||||
5
app/utils/cache_key_generator.js
Normal file
5
app/utils/cache_key_generator.js
Normal file
@@ -0,0 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function generateCacheKey(database, affectedTables) {
|
||||
return database + ":" + affectedTables.join(',');
|
||||
};
|
||||
8
app/utils/content_disposition.js
Normal file
8
app/utils/content_disposition.js
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function getContentDisposition(formatter, filename, inline) {
|
||||
var ext = formatter.getFileExtension();
|
||||
var time = new Date().toUTCString();
|
||||
return ( inline ? 'inline' : 'attachment' ) + '; filename=' + filename + '.' + ext + '; ' +
|
||||
'modification-date="' + time + '";';
|
||||
};
|
||||
19
app/utils/date_to_json.js
Normal file
19
app/utils/date_to_json.js
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
// jshint ignore:start
|
||||
function pad(n) {
|
||||
return n < 10 ? '0' + n : n;
|
||||
}
|
||||
|
||||
Date.prototype.toJSON = function() {
|
||||
var s = this.getFullYear() + '-' + pad(this.getMonth() + 1) + '-' + pad(this.getDate()) + 'T' +
|
||||
pad(this.getHours()) + ':' + pad(this.getMinutes()) + ':' + pad(this.getSeconds());
|
||||
var offset = this.getTimezoneOffset();
|
||||
if (offset === 0) {
|
||||
s += 'Z';
|
||||
} else {
|
||||
s += ( offset < 0 ? '+' : '-' ) + pad(Math.abs(offset / 60)) + pad(Math.abs(offset % 60));
|
||||
}
|
||||
return s;
|
||||
};
|
||||
// jshint ignore:end
|
||||
9
app/utils/filename_sanitizer.js
Normal file
9
app/utils/filename_sanitizer.js
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
|
||||
module.exports = function sanitize_filename(filename) {
|
||||
filename = path.basename(filename, path.extname(filename));
|
||||
filename = filename.replace(/[;()\[\]<>'"\s]/g, '_');
|
||||
return filename;
|
||||
};
|
||||
9
app/utils/md5.js
Normal file
9
app/utils/md5.js
Normal file
@@ -0,0 +1,9 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
module.exports = function generateMD5(data){
|
||||
var hash = crypto.createHash('md5');
|
||||
hash.update(data);
|
||||
return hash.digest('hex');
|
||||
};
|
||||
49
app/utils/no_cache.js
Normal file
49
app/utils/no_cache.js
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* This module provides an object with the interface of an LRU cache
|
||||
* but that actually does not store anything.
|
||||
*
|
||||
* See https://github.com/isaacs/node-lru-cache/tree/v2.5.0
|
||||
*/
|
||||
|
||||
function NoCache() {
|
||||
}
|
||||
|
||||
module.exports = NoCache;
|
||||
|
||||
NoCache.prototype.set = function (/* key, value */) {
|
||||
return true;
|
||||
};
|
||||
|
||||
NoCache.prototype.get = function (/* key */) {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
NoCache.prototype.peek = function (/* key */) {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
NoCache.prototype.del = function (/* key */) {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
NoCache.prototype.reset = function () {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
NoCache.prototype.has = function (/* key */) {
|
||||
return false;
|
||||
};
|
||||
|
||||
NoCache.prototype.forEach = function (/* fn, thisp */) {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
NoCache.prototype.keys = function () {
|
||||
return [];
|
||||
};
|
||||
|
||||
NoCache.prototype.values = function () {
|
||||
return [];
|
||||
};
|
||||
31
app/utils/query_info.js
Normal file
31
app/utils/query_info.js
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const COPY_FORMATS = ['TEXT', 'CSV', 'BINARY'];
|
||||
|
||||
module.exports = {
|
||||
getFormatFromCopyQuery(copyQuery) {
|
||||
let format = 'TEXT'; // Postgres default format
|
||||
|
||||
copyQuery = copyQuery.toUpperCase();
|
||||
|
||||
if (!copyQuery.startsWith("COPY ")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(copyQuery.includes(' WITH') && copyQuery.includes('FORMAT ')) {
|
||||
const regex = /\bFORMAT\s+(\w+)/;
|
||||
const result = regex.exec(copyQuery);
|
||||
|
||||
if (result && result.length === 2) {
|
||||
if (COPY_FORMATS.includes(result[1])) {
|
||||
format = result[1];
|
||||
format = format.toUpperCase();
|
||||
} else {
|
||||
format = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return format;
|
||||
}
|
||||
};
|
||||
14
app/utils/query_may_write.js
Normal file
14
app/utils/query_may_write.js
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
var sqlQueryMayWriteRegex = new RegExp("\\b(alter|insert|update|delete|create|drop|reindex|truncate|refresh)\\b", "i");
|
||||
|
||||
/**
|
||||
* This is a fuzzy check, the return could be true even if the query doesn't really write anything. But you can be
|
||||
* pretty sure of a false return.
|
||||
*
|
||||
* @param sql The SQL statement to check against
|
||||
* @returns {boolean} Return true of the given query may write to the database
|
||||
*/
|
||||
module.exports = function queryMayWrite(sql) {
|
||||
return sqlQueryMayWriteRegex.test(sql);
|
||||
};
|
||||
32
app/utils/table_cache_factory.js
Normal file
32
app/utils/table_cache_factory.js
Normal file
@@ -0,0 +1,32 @@
|
||||
'use strict';
|
||||
|
||||
var LRU = require('lru-cache');
|
||||
var NoCache = require('./no_cache');
|
||||
|
||||
/**
|
||||
* This module abstracts the creation of a tableCache,
|
||||
* depending on the configuration passed along
|
||||
*/
|
||||
|
||||
function TableCacheFactory() {
|
||||
}
|
||||
|
||||
module.exports = TableCacheFactory;
|
||||
|
||||
TableCacheFactory.prototype.build = function (settings) {
|
||||
var enabled = settings.tableCacheEnabled || false;
|
||||
var tableCache = null;
|
||||
|
||||
if(enabled) {
|
||||
tableCache = LRU({
|
||||
// store no more than these many items in the cache
|
||||
max: settings.tableCacheMax || 8192,
|
||||
// consider entries expired after these many milliseconds (10 minutes by default)
|
||||
maxAge: settings.tableCacheMaxAge || 1000*60*10
|
||||
});
|
||||
} else {
|
||||
tableCache = new NoCache();
|
||||
}
|
||||
|
||||
return tableCache;
|
||||
};
|
||||
Reference in New Issue
Block a user