Compare commits
116 Commits
0.15-serve
...
python-0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6177f0ac76 | ||
|
|
2c49f09aad | ||
|
|
22fdbd0f4e | ||
|
|
352d4217bc | ||
|
|
2fd560f3cf | ||
|
|
a4858bbf3c | ||
|
|
88ed777785 | ||
|
|
6f0db20163 | ||
|
|
73aa26590b | ||
|
|
c571edb21e | ||
|
|
e878031744 | ||
|
|
49fd51f6da | ||
|
|
708bee2912 | ||
|
|
cf3c6f2ce5 | ||
|
|
93579532e3 | ||
|
|
0533018326 | ||
|
|
e380d51bec | ||
|
|
6058960ec5 | ||
|
|
336d8be977 | ||
|
|
75557837b0 | ||
|
|
e7c35457e1 | ||
|
|
80963e2589 | ||
|
|
19d6cacdb3 | ||
|
|
0d22942a72 | ||
|
|
e8122c6728 | ||
|
|
d4ac2eb5e6 | ||
|
|
db80d389e0 | ||
|
|
8e02c64aeb | ||
|
|
cc2ab1bc0c | ||
|
|
948463f836 | ||
|
|
e1a7d1751c | ||
|
|
0c49107f96 | ||
|
|
05dc69af34 | ||
|
|
247034c21e | ||
|
|
2b1b1c981f | ||
|
|
aaff5564ec | ||
|
|
72998c324a | ||
|
|
bbd9b6b98e | ||
|
|
27be704bd6 | ||
|
|
03f4a1f4f7 | ||
|
|
91131488c5 | ||
|
|
7d137f3efc | ||
|
|
93a5de5f20 | ||
|
|
fc35aac639 | ||
|
|
98d533b707 | ||
|
|
6d0ad85d48 | ||
|
|
00e6cace76 | ||
|
|
e9ad35ba1d | ||
|
|
dcb3935021 | ||
|
|
cded6c2f08 | ||
|
|
e1b357137a | ||
|
|
3844cfc226 | ||
|
|
2a1276f4f1 | ||
|
|
35da7e48fd | ||
|
|
12aebb7eee | ||
|
|
0d87a95270 | ||
|
|
18e1a5c7c9 | ||
|
|
fcca5da302 | ||
|
|
1aec541906 | ||
|
|
9e98e0794d | ||
|
|
8fbb41742c | ||
|
|
275a6dc27f | ||
|
|
d522083d5c | ||
|
|
073163eb1a | ||
|
|
0c62c4bada | ||
|
|
3361960cfc | ||
|
|
86ab3abc53 | ||
|
|
b1f3405cd0 | ||
|
|
fb812ee15e | ||
|
|
c1dd410201 | ||
|
|
34ddd28e6b | ||
|
|
d85bc65bf8 | ||
|
|
443fe88d5a | ||
|
|
6c61626214 | ||
|
|
74d2fba763 | ||
|
|
e24819f193 | ||
|
|
1e6ee8d5c1 | ||
|
|
3a6cc4c364 | ||
|
|
8ad2434b1d | ||
|
|
0b7b44d8a5 | ||
|
|
02a2619b45 | ||
|
|
4b4a02905c | ||
|
|
1f3a655ae5 | ||
|
|
9d60fde0b8 | ||
|
|
efdc151282 | ||
|
|
fd2cc21942 | ||
|
|
18f05fbd4f | ||
|
|
d2f4586bae | ||
|
|
54eb279ae8 | ||
|
|
85d6c2a54e | ||
|
|
cad2051efe | ||
|
|
96a93e3c56 | ||
|
|
facda9e8be | ||
|
|
64fc18b9e0 | ||
|
|
9381d5644b | ||
|
|
9f55f2ee3b | ||
|
|
1087c1266b | ||
|
|
d5a296a30c | ||
|
|
f8caf4314d | ||
|
|
d7910fbbf1 | ||
|
|
d47049c813 | ||
|
|
cc8f93c535 | ||
|
|
3f9441de7e | ||
|
|
fe41359a1f | ||
|
|
46a934b178 | ||
|
|
184358bdec | ||
|
|
a6d546f2ee | ||
|
|
fc99f7aba9 | ||
|
|
e959873b32 | ||
|
|
a98093540d | ||
|
|
78add220cd | ||
|
|
cf2f86136b | ||
|
|
fb183b07ee | ||
|
|
5ab727bcb6 | ||
|
|
1e9b551160 | ||
|
|
fc291a7c63 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
*.pyc
|
||||
.coverage
|
||||
cartodb_services.egg-info/
|
||||
build/
|
||||
dist/
|
||||
|
||||
24
NEWS.md
24
NEWS.md
@@ -1,3 +1,27 @@
|
||||
October 21st, 2016
|
||||
==================
|
||||
* Version 0.9.2 of the python package
|
||||
* mapzen routing quota now is configurable per user
|
||||
|
||||
September 28, 2016
|
||||
==========
|
||||
* Released version 0.8.1 of Python package cartodb\_services
|
||||
* Improvements in QPS retry decorator for requests to external services
|
||||
|
||||
https://github.com/CartoDB/dataservices-api/releases/tag/python-0.8.1
|
||||
|
||||
September 8, 2016
|
||||
===========
|
||||
* Released version 0.11.1 of the client
|
||||
* Minor change in the name of the function parameter sent to server and Observatory backend for compatibility with the last observatory-extension framework updates
|
||||
|
||||
September 1, 2016
|
||||
===========
|
||||
* Released version 0.11.0 of the client
|
||||
* Include DS table functions to create and populate a table with the GetMeasure function in observatory
|
||||
* Released version 0.15.1 of the server
|
||||
* Rename DS table functions
|
||||
|
||||
August 29, 2016
|
||||
===========
|
||||
* Released version 0.15.0 of the server
|
||||
|
||||
@@ -5,7 +5,7 @@ The CARTO Data Services SQL API
|
||||
Steps to deploy a new Data Services API version :
|
||||
|
||||
- Deploy new version of dataservices API to all servers
|
||||
- Update the server user using: ALTER EXTENSION cdb_dataservices_server UPDATE TO '<CURRENT_VERSION>';
|
||||
- Update the server user using: ALTER EXTENSION cdb_dataservices_server UPDATE TO '\<CURRENT_VERSION\>';
|
||||
- Update the python dependencies if needed: **cartodb_geocoder** and **heremaps**
|
||||
- Add the needed config in the `cdb_conf` table:
|
||||
- `redis_metadata_config` and `redis_metrics_conf`
|
||||
|
||||
@@ -13,8 +13,8 @@ OLD_VERSIONS = $(wildcard old_versions/*.sql)
|
||||
# @see http://www.postgresql.org/docs/current/static/extend-pgxs.html
|
||||
DATA = $(NEW_EXTENSION_ARTIFACT) \
|
||||
$(OLD_VERSIONS) \
|
||||
cdb_dataservices_client--0.10.1--0.10.2.sql \
|
||||
cdb_dataservices_client--0.10.2--0.10.1.sql
|
||||
cdb_dataservices_client--0.11.0--0.11.1.sql \
|
||||
cdb_dataservices_client--0.11.1--0.11.0.sql
|
||||
|
||||
|
||||
REGRESS = $(notdir $(basename $(wildcard test/sql/*test.sql)))
|
||||
|
||||
140
client/cdb_dataservices_client--0.11.0--0.11.1.sql
Normal file
140
client/cdb_dataservices_client--0.11.0--0.11.1.sql
Normal file
@@ -0,0 +1,140 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.1'" to load this file. \quit
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'OBS_GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)
|
||||
)
|
||||
)
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||
');'
|
||||
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||
)
|
||||
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'OBS_GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute(
|
||||
"SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)))
|
||||
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [
|
||||
colnames_arr[i] +
|
||||
' ' +
|
||||
coltypes_arr[i] for i in range(
|
||||
0,
|
||||
len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
aliased_colname_list = ','.join(
|
||||
['result.' + name for name in colnames_arr])
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute(
|
||||
"SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
user_db_role=plpy.quote_literal(user_db_role),
|
||||
schema=plpy.quote_literal(user_schema),
|
||||
dbname=plpy.quote_literal(dbname),
|
||||
table_name=plpy.quote_literal(table_name)))
|
||||
|
||||
if ds_fdw_metadata[0]["schemaname"]:
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
else:
|
||||
raise Exception('Error connecting dataset via FDW')
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute(
|
||||
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||
'FROM "{schema}".{table_name} ut '
|
||||
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||
'{function_name}::text, {params}::json) '
|
||||
'AS result ({columns_with_types}, cartodb_id int) '
|
||||
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||
schema=user_schema,
|
||||
analysis_table_name=output_table_name,
|
||||
colname_list=aliased_colname_list,
|
||||
table_name=table_name,
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params),
|
||||
columns_with_types=columns_with_types))
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute(
|
||||
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
fdw_server=plpy.quote_literal(server_name)))
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
140
client/cdb_dataservices_client--0.11.1--0.11.0.sql
Normal file
140
client/cdb_dataservices_client--0.11.1--0.11.0.sql
Normal file
@@ -0,0 +1,140 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.0'" to load this file. \quit
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)
|
||||
)
|
||||
)
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||
');'
|
||||
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||
)
|
||||
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute(
|
||||
"SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)))
|
||||
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [
|
||||
colnames_arr[i] +
|
||||
' ' +
|
||||
coltypes_arr[i] for i in range(
|
||||
0,
|
||||
len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
aliased_colname_list = ','.join(
|
||||
['result.' + name for name in colnames_arr])
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute(
|
||||
"SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
user_db_role=plpy.quote_literal(user_db_role),
|
||||
schema=plpy.quote_literal(user_schema),
|
||||
dbname=plpy.quote_literal(dbname),
|
||||
table_name=plpy.quote_literal(table_name)))
|
||||
|
||||
if ds_fdw_metadata[0]["schemaname"]:
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
else:
|
||||
raise Exception('Error connecting dataset via FDW')
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute(
|
||||
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||
'FROM "{schema}".{table_name} ut '
|
||||
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||
'{function_name}::text, {params}::json) '
|
||||
'AS result ({columns_with_types}, cartodb_id int) '
|
||||
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||
schema=user_schema,
|
||||
analysis_table_name=output_table_name,
|
||||
colname_list=aliased_colname_list,
|
||||
table_name=table_name,
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params),
|
||||
columns_with_types=columns_with_types))
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute(
|
||||
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
fdw_server=plpy.quote_literal(server_name)))
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
1792
client/cdb_dataservices_client--0.11.1.sql
Normal file
1792
client/cdb_dataservices_client--0.11.1.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
comment = 'CartoDB dataservices client API extension'
|
||||
default_version = '0.10.2'
|
||||
default_version = '0.11.1'
|
||||
requires = 'plproxy, cartodb'
|
||||
superuser = true
|
||||
schema = cdb_dataservices_client
|
||||
|
||||
289
client/old_versions/cdb_dataservices_client--0.10.2--0.11.0.sql
Normal file
289
client/old_versions/cdb_dataservices_client--0.10.2--0.11.0.sql
Normal file
@@ -0,0 +1,289 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.0'" to load this file. \quit
|
||||
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_GetTable(text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_AugmentTable(text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client.__OBS_AugmentTable(text, text, text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client.__OBS_GetTable(text, text, text, text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_ConnectUserTable(text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_GetReturnMetadata(text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_DisconnectUserTable(text, text, text, text, text);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username,
|
||||
orgname,
|
||||
user_db_role,
|
||||
user_schema,
|
||||
output_table_name,
|
||||
params
|
||||
) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
dbname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT current_database() INTO dbname;
|
||||
|
||||
SELECT cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username,
|
||||
orgname,
|
||||
user_db_role,
|
||||
user_schema,
|
||||
dbname,
|
||||
table_name,
|
||||
output_table_name,
|
||||
params
|
||||
) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)
|
||||
)
|
||||
)
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||
');'
|
||||
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||
)
|
||||
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute(
|
||||
"SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)))
|
||||
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [
|
||||
colnames_arr[i] +
|
||||
' ' +
|
||||
coltypes_arr[i] for i in range(
|
||||
0,
|
||||
len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
aliased_colname_list = ','.join(
|
||||
['result.' + name for name in colnames_arr])
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute(
|
||||
"SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
user_db_role=plpy.quote_literal(user_db_role),
|
||||
schema=plpy.quote_literal(user_schema),
|
||||
dbname=plpy.quote_literal(dbname),
|
||||
table_name=plpy.quote_literal(table_name)))
|
||||
|
||||
if ds_fdw_metadata[0]["schemaname"]:
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
else:
|
||||
raise Exception('Error connecting dataset via FDW')
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute(
|
||||
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||
'FROM "{schema}".{table_name} ut '
|
||||
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||
'{function_name}::text, {params}::json) '
|
||||
'AS result ({columns_with_types}, cartodb_id int) '
|
||||
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||
schema=user_schema,
|
||||
analysis_table_name=output_table_name,
|
||||
colname_list=aliased_colname_list,
|
||||
table_name=table_name,
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params),
|
||||
columns_with_types=columns_with_types))
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute(
|
||||
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
fdw_server=plpy.quote_literal(server_name)))
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_ConnectUserTable(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text
|
||||
)RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_GetReturnMetadata(
|
||||
username text,
|
||||
orgname text,
|
||||
function_name text,
|
||||
params json
|
||||
) RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_FetchJoinFdwTableData(
|
||||
username text,
|
||||
orgname text,
|
||||
table_schema text,
|
||||
table_name text,
|
||||
function_name text,
|
||||
params json
|
||||
) RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_DisconnectUserTable(
|
||||
username text,
|
||||
orgname text,
|
||||
table_schema text,
|
||||
table_name text,
|
||||
server_name text
|
||||
) RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_DisconnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(output_table_name text, params json) TO publicuser;
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(table_name text, output_table_name text, params json) TO publicuser;
|
||||
|
||||
281
client/old_versions/cdb_dataservices_client--0.11.0--0.10.2.sql
Normal file
281
client/old_versions/cdb_dataservices_client--0.11.0--0.10.2.sql
Normal file
@@ -0,0 +1,281 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.10.2'" to load this file. \quit
|
||||
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(text, text, text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_ConnectUserTable(text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_GetReturnMetadata(text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_DisconnectUserTable(text, text, text, text, text);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetTable(table_name text, output_table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
dbname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT current_database() INTO dbname;
|
||||
|
||||
SELECT cdb_dataservices_client.__OBS_GetTable(username, orgname, user_db_role, user_schema, dbname, table_name, output_table_name, function_name, params) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_AugmentTable(table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
dbname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT current_database() INTO dbname;
|
||||
|
||||
SELECT cdb_dataservices_client.__OBS_AugmentTable(username, orgname, user_db_role, user_schema, dbname, table_name, function_name, params) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_AugmentTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
from time import strftime
|
||||
try:
|
||||
server_table_name = None
|
||||
temporary_table_name = 'ds_tmp_' + str(strftime("%s")) + table_name
|
||||
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||
)
|
||||
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
colnames = ','.join(colnames_arr)
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {user_schema}::text, {dbname}::text, {table_name}::text);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), user_schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||
)
|
||||
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
|
||||
# Create temporary table with the augmented results
|
||||
plpy.execute('CREATE UNLOGGED TABLE "{user_schema}".{temp_table_name} AS '
|
||||
'(SELECT {columns}, cartodb_id '
|
||||
'FROM cdb_dataservices_client._OBS_FetchJoinFdwTableData('
|
||||
'{username}::text, {orgname}::text, {schema}::text, {table_name}::text, {function_name}::text, {params}::json) '
|
||||
'AS results({columns_with_types}, cartodb_id int) )'
|
||||
.format(columns=colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||
user_schema=user_schema, schema=plpy.quote_literal(server_schema), table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types,
|
||||
temp_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
|
||||
# Add index to cartodb_id
|
||||
plpy.execute('CREATE UNIQUE INDEX {temp_table_name}_pkey ON "{user_schema}".{temp_table_name} (cartodb_id)'
|
||||
.format(user_schema=user_schema, temp_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
# Prepare table to receive augmented results in new columns
|
||||
for idx, column in enumerate(colnames_arr):
|
||||
if colnames_arr[idx] is not 'the_geom':
|
||||
plpy.execute('ALTER TABLE "{user_schema}".{table_name} ADD COLUMN {column_name} {column_type}'
|
||||
.format(user_schema=user_schema, table_name=table_name, column_name=colnames_arr[idx], column_type=coltypes_arr[idx])
|
||||
)
|
||||
|
||||
# Populate the user table with the augmented results
|
||||
plpy.execute('UPDATE "{user_schema}".{table_name} SET {columns} = '
|
||||
'(SELECT {columns} FROM "{user_schema}".{temporary_table_name} '
|
||||
'WHERE "{user_schema}".{temporary_table_name}.cartodb_id = "{user_schema}".{table_name}.cartodb_id)'
|
||||
.format(columns = colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||
user_schema = user_schema, table_name=table_name, function_name=function_name, params=params, columns_with_types=columns_with_types,
|
||||
temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
plpy.warning('Error trying to augment table {0}'.format(e))
|
||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||
if server_table_name:
|
||||
# Wipe local temporary table
|
||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
return False
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_GetTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, output_table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
try:
|
||||
server_table_name = None
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||
)
|
||||
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
colnames = ','.join(colnames_arr)
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {table_name}::text);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||
)
|
||||
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
|
||||
# Get list of user columns to include in the new table
|
||||
user_table_columns = ','.join(
|
||||
plpy.execute('SELECT array_agg(\'user_table.\' || attname) AS columns '
|
||||
'FROM pg_attribute WHERE attrelid = \'"{user_schema}".{table_name}\'::regclass '
|
||||
'AND attnum > 0 AND NOT attisdropped AND attname NOT LIKE \'the_geom_webmercator\' '
|
||||
'AND NOT attname LIKE ANY(string_to_array(\'{colnames}\',\',\'));'
|
||||
.format(user_schema=user_schema, table_name=table_name, colnames=colnames)
|
||||
)[0]["columns"]
|
||||
)
|
||||
|
||||
# Populate a new table with the augmented results
|
||||
plpy.execute('CREATE TABLE "{user_schema}".{output_table_name} AS '
|
||||
'(SELECT results.{columns}, {user_table_columns} '
|
||||
'FROM {table_name} AS user_table '
|
||||
'LEFT JOIN cdb_dataservices_client._OBS_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {function_name}::text, {params}::json) as results({columns_with_types}, cartodb_id int) '
|
||||
'ON results.cartodb_id = user_table.cartodb_id)'
|
||||
.format(output_table_name=output_table_name, columns=colnames, user_table_columns=user_table_columns, username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname), user_schema=user_schema, server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name),
|
||||
table_name=table_name, function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types)
|
||||
)
|
||||
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
plpy.warning('Error trying to get table {0}'.format(e))
|
||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||
if server_table_name:
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
return False
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_ConnectUserTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, server_name text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_DisconnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_augmenttable(table_name text, function_name text, params json) TO publicuser;
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_gettable(table_name text, output_table_name text, function_name text, params json) TO publicuser;
|
||||
1792
client/old_versions/cdb_dataservices_client--0.11.0.sql
Normal file
1792
client/old_versions/cdb_dataservices_client--0.11.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,53 @@
|
||||
CREATE TYPE cdb_dataservices_client.ds_fdw_metadata as (schemaname text, tabname text, servername text);
|
||||
CREATE TYPE cdb_dataservices_client.ds_return_metadata as (colnames text[], coltypes text[]);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetTable(table_name text, output_table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username,
|
||||
orgname,
|
||||
user_db_role,
|
||||
user_schema,
|
||||
output_table_name,
|
||||
params
|
||||
) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
@@ -31,238 +76,200 @@ BEGIN
|
||||
|
||||
SELECT current_database() INTO dbname;
|
||||
|
||||
SELECT cdb_dataservices_client.__OBS_GetTable(username, orgname, user_db_role, user_schema, dbname, table_name, output_table_name, function_name, params) INTO result;
|
||||
SELECT cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username,
|
||||
orgname,
|
||||
user_db_role,
|
||||
user_schema,
|
||||
dbname,
|
||||
table_name,
|
||||
output_table_name,
|
||||
params
|
||||
) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_AugmentTable(table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
DECLARE
|
||||
username text;
|
||||
user_db_role text;
|
||||
orgname text;
|
||||
dbname text;
|
||||
user_schema text;
|
||||
result boolean;
|
||||
BEGIN
|
||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||
RAISE EXCEPTION 'The api_key must be provided';
|
||||
END IF;
|
||||
|
||||
SELECT session_user INTO user_db_role;
|
||||
|
||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||
-- JSON value stored "" is taken as literal
|
||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||
END IF;
|
||||
|
||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||
user_schema := 'public';
|
||||
ELSE
|
||||
user_schema := username;
|
||||
END IF;
|
||||
|
||||
SELECT current_database() INTO dbname;
|
||||
|
||||
SELECT cdb_dataservices_client.__OBS_AugmentTable(username, orgname, user_db_role, user_schema, dbname, table_name, function_name, params) INTO result;
|
||||
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_AugmentTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
from time import strftime
|
||||
try:
|
||||
server_table_name = None
|
||||
temporary_table_name = 'ds_tmp_' + str(strftime("%s")) + table_name
|
||||
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'OBS_GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)
|
||||
)
|
||||
|
||||
)
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
colnames = ','.join(colnames_arr)
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {user_schema}::text, {dbname}::text, {table_name}::text);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), user_schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||
)
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
|
||||
# Create temporary table with the augmented results
|
||||
plpy.execute('CREATE UNLOGGED TABLE "{user_schema}".{temp_table_name} AS '
|
||||
'(SELECT {columns}, cartodb_id '
|
||||
'FROM cdb_dataservices_client._OBS_FetchJoinFdwTableData('
|
||||
'{username}::text, {orgname}::text, {schema}::text, {table_name}::text, {function_name}::text, {params}::json) '
|
||||
'AS results({columns_with_types}, cartodb_id int) )'
|
||||
.format(columns=colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||
user_schema=user_schema, schema=plpy.quote_literal(server_schema), table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types,
|
||||
temp_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
|
||||
# Add index to cartodb_id
|
||||
plpy.execute('CREATE UNIQUE INDEX {temp_table_name}_pkey ON "{user_schema}".{temp_table_name} (cartodb_id)'
|
||||
.format(user_schema=user_schema, temp_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
# Prepare table to receive augmented results in new columns
|
||||
for idx, column in enumerate(colnames_arr):
|
||||
if colnames_arr[idx] is not 'the_geom':
|
||||
plpy.execute('ALTER TABLE "{user_schema}".{table_name} ADD COLUMN {column_name} {column_type}'
|
||||
.format(user_schema=user_schema, table_name=table_name, column_name=colnames_arr[idx], column_type=coltypes_arr[idx])
|
||||
)
|
||||
|
||||
# Populate the user table with the augmented results
|
||||
plpy.execute('UPDATE "{user_schema}".{table_name} SET {columns} = '
|
||||
'(SELECT {columns} FROM "{user_schema}".{temporary_table_name} '
|
||||
'WHERE "{user_schema}".{temporary_table_name}.cartodb_id = "{user_schema}".{table_name}.cartodb_id)'
|
||||
.format(columns = colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||
user_schema = user_schema, table_name=table_name, function_name=function_name, params=params, columns_with_types=columns_with_types,
|
||||
temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
plpy.warning('Error trying to augment table {0}'.format(e))
|
||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||
if server_table_name:
|
||||
# Wipe local temporary table
|
||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||
)
|
||||
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
return False
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_GetTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, output_table_name text, function_name text, params json)
|
||||
RETURNS boolean AS $$
|
||||
try:
|
||||
server_table_name = None
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||
)
|
||||
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
colnames = ','.join(colnames_arr)
|
||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {table_name}::text);"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||
)
|
||||
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
|
||||
# Get list of user columns to include in the new table
|
||||
user_table_columns = ','.join(
|
||||
plpy.execute('SELECT array_agg(\'user_table.\' || attname) AS columns '
|
||||
'FROM pg_attribute WHERE attrelid = \'"{user_schema}".{table_name}\'::regclass '
|
||||
'AND attnum > 0 AND NOT attisdropped AND attname NOT LIKE \'the_geom_webmercator\' '
|
||||
'AND NOT attname LIKE ANY(string_to_array(\'{colnames}\',\',\'));'
|
||||
.format(user_schema=user_schema, table_name=table_name, colnames=colnames)
|
||||
)[0]["columns"]
|
||||
# Create a new table with the required columns
|
||||
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||
');'
|
||||
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||
)
|
||||
|
||||
# Populate a new table with the augmented results
|
||||
plpy.execute('CREATE TABLE "{user_schema}".{output_table_name} AS '
|
||||
'(SELECT results.{columns}, {user_table_columns} '
|
||||
'FROM {table_name} AS user_table '
|
||||
'LEFT JOIN cdb_dataservices_client._OBS_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {function_name}::text, {params}::json) as results({columns_with_types}, cartodb_id int) '
|
||||
'ON results.cartodb_id = user_table.cartodb_id)'
|
||||
.format(output_table_name=output_table_name, columns=colnames, user_table_columns=user_table_columns, username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname), user_schema=user_schema, server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name),
|
||||
table_name=table_name, function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types)
|
||||
)
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||
)
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
plpy.warning('Error trying to get table {0}'.format(e))
|
||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||
if server_table_name:
|
||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||
)
|
||||
return False
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text,
|
||||
output_table_name text,
|
||||
params json
|
||||
) RETURNS boolean AS $$
|
||||
function_name = 'OBS_GetMeasure'
|
||||
# Obtain return types for augmentation procedure
|
||||
ds_return_metadata = plpy.execute(
|
||||
"SELECT colnames, coltypes "
|
||||
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params)))
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_ConnectUserTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_ConnectUserTable;
|
||||
if ds_return_metadata[0]["colnames"]:
|
||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||
else:
|
||||
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||
|
||||
# Prepare column and type strings required in the SQL queries
|
||||
columns_with_types_arr = [
|
||||
colnames_arr[i] +
|
||||
' ' +
|
||||
coltypes_arr[i] for i in range(
|
||||
0,
|
||||
len(colnames_arr))]
|
||||
columns_with_types = ','.join(columns_with_types_arr)
|
||||
aliased_colname_list = ','.join(
|
||||
['result.' + name for name in colnames_arr])
|
||||
|
||||
# Instruct the OBS server side to establish a FDW
|
||||
# The metadata is obtained as well in order to:
|
||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||
# - (b) be able to tell OBS to free resources when done.
|
||||
ds_fdw_metadata = plpy.execute(
|
||||
"SELECT schemaname, tabname, servername "
|
||||
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
user_db_role=plpy.quote_literal(user_db_role),
|
||||
schema=plpy.quote_literal(user_schema),
|
||||
dbname=plpy.quote_literal(dbname),
|
||||
table_name=plpy.quote_literal(table_name)))
|
||||
|
||||
if ds_fdw_metadata[0]["schemaname"]:
|
||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||
server_name = ds_fdw_metadata[0]["servername"]
|
||||
else:
|
||||
raise Exception('Error connecting dataset via FDW')
|
||||
|
||||
# Create a new table with the required columns
|
||||
plpy.execute(
|
||||
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||
'FROM "{schema}".{table_name} ut '
|
||||
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||
'{function_name}::text, {params}::json) '
|
||||
'AS result ({columns_with_types}, cartodb_id int) '
|
||||
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||
schema=user_schema,
|
||||
analysis_table_name=output_table_name,
|
||||
colname_list=aliased_colname_list,
|
||||
table_name=table_name,
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
function_name=plpy.quote_literal(function_name),
|
||||
params=plpy.quote_literal(params),
|
||||
columns_with_types=columns_with_types))
|
||||
|
||||
# Wipe user FDW data from the server
|
||||
wiped = plpy.execute(
|
||||
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||
username=plpy.quote_nullable(username),
|
||||
orgname=plpy.quote_nullable(orgname),
|
||||
server_schema=plpy.quote_literal(server_schema),
|
||||
server_table_name=plpy.quote_literal(server_table_name),
|
||||
fdw_server=plpy.quote_literal(server_name)))
|
||||
|
||||
return True
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_ConnectUserTable(
|
||||
username text,
|
||||
orgname text,
|
||||
user_db_role text,
|
||||
user_schema text,
|
||||
dbname text,
|
||||
table_name text
|
||||
)RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_GetReturnMetadata;
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_GetReturnMetadata(
|
||||
username text,
|
||||
orgname text,
|
||||
function_name text,
|
||||
params json
|
||||
) RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_FetchJoinFdwTableData;
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_FetchJoinFdwTableData(
|
||||
username text,
|
||||
orgname text,
|
||||
table_schema text,
|
||||
table_name text,
|
||||
function_name text,
|
||||
params json
|
||||
) RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, server_name text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT _server_conn_str();
|
||||
TARGET cdb_dataservices_server._OBS_DisconnectUserTable;
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_DisconnectUserTable(
|
||||
username text,
|
||||
orgname text,
|
||||
table_schema text,
|
||||
table_name text,
|
||||
server_name text
|
||||
) RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_client._server_conn_str();
|
||||
TARGET cdb_dataservices_server._DST_DisconnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_augmenttable(table_name text, function_name text, params json) TO publicuser;
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_gettable(table_name text, output_table_name text, function_name text, params json) TO publicuser;
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(output_table_name text, params json) TO publicuser;
|
||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(table_name text, output_table_name text, params json) TO publicuser;
|
||||
|
||||
@@ -3,65 +3,63 @@ SET search_path TO public,cartodb,cdb_dataservices_client;
|
||||
CREATE TABLE my_table(cartodb_id int);
|
||||
INSERT INTO my_table (cartodb_id) VALUES (1);
|
||||
-- Mock the server functions
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
BEGIN
|
||||
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
BEGIN
|
||||
RETURN (Array['total_pop'], Array['double precision']);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS RECORD AS $$
|
||||
BEGIN
|
||||
RETURN (23.4::double precision, 1::int);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
BEGIN
|
||||
RETURN true;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
-- Augment a table with the total_pop column
|
||||
SELECT cdb_dataservices_client._OBS_AugmentTable('my_table', 'dummy', '{"dummy":"dummy"}'::json);
|
||||
_obs_augmenttable
|
||||
-------------------
|
||||
-- Create a sample user table
|
||||
CREATE TABLE user_table (cartodb_id int, the_geom geometry);
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (1, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (2, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (3, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
-- Prepare a table with the total_pop column
|
||||
SELECT cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure('my_table_dst', '{"dummy":"dummy"}'::json);
|
||||
_dst_preparetableobs_getmeasure
|
||||
---------------------------------
|
||||
t
|
||||
(1 row)
|
||||
|
||||
-- The results of the table should return the mocked value of 23.4 in the total_pop column
|
||||
SELECT * FROM my_table;
|
||||
cartodb_id | total_pop
|
||||
------------+-----------
|
||||
1 | 23.4
|
||||
(1 row)
|
||||
-- The table should now exist and be empty
|
||||
SELECT * FROM my_table_dst;
|
||||
cartodb_id | the_geom | total_pop
|
||||
------------+----------+-----------
|
||||
(0 rows)
|
||||
|
||||
-- Mock again the function for it to return a different value now
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS RECORD AS $$
|
||||
BEGIN
|
||||
RETURN (577777.4::double precision, 1::int);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
-- Augment a new table with total_pop
|
||||
SELECT cdb_dataservices_client._OBS_GetTable('my_table', 'my_table_new', 'dummy', '{"dummy":"dummy"}'::json);
|
||||
_obs_gettable
|
||||
---------------
|
||||
-- Populate the table with measurement data
|
||||
SELECT cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure('user_table', 'my_table_dst', '{"dummy":"dummy"}'::json);
|
||||
_dst_populatetableobs_getmeasure
|
||||
----------------------------------
|
||||
t
|
||||
(1 row)
|
||||
|
||||
-- Check that the table contains the new value for total_pop and not the value already existent in the table
|
||||
SELECT * FROM my_table_new;
|
||||
total_pop | cartodb_id
|
||||
-----------+------------
|
||||
577777.4 | 1
|
||||
(1 row)
|
||||
-- The table should now show the results
|
||||
SELECT * FROM my_table_dst;
|
||||
cartodb_id | the_geom | total_pop
|
||||
------------+----------------------------------------------------+-----------
|
||||
1 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 | 23.4
|
||||
2 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 |
|
||||
3 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 |
|
||||
(3 rows)
|
||||
|
||||
-- Clean tables
|
||||
DROP TABLE my_table;
|
||||
DROP TABLE my_table_new;
|
||||
DROP TABLE my_table_dst;
|
||||
|
||||
@@ -6,54 +6,51 @@ CREATE TABLE my_table(cartodb_id int);
|
||||
INSERT INTO my_table (cartodb_id) VALUES (1);
|
||||
|
||||
-- Mock the server functions
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||
BEGIN
|
||||
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||
BEGIN
|
||||
RETURN (Array['total_pop'], Array['double precision']);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS RECORD AS $$
|
||||
BEGIN
|
||||
RETURN (23.4::double precision, 1::int);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
BEGIN
|
||||
RETURN true;
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
|
||||
-- Augment a table with the total_pop column
|
||||
SELECT cdb_dataservices_client._OBS_AugmentTable('my_table', 'dummy', '{"dummy":"dummy"}'::json);
|
||||
-- Create a sample user table
|
||||
CREATE TABLE user_table (cartodb_id int, the_geom geometry);
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (1, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (2, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
INSERT INTO user_table(cartodb_id, the_geom) VALUES (3, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||
|
||||
-- The results of the table should return the mocked value of 23.4 in the total_pop column
|
||||
SELECT * FROM my_table;
|
||||
-- Prepare a table with the total_pop column
|
||||
SELECT cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure('my_table_dst', '{"dummy":"dummy"}'::json);
|
||||
|
||||
-- Mock again the function for it to return a different value now
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS RECORD AS $$
|
||||
BEGIN
|
||||
RETURN (577777.4::double precision, 1::int);
|
||||
END;
|
||||
$$ LANGUAGE 'plpgsql';
|
||||
-- The table should now exist and be empty
|
||||
SELECT * FROM my_table_dst;
|
||||
|
||||
-- Augment a new table with total_pop
|
||||
SELECT cdb_dataservices_client._OBS_GetTable('my_table', 'my_table_new', 'dummy', '{"dummy":"dummy"}'::json);
|
||||
-- Populate the table with measurement data
|
||||
SELECT cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure('user_table', 'my_table_dst', '{"dummy":"dummy"}'::json);
|
||||
|
||||
-- Check that the table contains the new value for total_pop and not the value already existent in the table
|
||||
SELECT * FROM my_table_new;
|
||||
-- The table should now show the results
|
||||
SELECT * FROM my_table_dst;
|
||||
|
||||
-- Clean tables
|
||||
DROP TABLE my_table;
|
||||
DROP TABLE my_table_new;
|
||||
DROP TABLE my_table_dst;
|
||||
@@ -1,6 +1,6 @@
|
||||
# Demographic Functions
|
||||
|
||||
The Demographic Snapshot enables you to collect demographic reports around a point location. For example, you can take the coordinates of a coffee shop and find the average population characteristics, such as total population, educational attainment, housing and income information around that location. You can use raw street addresses by combining the Demographic Snapshot with CARTO's geocoding features. If you need help creating coordinates from addresses, see the [Geocoding Functions](/carto-engine/dataservices-api/geocoding-functions/) documentation.
|
||||
The Demographic Snapshot enables you to collect demographic reports around a point location. For example, you can take the coordinates of a coffee shop and find the average population characteristics, such as total population, educational attainment, housing and income information around that location. You can use raw street addresses by combining the Demographic Snapshot with CARTO's geocoding features. If you need help creating coordinates from addresses, see the [Geocoding Functions](https://carto.com/docs/carto-engine/dataservices-api/geocoding-functions/) documentation.
|
||||
|
||||
_**Note:** The Demographic Snapshot functions are only available for the United States._
|
||||
|
||||
|
||||
@@ -290,8 +290,7 @@ Geocodes a postal code from a specified country into an IP address, displayed as
|
||||
|
||||
Name | Type | Description
|
||||
--- | --- | ---
|
||||
`ip_address` | `text` | Postal code
|
||||
`country_name` | `text` | IPv4 or IPv6 address
|
||||
`ip_address` | `text` | IPv4 or IPv6 address
|
||||
|
||||
#### Returns
|
||||
|
||||
|
||||
1341
server/extension/cdb_dataservices_server--0.16.0--0.17.0.sql
Normal file
1341
server/extension/cdb_dataservices_server--0.16.0--0.17.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
1208
server/extension/cdb_dataservices_server--0.17.0--0.16.0.sql
Normal file
1208
server/extension/cdb_dataservices_server--0.17.0--0.16.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
2491
server/extension/cdb_dataservices_server--0.17.0.sql
Normal file
2491
server/extension/cdb_dataservices_server--0.17.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
comment = 'CartoDB dataservices server extension'
|
||||
default_version = '0.15.0'
|
||||
default_version = '0.17.0'
|
||||
requires = 'plpythonu, plproxy, postgis, cdb_geocoder'
|
||||
superuser = true
|
||||
schema = cdb_dataservices_server
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.1'" to load this file. \quit
|
||||
|
||||
-- HERE goes your code to upgrade/downgrade
|
||||
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_ConnectUserTable(text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server.__OBS_ConnectUserTable(text, text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_GetReturnMetadata(text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_DisconnectUserTable(text, text, text, text, text);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||
)[0]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
@@ -0,0 +1,44 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.0'" to load this file. \quit
|
||||
|
||||
-- HERE goes your code to upgrade/downgrade
|
||||
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_ConnectUserTable(text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server.__DST_ConnectUserTable(text, text, text, text, text, text, text);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_GetReturnMetadata(text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_DisconnectUserTable(text, text, text, text, text);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||
)[0]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
@@ -0,0 +1,69 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.16.0'" to load this file. \quit
|
||||
|
||||
-- Here goes your code to upgrade/downgrade
|
||||
|
||||
-- This is done in order to avoid an undesired depedency on cartodb extension
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_conf_getconf(input_key text)
|
||||
RETURNS JSON AS $$
|
||||
SELECT VALUE FROM cartodb.cdb_conf WHERE key = input_key;
|
||||
$$ LANGUAGE SQL STABLE SECURITY DEFINER;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
import cartodb_services
|
||||
cartodb_services.init(plpy, GD)
|
||||
from cartodb_services.mapzen import MapzenGeocoder
|
||||
from cartodb_services.mapzen.types import country_to_iso3
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger
|
||||
from cartodb_services.refactor.tools.logger import LoggerConfigBuilder
|
||||
from cartodb_services.refactor.service.mapzen_geocoder_config import MapzenGeocoderConfigBuilder
|
||||
from cartodb_services.refactor.core.environment import ServerEnvironmentBuilder
|
||||
from cartodb_services.refactor.backend.server_config import ServerConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.user_config import UserConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.org_config import OrgConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.redis_metrics_connection import RedisMetricsConnectionFactory
|
||||
|
||||
server_config_backend = ServerConfigBackendFactory().get()
|
||||
environment = ServerEnvironmentBuilder(server_config_backend).get()
|
||||
user_config_backend = UserConfigBackendFactory(username, environment, server_config_backend).get()
|
||||
org_config_backend = OrgConfigBackendFactory(orgname, environment, server_config_backend).get()
|
||||
|
||||
logger_config = LoggerConfigBuilder(environment, server_config_backend).get()
|
||||
logger = Logger(logger_config)
|
||||
|
||||
mapzen_geocoder_config = MapzenGeocoderConfigBuilder(server_config_backend, user_config_backend, org_config_backend, username, orgname).get()
|
||||
|
||||
redis_metrics_connection = RedisMetricsConnectionFactory(environment, server_config_backend).get()
|
||||
|
||||
quota_service = QuotaService(mapzen_geocoder_config, redis_metrics_connection)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
geocoder = MapzenGeocoder(mapzen_geocoder_config.mapzen_api_key, logger)
|
||||
country_iso3 = None
|
||||
if country:
|
||||
country_iso3 = country_to_iso3(country)
|
||||
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
||||
state_province=state_province,
|
||||
country=country_iso3, search_type='address')
|
||||
if coordinates:
|
||||
quota_service.increment_success_service_use()
|
||||
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||
return point['st_setsrid']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode street point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode street point using mapzen')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
@@ -1176,34 +1176,34 @@ CREATE TYPE cdb_dataservices_server.ds_fdw_metadata as (schemaname text, tabname
|
||||
|
||||
CREATE TYPE cdb_dataservices_server.ds_return_metadata as (colnames text[], coltypes text[]);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||
)[0]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||
@@ -0,0 +1,54 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.1'" to load this file. \quit
|
||||
|
||||
-- Here goes your code to upgrade/downgrade
|
||||
|
||||
DROP FUNCTION IF EXISTS cdb_dataservices_server.cdb_conf_getconf(text);
|
||||
|
||||
-- Geocodes a street address given a searchtext and a state and/or country
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
import cartodb_services
|
||||
cartodb_services.init(plpy, GD)
|
||||
|
||||
from cartodb_services.config.user import User
|
||||
from cartodb_services.config.configs import ConfigsFactory
|
||||
from cartodb_services.config.hires_geocoder_config import HiResGeocoderConfigFactory
|
||||
from cartodb_services.request.request import RequestFactory
|
||||
|
||||
user = User(username, orgname)
|
||||
configs = ConfigsFactory.get(user)
|
||||
request = RequestFactory().create(user, configs, 'cdb_geocode_street_point')
|
||||
|
||||
# TODO change to hires_geocoder_config = HiResGeocoderConfigFactory.get(request)
|
||||
hires_geocoder_config = HiResGeocoderConfigFactory(configs).get(user)
|
||||
|
||||
if hires_geocoder_config.provider == 'here':
|
||||
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif hires_geocoder_config.provider == 'google':
|
||||
google_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_google_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(google_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif hires_geocoder_config.provider == 'mapzen':
|
||||
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(mapzen_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
else:
|
||||
raise Exception('Requested geocoder is not available')
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_here_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||
|
||||
if user_geocoder_config.heremaps_geocoder:
|
||||
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
else:
|
||||
raise Exception('Here geocoder is not available for your account.')
|
||||
|
||||
$$ LANGUAGE plpythonu;
|
||||
2411
server/extension/old_versions/cdb_dataservices_server--0.16.0.sql
Normal file
2411
server/extension/old_versions/cdb_dataservices_server--0.16.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -7,15 +7,21 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_route_point_to_point(
|
||||
options text[] DEFAULT ARRAY[]::text[],
|
||||
units text DEFAULT 'kilometers')
|
||||
RETURNS cdb_dataservices_server.simple_route AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.tools import Logger
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_routing_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_routing_config = GD["user_routing_config_{0}".format(username)]
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
|
||||
waypoints = [origin, destination]
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._cdb_mapzen_route_with_waypoints($1, $2, $3, $4, $5, $6) as route;", ["text", "text", "geometry(Point, 4326)[]", "text", "text[]", "text"])
|
||||
result = plpy.execute(mapzen_plan, [username, orgname, waypoints, mode, options, units])
|
||||
return [result[0]['shape'],result[0]['length'], result[0]['duration']]
|
||||
with metrics('cdb_route_with_point', user_routing_config, logger):
|
||||
waypoints = [origin, destination]
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._cdb_mapzen_route_with_waypoints($1, $2, $3, $4, $5, $6) as route;", ["text", "text", "geometry(Point, 4326)[]", "text", "text[]", "text"])
|
||||
result = plpy.execute(mapzen_plan, [username, orgname, waypoints, mode, options, units])
|
||||
return [result[0]['shape'],result[0]['length'], result[0]['duration']]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
@@ -27,12 +33,18 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_route_with_waypoints(
|
||||
options text[] DEFAULT ARRAY[]::text[],
|
||||
units text DEFAULT 'kilometers')
|
||||
RETURNS cdb_dataservices_server.simple_route AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.tools import Logger
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_routing_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_routing_config = GD["user_routing_config_{0}".format(username)]
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._cdb_mapzen_route_with_waypoints($1, $2, $3, $4, $5, $6) as route;", ["text", "text", "geometry(Point, 4326)[]", "text", "text[]", "text"])
|
||||
result = plpy.execute(mapzen_plan, [username, orgname, waypoints, mode, options, units])
|
||||
return [result[0]['shape'],result[0]['length'], result[0]['duration']]
|
||||
with metrics('cdb_route_with_waypoints', user_routing_config, logger):
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._cdb_mapzen_route_with_waypoints($1, $2, $3, $4, $5, $6) as route;", ["text", "text", "geometry(Point, 4326)[]", "text", "text[]", "text"])
|
||||
result = plpy.execute(mapzen_plan, [username, orgname, waypoints, mode, options, units])
|
||||
return [result[0]['shape'],result[0]['length'], result[0]['duration']]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
@@ -11,9 +11,9 @@ RETURNS text AS $$
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_obs_snapshot_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
|
||||
return user_obs_snapshot_config.connection_str
|
||||
return user_obs_config.connection_str
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetDemographicSnapshotJSON(
|
||||
@@ -34,6 +34,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_demographic_snapshot(
|
||||
time_span TEXT DEFAULT NULL,
|
||||
geometry_level TEXT DEFAULT NULL)
|
||||
RETURNS json AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
import json
|
||||
@@ -41,31 +42,32 @@ RETURNS json AS $$
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_obs_snapshot_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_obs_snapshot_config, redis_conn)
|
||||
quota_service = QuotaService(user_obs_config, redis_conn)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshotJSON($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['snapshot']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_demographic_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshotJSON($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['snapshot']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_demographic_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetDemographicSnapshot(
|
||||
@@ -86,41 +88,43 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetDemographicSnapshot(
|
||||
time_span TEXT DEFAULT NULL,
|
||||
geometry_level TEXT DEFAULT NULL)
|
||||
RETURNS SETOF JSON AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_obs_snapshot_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_obs_snapshot_config, redis_conn)
|
||||
quota_service = QuotaService(user_obs_config, redis_conn)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshot($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
value = element['snapshot']
|
||||
resp.append(value)
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_demographic_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshot($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
value = element['snapshot']
|
||||
resp.append(value)
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_demographic_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetSegmentSnapshotJSON(
|
||||
@@ -139,6 +143,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_segment_snapshot(
|
||||
geom geometry(Geometry, 4326),
|
||||
geometry_level TEXT DEFAULT NULL)
|
||||
RETURNS json AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
import json
|
||||
@@ -146,31 +151,32 @@ RETURNS json AS $$
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_obs_snapshot_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_obs_snapshot_config, redis_conn)
|
||||
quota_service = QuotaService(user_obs_config, redis_conn)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetSegmentSnapshotJSON($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['snapshot']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_segment_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_segment_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetSegmentSnapshotJSON($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['snapshot']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to obs_get_segment_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to obs_get_segment_snapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetSegmentSnapshot(
|
||||
@@ -189,41 +195,43 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetSegmentSnapshot(
|
||||
geom geometry(Geometry, 4326),
|
||||
geometry_level TEXT DEFAULT NULL)
|
||||
RETURNS SETOF JSON AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_obs_snapshot_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_obs_snapshot_config, redis_conn)
|
||||
quota_service = QuotaService(user_obs_config, redis_conn)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetSegmentSnapshot($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
value = element['snapshot']
|
||||
resp.append(value)
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetSegmentSnapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetSegmentSnapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetSegmentSnapshot($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
value = element['snapshot']
|
||||
resp.append(value)
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetSegmentSnapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetSegmentSnapshot')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetMeasure(
|
||||
@@ -248,6 +256,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetMeasure(
|
||||
boundary_id TEXT DEFAULT NULL,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS NUMERIC AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -263,22 +272,23 @@ RETURNS NUMERIC AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetMeasure($1, $2, $3, $4, $5, $6, $7) as measure;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, measure_id, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasure', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasure')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getmeasure', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetMeasure($1, $2, $3, $4, $5, $6, $7) as measure;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, measure_id, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasure', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasure')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetCategory(
|
||||
@@ -301,6 +311,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetCategory(
|
||||
boundary_id TEXT DEFAULT NULL,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS TEXT AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -316,22 +327,23 @@ RETURNS TEXT AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetCategory($1, $2, $3, $4, $5, $6) as category;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, category_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['category']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetCategory', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetCategory')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getcategory', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetCategory($1, $2, $3, $4, $5, $6) as category;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, category_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['category']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetCategory', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetCategory')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetUSCensusMeasure(
|
||||
@@ -356,6 +368,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetUSCensusMeasure(
|
||||
boundary_id TEXT DEFAULT NULL,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS NUMERIC AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -371,22 +384,23 @@ RETURNS NUMERIC AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetUSCensusMeasure($1, $2, $3, $4, $5, $6, $7) as census_measure;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, name, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['census_measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetUSCensusMeasure', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetUSCensusMeasure')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getuscensusmeasure', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetUSCensusMeasure($1, $2, $3, $4, $5, $6, $7) as census_measure;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, name, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['census_measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetUSCensusMeasure', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetUSCensusMeasure')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetUSCensusCategory(
|
||||
@@ -409,6 +423,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetUSCensusCategory(
|
||||
boundary_id TEXT DEFAULT NULL,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS TEXT AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -424,22 +439,23 @@ RETURNS TEXT AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetUSCensusCategory($1, $2, $3, $4, $5, $6) as census_category;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, name, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['census_category']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetUSCensusCategory', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetUSCensusCategory')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getuscensuscategory', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetUSCensusCategory($1, $2, $3, $4, $5, $6) as census_category;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, name, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['census_category']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetUSCensusCategory', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetUSCensusCategory')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetPopulation(
|
||||
@@ -462,6 +478,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetPopulation(
|
||||
boundary_id TEXT DEFAULT NULL,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS NUMERIC AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -477,22 +494,23 @@ RETURNS NUMERIC AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetPopulation($1, $2, $3, $4, $5, $6) as population;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['population']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPopulation', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPopulation')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getpopulation', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetPopulation($1, $2, $3, $4, $5, $6) as population;", ["text", "text", "geometry(Geometry, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, normalize, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['population']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPopulation', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPopulation')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetMeasureById(
|
||||
@@ -515,6 +533,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetMeasureById(
|
||||
boundary_id TEXT,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS NUMERIC AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -530,20 +549,21 @@ RETURNS NUMERIC AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetMeasureById($1, $2, $3, $4, $5, $6) as measure;", ["text", "text", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom_ref, measure_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasureById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasureById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getmeasurebyid', user_obs_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetMeasureById($1, $2, $3, $4, $5, $6) as measure;", ["text", "text", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom_ref, measure_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['measure']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasureById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasureById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
@@ -14,6 +14,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_Search(
|
||||
search_term TEXT,
|
||||
relevant_boundary TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(id text, description text, name text, aggregate text, source text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -29,30 +30,31 @@ RETURNS TABLE(id text, description text, name text, aggregate text, source text)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_Search($1, $2, $3, $4);", ["text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, search_term, relevant_boundary])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
id = element['id']
|
||||
description = element['description']
|
||||
name = element['name']
|
||||
aggregate = element['aggregate']
|
||||
source = element['source']
|
||||
resp.append([id, description, name, aggregate, source])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return [None, None, None, None, None]
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_Search', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_Search')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_search', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_Search($1, $2, $3, $4);", ["text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, search_term, relevant_boundary])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
id = element['id']
|
||||
description = element['description']
|
||||
name = element['name']
|
||||
aggregate = element['aggregate']
|
||||
source = element['source']
|
||||
resp.append([id, description, name, aggregate, source])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return [None, None, None, None, None]
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_Search', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_Search')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetAvailableBoundaries(
|
||||
@@ -71,6 +73,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetAvailableBoundaries(
|
||||
geom geometry(Geometry, 4326),
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(boundary_id text, description text, time_span text, tablename text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -86,27 +89,28 @@ RETURNS TABLE(boundary_id text, description text, time_span text, tablename text
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetAvailableBoundaries($1, $2, $3, $4) as available_boundaries;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
id = element['boundary_id']
|
||||
description = element['description']
|
||||
tspan = element['time_span']
|
||||
tablename = element['tablename']
|
||||
resp.append([id, description, tspan, tablename])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasureById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasureById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getavailableboundaries', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetAvailableBoundaries($1, $2, $3, $4) as available_boundaries;", ["text", "text", "geometry(Geometry, 4326)", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, time_span])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
id = element['boundary_id']
|
||||
description = element['description']
|
||||
tspan = element['time_span']
|
||||
tablename = element['tablename']
|
||||
resp.append([id, description, tspan, tablename])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetMeasureById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetMeasureById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
@@ -16,6 +16,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetBoundary(
|
||||
boundary_id TEXT,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS geometry(Geometry, 4326) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -31,22 +32,23 @@ RETURNS geometry(Geometry, 4326) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundary($1, $2, $3, $4) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundary', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundary')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getboundary', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundary($1, $2, $3, $4) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundary', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundary')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetBoundaryId(
|
||||
@@ -67,6 +69,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetBoundaryId(
|
||||
boundary_id TEXT,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS TEXT AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -82,22 +85,23 @@ RETURNS TEXT AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundaryId($1, $2, $3, $4, $5) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundaryId', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundaryId')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getboundaryid', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundaryId($1, $2, $3, $4, $5) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundaryId', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundaryId')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetBoundaryById(
|
||||
@@ -118,6 +122,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetBoundaryById(
|
||||
boundary_id TEXT,
|
||||
time_span TEXT DEFAULT NULL)
|
||||
RETURNS geometry(Geometry, 4326) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -133,22 +138,23 @@ RETURNS geometry(Geometry, 4326) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundaryById($1, $2, $3, $4, $5) as boundary;", ["text", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geometry_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundaryById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundaryById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getboundarybyid', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetBoundaryById($1, $2, $3, $4, $5) as boundary;", ["text", "text", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geometry_id, boundary_id, time_span])
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result[0]['boundary']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundaryById', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundaryById')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetBoundariesByGeometry(
|
||||
@@ -171,6 +177,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetBoundariesByGeometry(
|
||||
time_span TEXT DEFAULT NULL,
|
||||
overlap_type TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -186,27 +193,28 @@ RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetBoundariesByGeometry($1, $2, $3, $4, $5, $6) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundariesByGeometry', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundariesByGeometry')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getboundariesbygeometry', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetBoundariesByGeometry($1, $2, $3, $4, $5, $6) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundariesByGeometry', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundariesByGeometry')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetBoundariesByPointAndRadius(
|
||||
@@ -231,6 +239,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetBoundariesByPointAndRa
|
||||
time_span TEXT DEFAULT NULL,
|
||||
overlap_type TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -246,27 +255,28 @@ RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetBoundariesByPointAndRadius($1, $2, $3, $4, $5, $6, $7) as boundary;", ["text", "text", "geometry(Point, 4326)", "numeric", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, radius, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundariesByPointAndRadius', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundariesByPointAndRadius')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getboundariesbypointandradius', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetBoundariesByPointAndRadius($1, $2, $3, $4, $5, $6, $7) as boundary;", ["text", "text", "geometry(Point, 4326)", "numeric", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, radius, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetBoundariesByPointAndRadius', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetBoundariesByPointAndRadius')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetPointsByGeometry(
|
||||
@@ -289,6 +299,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetPointsByGeometry(
|
||||
time_span TEXT DEFAULT NULL,
|
||||
overlap_type TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -304,27 +315,28 @@ RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetPointsByGeometry($1, $2, $3, $4, $5, $6) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPointsByGeometry', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPointsByGeometry')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getpointsbygeometry', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetPointsByGeometry($1, $2, $3, $4, $5, $6) as boundary;", ["text", "text", "geometry(Point, 4326)", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return []
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPointsByGeometry', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPointsByGeometry')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetPointsByPointAndRadius(
|
||||
@@ -349,6 +361,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetPointsByPointAndRadius
|
||||
time_span TEXT DEFAULT NULL,
|
||||
overlap_type TEXT DEFAULT NULL)
|
||||
RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
@@ -364,25 +377,26 @@ RETURNS TABLE(the_geom geometry, geom_refs text) AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetPointsByPointAndRadius($1, $2, $3, $4, $5, $6, $7) as boundary;", ["text", "text", "geometry(Point, 4326)", "numeric", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, radius, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPointsByPointAndRadius', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPointsByPointAndRadius')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('obs_getpointsbypointandradius', user_obs_snapshot_config, logger):
|
||||
try:
|
||||
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetPointsByPointAndRadius($1, $2, $3, $4, $5, $6, $7) as boundary;", ["text", "text", "geometry(Point, 4326)", "numeric", "text", "text", "text"])
|
||||
result = plpy.execute(obs_plan, [username, orgname, geom, radius, boundary_id, time_span, overlap_type])
|
||||
if result:
|
||||
resp = []
|
||||
for element in result:
|
||||
the_geom = element['the_geom']
|
||||
geom_refs = element['geom_refs']
|
||||
resp.append([the_geom, geom_refs])
|
||||
quota_service.increment_success_service_use()
|
||||
return resp
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to OBS_GetPointsByPointAndRadius', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to OBS_GetPointsByPointAndRadius')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
@@ -2,34 +2,34 @@ CREATE TYPE cdb_dataservices_server.ds_fdw_metadata as (schemaname text, tabname
|
||||
|
||||
CREATE TYPE cdb_dataservices_server.ds_return_metadata as (colnames text[], coltypes text[]);
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||
)[0]
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||
RETURNS SETOF record AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||
$$ LANGUAGE plproxy;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||
RETURNS boolean AS $$
|
||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||
|
||||
@@ -10,6 +10,12 @@ RETURNS boolean AS $$
|
||||
return True
|
||||
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
||||
|
||||
-- This is done in order to avoid an undesired depedency on cartodb extension
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_conf_getconf(input_key text)
|
||||
RETURNS JSON AS $$
|
||||
SELECT VALUE FROM cartodb.cdb_conf WHERE key = input_key;
|
||||
$$ LANGUAGE SQL STABLE SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_geocoder_config(username text, orgname text, provider text DEFAULT NULL)
|
||||
RETURNS boolean AS $$
|
||||
cache_key = "user_geocoder_config_{0}".format(username)
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
-- Geocodes a street address given a searchtext and a state and/or country
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
|
||||
if user_geocoder_config.heremaps_geocoder:
|
||||
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif user_geocoder_config.google_geocoder:
|
||||
google_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_google_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(google_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif user_geocoder_config.mapzen_geocoder:
|
||||
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(mapzen_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
else:
|
||||
raise Exception('Requested geocoder is not available')
|
||||
with metrics('cdb_geocode_street_point', user_geocoder_config, logger):
|
||||
if user_geocoder_config.heremaps_geocoder:
|
||||
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif user_geocoder_config.google_geocoder:
|
||||
google_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_google_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(google_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
elif user_geocoder_config.mapzen_geocoder:
|
||||
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||
return plpy.execute(mapzen_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||
else:
|
||||
raise Exception('Requested geocoder is not available')
|
||||
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
@@ -137,23 +143,38 @@ $$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
import cartodb_services
|
||||
cartodb_services.init(plpy, GD)
|
||||
from cartodb_services.mapzen import MapzenGeocoder
|
||||
from cartodb_services.mapzen.types import country_to_iso3
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
from cartodb_services.tools import Logger
|
||||
from cartodb_services.refactor.tools.logger import LoggerConfigBuilder
|
||||
from cartodb_services.refactor.service.mapzen_geocoder_config import MapzenGeocoderConfigBuilder
|
||||
from cartodb_services.refactor.core.environment import ServerEnvironmentBuilder
|
||||
from cartodb_services.refactor.backend.server_config import ServerConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.user_config import UserConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.org_config import OrgConfigBackendFactory
|
||||
from cartodb_services.refactor.backend.redis_metrics_connection import RedisMetricsConnectionFactory
|
||||
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||
server_config_backend = ServerConfigBackendFactory().get()
|
||||
environment = ServerEnvironmentBuilder(server_config_backend).get()
|
||||
user_config_backend = UserConfigBackendFactory(username, environment, server_config_backend).get()
|
||||
org_config_backend = OrgConfigBackendFactory(orgname, environment, server_config_backend).get()
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger_config = LoggerConfigBuilder(environment, server_config_backend).get()
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
|
||||
mapzen_geocoder_config = MapzenGeocoderConfigBuilder(server_config_backend, user_config_backend, org_config_backend, username, orgname).get()
|
||||
|
||||
redis_metrics_connection = RedisMetricsConnectionFactory(environment, server_config_backend).get()
|
||||
|
||||
quota_service = QuotaService(mapzen_geocoder_config, redis_metrics_connection)
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||
geocoder = MapzenGeocoder(mapzen_geocoder_config.mapzen_api_key, logger)
|
||||
country_iso3 = None
|
||||
if country:
|
||||
country_iso3 = country_to_iso3(country)
|
||||
|
||||
@@ -2,6 +2,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_admin0_polygon(us
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
@@ -13,23 +14,24 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin0_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [country_name], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode admin0 polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode admin0 polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_admin0_polygon', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin0_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [country_name], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode admin0 polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode admin0 polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
---- cdb_geocode_admin1_polygon(admin1_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_admin1_polygon(username text, orgname text, admin1_name text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -14,44 +15,11 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin1_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [admin1_name], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode admin1 polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode admin1 polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
---- cdb_geocode_admin1_polygon(admin1_name text, country_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_admin1_polygon(username text, orgname text, admin1_name text, country_name text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
with metrics('cdb_geocode_admin1_polygon', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin1_polygon(trim($1), trim($2)) AS mypolygon", ["text", "text"])
|
||||
rv = plpy.execute(plan, [admin1_name, country_name], 1)
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin1_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [admin1_name], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
@@ -68,6 +36,44 @@ RETURNS Geometry AS $$
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
---- cdb_geocode_admin1_polygon(admin1_name text, country_name text)
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_admin1_polygon(username text, orgname text, admin1_name text, country_name text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
|
||||
with metrics('cdb_geocode_admin1_polygon', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_admin1_polygon(trim($1), trim($2)) AS mypolygon", ["text", "text"])
|
||||
rv = plpy.execute(plan, [admin1_name, country_name], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode admin1 polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode admin1 polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- Implementation of the server extension
|
||||
|
||||
@@ -35,7 +35,7 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_namedplac
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.mapzen import MapzenGeocoder
|
||||
from cartodb_services.mapzen.types import country_to_iso3
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import QuotaService, metrics
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
@@ -50,35 +50,36 @@ RETURNS Geometry AS $$
|
||||
if not quota_service.check_user_quota():
|
||||
raise Exception('You have reached the limit of your quota')
|
||||
|
||||
try:
|
||||
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||
country_iso3 = None
|
||||
if country_name:
|
||||
country_iso3 = country_to_iso3(country_name)
|
||||
coordinates = geocoder.geocode(searchtext=city_name, city=None,
|
||||
state_province=admin1_name,
|
||||
country=country_iso3, search_type='locality')
|
||||
if coordinates:
|
||||
quota_service.increment_success_service_use()
|
||||
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||
return point['st_setsrid']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode city point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode city point using mapzen')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_namedplace_point', user_geocoder_config, logger):
|
||||
try:
|
||||
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||
country_iso3 = None
|
||||
if country_name:
|
||||
country_iso3 = country_to_iso3(country_name)
|
||||
coordinates = geocoder.geocode(searchtext=city_name, city=None,
|
||||
state_province=admin1_name,
|
||||
country=country_iso3, search_type='locality')
|
||||
if coordinates:
|
||||
quota_service.increment_success_service_use()
|
||||
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||
return point['st_setsrid']
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode city point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode city point using mapzen')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_internal_geocode_namedplace(username text, orgname text, city_name text, admin1_name text DEFAULT NULL, country_name text DEFAULT NULL)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.metrics import InternalGeocoderConfig, metrics
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
@@ -90,30 +91,32 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
if admin1_name and country_name:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
||||
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
||||
elif country_name:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
||||
rv = plpy.execute(plan, [city_name, country_name], 1)
|
||||
else:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
||||
rv = plpy.execute(plan, [city_name], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode namedplace point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
|
||||
with metrics('cdb_geocode_namedplace_point', user_geocoder_config, logger):
|
||||
try:
|
||||
if admin1_name and country_name:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
||||
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
||||
elif country_name:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
||||
rv = plpy.execute(plan, [city_name, country_name], 1)
|
||||
else:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
||||
rv = plpy.execute(plan, [city_name], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode namedplace point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_postalcode_point(username text, orgname text, code text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -13,27 +14,29 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_point(trim($1)) AS mypoint", ["text"])
|
||||
rv = plpy.execute(plan, [code], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_postalcode_point', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_point(trim($1)) AS mypoint", ["text"])
|
||||
rv = plpy.execute(plan, [code], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_postalcode_point(username text, orgname text, code text, country text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -47,27 +50,29 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_point(trim($1), trim($2)) AS mypoint", ["TEXT", "TEXT"])
|
||||
rv = plpy.execute(plan, [code, country], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_postalcode_point', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_point(trim($1), trim($2)) AS mypoint", ["TEXT", "TEXT"])
|
||||
rv = plpy.execute(plan, [code, country], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code point')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_postalcode_polygon(username text, orgname text, code text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -81,27 +86,29 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [code], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_postalcode_point', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_polygon(trim($1)) AS mypolygon", ["text"])
|
||||
rv = plpy.execute(plan, [code], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_postalcode_polygon(username text, orgname text, code text, country text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -115,23 +122,24 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_polygon(trim($1), trim($2)) AS mypolygon", ["TEXT", "TEXT"])
|
||||
rv = plpy.execute(plan, [code, country], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_postalcode_point', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_postalcode_polygon(trim($1), trim($2)) AS mypolygon", ["TEXT", "TEXT"])
|
||||
rv = plpy.execute(plan, [code, country], 1)
|
||||
result = rv[0]["mypolygon"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_ipaddress_point(username text, orgname text, ip text)
|
||||
RETURNS Geometry AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
from cartodb_services.metrics import QuotaService
|
||||
from cartodb_services.metrics import InternalGeocoderConfig
|
||||
from cartodb_services.tools import Logger,LoggerConfig
|
||||
@@ -13,23 +14,24 @@ RETURNS Geometry AS $$
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_ipaddress_point(trim($1)) AS mypoint", ["TEXT"])
|
||||
rv = plpy.execute(plan, [ip], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
with metrics('cdb_geocode_ipaddress_point', user_geocoder_config, logger):
|
||||
try:
|
||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_ipaddress_point(trim($1)) AS mypoint", ["TEXT"])
|
||||
rv = plpy.execute(plan, [ip], 1)
|
||||
result = rv[0]["mypoint"]
|
||||
if result:
|
||||
quota_service.increment_success_service_use()
|
||||
return result
|
||||
else:
|
||||
quota_service.increment_empty_service_use()
|
||||
return None
|
||||
except BaseException as e:
|
||||
import sys
|
||||
quota_service.increment_failed_service_use()
|
||||
logger.error('Error trying to geocode postal code polygon', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||
raise Exception('Error trying to geocode postal code polygon')
|
||||
finally:
|
||||
quota_service.increment_total_service_use()
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_isochrone(username TEXT, orgname TEXT, source geometry(Geometry, 4326), mode TEXT, range integer[], options text[] DEFAULT array[]::text[])
|
||||
RETURNS SETOF cdb_dataservices_server.isoline AS $$
|
||||
from cartodb_services.metrics import metrics
|
||||
|
||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_isolines_routing_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||
user_isolines_config = GD["user_isolines_routing_config_{0}".format(username)]
|
||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||
logger_config = GD["logger_config"]
|
||||
logger = Logger(logger_config)
|
||||
|
||||
if user_isolines_config.google_services_user:
|
||||
raise Exception('This service is not available for google service users.')
|
||||
|
||||
if user_isolines_config.heremaps_provider:
|
||||
here_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server.cdb_here_isochrone($1, $2, $3, $4, $5, $6) as isoline; ", ["text", "text", "geometry(geometry, 4326)", "text", "integer[]", "text[]"])
|
||||
return plpy.execute(here_plan, [username, orgname, source, mode, range, options])
|
||||
elif user_isolines_config.mapzen_provider:
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server.cdb_mapzen_isochrone($1, $2, $3, $4, $5, $6) as isoline; ", ["text", "text", "geometry(geometry, 4326)", "text", "integer[]", "text[]"])
|
||||
return plpy.execute(mapzen_plan, [username, orgname, source, mode, range, options])
|
||||
else:
|
||||
raise Exception('Requested isolines provider is not available')
|
||||
with metrics('cb_isochrone', user_isolines_config, logger):
|
||||
if user_isolines_config.heremaps_provider:
|
||||
here_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server.cdb_here_isochrone($1, $2, $3, $4, $5, $6) as isoline; ", ["text", "text", "geometry(geometry, 4326)", "text", "integer[]", "text[]"])
|
||||
return plpy.execute(here_plan, [username, orgname, source, mode, range, options])
|
||||
elif user_isolines_config.mapzen_provider:
|
||||
mapzen_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server.cdb_mapzen_isochrone($1, $2, $3, $4, $5, $6) as isoline; ", ["text", "text", "geometry(geometry, 4326)", "text", "integer[]", "text[]"])
|
||||
return plpy.execute(mapzen_plan, [username, orgname, source, mode, range, options])
|
||||
else:
|
||||
raise Exception('Requested isolines provider is not available')
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
-- heremaps isochrone
|
||||
|
||||
@@ -2,7 +2,7 @@ SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_connectusertable'
|
||||
AND proname = '_dst_connectusertable'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
||||
exists
|
||||
--------
|
||||
@@ -13,7 +13,7 @@ SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_getreturnmetadata'
|
||||
AND proname = '_dst_getreturnmetadata'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
||||
exists
|
||||
--------
|
||||
@@ -24,7 +24,7 @@ SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_fetchjoinfdwtabledata'
|
||||
AND proname = '_dst_fetchjoinfdwtabledata'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
||||
exists
|
||||
--------
|
||||
@@ -35,7 +35,7 @@ SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_disconnectusertable'
|
||||
AND proname = '_dst_disconnectusertable'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
||||
exists
|
||||
--------
|
||||
|
||||
@@ -2,27 +2,27 @@ SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_connectusertable'
|
||||
AND proname = '_dst_connectusertable'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
||||
|
||||
SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_getreturnmetadata'
|
||||
AND proname = '_dst_getreturnmetadata'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
||||
|
||||
SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_fetchjoinfdwtabledata'
|
||||
AND proname = '_dst_fetchjoinfdwtabledata'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
||||
|
||||
SELECT exists(SELECT *
|
||||
FROM pg_proc p
|
||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||
AND proname = '_obs_disconnectusertable'
|
||||
AND proname = '_dst_disconnectusertable'
|
||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# CartoDB dataservices API python module
|
||||
# CARTO dataservices API python module
|
||||
|
||||
This directory contains the python library used by the server side of CARTO LDS (Location Data Services).
|
||||
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
# NOTE: This init function must be called from plpythonu entry points to
|
||||
# initialize cartodb_services module properly. E.g:
|
||||
#
|
||||
# CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_isochrone(...)
|
||||
# RETURNS SETOF cdb_dataservices_server.isoline AS $$
|
||||
#
|
||||
# import cartodb_services
|
||||
# cartodb_services.init(plpy, GD)
|
||||
#
|
||||
# # rest of the code here
|
||||
# cartodb_services.GD[key] = val
|
||||
# cartodb_services.plpy.execute('SELECT * FROM ...')
|
||||
#
|
||||
# $$ LANGUAGE plpythonu;
|
||||
|
||||
plpy = None
|
||||
GD = None
|
||||
|
||||
def init(_plpy, _GD):
|
||||
global plpy
|
||||
global GD
|
||||
|
||||
if plpy is None:
|
||||
plpy = _plpy
|
||||
|
||||
if GD is None:
|
||||
GD = _GD
|
||||
|
||||
def _reset():
|
||||
# NOTE: just for testing
|
||||
global plpy
|
||||
global GD
|
||||
|
||||
plpy = None
|
||||
GD = None
|
||||
|
||||
@@ -5,15 +5,18 @@ import json
|
||||
import requests
|
||||
|
||||
from exceptions import *
|
||||
from cartodb_services.metrics import Traceable
|
||||
|
||||
|
||||
class HereMapsGeocoder:
|
||||
class HereMapsGeocoder(Traceable):
|
||||
'A Here Maps Geocoder wrapper for python'
|
||||
|
||||
PRODUCTION_GEOCODE_JSON_URL = 'https://geocoder.api.here.com/6.2/geocode.json'
|
||||
STAGING_GEOCODE_JSON_URL = 'https://geocoder.cit.api.here.com/6.2/geocode.json'
|
||||
DEFAULT_MAXRESULTS = 1
|
||||
DEFAULT_GEN = 9
|
||||
READ_TIMEOUT = 60
|
||||
CONNECT_TIMEOUT = 10
|
||||
|
||||
ADDRESS_PARAMS = [
|
||||
'city',
|
||||
@@ -85,7 +88,9 @@ class HereMapsGeocoder:
|
||||
'gen': self.gen
|
||||
}
|
||||
request_params.update(params)
|
||||
response = requests.get(self.host, params=request_params)
|
||||
response = requests.get(self.host, params=request_params,
|
||||
timeout=(self.CONNECT_TIMEOUT, self.READ_TIMEOUT))
|
||||
self.add_response_data(response, self._logger)
|
||||
if response.status_code == requests.codes.ok:
|
||||
return json.loads(response.text)
|
||||
elif response.status_code == requests.codes.bad_request:
|
||||
|
||||
@@ -2,14 +2,17 @@ import requests
|
||||
import json
|
||||
|
||||
from exceptions import WrongParams
|
||||
from cartodb_services.metrics import Traceable
|
||||
|
||||
|
||||
class HereMapsRoutingIsoline:
|
||||
class HereMapsRoutingIsoline(Traceable):
|
||||
'A Here Maps Routing wrapper for python'
|
||||
|
||||
PRODUCTION_ROUTING_BASE_URL = 'https://isoline.route.api.here.com'
|
||||
STAGING_ROUTING_BASE_URL = 'https://isoline.route.cit.api.here.com'
|
||||
ISOLINE_PATH = '/routing/7.2/calculateisoline.json'
|
||||
READ_TIMEOUT = 60
|
||||
CONNECT_TIMEOUT = 10
|
||||
|
||||
ACCEPTED_MODES = {
|
||||
"walk": "pedestrian",
|
||||
@@ -50,7 +53,9 @@ class HereMapsRoutingIsoline:
|
||||
data_range,
|
||||
range_type,
|
||||
parsed_options)
|
||||
response = requests.get(self._url, params=request_params)
|
||||
response = requests.get(self._url, params=request_params,
|
||||
timeout=(self.CONNECT_TIMEOUT, self.READ_TIMEOUT))
|
||||
self.add_response_data(response, self._logger)
|
||||
if response.status_code == requests.codes.ok:
|
||||
return self.__parse_isolines_response(response.text)
|
||||
elif response.status_code == requests.codes.bad_request:
|
||||
|
||||
@@ -19,3 +19,15 @@ class MalformedResult(Exception):
|
||||
class TimeoutException(Exception):
|
||||
def __str__(self):
|
||||
return repr('Timeout requesting to mapzen server')
|
||||
|
||||
|
||||
class ServiceException(Exception):
|
||||
def __init__(self, message, response):
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
def response(self):
|
||||
return self.response
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
@@ -2,28 +2,34 @@ import requests
|
||||
import json
|
||||
import re
|
||||
|
||||
from exceptions import WrongParams, MalformedResult
|
||||
from exceptions import WrongParams, MalformedResult, ServiceException
|
||||
from qps import qps_retry
|
||||
from cartodb_services.tools import Coordinate, PolyLine
|
||||
from cartodb_services.metrics import Traceable
|
||||
|
||||
|
||||
class MapzenGeocoder:
|
||||
class MapzenGeocoder(Traceable):
|
||||
'A Mapzen Geocoder wrapper for python'
|
||||
|
||||
BASE_URL = 'https://search.mapzen.com/v1/search'
|
||||
READ_TIMEOUT = 60
|
||||
CONNECT_TIMEOUT = 10
|
||||
|
||||
def __init__(self, app_key, logger, base_url=BASE_URL):
|
||||
self._app_key = app_key
|
||||
self._url = base_url
|
||||
self._logger = logger
|
||||
|
||||
@qps_retry
|
||||
def geocode(self, searchtext, city=None, state_province=None, country=None, search_type=None):
|
||||
@qps_retry(qps=20)
|
||||
def geocode(self, searchtext, city=None, state_province=None,
|
||||
country=None, search_type=None):
|
||||
request_params = self._build_requests_parameters(searchtext, city,
|
||||
state_province,
|
||||
country, search_type)
|
||||
try:
|
||||
response = requests.get(self._url, params=request_params)
|
||||
response = requests.get(self._url, params=request_params,
|
||||
timeout=(self.CONNECT_TIMEOUT, self.READ_TIMEOUT))
|
||||
self.add_response_data(response, self._logger)
|
||||
if response.status_code == requests.codes.ok:
|
||||
return self.__parse_response(response.text)
|
||||
elif response.status_code == requests.codes.bad_request:
|
||||
@@ -31,21 +37,27 @@ class MapzenGeocoder:
|
||||
else:
|
||||
self._logger.error('Error trying to geocode using mapzen',
|
||||
data={"response_status": response.status_code,
|
||||
"response_reason": response.reason,
|
||||
"response_content": response.text,
|
||||
"reponse_url": response.url,
|
||||
"response_headers": response.headers,
|
||||
"searchtext": searchtext,
|
||||
"city": city, "country": country,
|
||||
"state_province": state_province })
|
||||
raise Exception('Error trying to geocode {0} using mapzen'.format(searchtext))
|
||||
"response_reason": response.reason,
|
||||
"response_content": response.text,
|
||||
"reponse_url": response.url,
|
||||
"response_headers": response.headers,
|
||||
"searchtext": searchtext,
|
||||
"city": city, "country": country,
|
||||
"state_province": state_province})
|
||||
raise ServiceException('Error trying to geocode {0} using mapzen'.format(searchtext),
|
||||
response)
|
||||
except requests.Timeout as te:
|
||||
# In case of timeout we want to stop the job because the server
|
||||
# could be down
|
||||
self._logger.error('Timeout connecting to Mapzen geocoding server')
|
||||
raise ServiceException('Error trying to geocode {0} using mapzen'.format(searchtext),
|
||||
None)
|
||||
except requests.ConnectionError as e:
|
||||
# Don't raise the exception to continue with the geocoding job
|
||||
self._logger.error('Error connecting to Mapzen geocoding server',
|
||||
exception=e)
|
||||
return []
|
||||
|
||||
|
||||
def _build_requests_parameters(self, searchtext, city=None,
|
||||
state_province=None, country=None,
|
||||
search_type=None):
|
||||
|
||||
@@ -86,7 +86,7 @@ class MapzenIsolines:
|
||||
def calculate_isoline(self, origin, costing_model, isorange, upper_rmax, cost_variable, unit_factor=1.0):
|
||||
|
||||
# NOTE: not for production
|
||||
self._logger.debug('Calculate isoline', data={"origin": origin, "costing_model": costing_model, "isorange": isorange})
|
||||
# self._logger.debug('Calculate isoline', data={"origin": origin, "costing_model": costing_model, "isorange": isorange})
|
||||
|
||||
# Formally, a solution is an array of {angle, radius, lat, lon, cost} with cardinality NUMBER_OF_ANGLES
|
||||
# we're looking for a solution in which abs(cost - isorange) / isorange <= TOLERANCE
|
||||
@@ -105,14 +105,16 @@ class MapzenIsolines:
|
||||
|
||||
response = self._matrix_client.one_to_many([origin] + location_estimates, costing_model)
|
||||
costs = [None] * self.NUMBER_OF_ANGLES
|
||||
if not response:
|
||||
# In case the matrix client doesn't return any data
|
||||
break
|
||||
|
||||
for idx, c in enumerate(response['one_to_many'][0][1:]):
|
||||
if c[cost_variable]:
|
||||
costs[idx] = c[cost_variable]*unit_factor
|
||||
else:
|
||||
costs[idx] = isorange
|
||||
|
||||
# self._logger.debug('i = %d, costs = %s' % (i, costs))
|
||||
|
||||
errors = [(cost - isorange) / float(isorange) for cost in costs]
|
||||
max_abs_error = max([abs(e) for e in errors])
|
||||
if max_abs_error <= self.TOLERANCE:
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import requests
|
||||
import json
|
||||
from qps import qps_retry
|
||||
from exceptions import ServiceException
|
||||
from cartodb_services.metrics import Traceable
|
||||
|
||||
|
||||
class MatrixClient:
|
||||
class MatrixClient(Traceable):
|
||||
|
||||
"""
|
||||
A minimal client for Mapzen Time-Distance Matrix Service
|
||||
@@ -18,6 +20,8 @@ class MatrixClient:
|
||||
"""
|
||||
|
||||
ONE_TO_MANY_URL = 'https://matrix.mapzen.com/one_to_many'
|
||||
READ_TIMEOUT = 60
|
||||
CONNECT_TIMEOUT = 10
|
||||
|
||||
def __init__(self, matrix_key, logger):
|
||||
self._matrix_key = matrix_key
|
||||
@@ -40,9 +44,11 @@ class MatrixClient:
|
||||
'costing': costing,
|
||||
'api_key': self._matrix_key
|
||||
}
|
||||
response = requests.get(self.ONE_TO_MANY_URL, params=request_params)
|
||||
response = requests.get(self.ONE_TO_MANY_URL, params=request_params,
|
||||
timeout=(self.CONNECT_TIMEOUT, self.READ_TIMEOUT))
|
||||
self.add_response_data(response, self._logger)
|
||||
|
||||
if not requests.codes.ok:
|
||||
if response.status_code != requests.codes.ok:
|
||||
self._logger.error('Error trying to get matrix distance from mapzen',
|
||||
data={"response_status": response.status_code,
|
||||
"response_reason": response.reason,
|
||||
@@ -51,6 +57,22 @@ class MatrixClient:
|
||||
"response_headers": response.headers,
|
||||
"locations": locations,
|
||||
"costing": costing})
|
||||
raise Exception('Error trying to get matrix distance from mapzen')
|
||||
# In case 4xx error we return empty because the error comes from
|
||||
# the provided info by the user and we don't want to top the
|
||||
# isolines generation
|
||||
if response.status_code == requests.codes.bad_request:
|
||||
return {}
|
||||
elif response.status_code == 504:
|
||||
# Due to some unsolved problems in the Mapzen Matrix API we're
|
||||
# getting randomly 504, probably timeouts. To avoid raise an
|
||||
# exception in all the jobs, for now we're going to return
|
||||
# empty in that case
|
||||
return {}
|
||||
else:
|
||||
raise ServiceException("Error trying to get matrix distance from mapzen", response)
|
||||
|
||||
return response.json()
|
||||
# response could return with empty json
|
||||
try:
|
||||
return response.json()
|
||||
except:
|
||||
return {}
|
||||
|
||||
@@ -4,18 +4,38 @@ from datetime import datetime
|
||||
from exceptions import TimeoutException
|
||||
|
||||
DEFAULT_RETRY_TIMEOUT = 60
|
||||
DEFAULT_QUERIES_PER_SECOND = 10
|
||||
|
||||
|
||||
def qps_retry(f):
|
||||
def wrapped_f(*args, **kw):
|
||||
return QPSService().call(f, *args, **kw)
|
||||
return wrapped_f
|
||||
def qps_retry(original_function=None,**options):
|
||||
""" Query Per Second retry decorator
|
||||
The intention of this decorator is to retry requests against third
|
||||
party services that has QPS restriction.
|
||||
Parameters:
|
||||
- timeout: Maximum number of seconds to retry
|
||||
- qps: Allowed queries per second. This parameter is used to
|
||||
calculate the next time to retry the request
|
||||
"""
|
||||
if original_function is not None:
|
||||
def wrapped_function(*args, **kwargs):
|
||||
if 'timeout' in options:
|
||||
timeout = options['timeout']
|
||||
else:
|
||||
timeout = DEFAULT_RETRY_TIMEOUT
|
||||
if 'qps' in options:
|
||||
qps = options['qps']
|
||||
else:
|
||||
qps = DEFAULT_QUERIES_PER_SECOND
|
||||
return QPSService(retry_timeout=timeout, queries_per_second=qps).call(original_function, *args, **kwargs)
|
||||
return wrapped_function
|
||||
else:
|
||||
def partial_wrapper(func):
|
||||
return qps_retry(func, **options)
|
||||
return partial_wrapper
|
||||
|
||||
|
||||
class QPSService:
|
||||
|
||||
def __init__(self, queries_per_second=10,
|
||||
retry_timeout=DEFAULT_RETRY_TIMEOUT):
|
||||
def __init__(self, queries_per_second, retry_timeout):
|
||||
self._queries_per_second = queries_per_second
|
||||
self._retry_timeout = retry_timeout
|
||||
|
||||
@@ -27,7 +47,7 @@ class QPSService:
|
||||
return fn(*args, **kwargs)
|
||||
except Exception as e:
|
||||
response = getattr(e, 'response', None)
|
||||
if response and (response.status_code == 429):
|
||||
if response is not None and (response.status_code == 429):
|
||||
self.retry(start_time, attempt_number)
|
||||
else:
|
||||
raise e
|
||||
@@ -35,7 +55,7 @@ class QPSService:
|
||||
|
||||
def retry(self, first_request_time, retry_count):
|
||||
elapsed = datetime.now() - first_request_time
|
||||
if elapsed.seconds > self._retry_timeout:
|
||||
if elapsed.microseconds > (self._retry_timeout * 1000.0):
|
||||
raise TimeoutException()
|
||||
|
||||
# inverse qps * (1.5 ^ i) is an increased sleep time of 1.5x per
|
||||
|
||||
@@ -2,15 +2,18 @@ import requests
|
||||
import json
|
||||
import re
|
||||
|
||||
from exceptions import WrongParams, MalformedResult
|
||||
from exceptions import WrongParams, MalformedResult, ServiceException
|
||||
from qps import qps_retry
|
||||
from cartodb_services.tools import Coordinate, PolyLine
|
||||
from cartodb_services.metrics import MetricsDataGatherer, Traceable
|
||||
|
||||
|
||||
class MapzenRouting:
|
||||
class MapzenRouting(Traceable):
|
||||
'A Mapzen Routing wrapper for python'
|
||||
|
||||
PRODUCTION_ROUTING_BASE_URL = 'https://valhalla.mapzen.com/route'
|
||||
READ_TIMEOUT = 60
|
||||
CONNECT_TIMEOUT = 10
|
||||
|
||||
ACCEPTED_MODES = {
|
||||
"walk": "pedestrian",
|
||||
@@ -43,7 +46,9 @@ class MapzenRouting:
|
||||
mode_param,
|
||||
units)
|
||||
request_params = self.__parse_request_parameters(json_request_params)
|
||||
response = requests.get(self._url, params=request_params)
|
||||
response = requests.get(self._url, params=request_params,
|
||||
timeout=(self.CONNECT_TIMEOUT, self.READ_TIMEOUT))
|
||||
self.add_response_data(response, self._logger)
|
||||
if response.status_code == requests.codes.ok:
|
||||
return self.__parse_routing_response(response.text)
|
||||
elif response.status_code == requests.codes.bad_request:
|
||||
@@ -57,7 +62,7 @@ class MapzenRouting:
|
||||
"response_headers": response.headers,
|
||||
"waypoints": waypoints, "mode": mode,
|
||||
"options": options})
|
||||
raise Exception('Error trying to calculate route using Mapzen')
|
||||
raise ServiceException('Error trying to calculate route using Mapzen', response)
|
||||
|
||||
def __parse_options(self, options):
|
||||
return dict(option.split('=') for option in options)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from config import GeocoderConfig, IsolinesRoutingConfig, InternalGeocoderConfig, RoutingConfig, ConfigException, ObservatorySnapshotConfig, ObservatoryConfig
|
||||
from quota import QuotaService
|
||||
from user import UserMetricsService
|
||||
from log import metrics, MetricsDataGatherer, Traceable
|
||||
|
||||
@@ -15,6 +15,7 @@ class ServiceConfig(object):
|
||||
self._username = username
|
||||
self._orgname = orgname
|
||||
self._db_config = ServicesDBConfig(db_conn, username, orgname)
|
||||
self._metrics_log_path = self.__get_metrics_log_path()
|
||||
self._environment = self._db_config._server_environment
|
||||
if redis_connection:
|
||||
self._redis_config = ServicesRedisConfig(redis_connection).build(
|
||||
@@ -38,9 +39,20 @@ class ServiceConfig(object):
|
||||
def environment(self):
|
||||
return self._environment
|
||||
|
||||
@property
|
||||
def metrics_log_path(self):
|
||||
return self._metrics_log_path
|
||||
|
||||
def __get_metrics_log_path(self):
|
||||
if self.METRICS_LOG_KEY:
|
||||
return self._db_config.logger_config.get(self.METRICS_LOG_KEY, None)
|
||||
else:
|
||||
return None
|
||||
|
||||
class DataObservatoryConfig(ServiceConfig):
|
||||
|
||||
METRICS_LOG_KEY = 'do_log_path'
|
||||
|
||||
def __init__(self, redis_connection, db_conn, username, orgname=None):
|
||||
super(DataObservatoryConfig, self).__init__(redis_connection, db_conn,
|
||||
username, orgname)
|
||||
@@ -61,6 +73,10 @@ class DataObservatoryConfig(ServiceConfig):
|
||||
def connection_str(self):
|
||||
return self._connection_str
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return 'data observatory'
|
||||
|
||||
|
||||
class ObservatorySnapshotConfig(DataObservatoryConfig):
|
||||
|
||||
@@ -116,6 +132,9 @@ class RoutingConfig(ServiceConfig):
|
||||
ROUTING_PROVIDER_KEY = 'routing_provider'
|
||||
MAPZEN_PROVIDER = 'mapzen'
|
||||
DEFAULT_PROVIDER = 'mapzen'
|
||||
QUOTA_KEY = 'mapzen_routing_quota'
|
||||
SOFT_LIMIT_KEY = 'soft_mapzen_routing_limit'
|
||||
METRICS_LOG_KEY = 'routing_log_path'
|
||||
|
||||
def __init__(self, redis_connection, db_conn, username, orgname=None):
|
||||
super(RoutingConfig, self).__init__(redis_connection, db_conn,
|
||||
@@ -124,7 +143,8 @@ class RoutingConfig(ServiceConfig):
|
||||
if not self._routing_provider:
|
||||
self._routing_provider = self.DEFAULT_PROVIDER
|
||||
self._mapzen_api_key = self._db_config.mapzen_routing_api_key
|
||||
self._monthly_quota = self._db_config.mapzen_routing_monthly_quota
|
||||
self._set_monthly_quota()
|
||||
self._set_soft_limit()
|
||||
self._period_end_date = date_parse(self._redis_config[self.PERIOD_END_DATE])
|
||||
|
||||
@property
|
||||
@@ -132,6 +152,10 @@ class RoutingConfig(ServiceConfig):
|
||||
if self._routing_provider == self.MAPZEN_PROVIDER:
|
||||
return 'routing_mapzen'
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return self._routing_provider
|
||||
|
||||
@property
|
||||
def mapzen_api_key(self):
|
||||
return self._mapzen_api_key
|
||||
@@ -144,6 +168,26 @@ class RoutingConfig(ServiceConfig):
|
||||
def period_end_date(self):
|
||||
return self._period_end_date
|
||||
|
||||
@property
|
||||
def soft_limit(self):
|
||||
return self._soft_limit
|
||||
|
||||
def _set_monthly_quota(self):
|
||||
self._monthly_quota = self._get_effective_monthly_quota()
|
||||
|
||||
def _get_effective_monthly_quota(self):
|
||||
quota_from_redis = self._redis_config.get(self.QUOTA_KEY)
|
||||
if quota_from_redis and quota_from_redis <> '':
|
||||
return int(quota_from_redis)
|
||||
else:
|
||||
return self._db_config.mapzen_routing_monthly_quota
|
||||
|
||||
def _set_soft_limit(self):
|
||||
if self.SOFT_LIMIT_KEY in self._redis_config and self._redis_config[self.SOFT_LIMIT_KEY].lower() == 'true':
|
||||
self._soft_limit = True
|
||||
else:
|
||||
self._soft_limit = False
|
||||
|
||||
|
||||
class IsolinesRoutingConfig(ServiceConfig):
|
||||
|
||||
@@ -159,6 +203,7 @@ class IsolinesRoutingConfig(ServiceConfig):
|
||||
MAPZEN_PROVIDER = 'mapzen'
|
||||
HEREMAPS_PROVIDER = 'heremaps'
|
||||
DEFAULT_PROVIDER = 'heremaps'
|
||||
METRICS_LOG_KEY = 'isolines_log_path'
|
||||
|
||||
def __init__(self, redis_connection, db_conn, username, orgname=None):
|
||||
super(IsolinesRoutingConfig, self).__init__(redis_connection, db_conn,
|
||||
@@ -235,11 +280,12 @@ class IsolinesRoutingConfig(ServiceConfig):
|
||||
|
||||
class InternalGeocoderConfig(ServiceConfig):
|
||||
|
||||
METRICS_LOG_KEY = 'geocoder_log_path'
|
||||
|
||||
def __init__(self, redis_connection, db_conn, username, orgname=None):
|
||||
# For now, internal geocoder doesn't use the redis config
|
||||
super(InternalGeocoderConfig, self).__init__(None, db_conn,
|
||||
username, orgname)
|
||||
self._log_path = self._db_config.geocoder_log_path
|
||||
|
||||
@property
|
||||
def service_type(self):
|
||||
@@ -258,8 +304,8 @@ class InternalGeocoderConfig(ServiceConfig):
|
||||
return None
|
||||
|
||||
@property
|
||||
def log_path(self):
|
||||
return self._log_path
|
||||
def provider(self):
|
||||
return 'internal'
|
||||
|
||||
|
||||
class GeocoderConfig(ServiceConfig):
|
||||
@@ -285,6 +331,7 @@ class GeocoderConfig(ServiceConfig):
|
||||
ORGNAME_KEY = 'orgname'
|
||||
PERIOD_END_DATE = 'period_end_date'
|
||||
DEFAULT_PROVIDER = 'mapzen'
|
||||
METRICS_LOG_KEY = 'geocoder_log_path'
|
||||
|
||||
def __init__(self, redis_connection, db_conn, username, orgname=None, forced_provider=None):
|
||||
super(GeocoderConfig, self).__init__(redis_connection, db_conn,
|
||||
@@ -316,7 +363,6 @@ class GeocoderConfig(ServiceConfig):
|
||||
self._geocoder_provider = self.DEFAULT_PROVIDER
|
||||
self._geocoding_quota = float(filtered_config[self.QUOTA_KEY])
|
||||
self._period_end_date = date_parse(filtered_config[self.PERIOD_END_DATE])
|
||||
self._log_path = db_config.geocoder_log_path
|
||||
if filtered_config[self.SOFT_LIMIT_KEY].lower() == 'true':
|
||||
self._soft_geocoding_limit = True
|
||||
else:
|
||||
@@ -399,8 +445,8 @@ class GeocoderConfig(ServiceConfig):
|
||||
return self._cost_per_hit
|
||||
|
||||
@property
|
||||
def log_path(self):
|
||||
return self._log_path
|
||||
def provider(self):
|
||||
return self._geocoder_provider
|
||||
|
||||
|
||||
class ServicesDBConfig:
|
||||
@@ -415,7 +461,6 @@ class ServicesDBConfig:
|
||||
self._get_server_config()
|
||||
self._get_here_config()
|
||||
self._get_mapzen_config()
|
||||
self._get_logger_config()
|
||||
self._get_data_observatory_config()
|
||||
|
||||
def _get_server_config(self):
|
||||
@@ -468,13 +513,6 @@ class ServicesDBConfig:
|
||||
else:
|
||||
self._data_observatory_connection_str = do_conf['connection']['production']
|
||||
|
||||
def _get_logger_config(self):
|
||||
logger_conf_json = self._get_conf('logger_conf')
|
||||
if not logger_conf_json:
|
||||
raise ConfigException('Logger configuration missing')
|
||||
else:
|
||||
logger_conf = json.loads(logger_conf_json)
|
||||
self._geocoder_log_path = logger_conf['geocoder_log_path']
|
||||
|
||||
def _get_conf(self, key):
|
||||
try:
|
||||
@@ -482,7 +520,7 @@ class ServicesDBConfig:
|
||||
conf = self._db_conn.execute(sql, 1)
|
||||
return conf[0]['conf']
|
||||
except Exception as e:
|
||||
raise ConfigException("Malformed config for {0}: {1}".format(key, e))
|
||||
raise ConfigException("Error trying to get config for {0}: {1}".format(key, e))
|
||||
|
||||
@property
|
||||
def server_environment(self):
|
||||
@@ -532,14 +570,18 @@ class ServicesDBConfig:
|
||||
def mapzen_geocoder_monthly_quota(self):
|
||||
return self._mapzen_geocoder_quota
|
||||
|
||||
@property
|
||||
def geocoder_log_path(self):
|
||||
return self._geocoder_log_path
|
||||
|
||||
@property
|
||||
def data_observatory_connection_str(self):
|
||||
return self._data_observatory_connection_str
|
||||
|
||||
@property
|
||||
def logger_config(self):
|
||||
logger_conf_json = self._get_conf('logger_conf')
|
||||
if not logger_conf_json:
|
||||
raise ConfigException('Logger configuration missing')
|
||||
else:
|
||||
return json.loads(logger_conf_json)
|
||||
|
||||
|
||||
class ServicesRedisConfig:
|
||||
|
||||
@@ -547,6 +589,7 @@ class ServicesRedisConfig:
|
||||
GOOGLE_GEOCODER_CLIENT_ID = 'google_maps_client_id'
|
||||
QUOTA_KEY = 'geocoding_quota'
|
||||
ISOLINES_QUOTA_KEY = 'here_isolines_quota'
|
||||
ROUTING_QUOTA_KEY = 'mapzen_routing_quota'
|
||||
OBS_SNAPSHOT_QUOTA_KEY = 'obs_snapshot_quota'
|
||||
OBS_GENERAL_QUOTA_KEY = 'obs_general_quota'
|
||||
PERIOD_END_DATE = 'period_end_date'
|
||||
@@ -585,8 +628,12 @@ class ServicesRedisConfig:
|
||||
if not org_config:
|
||||
raise ConfigException("""There is no organization config available. Please check your configuration.'""")
|
||||
else:
|
||||
user_config[self.QUOTA_KEY] = org_config[self.QUOTA_KEY]
|
||||
user_config[self.ISOLINES_QUOTA_KEY] = org_config[self.ISOLINES_QUOTA_KEY]
|
||||
if self.QUOTA_KEY in org_config:
|
||||
user_config[self.QUOTA_KEY] = org_config[self.QUOTA_KEY]
|
||||
if self.ISOLINES_QUOTA_KEY in org_config:
|
||||
user_config[self.ISOLINES_QUOTA_KEY] = org_config[self.ISOLINES_QUOTA_KEY]
|
||||
if self.ROUTING_QUOTA_KEY in org_config:
|
||||
user_config[self.ROUTING_QUOTA_KEY] = org_config[self.ROUTING_QUOTA_KEY]
|
||||
if self.OBS_SNAPSHOT_QUOTA_KEY in org_config:
|
||||
user_config[self.OBS_SNAPSHOT_QUOTA_KEY] = org_config[self.OBS_SNAPSHOT_QUOTA_KEY]
|
||||
if self.OBS_GENERAL_QUOTA_KEY in org_config:
|
||||
|
||||
@@ -1,15 +1,130 @@
|
||||
from datetime import datetime
|
||||
import abc
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
import plpy
|
||||
from datetime import datetime
|
||||
from contextlib import contextmanager
|
||||
from urlparse import urlparse
|
||||
|
||||
|
||||
class MetricsLoggerFactory:
|
||||
@contextmanager
|
||||
def metrics(function, service_config, logger=None):
|
||||
try:
|
||||
start_time = time.time()
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
MetricsDataGatherer.add('uuid', str(uuid.uuid1()))
|
||||
MetricsDataGatherer.add('function_name', function)
|
||||
MetricsDataGatherer.add('function_execution_time', (end_time - start_time))
|
||||
metrics_logger = MetricsServiceLoggerFactory.build(service_config,
|
||||
logger)
|
||||
if metrics_logger:
|
||||
data = MetricsDataGatherer.get()
|
||||
metrics_logger.log(data)
|
||||
MetricsDataGatherer.clean()
|
||||
|
||||
|
||||
class Traceable:
|
||||
"""
|
||||
Module to add metrics traceability, for example to get response object
|
||||
in order to add to the metrics dump
|
||||
"""
|
||||
def add_response_data(self, response, logger=None):
|
||||
try:
|
||||
response_data = {}
|
||||
response_data['type'] = "request"
|
||||
response_data['date'] = datetime.now().isoformat()
|
||||
response_data['elapsed_time'] = response.elapsed.total_seconds()
|
||||
response_data['code'] = response.status_code
|
||||
response_data['message'] = response.reason
|
||||
response_data['url'] = self._parse_response_url(response.url)
|
||||
stored_data = MetricsDataGatherer.get_element('response')
|
||||
if stored_data:
|
||||
stored_data.append(response_data)
|
||||
else:
|
||||
MetricsDataGatherer.add('response', [response_data])
|
||||
except BaseException as e:
|
||||
# We don't want to stop the job for some error processing response
|
||||
if logger:
|
||||
logger.error("Error trying to process response metricd data",
|
||||
exception=e)
|
||||
|
||||
def _parse_response_url(self, url):
|
||||
u = urlparse(url)
|
||||
return "{0}://{1}{2}".format(u.scheme, u.netloc, u.path)
|
||||
|
||||
|
||||
class MetricsDataGatherer:
|
||||
"""
|
||||
Metrics gatherer used as a singleton. The intend is to use it as a global
|
||||
storage for the metrics along the function request.
|
||||
"""
|
||||
|
||||
class __MetricsDataGatherer:
|
||||
def __init__(self):
|
||||
self.data = {}
|
||||
|
||||
def add(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def get(self):
|
||||
return self.data
|
||||
|
||||
def get_element(self, key):
|
||||
return self.data.get(key, None)
|
||||
|
||||
def clean(self):
|
||||
self.data = {}
|
||||
|
||||
|
||||
# We use pgbouncer so we need to have multiples instances per request id
|
||||
__instance = {}
|
||||
|
||||
@classmethod
|
||||
def build(self, service_config):
|
||||
if re.match('geocoder_*', service_config.service_type):
|
||||
return MetricsGeocoderLogger(service_config)
|
||||
def add(self, key, value):
|
||||
MetricsDataGatherer.instance().add(key, value)
|
||||
|
||||
@classmethod
|
||||
def get(self):
|
||||
return MetricsDataGatherer.instance().get()
|
||||
|
||||
@classmethod
|
||||
def get_element(self, key):
|
||||
return MetricsDataGatherer.instance().get_element(key)
|
||||
|
||||
@classmethod
|
||||
def clean(self):
|
||||
MetricsDataGatherer.instance().clean()
|
||||
|
||||
@classmethod
|
||||
def instance(self):
|
||||
txid = MetricsDataGatherer._get_txid()
|
||||
if txid not in MetricsDataGatherer.__instance:
|
||||
MetricsDataGatherer.__instance[txid] = MetricsDataGatherer.__MetricsDataGatherer()
|
||||
|
||||
return MetricsDataGatherer.__instance[txid]
|
||||
|
||||
@classmethod
|
||||
def _get_txid(self):
|
||||
result = plpy.execute('select txid_current() as txid')
|
||||
return result[0]['txid']
|
||||
|
||||
|
||||
class MetricsServiceLoggerFactory:
|
||||
|
||||
@classmethod
|
||||
def build(self, service_config, logger=None):
|
||||
if re.search('^geocoder_*', service_config.service_type):
|
||||
return MetricsGeocoderLogger(service_config, logger)
|
||||
elif re.search('^routing_*', service_config.service_type):
|
||||
return MetricsGenericLogger(service_config, logger)
|
||||
elif re.search('_isolines$', service_config.service_type):
|
||||
return MetricsIsolinesLogger(service_config, logger)
|
||||
elif re.search('^obs_*', service_config.service_type):
|
||||
return MetricsGenericLogger(service_config, logger)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -17,58 +132,119 @@ class MetricsLoggerFactory:
|
||||
class MetricsLogger(object):
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def __init__(self, file_path):
|
||||
self._file_path = file_path
|
||||
def __init__(self, service_config, logger):
|
||||
self._service_config = service_config
|
||||
self._logger = logger
|
||||
|
||||
def dump_to_file(self, data):
|
||||
with open(self._file_path, 'a') as logfile:
|
||||
json.dump(data, logfile)
|
||||
logfile.write('\n')
|
||||
try:
|
||||
log_path = self.service_config.metrics_log_path
|
||||
response_data = data.pop('response', [])
|
||||
uuid = data.get('uuid')
|
||||
if log_path:
|
||||
with open(log_path, 'a') as logfile:
|
||||
self._dump_response_to_file(uuid, response_data, logfile)
|
||||
json.dump(data, logfile)
|
||||
logfile.write('\n')
|
||||
except BaseException as e:
|
||||
self._logger("Error dumping metrics to file {0}".format(log_path),
|
||||
exception=e)
|
||||
|
||||
def collect_data(self, data):
|
||||
return {
|
||||
"uuid": data.get('uuid', uuid.uuid1()),
|
||||
"type": 'function',
|
||||
"function_name": data.get('function_name', None),
|
||||
"function_execution_time": data.get('function_execution_time',
|
||||
None),
|
||||
"service": self._service_config.service_type,
|
||||
"processable_rows": 1,
|
||||
"success": data.get('success', False),
|
||||
"successful_rows": data.get('successful_rows', 0),
|
||||
"failed_rows": data.get('failed_rows', 0),
|
||||
"empty_rows": data.get('empty_rows', 0),
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"provider": self._service_config.provider,
|
||||
"username": self._service_config.username,
|
||||
"organization": self._service_config.organization,
|
||||
"response": data.get('response', [])
|
||||
}
|
||||
|
||||
def _dump_response_to_file(self, uuid, response_data, log_file):
|
||||
for r in response_data:
|
||||
r['uuid'] = uuid
|
||||
json.dump(r, log_file)
|
||||
log_file.write('\n')
|
||||
|
||||
@property
|
||||
def service_config(self):
|
||||
return self._service_config
|
||||
|
||||
@abc.abstractproperty
|
||||
def log(self, **data):
|
||||
def log(self, data):
|
||||
raise NotImplementedError('log method must be defined')
|
||||
|
||||
|
||||
class MetricsGeocoderLogger(MetricsLogger):
|
||||
|
||||
def __init__(self, service_config):
|
||||
super(MetricsGeocoderLogger, self).__init__(service_config.log_path)
|
||||
self._service_config = service_config
|
||||
def __init__(self, service_config, logger):
|
||||
super(MetricsGeocoderLogger, self).__init__(service_config, logger)
|
||||
|
||||
def log(self, **data):
|
||||
dump_data = self._dump_data(**data)
|
||||
def log(self, data):
|
||||
dump_data = self.collect_data(data)
|
||||
self.dump_to_file(dump_data)
|
||||
|
||||
def _dump_data(self, **data):
|
||||
if data['success']:
|
||||
cost = self._service_config.cost_per_hit
|
||||
failed_rows = 0
|
||||
successful_rows = 1
|
||||
def collect_data(self, data):
|
||||
dump_data = super(MetricsGeocoderLogger, self).collect_data(data)
|
||||
if data.get('success', False):
|
||||
cost = self.service_config.cost_per_hit
|
||||
else:
|
||||
cost = 0
|
||||
failed_rows = 1
|
||||
successful_rows = 0
|
||||
|
||||
if self._service_config.is_high_resolution:
|
||||
if self.service_config.is_high_resolution:
|
||||
kind = 'high-resolution'
|
||||
else:
|
||||
kind = 'internal'
|
||||
|
||||
return {
|
||||
dump_data.update({
|
||||
"batched": False,
|
||||
"cache_hits": 0, # Always 0 because no cache involved
|
||||
# https://github.com/CartoDB/cartodb/blob/master/app/models/geocoding.rb#L208-L211
|
||||
"cost": cost,
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"failed_rows": failed_rows,
|
||||
"geocoder_type": self._service_config.service_type,
|
||||
"geocoder_type": self.service_config.service_type,
|
||||
"kind": kind,
|
||||
"processable_rows": 1,
|
||||
"processed_rows": successful_rows,
|
||||
"real_rows": successful_rows,
|
||||
"success": data['success'],
|
||||
"successful_rows": successful_rows,
|
||||
"username": self._service_config.username,
|
||||
"organization": self._service_config.organization
|
||||
}
|
||||
"processed_rows": data.get('successful_rows', 0),
|
||||
"real_rows": data.get('successful_rows', 0),
|
||||
})
|
||||
|
||||
return dump_data
|
||||
|
||||
|
||||
class MetricsGenericLogger(MetricsLogger):
|
||||
|
||||
def __init__(self, service_config, logger):
|
||||
super(MetricsGenericLogger, self).__init__(service_config, logger)
|
||||
|
||||
def log(self, data):
|
||||
dump_data = self.collect_data(data)
|
||||
self.dump_to_file(dump_data)
|
||||
|
||||
def collect_data(self, data):
|
||||
return super(MetricsGenericLogger, self).collect_data(data)
|
||||
|
||||
|
||||
class MetricsIsolinesLogger(MetricsLogger):
|
||||
|
||||
def __init__(self, service_config, logger):
|
||||
super(MetricsIsolinesLogger, self).__init__(service_config, logger)
|
||||
|
||||
def log(self, data):
|
||||
dump_data = self.collect_data(data)
|
||||
self.dump_to_file(dump_data)
|
||||
|
||||
def collect_data(self, data):
|
||||
dump_data = super(MetricsIsolinesLogger, self).collect_data(data)
|
||||
dump_data.update({
|
||||
"isolines_generated": data.get('isolines_generated', 0)
|
||||
})
|
||||
return dump_data
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from user import UserMetricsService
|
||||
from log import MetricsLoggerFactory
|
||||
from log import MetricsDataGatherer
|
||||
from datetime import date
|
||||
import re
|
||||
|
||||
@@ -14,7 +14,6 @@ class QuotaService:
|
||||
redis_connection)
|
||||
self._user_service = UserMetricsService(self._user_service_config,
|
||||
redis_connection)
|
||||
self._metrics_logger = MetricsLoggerFactory.build(user_service_config)
|
||||
|
||||
def check_user_quota(self):
|
||||
return self._quota_checker.check()
|
||||
@@ -46,13 +45,19 @@ class QuotaService:
|
||||
self._user_service.increment_service_use(
|
||||
self._user_service_config.service_type, "isolines_generated",
|
||||
amount=amount)
|
||||
MetricsDataGatherer.add('isolines_generated', amount)
|
||||
|
||||
def _log_service_process(self, event):
|
||||
if self._metrics_logger:
|
||||
if event is 'success' or event is 'empty':
|
||||
self._metrics_logger.log(success=True)
|
||||
elif event is 'empty':
|
||||
self._metrics_logger.log(success=False)
|
||||
if event is 'success':
|
||||
MetricsDataGatherer.add('success', True)
|
||||
MetricsDataGatherer.add('successful_rows', 1)
|
||||
elif event is 'empty':
|
||||
MetricsDataGatherer.add('success', True)
|
||||
MetricsDataGatherer.add('successful_rows', 1)
|
||||
MetricsDataGatherer.add('empty_rows', 1)
|
||||
elif event is 'fail':
|
||||
MetricsDataGatherer.add('success', False)
|
||||
MetricsDataGatherer.add('failed_rows', 1)
|
||||
|
||||
|
||||
class QuotaChecker:
|
||||
@@ -93,7 +98,7 @@ class QuotaChecker:
|
||||
current_used = self._user_service.used_quota(service_type, today)
|
||||
soft_geocoding_limit = self._user_service_config.soft_geocoding_limit
|
||||
|
||||
if soft_geocoding_limit or (user_quota > 0 and current_used <= user_quota):
|
||||
if soft_geocoding_limit or (user_quota > 0 and current_used < user_quota):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@@ -105,7 +110,7 @@ class QuotaChecker:
|
||||
current_used = self._user_service.used_quota(service_type, today)
|
||||
soft_isolines_limit = self._user_service_config.soft_isolines_limit
|
||||
|
||||
if soft_isolines_limit or (user_quota > 0 and current_used <= user_quota):
|
||||
if soft_isolines_limit or (user_quota > 0 and current_used < user_quota):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@@ -115,8 +120,9 @@ class QuotaChecker:
|
||||
today = date.today()
|
||||
service_type = self._user_service_config.service_type
|
||||
current_used = self._user_service.used_quota(service_type, today)
|
||||
soft_limit = self._user_service_config.soft_limit
|
||||
|
||||
if (user_quota > 0 and current_used <= user_quota):
|
||||
if soft_limit or (user_quota > 0 and current_used < user_quota):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@@ -128,7 +134,7 @@ class QuotaChecker:
|
||||
service_type = self._user_service_config.service_type
|
||||
current_used = self._user_service.used_quota(service_type, today)
|
||||
|
||||
if soft_limit or (user_quota > 0 and current_used <= user_quota):
|
||||
if soft_limit or (user_quota > 0 and current_used < user_quota):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -8,6 +8,7 @@ class UserMetricsService:
|
||||
SERVICE_GEOCODER_NOKIA = 'geocoder_here'
|
||||
SERVICE_GEOCODER_CACHE = 'geocoder_cache'
|
||||
SERVICE_HERE_ISOLINES = 'here_isolines'
|
||||
SERVICE_MAPZEN_ROUTING = 'routing_mapzen'
|
||||
DAY_OF_MONTH_ZERO_PADDED = '%d'
|
||||
|
||||
def __init__(self, user_geocoder_config, redis_connection):
|
||||
@@ -19,6 +20,8 @@ class UserMetricsService:
|
||||
def used_quota(self, service_type, date):
|
||||
if service_type == self.SERVICE_HERE_ISOLINES:
|
||||
return self.__used_isolines_quota(service_type, date)
|
||||
elif service_type == self.SERVICE_MAPZEN_ROUTING:
|
||||
return self.__used_routing_quota(service_type, date)
|
||||
else:
|
||||
return self.__used_geocoding_quota(service_type, date)
|
||||
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
from cartodb_services.refactor.storage.redis_connection_config import RedisMetadataConnectionConfigBuilder
|
||||
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||
from cartodb_services.refactor.storage.redis_config import RedisOrgConfigStorageBuilder
|
||||
|
||||
class OrgConfigBackendFactory(object):
|
||||
"""
|
||||
This class abstracts the creation of an org configuration backend. It will return
|
||||
an implementation of the ConfigBackendInterface appropriate to the org, depending
|
||||
on the environment.
|
||||
"""
|
||||
|
||||
def __init__(self, orgname, environment, server_config_backend):
|
||||
self._orgname = orgname
|
||||
self._environment = environment
|
||||
self._server_config_backend = server_config_backend
|
||||
|
||||
def get(self):
|
||||
if self._environment.is_onpremise:
|
||||
org_config_backend = self._server_config_backend
|
||||
else:
|
||||
redis_metadata_connection_config = RedisMetadataConnectionConfigBuilder(self._server_config_backend).get()
|
||||
redis_metadata_connection = RedisConnectionBuilder(redis_metadata_connection_config).get()
|
||||
org_config_backend = RedisOrgConfigStorageBuilder(redis_metadata_connection, self._orgname).get()
|
||||
return org_config_backend
|
||||
@@ -0,0 +1,17 @@
|
||||
from cartodb_services.refactor.tools.redis_mock import RedisConnectionMock
|
||||
from cartodb_services.refactor.storage.redis_connection_config import RedisMetricsConnectionConfigBuilder
|
||||
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||
|
||||
class RedisMetricsConnectionFactory(object):
|
||||
def __init__(self, environment, server_config_storage):
|
||||
self._environment = environment
|
||||
self._server_config_storage = server_config_storage
|
||||
|
||||
def get(self):
|
||||
if self._environment.is_onpremise:
|
||||
redis_metrics_connection = RedisConnectionMock()
|
||||
else:
|
||||
redis_metrics_connection_config = RedisMetricsConnectionConfigBuilder(self._server_config_storage).get()
|
||||
redis_metrics_connection = RedisConnectionBuilder(redis_metrics_connection_config).get()
|
||||
return redis_metrics_connection
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
from cartodb_services.refactor.storage.server_config import InDbServerConfigStorage
|
||||
|
||||
|
||||
class ServerConfigBackendFactory(object):
|
||||
"""
|
||||
This class creates a backend to retrieve server configurations (implementing the ConfigBackendInterface).
|
||||
|
||||
At this moment it will always return an InDbServerConfigStorage, but nothing prevents from changing the
|
||||
implementation. To something that reads from a file, memory or whatever. It is mostly there to keep
|
||||
the layers separated.
|
||||
"""
|
||||
def get(self):
|
||||
return InDbServerConfigStorage()
|
||||
@@ -0,0 +1,24 @@
|
||||
from cartodb_services.refactor.storage.redis_connection_config import RedisMetadataConnectionConfigBuilder
|
||||
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||
from cartodb_services.refactor.storage.redis_config import RedisUserConfigStorageBuilder
|
||||
|
||||
class UserConfigBackendFactory(object):
|
||||
"""
|
||||
This class abstracts the creation of a user configuration backend. It will return
|
||||
an implementation of the ConfigBackendInterface appropriate to the user, depending
|
||||
on the environment.
|
||||
"""
|
||||
|
||||
def __init__(self, username, environment, server_config_backend):
|
||||
self._username = username
|
||||
self._environment = environment
|
||||
self._server_config_backend = server_config_backend
|
||||
|
||||
def get(self):
|
||||
if self._environment.is_onpremise:
|
||||
user_config_backend = self._server_config_backend
|
||||
else:
|
||||
redis_metadata_connection_config = RedisMetadataConnectionConfigBuilder(self._server_config_backend).get()
|
||||
redis_metadata_connection = RedisConnectionBuilder(redis_metadata_connection_config).get()
|
||||
user_config_backend = RedisUserConfigStorageBuilder(redis_metadata_connection, self._username).get()
|
||||
return user_config_backend
|
||||
@@ -0,0 +1,2 @@
|
||||
class ConfigException(Exception):
|
||||
pass
|
||||
@@ -0,0 +1,57 @@
|
||||
class ServerEnvironment(object):
|
||||
|
||||
DEVELOPMENT = 'development'
|
||||
STAGING = 'staging'
|
||||
PRODUCTION = 'production'
|
||||
ONPREMISE = 'onpremise'
|
||||
|
||||
VALID_ENVIRONMENTS = [
|
||||
DEVELOPMENT,
|
||||
STAGING,
|
||||
PRODUCTION,
|
||||
ONPREMISE
|
||||
]
|
||||
|
||||
def __init__(self, environment_str):
|
||||
assert environment_str in self.VALID_ENVIRONMENTS
|
||||
self._environment_str = environment_str
|
||||
|
||||
def __str__(self):
|
||||
return self._environment_str
|
||||
|
||||
@property
|
||||
def is_development(self):
|
||||
return self._environment_str == self.DEVELOPMENT
|
||||
|
||||
@property
|
||||
def is_staging(self):
|
||||
return self._environment_str == self.STAGING
|
||||
|
||||
@property
|
||||
def is_production(self):
|
||||
return self._environment_str == self.PRODUCTION
|
||||
|
||||
@property
|
||||
def is_onpremise(self):
|
||||
return self._environment_str == self.ONPREMISE
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._environment_str == other._environment_str
|
||||
|
||||
|
||||
class ServerEnvironmentBuilder(object):
|
||||
|
||||
DEFAULT_ENVIRONMENT = ServerEnvironment.DEVELOPMENT
|
||||
|
||||
def __init__(self, server_config_storage):
|
||||
self._server_config_storage = server_config_storage
|
||||
|
||||
def get(self):
|
||||
server_config = self._server_config_storage.get('server_conf')
|
||||
|
||||
if not server_config or 'environment' not in server_config:
|
||||
environment_str = self.DEFAULT_ENVIRONMENT
|
||||
else:
|
||||
environment_str = server_config['environment']
|
||||
|
||||
return ServerEnvironment(environment_str)
|
||||
@@ -0,0 +1,11 @@
|
||||
import abc
|
||||
|
||||
class ConfigBackendInterface(object):
|
||||
"""This is an interface that all config backends must abide to"""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def get(self, key):
|
||||
"""Return a value based on the key supplied from some storage"""
|
||||
pass
|
||||
@@ -0,0 +1,112 @@
|
||||
from dateutil.parser import parse as date_parse
|
||||
|
||||
class MapzenGeocoderConfig(object):
|
||||
"""
|
||||
Value object that represents the configuration needed to operate the mapzen service.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
geocoding_quota,
|
||||
soft_geocoding_limit,
|
||||
period_end_date,
|
||||
cost_per_hit,
|
||||
log_path,
|
||||
mapzen_api_key,
|
||||
username,
|
||||
organization):
|
||||
self._geocoding_quota = geocoding_quota
|
||||
self._soft_geocoding_limit = soft_geocoding_limit
|
||||
self._period_end_date = period_end_date
|
||||
self._cost_per_hit = cost_per_hit
|
||||
self._log_path = log_path
|
||||
self._mapzen_api_key = mapzen_api_key
|
||||
self._username = username
|
||||
self._organization = organization
|
||||
|
||||
# Kind of generic properties. Note which ones are for actually running the
|
||||
# service and which ones are needed for quota stuff.
|
||||
@property
|
||||
def service_type(self):
|
||||
return 'geocoder_mapzen'
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return 'mapzen'
|
||||
|
||||
@property
|
||||
def is_high_resolution(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def geocoding_quota(self):
|
||||
return self._geocoding_quota
|
||||
|
||||
@property
|
||||
def soft_geocoding_limit(self):
|
||||
return self._soft_geocoding_limit
|
||||
|
||||
@property
|
||||
def period_end_date(self):
|
||||
return self._period_end_date
|
||||
|
||||
@property
|
||||
def cost_per_hit(self):
|
||||
return self._cost_per_hit
|
||||
|
||||
# Server config, TODO: locate where this is actually used
|
||||
@property
|
||||
def log_path(self):
|
||||
return self._log_path
|
||||
|
||||
# This is actually the specific one to run requests against the remote endpoitn
|
||||
@property
|
||||
def mapzen_api_key(self):
|
||||
return self._mapzen_api_key
|
||||
|
||||
# These two identify the user
|
||||
@property
|
||||
def username(self):
|
||||
return self._username
|
||||
@property
|
||||
def organization(self):
|
||||
return self._organization
|
||||
|
||||
# TODO: for BW compat, remove
|
||||
@property
|
||||
def google_geocoder(self):
|
||||
return False
|
||||
|
||||
|
||||
class MapzenGeocoderConfigBuilder(object):
|
||||
|
||||
def __init__(self, server_conf, user_conf, org_conf, username, orgname):
|
||||
self._server_conf = server_conf
|
||||
self._user_conf = user_conf
|
||||
self._org_conf = org_conf
|
||||
self._username = username
|
||||
self._orgname = orgname
|
||||
|
||||
|
||||
def get(self):
|
||||
mapzen_server_conf = self._server_conf.get('mapzen_conf')
|
||||
geocoding_quota = mapzen_server_conf['geocoder']['monthly_quota']
|
||||
mapzen_api_key = mapzen_server_conf['geocoder']['api_key']
|
||||
|
||||
soft_geocoding_limit = self._user_conf.get('soft_geocoding_limit')
|
||||
|
||||
cost_per_hit=0
|
||||
|
||||
period_end_date_str = self._org_conf.get('period_end_date') or self._user_conf.get('period_end_date')
|
||||
period_end_date = date_parse(period_end_date_str)
|
||||
|
||||
logger_conf = self._server_conf.get('logger_conf')
|
||||
log_path = logger_conf['geocoder_log_path']
|
||||
|
||||
return MapzenGeocoderConfig(geocoding_quota,
|
||||
soft_geocoding_limit,
|
||||
period_end_date,
|
||||
cost_per_hit,
|
||||
log_path,
|
||||
mapzen_api_key,
|
||||
self._username,
|
||||
self._orgname)
|
||||
@@ -0,0 +1,12 @@
|
||||
from ..core.interfaces import ConfigBackendInterface
|
||||
|
||||
class InMemoryConfigStorage(ConfigBackendInterface):
|
||||
|
||||
def __init__(self, config_hash={}):
|
||||
self._config_hash = config_hash
|
||||
|
||||
def get(self, key):
|
||||
try:
|
||||
return self._config_hash[key]
|
||||
except KeyError:
|
||||
return None
|
||||
@@ -0,0 +1,6 @@
|
||||
from ..core.interfaces import ConfigBackendInterface
|
||||
|
||||
class NullConfigStorage(ConfigBackendInterface):
|
||||
|
||||
def get(self, key):
|
||||
return None
|
||||
@@ -0,0 +1,36 @@
|
||||
from ..core.interfaces import ConfigBackendInterface
|
||||
from null_config import NullConfigStorage
|
||||
|
||||
|
||||
class RedisConfigStorage(ConfigBackendInterface):
|
||||
|
||||
def __init__(self, connection, config_key):
|
||||
self._connection = connection
|
||||
self._config_key = config_key
|
||||
self._data = None
|
||||
|
||||
def get(self, key):
|
||||
if not self._data:
|
||||
self._data = self._connection.hgetall(self._config_key)
|
||||
return self._data[key]
|
||||
|
||||
|
||||
class RedisUserConfigStorageBuilder(object):
|
||||
def __init__(self, redis_connection, username):
|
||||
self._redis_connection = redis_connection
|
||||
self._username = username
|
||||
|
||||
def get(self):
|
||||
return RedisConfigStorage(self._redis_connection, 'rails:users:{0}'.format(self._username))
|
||||
|
||||
|
||||
class RedisOrgConfigStorageBuilder(object):
|
||||
def __init__(self, redis_connection, orgname):
|
||||
self._redis_connection = redis_connection
|
||||
self._orgname = orgname
|
||||
|
||||
def get(self):
|
||||
if self._orgname:
|
||||
return RedisConfigStorage(self._redis_connection, 'rails:orgs:{0}'.format(self._orgname))
|
||||
else:
|
||||
return NullConfigStorage()
|
||||
@@ -0,0 +1,22 @@
|
||||
from redis.sentinel import Sentinel
|
||||
from redis import StrictRedis
|
||||
|
||||
class RedisConnectionBuilder():
|
||||
|
||||
def __init__(self, connection_config):
|
||||
self._config = connection_config
|
||||
|
||||
def get(self):
|
||||
if self._config.sentinel_id:
|
||||
sentinel = Sentinel([(self._config.host,
|
||||
self._config.port)],
|
||||
socket_timeout=self._config.timeout)
|
||||
return sentinel.master_for(self._config.sentinel_id,
|
||||
socket_timeout=self._config.timeout,
|
||||
db=self._config.db,
|
||||
retry_on_timeout=True)
|
||||
else:
|
||||
conn = StrictRedis(host=self._config.host, port=self._config.port,
|
||||
db=self._config.db, retry_on_timeout=True,
|
||||
socket_timeout=self._config.timeout)
|
||||
return conn
|
||||
@@ -0,0 +1,80 @@
|
||||
from cartodb_services.refactor.config.exceptions import ConfigException
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
|
||||
class RedisConnectionConfig(object):
|
||||
"""
|
||||
This represents a value object to contain configuration needed to set up
|
||||
a connection to a redis server.
|
||||
"""
|
||||
|
||||
def __init__(self, host, port, timeout, db, sentinel_id):
|
||||
self._host = host
|
||||
self._port = port
|
||||
self._timeout = timeout
|
||||
self._db = db
|
||||
self._sentinel_id = sentinel_id
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
return self._host
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
return self._port
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
return self._timeout
|
||||
|
||||
@property
|
||||
def db(self):
|
||||
return self._db
|
||||
|
||||
@property
|
||||
def sentinel_id(self):
|
||||
return self._sentinel_id
|
||||
|
||||
|
||||
class RedisConnectionConfigBuilder(object):
|
||||
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
DEFAULT_USER_DB = 5
|
||||
DEFAULT_TIMEOUT = 1.5 # seconds
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, server_config_storage, config_key):
|
||||
self._server_config_storage = server_config_storage
|
||||
self._config_key = config_key
|
||||
|
||||
def get(self):
|
||||
conf = self._server_config_storage.get(self._config_key)
|
||||
if conf is None:
|
||||
raise ConfigException("There is no redis configuration defined")
|
||||
|
||||
host = conf['redis_host']
|
||||
port = conf['redis_port']
|
||||
timeout = conf.get('timeout', self.DEFAULT_TIMEOUT) or self.DEFAULT_TIMEOUT
|
||||
db = conf.get('redis_db', self.DEFAULT_USER_DB) or self.DEFAULT_USER_DB
|
||||
sentinel_id = conf.get('sentinel_master_id', None)
|
||||
|
||||
return RedisConnectionConfig(host, port, timeout, db, sentinel_id)
|
||||
|
||||
|
||||
class RedisMetadataConnectionConfigBuilder(RedisConnectionConfigBuilder):
|
||||
|
||||
def __init__(self, server_config_storage):
|
||||
super(RedisMetadataConnectionConfigBuilder, self).__init__(
|
||||
server_config_storage,
|
||||
'redis_metadata_config'
|
||||
)
|
||||
|
||||
|
||||
class RedisMetricsConnectionConfigBuilder(RedisConnectionConfigBuilder):
|
||||
|
||||
def __init__(self, server_config_storage):
|
||||
super(RedisMetricsConnectionConfigBuilder, self).__init__(
|
||||
server_config_storage,
|
||||
'redis_metrics_config'
|
||||
)
|
||||
@@ -0,0 +1,14 @@
|
||||
import json
|
||||
import cartodb_services
|
||||
from ..core.interfaces import ConfigBackendInterface
|
||||
|
||||
class InDbServerConfigStorage(ConfigBackendInterface):
|
||||
|
||||
def get(self, key):
|
||||
sql = "SELECT cdb_dataservices_server.cdb_conf_getconf('{0}') as conf".format(key)
|
||||
rows = cartodb_services.plpy.execute(sql, 1)
|
||||
json_output = rows[0]['conf']
|
||||
if json_output:
|
||||
return json.loads(json_output)
|
||||
else:
|
||||
return None
|
||||
@@ -0,0 +1,52 @@
|
||||
from cartodb_services.refactor.config.exceptions import ConfigException
|
||||
|
||||
class LoggerConfig(object):
|
||||
|
||||
"""This class is a value object needed to setup a Logger"""
|
||||
|
||||
def __init__(self, server_environment, rollbar_api_key, log_file_path, min_log_level):
|
||||
self._server_environment = server_environment
|
||||
self._rollbar_api_key = rollbar_api_key
|
||||
self._log_file_path = log_file_path
|
||||
self._min_log_level = min_log_level
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
return self._server_environment
|
||||
|
||||
@property
|
||||
def rollbar_api_key(self):
|
||||
return self._rollbar_api_key
|
||||
|
||||
@property
|
||||
def log_file_path(self):
|
||||
return self._log_file_path
|
||||
|
||||
@property
|
||||
def min_log_level(self):
|
||||
return self._min_log_level
|
||||
|
||||
# TODO this needs tests
|
||||
class LoggerConfigBuilder(object):
|
||||
|
||||
def __init__(self, environment, server_config_storage):
|
||||
self._server_environment = environment
|
||||
self._server_config_storage = server_config_storage
|
||||
|
||||
def get(self):
|
||||
logger_conf = self._server_config_storage.get('logger_conf')
|
||||
if not logger_conf:
|
||||
raise ConfigException('Logger configuration missing')
|
||||
|
||||
rollbar_api_key = self._get_value_or_none(logger_conf, 'rollbar_api_key')
|
||||
log_file_path = self._get_value_or_none(logger_conf, 'log_file_path')
|
||||
min_log_level = self._get_value_or_none(logger_conf, 'min_log_level') or 'warning'
|
||||
|
||||
logger_config = LoggerConfig(str(self._server_environment), rollbar_api_key, log_file_path, min_log_level)
|
||||
return logger_config
|
||||
|
||||
def _get_value_or_none(self, logger_conf, key):
|
||||
value = None
|
||||
if key in logger_conf:
|
||||
value = logger_conf[key]
|
||||
return value
|
||||
@@ -0,0 +1,8 @@
|
||||
class RedisConnectionMock(object):
|
||||
""" Simple class to mock a dummy behaviour for Redis related functions """
|
||||
|
||||
def zscore(self, redis_prefix, day):
|
||||
pass
|
||||
|
||||
def zincrby(self, redis_prefix, day, amount):
|
||||
pass
|
||||
@@ -1,4 +1,3 @@
|
||||
import plpy
|
||||
import rollbar
|
||||
import logging
|
||||
import json
|
||||
@@ -6,7 +5,14 @@ import traceback
|
||||
import sys
|
||||
# Monkey patch because plpython sys module doesn't have argv and rollbar
|
||||
# package use it
|
||||
sys.__dict__['argv'] = []
|
||||
if 'argv' not in sys.__dict__:
|
||||
sys.__dict__['argv'] = []
|
||||
|
||||
# Only can be imported when is called from PLPython
|
||||
try:
|
||||
import plpy
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class Logger:
|
||||
@@ -30,30 +36,28 @@ class Logger:
|
||||
return
|
||||
self._send_to_rollbar('debug', text, exception, data)
|
||||
self._send_to_log_file('debug', text, exception, data)
|
||||
plpy.debug(text)
|
||||
self._send_to_plpy('debug', text)
|
||||
|
||||
def info(self, text, exception=None, data={}):
|
||||
if not self._check_min_level('info'):
|
||||
return
|
||||
self._send_to_rollbar('info', text, exception, data)
|
||||
self._send_to_log_file('info', text, exception, data)
|
||||
plpy.info(text)
|
||||
self._send_to_plpy('info', text)
|
||||
|
||||
def warning(self, text, exception=None, data={}):
|
||||
if not self._check_min_level('warning'):
|
||||
return
|
||||
self._send_to_rollbar('warning', text, exception, data)
|
||||
self._send_to_log_file('warning', text, exception, data)
|
||||
plpy.warning(text)
|
||||
self._send_to_plpy('warning', text)
|
||||
|
||||
def error(self, text, exception=None, data={}):
|
||||
if not self._check_min_level('error'):
|
||||
return
|
||||
self._send_to_rollbar('error', text, exception, data)
|
||||
self._send_to_log_file('error', text, exception, data)
|
||||
# Plpy.error and fatal raises exceptions and we only want to log an
|
||||
# error, exceptions should be raise explicitly
|
||||
plpy.warning(text)
|
||||
self._send_to_plpy('error', text)
|
||||
|
||||
def _check_min_level(self, level):
|
||||
return True if self.LEVELS[level] >= self._min_level else False
|
||||
@@ -82,6 +86,19 @@ class Logger:
|
||||
elif level == 'error':
|
||||
self._file_logger.error(text, extra=extra_data)
|
||||
|
||||
def _send_to_plpy(self, level, text):
|
||||
if self._check_plpy():
|
||||
if level == 'debug':
|
||||
plpy.debug(text)
|
||||
elif level == 'info':
|
||||
plpy.info(text)
|
||||
elif level == 'warning':
|
||||
plpy.warning(text)
|
||||
elif level == 'error':
|
||||
# Plpy.error and fatal raises exceptions and we only want to
|
||||
# log an error, exceptions should be raise explicitly
|
||||
plpy.warning(text)
|
||||
|
||||
def _parse_log_extra_data(self, exception, data):
|
||||
extra_data = {}
|
||||
if exception:
|
||||
@@ -118,6 +135,13 @@ class Logger:
|
||||
def _log_file_activated(self):
|
||||
return True if self._config.log_file_path else False
|
||||
|
||||
def _check_plpy(self):
|
||||
try:
|
||||
module = sys.modules['plpy']
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
class ConfigException(Exception):
|
||||
pass
|
||||
|
||||
@@ -10,7 +10,7 @@ from setuptools import setup, find_packages
|
||||
setup(
|
||||
name='cartodb_services',
|
||||
|
||||
version='0.8',
|
||||
version='0.10.0',
|
||||
|
||||
description='CartoDB Services API Python Library',
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
from test_helper import plpy_mock_config
|
||||
|
||||
|
||||
def setup():
|
||||
plpy_mock_config()
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
from unittest import TestCase
|
||||
from mockredis import MockRedis
|
||||
from ..test_helper import *
|
||||
from cartodb_services.metrics.config import RoutingConfig, ServicesRedisConfig
|
||||
|
||||
|
||||
class TestRoutingConfig(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._redis_conn = MockRedis()
|
||||
self._db_conn = plpy_mock
|
||||
self._username = 'my_test_user'
|
||||
self._user_key = "rails:users:{0}".format(self._username)
|
||||
self._redis_conn.hset(self._user_key, 'period_end_date', '2016-10-10')
|
||||
|
||||
def test_should_pick_quota_from_server_by_default(self):
|
||||
orgname = None
|
||||
config = RoutingConfig(self._redis_conn, self._db_conn, self._username, orgname)
|
||||
assert config.monthly_quota == 1500000
|
||||
|
||||
def test_should_pick_quota_from_redis_if_present(self):
|
||||
self._redis_conn.hset(self._user_key, 'mapzen_routing_quota', 1000)
|
||||
orgname = None
|
||||
config = RoutingConfig(self._redis_conn, self._db_conn, self._username, orgname)
|
||||
assert config.monthly_quota == 1000
|
||||
|
||||
def test_org_quota_overrides_user_quota(self):
|
||||
self._redis_conn.hset(self._user_key, 'mapzen_routing_quota', 1000)
|
||||
orgname = 'my_test_org'
|
||||
orgname_key = "rails:orgs:{0}".format(orgname)
|
||||
self._redis_conn.hset(orgname_key, 'period_end_date', '2016-05-31')
|
||||
self._redis_conn.hset(orgname_key, 'mapzen_routing_quota', 5000)
|
||||
|
||||
# TODO: these are not too relevant for the routing config
|
||||
self._redis_conn.hset(orgname_key, 'geocoding_quota', 0)
|
||||
self._redis_conn.hset(orgname_key, 'here_isolines_quota', 0)
|
||||
|
||||
config = RoutingConfig(self._redis_conn, self._db_conn, self._username, orgname)
|
||||
assert config.monthly_quota == 5000
|
||||
|
||||
|
||||
def test_should_have_soft_limit_false_by_default(self):
|
||||
orgname = None
|
||||
config = RoutingConfig(self._redis_conn, self._db_conn, self._username, orgname)
|
||||
assert config.soft_limit == False
|
||||
|
||||
def test_can_set_soft_limit_in_user_conf(self):
|
||||
self._redis_conn.hset(self._user_key, 'soft_mapzen_routing_limit', True)
|
||||
orgname = None
|
||||
config = RoutingConfig(self._redis_conn, self._db_conn, self._username, orgname)
|
||||
assert config.soft_limit == True
|
||||
|
||||
|
||||
class TestServicesRedisConfig(TestCase):
|
||||
def test_it_picks_mapzen_routing_quota_from_redis(self):
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.hset('rails:users:my_username', 'mapzen_routing_quota', 42)
|
||||
redis_config = ServicesRedisConfig(redis_conn).build('my_username', None)
|
||||
assert 'mapzen_routing_quota' in redis_config
|
||||
assert int(redis_config['mapzen_routing_quota']) == 42
|
||||
|
||||
def test_org_quota_overrides_user_quota(self):
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.hset('rails:users:my_username', 'mapzen_routing_quota', 42)
|
||||
redis_conn.hset('rails:orgs:acme', 'mapzen_routing_quota', 31415)
|
||||
redis_config = ServicesRedisConfig(redis_conn).build('my_username', 'acme')
|
||||
assert 'mapzen_routing_quota' in redis_config
|
||||
assert int(redis_config['mapzen_routing_quota']) == 31415
|
||||
@@ -0,0 +1,85 @@
|
||||
from unittest import TestCase
|
||||
from mockredis import MockRedis
|
||||
from ..test_helper import *
|
||||
from cartodb_services.metrics.quota import QuotaChecker
|
||||
from cartodb_services.metrics import RoutingConfig
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class RoutingConfigMock(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__ = kwargs
|
||||
|
||||
|
||||
class TestQuotaChecker(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.username = 'my_test_user'
|
||||
self.period_end_date = datetime.today()
|
||||
self.service_type = 'routing_mapzen'
|
||||
self.redis_key = 'user:{0}:{1}:success_responses:{2}{3}'.format(
|
||||
self.username,
|
||||
self.service_type,
|
||||
self.period_end_date.year,
|
||||
self.period_end_date.month
|
||||
)
|
||||
|
||||
def test_routing_quota_check_passes_when_enough_quota(self):
|
||||
user_service_config = RoutingConfigMock(
|
||||
username = self.username,
|
||||
organization = None,
|
||||
service_type = self.service_type,
|
||||
monthly_quota = 1000,
|
||||
period_end_date = datetime.today(),
|
||||
soft_limit = False
|
||||
)
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.zincrby(self.redis_key, self.period_end_date.day, 999)
|
||||
assert QuotaChecker(user_service_config, redis_conn).check() == True
|
||||
|
||||
def test_routing_quota_check_fails_when_quota_exhausted(self):
|
||||
user_service_config = RoutingConfigMock(
|
||||
username = self.username,
|
||||
organization = None,
|
||||
service_type = self.service_type,
|
||||
monthly_quota = 1000,
|
||||
period_end_date = datetime.today(),
|
||||
soft_limit = False
|
||||
)
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.zincrby(self.redis_key, self.period_end_date.day, 1001)
|
||||
checker = QuotaChecker(user_service_config, redis_conn)
|
||||
assert checker.check() == False
|
||||
|
||||
def test_routing_quota_check_fails_right_in_the_limit(self):
|
||||
"""
|
||||
I have 1000 credits and I just spent 1000 today. I should not pass
|
||||
the check to perform the 1001th routing operation.
|
||||
"""
|
||||
user_service_config = RoutingConfigMock(
|
||||
username = self.username,
|
||||
organization = None,
|
||||
service_type = self.service_type,
|
||||
monthly_quota = 1000,
|
||||
period_end_date = datetime.today(),
|
||||
soft_limit = False
|
||||
)
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.zincrby(self.redis_key, self.period_end_date.day, 1000)
|
||||
checker = QuotaChecker(user_service_config, redis_conn)
|
||||
assert checker.check() == False
|
||||
|
||||
def test_routing_quota_check_passes_if_no_quota_but_soft_limit(self):
|
||||
user_service_config = RoutingConfigMock(
|
||||
username = self.username,
|
||||
organization = None,
|
||||
service_type = self.service_type,
|
||||
monthly_quota = 1000,
|
||||
period_end_date = datetime.today(),
|
||||
soft_limit = True
|
||||
)
|
||||
redis_conn = MockRedis()
|
||||
redis_conn.zincrby(self.redis_key, self.period_end_date.day, 1001)
|
||||
checker = QuotaChecker(user_service_config, redis_conn)
|
||||
assert checker.check() == True
|
||||
30
server/lib/python/cartodb_services/test/metrics/test_user.py
Normal file
30
server/lib/python/cartodb_services/test/metrics/test_user.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from unittest import TestCase
|
||||
from cartodb_services.metrics import UserMetricsService
|
||||
import datetime
|
||||
from mockredis import MockRedis
|
||||
|
||||
class UserGeocoderConfig(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__ = kwargs
|
||||
|
||||
|
||||
class TestUserMetricsService(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
user_geocoder_config = UserGeocoderConfig(
|
||||
username = 'my_test_user',
|
||||
organization = None,
|
||||
period_end_date = datetime.date.today()
|
||||
)
|
||||
redis_conn = MockRedis()
|
||||
self.user_metrics_service = UserMetricsService(user_geocoder_config, redis_conn)
|
||||
|
||||
|
||||
def test_routing_used_quota_zero_when_no_usage(self):
|
||||
assert self.user_metrics_service.used_quota(UserMetricsService.SERVICE_MAPZEN_ROUTING, datetime.date.today()) == 0
|
||||
|
||||
def test_routing_used_quota_counts_usages(self):
|
||||
self.user_metrics_service.increment_service_use(UserMetricsService.SERVICE_MAPZEN_ROUTING, 'success_responses')
|
||||
self.user_metrics_service.increment_service_use(UserMetricsService.SERVICE_MAPZEN_ROUTING, 'empty_responses')
|
||||
assert self.user_metrics_service.used_quota('routing_mapzen', datetime.date.today()) == 2
|
||||
53
server/lib/python/cartodb_services/test/mock_plpy.py
Normal file
53
server/lib/python/cartodb_services/test/mock_plpy.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import re
|
||||
|
||||
|
||||
class MockCursor:
|
||||
def __init__(self, data):
|
||||
self.cursor_pos = 0
|
||||
self.data = data
|
||||
|
||||
def fetch(self, batch_size):
|
||||
batch = self.data[self.cursor_pos: self.cursor_pos + batch_size]
|
||||
self.cursor_pos += batch_size
|
||||
return batch
|
||||
|
||||
|
||||
class MockPlPy:
|
||||
def __init__(self):
|
||||
self._reset()
|
||||
|
||||
def _reset(self):
|
||||
self.infos = []
|
||||
self.notices = []
|
||||
self.debugs = []
|
||||
self.logs = []
|
||||
self.warnings = []
|
||||
self.errors = []
|
||||
self.fatals = []
|
||||
self.executes = []
|
||||
self.results = []
|
||||
self.prepares = []
|
||||
self.results = {}
|
||||
|
||||
def _define_result(self, query, result):
|
||||
pattern = re.compile(query, re.IGNORECASE | re.MULTILINE)
|
||||
self.results[pattern] = result
|
||||
|
||||
def notice(self, msg):
|
||||
self.notices.append(msg)
|
||||
|
||||
def debug(self, msg):
|
||||
self.notices.append(msg)
|
||||
|
||||
def info(self, msg):
|
||||
self.infos.append(msg)
|
||||
|
||||
def cursor(self, query):
|
||||
data = self.execute(query)
|
||||
return MockCursor(data)
|
||||
|
||||
def execute(self, query, rows=1):
|
||||
for pattern, result in self.results.iteritems():
|
||||
if pattern.search(query):
|
||||
return result
|
||||
return []
|
||||
@@ -0,0 +1,47 @@
|
||||
from unittest import TestCase
|
||||
from cartodb_services.refactor.core.environment import *
|
||||
from nose.tools import raises
|
||||
from cartodb_services.refactor.storage.mem_config import InMemoryConfigStorage
|
||||
|
||||
class TestServerEnvironment(TestCase):
|
||||
|
||||
def test_can_be_a_valid_one(self):
|
||||
env_dev = ServerEnvironment('development')
|
||||
env_staging = ServerEnvironment('staging')
|
||||
env_prod = ServerEnvironment('production')
|
||||
env_onpremise = ServerEnvironment('onpremise')
|
||||
|
||||
@raises(AssertionError)
|
||||
def test_cannot_be_a_non_valid_one(self):
|
||||
env_whatever = ServerEnvironment('whatever')
|
||||
|
||||
def test_is_on_premise_returns_true_when_onpremise(self):
|
||||
assert ServerEnvironment('onpremise').is_onpremise == True
|
||||
|
||||
def test_is_on_premise_returns_true_when_any_other(self):
|
||||
assert ServerEnvironment('development').is_onpremise == False
|
||||
assert ServerEnvironment('staging').is_onpremise == False
|
||||
assert ServerEnvironment('production').is_onpremise == False
|
||||
|
||||
def test_equality(self):
|
||||
assert ServerEnvironment('development') == ServerEnvironment('development')
|
||||
assert ServerEnvironment('development') <> ServerEnvironment('onpremise')
|
||||
|
||||
|
||||
class TestServerEnvironmentBuilder(TestCase):
|
||||
|
||||
def test_returns_env_according_to_configuration(self):
|
||||
server_config_storage = InMemoryConfigStorage({
|
||||
'server_conf': {
|
||||
'environment': 'staging'
|
||||
}
|
||||
})
|
||||
server_env = ServerEnvironmentBuilder(server_config_storage).get()
|
||||
assert server_env.is_staging == True
|
||||
|
||||
def test_returns_default_when_no_server_conf(self):
|
||||
server_config_storage = InMemoryConfigStorage({})
|
||||
server_env = ServerEnvironmentBuilder(server_config_storage).get()
|
||||
|
||||
assert server_env.is_development == True
|
||||
assert str(server_env) == ServerEnvironmentBuilder.DEFAULT_ENVIRONMENT
|
||||
@@ -0,0 +1,12 @@
|
||||
from unittest import TestCase
|
||||
from cartodb_services.refactor.storage.mem_config import InMemoryConfigStorage
|
||||
|
||||
class TestInMemoryConfigStorage(TestCase):
|
||||
|
||||
def test_can_provide_values_from_hash(self):
|
||||
server_config = InMemoryConfigStorage({'any_key': 'any_value'})
|
||||
assert server_config.get('any_key') == 'any_value'
|
||||
|
||||
def test_gets_none_if_cannot_retrieve_key(self):
|
||||
server_config = InMemoryConfigStorage()
|
||||
assert server_config.get('any_non_existing_key') == None
|
||||
@@ -0,0 +1,14 @@
|
||||
from unittest import TestCase
|
||||
from cartodb_services.refactor.storage.null_config import NullConfigStorage
|
||||
from cartodb_services.refactor.core.interfaces import ConfigBackendInterface
|
||||
|
||||
|
||||
class TestNullConfigStorage(TestCase):
|
||||
|
||||
def test_is_a_config_backend(self):
|
||||
null_config = NullConfigStorage()
|
||||
assert isinstance(null_config, ConfigBackendInterface)
|
||||
|
||||
def test_returns_none_regardless_of_input(self):
|
||||
null_config = NullConfigStorage()
|
||||
assert null_config.get('whatever') is None
|
||||
@@ -0,0 +1,77 @@
|
||||
from unittest import TestCase
|
||||
from cartodb_services.refactor.storage.redis_config import *
|
||||
from mockredis import MockRedis
|
||||
from mock import Mock, MagicMock
|
||||
from nose.tools import raises
|
||||
|
||||
|
||||
class TestRedisConfigStorage(TestCase):
|
||||
|
||||
CONFIG_HASH_KEY = 'mykey'
|
||||
|
||||
def test_can_get_a_config_field(self):
|
||||
connection = MockRedis()
|
||||
connection.hset(self.CONFIG_HASH_KEY, 'field1', 42)
|
||||
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||
|
||||
value = redis_config.get('field1')
|
||||
assert type(value) == str # this is something to take into account, redis always returns strings
|
||||
assert value == '42'
|
||||
|
||||
@raises(KeyError)
|
||||
def test_raises_an_exception_if_config_key_not_present(self):
|
||||
connection = MockRedis()
|
||||
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||
redis_config.get('whatever_field')
|
||||
|
||||
@raises(KeyError)
|
||||
def test_returns_nothing_if_field_not_present(self):
|
||||
connection = MockRedis()
|
||||
connection.hmset(self.CONFIG_HASH_KEY, {'field1': 42, 'field2': 43})
|
||||
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||
redis_config.get('whatever_field')
|
||||
|
||||
def test_it_reads_the_config_hash_just_once(self):
|
||||
connection = Mock()
|
||||
connection.hgetall = MagicMock(return_value={'field1': '42'})
|
||||
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||
|
||||
assert redis_config.get('field1') == '42'
|
||||
assert redis_config.get('field1') == '42'
|
||||
|
||||
connection.hgetall.assert_called_once_with(self.CONFIG_HASH_KEY)
|
||||
|
||||
|
||||
class TestRedisUserConfigStorageBuilder(TestCase):
|
||||
|
||||
USERNAME = 'john'
|
||||
EXPECTED_REDIS_CONFIG_HASH_KEY = 'rails:users:john'
|
||||
|
||||
def test_it_reads_the_correct_hash_key(self):
|
||||
connection = Mock()
|
||||
connection.hgetall = MagicMock(return_value={'an_user_config_field': 'nice'})
|
||||
redis_config = RedisConfigStorage(connection, self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||
|
||||
redis_config = RedisUserConfigStorageBuilder(connection, self.USERNAME).get()
|
||||
assert redis_config.get('an_user_config_field') == 'nice'
|
||||
connection.hgetall.assert_called_once_with(self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||
|
||||
|
||||
class TestRedisOrgConfigStorageBuilder(TestCase):
|
||||
|
||||
ORGNAME = 'smith'
|
||||
EXPECTED_REDIS_CONFIG_HASH_KEY = 'rails:orgs:smith'
|
||||
|
||||
def test_it_reads_the_correct_hash_key(self):
|
||||
connection = Mock()
|
||||
connection.hgetall = MagicMock(return_value={'an_org_config_field': 'awesome'})
|
||||
redis_config = RedisConfigStorage(connection, self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||
|
||||
redis_config = RedisOrgConfigStorageBuilder(connection, self.ORGNAME).get()
|
||||
assert redis_config.get('an_org_config_field') == 'awesome'
|
||||
connection.hgetall.assert_called_once_with(self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||
|
||||
def test_it_returns_a_null_config_storage_if_theres_no_orgname(self):
|
||||
redis_config = RedisOrgConfigStorageBuilder(None, None).get()
|
||||
assert type(redis_config) == NullConfigStorage
|
||||
assert redis_config.get('whatever') == None
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user