Compare commits
79 Commits
python-0.7
...
0.16.0-ser
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
98d533b707 | ||
|
|
6d0ad85d48 | ||
|
|
00e6cace76 | ||
|
|
e9ad35ba1d | ||
|
|
dcb3935021 | ||
|
|
cded6c2f08 | ||
|
|
e1b357137a | ||
|
|
3844cfc226 | ||
|
|
2a1276f4f1 | ||
|
|
35da7e48fd | ||
|
|
12aebb7eee | ||
|
|
0d87a95270 | ||
|
|
18e1a5c7c9 | ||
|
|
fcca5da302 | ||
|
|
1aec541906 | ||
|
|
9e98e0794d | ||
|
|
8fbb41742c | ||
|
|
275a6dc27f | ||
|
|
d522083d5c | ||
|
|
073163eb1a | ||
|
|
0c62c4bada | ||
|
|
3361960cfc | ||
|
|
86ab3abc53 | ||
|
|
b1f3405cd0 | ||
|
|
fb812ee15e | ||
|
|
c1dd410201 | ||
|
|
34ddd28e6b | ||
|
|
d85bc65bf8 | ||
|
|
443fe88d5a | ||
|
|
6c61626214 | ||
|
|
74d2fba763 | ||
|
|
e24819f193 | ||
|
|
1e6ee8d5c1 | ||
|
|
3a6cc4c364 | ||
|
|
8ad2434b1d | ||
|
|
0b7b44d8a5 | ||
|
|
02a2619b45 | ||
|
|
4b4a02905c | ||
|
|
1f3a655ae5 | ||
|
|
9d60fde0b8 | ||
|
|
efdc151282 | ||
|
|
fd2cc21942 | ||
|
|
18f05fbd4f | ||
|
|
d2f4586bae | ||
|
|
54eb279ae8 | ||
|
|
85d6c2a54e | ||
|
|
cad2051efe | ||
|
|
96a93e3c56 | ||
|
|
facda9e8be | ||
|
|
64fc18b9e0 | ||
|
|
9381d5644b | ||
|
|
9f55f2ee3b | ||
|
|
1087c1266b | ||
|
|
d5a296a30c | ||
|
|
f8caf4314d | ||
|
|
d7910fbbf1 | ||
|
|
d47049c813 | ||
|
|
cc8f93c535 | ||
|
|
3f9441de7e | ||
|
|
fe41359a1f | ||
|
|
46a934b178 | ||
|
|
184358bdec | ||
|
|
a6d546f2ee | ||
|
|
fc99f7aba9 | ||
|
|
e959873b32 | ||
|
|
a98093540d | ||
|
|
78add220cd | ||
|
|
cf2f86136b | ||
|
|
fb183b07ee | ||
|
|
5ab727bcb6 | ||
|
|
1e9b551160 | ||
|
|
699dc9bf0e | ||
|
|
fc291a7c63 | ||
|
|
d73af32c2c | ||
|
|
7ea88fa051 | ||
|
|
d2ca40cf38 | ||
|
|
18ae2525b6 | ||
|
|
06462fdf7a | ||
|
|
71d5ce951a |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,6 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
*.pyc
|
*.pyc
|
||||||
|
.coverage
|
||||||
cartodb_services.egg-info/
|
cartodb_services.egg-info/
|
||||||
build/
|
build/
|
||||||
dist/
|
dist/
|
||||||
|
|||||||
25
NEWS.md
25
NEWS.md
@@ -1,3 +1,28 @@
|
|||||||
|
September 28, 2016
|
||||||
|
==========
|
||||||
|
* Released version 0.8.1 of Python package cartodb\_services
|
||||||
|
* Improvements in QPS retry decorator for requests to external services
|
||||||
|
|
||||||
|
https://github.com/CartoDB/dataservices-api/releases/tag/python-0.8.1
|
||||||
|
|
||||||
|
September 8, 2016
|
||||||
|
===========
|
||||||
|
* Released version 0.11.1 of the client
|
||||||
|
* Minor change in the name of the function parameter sent to server and Observatory backend for compatibility with the last observatory-extension framework updates
|
||||||
|
|
||||||
|
September 1, 2016
|
||||||
|
===========
|
||||||
|
* Released version 0.11.0 of the client
|
||||||
|
* Include DS table functions to create and populate a table with the GetMeasure function in observatory
|
||||||
|
* Released version 0.15.1 of the server
|
||||||
|
* Rename DS table functions
|
||||||
|
|
||||||
|
August 29, 2016
|
||||||
|
===========
|
||||||
|
* Released version 0.15.0 of the server
|
||||||
|
* Geocode namedplace point functions uses Mapzen search service and in case of error
|
||||||
|
it'll use the internal geocoder
|
||||||
|
|
||||||
August 19, 2016
|
August 19, 2016
|
||||||
===========
|
===========
|
||||||
* Released version 0.7.4.2 of the server python library
|
* Released version 0.7.4.2 of the server python library
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ The CARTO Data Services SQL API
|
|||||||
Steps to deploy a new Data Services API version :
|
Steps to deploy a new Data Services API version :
|
||||||
|
|
||||||
- Deploy new version of dataservices API to all servers
|
- Deploy new version of dataservices API to all servers
|
||||||
- Update the server user using: ALTER EXTENSION cdb_dataservices_server UPDATE TO '<CURRENT_VERSION>';
|
- Update the server user using: ALTER EXTENSION cdb_dataservices_server UPDATE TO '\<CURRENT_VERSION\>';
|
||||||
- Update the python dependencies if needed: **cartodb_geocoder** and **heremaps**
|
- Update the python dependencies if needed: **cartodb_geocoder** and **heremaps**
|
||||||
- Add the needed config in the `cdb_conf` table:
|
- Add the needed config in the `cdb_conf` table:
|
||||||
- `redis_metadata_config` and `redis_metrics_conf`
|
- `redis_metadata_config` and `redis_metrics_conf`
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ OLD_VERSIONS = $(wildcard old_versions/*.sql)
|
|||||||
# @see http://www.postgresql.org/docs/current/static/extend-pgxs.html
|
# @see http://www.postgresql.org/docs/current/static/extend-pgxs.html
|
||||||
DATA = $(NEW_EXTENSION_ARTIFACT) \
|
DATA = $(NEW_EXTENSION_ARTIFACT) \
|
||||||
$(OLD_VERSIONS) \
|
$(OLD_VERSIONS) \
|
||||||
cdb_dataservices_client--0.10.1--0.10.2.sql \
|
cdb_dataservices_client--0.11.0--0.11.1.sql \
|
||||||
cdb_dataservices_client--0.10.2--0.10.1.sql
|
cdb_dataservices_client--0.11.1--0.11.0.sql
|
||||||
|
|
||||||
|
|
||||||
REGRESS = $(notdir $(basename $(wildcard test/sql/*test.sql)))
|
REGRESS = $(notdir $(basename $(wildcard test/sql/*test.sql)))
|
||||||
|
|||||||
140
client/cdb_dataservices_client--0.11.0--0.11.1.sql
Normal file
140
client/cdb_dataservices_client--0.11.0--0.11.1.sql
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.1'" to load this file. \quit
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'OBS_GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
|
.format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||||
|
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||||
|
');'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||||
|
)
|
||||||
|
|
||||||
|
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'OBS_GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute(
|
||||||
|
"SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)))
|
||||||
|
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [
|
||||||
|
colnames_arr[i] +
|
||||||
|
' ' +
|
||||||
|
coltypes_arr[i] for i in range(
|
||||||
|
0,
|
||||||
|
len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
aliased_colname_list = ','.join(
|
||||||
|
['result.' + name for name in colnames_arr])
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute(
|
||||||
|
"SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||||
|
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_db_role=plpy.quote_literal(user_db_role),
|
||||||
|
schema=plpy.quote_literal(user_schema),
|
||||||
|
dbname=plpy.quote_literal(dbname),
|
||||||
|
table_name=plpy.quote_literal(table_name)))
|
||||||
|
|
||||||
|
if ds_fdw_metadata[0]["schemaname"]:
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error connecting dataset via FDW')
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute(
|
||||||
|
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||||
|
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||||
|
'FROM "{schema}".{table_name} ut '
|
||||||
|
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||||
|
'{function_name}::text, {params}::json) '
|
||||||
|
'AS result ({columns_with_types}, cartodb_id int) '
|
||||||
|
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||||
|
schema=user_schema,
|
||||||
|
analysis_table_name=output_table_name,
|
||||||
|
colname_list=aliased_colname_list,
|
||||||
|
table_name=table_name,
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params),
|
||||||
|
columns_with_types=columns_with_types))
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute(
|
||||||
|
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||||
|
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
fdw_server=plpy.quote_literal(server_name)))
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
140
client/cdb_dataservices_client--0.11.1--0.11.0.sql
Normal file
140
client/cdb_dataservices_client--0.11.1--0.11.0.sql
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.0'" to load this file. \quit
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
|
.format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||||
|
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||||
|
');'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||||
|
)
|
||||||
|
|
||||||
|
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute(
|
||||||
|
"SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)))
|
||||||
|
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [
|
||||||
|
colnames_arr[i] +
|
||||||
|
' ' +
|
||||||
|
coltypes_arr[i] for i in range(
|
||||||
|
0,
|
||||||
|
len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
aliased_colname_list = ','.join(
|
||||||
|
['result.' + name for name in colnames_arr])
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute(
|
||||||
|
"SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||||
|
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_db_role=plpy.quote_literal(user_db_role),
|
||||||
|
schema=plpy.quote_literal(user_schema),
|
||||||
|
dbname=plpy.quote_literal(dbname),
|
||||||
|
table_name=plpy.quote_literal(table_name)))
|
||||||
|
|
||||||
|
if ds_fdw_metadata[0]["schemaname"]:
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error connecting dataset via FDW')
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute(
|
||||||
|
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||||
|
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||||
|
'FROM "{schema}".{table_name} ut '
|
||||||
|
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||||
|
'{function_name}::text, {params}::json) '
|
||||||
|
'AS result ({columns_with_types}, cartodb_id int) '
|
||||||
|
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||||
|
schema=user_schema,
|
||||||
|
analysis_table_name=output_table_name,
|
||||||
|
colname_list=aliased_colname_list,
|
||||||
|
table_name=table_name,
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params),
|
||||||
|
columns_with_types=columns_with_types))
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute(
|
||||||
|
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||||
|
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
fdw_server=plpy.quote_literal(server_name)))
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
1792
client/cdb_dataservices_client--0.11.1.sql
Normal file
1792
client/cdb_dataservices_client--0.11.1.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
comment = 'CartoDB dataservices client API extension'
|
comment = 'CartoDB dataservices client API extension'
|
||||||
default_version = '0.10.2'
|
default_version = '0.11.1'
|
||||||
requires = 'plproxy, cartodb'
|
requires = 'plproxy, cartodb'
|
||||||
superuser = true
|
superuser = true
|
||||||
schema = cdb_dataservices_client
|
schema = cdb_dataservices_client
|
||||||
|
|||||||
289
client/old_versions/cdb_dataservices_client--0.10.2--0.11.0.sql
Normal file
289
client/old_versions/cdb_dataservices_client--0.10.2--0.11.0.sql
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.11.0'" to load this file. \quit
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_GetTable(text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_AugmentTable(text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client.__OBS_AugmentTable(text, text, text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client.__OBS_GetTable(text, text, text, text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_ConnectUserTable(text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_GetReturnMetadata(text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._OBS_DisconnectUserTable(text, text, text, text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
DECLARE
|
||||||
|
username text;
|
||||||
|
user_db_role text;
|
||||||
|
orgname text;
|
||||||
|
user_schema text;
|
||||||
|
result boolean;
|
||||||
|
BEGIN
|
||||||
|
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||||
|
RAISE EXCEPTION 'The api_key must be provided';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT session_user INTO user_db_role;
|
||||||
|
|
||||||
|
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||||
|
-- JSON value stored "" is taken as literal
|
||||||
|
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||||
|
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||||
|
user_schema := 'public';
|
||||||
|
ELSE
|
||||||
|
user_schema := username;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
|
username,
|
||||||
|
orgname,
|
||||||
|
user_db_role,
|
||||||
|
user_schema,
|
||||||
|
output_table_name,
|
||||||
|
params
|
||||||
|
) INTO result;
|
||||||
|
|
||||||
|
RETURN result;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
DECLARE
|
||||||
|
username text;
|
||||||
|
user_db_role text;
|
||||||
|
orgname text;
|
||||||
|
dbname text;
|
||||||
|
user_schema text;
|
||||||
|
result boolean;
|
||||||
|
BEGIN
|
||||||
|
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||||
|
RAISE EXCEPTION 'The api_key must be provided';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT session_user INTO user_db_role;
|
||||||
|
|
||||||
|
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||||
|
-- JSON value stored "" is taken as literal
|
||||||
|
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||||
|
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||||
|
user_schema := 'public';
|
||||||
|
ELSE
|
||||||
|
user_schema := username;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT current_database() INTO dbname;
|
||||||
|
|
||||||
|
SELECT cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username,
|
||||||
|
orgname,
|
||||||
|
user_db_role,
|
||||||
|
user_schema,
|
||||||
|
dbname,
|
||||||
|
table_name,
|
||||||
|
output_table_name,
|
||||||
|
params
|
||||||
|
) INTO result;
|
||||||
|
|
||||||
|
RETURN result;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
|
.format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||||
|
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||||
|
');'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||||
|
)
|
||||||
|
|
||||||
|
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute(
|
||||||
|
"SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)))
|
||||||
|
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [
|
||||||
|
colnames_arr[i] +
|
||||||
|
' ' +
|
||||||
|
coltypes_arr[i] for i in range(
|
||||||
|
0,
|
||||||
|
len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
aliased_colname_list = ','.join(
|
||||||
|
['result.' + name for name in colnames_arr])
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute(
|
||||||
|
"SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||||
|
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_db_role=plpy.quote_literal(user_db_role),
|
||||||
|
schema=plpy.quote_literal(user_schema),
|
||||||
|
dbname=plpy.quote_literal(dbname),
|
||||||
|
table_name=plpy.quote_literal(table_name)))
|
||||||
|
|
||||||
|
if ds_fdw_metadata[0]["schemaname"]:
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error connecting dataset via FDW')
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute(
|
||||||
|
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||||
|
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||||
|
'FROM "{schema}".{table_name} ut '
|
||||||
|
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||||
|
'{function_name}::text, {params}::json) '
|
||||||
|
'AS result ({columns_with_types}, cartodb_id int) '
|
||||||
|
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||||
|
schema=user_schema,
|
||||||
|
analysis_table_name=output_table_name,
|
||||||
|
colname_list=aliased_colname_list,
|
||||||
|
table_name=table_name,
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params),
|
||||||
|
columns_with_types=columns_with_types))
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute(
|
||||||
|
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||||
|
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
fdw_server=plpy.quote_literal(server_name)))
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_ConnectUserTable(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text
|
||||||
|
)RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_ConnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_GetReturnMetadata(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
function_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_GetReturnMetadata;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_FetchJoinFdwTableData(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
table_schema text,
|
||||||
|
table_name text,
|
||||||
|
function_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS SETOF record AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_FetchJoinFdwTableData;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_DisconnectUserTable(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
table_schema text,
|
||||||
|
table_name text,
|
||||||
|
server_name text
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_DisconnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(output_table_name text, params json) TO publicuser;
|
||||||
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(table_name text, output_table_name text, params json) TO publicuser;
|
||||||
|
|
||||||
281
client/old_versions/cdb_dataservices_client--0.11.0--0.10.2.sql
Normal file
281
client/old_versions/cdb_dataservices_client--0.11.0--0.10.2.sql
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_client UPDATE TO '0.10.2'" to load this file. \quit
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(text, text, text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_ConnectUserTable(text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_GetReturnMetadata(text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_client._DST_DisconnectUserTable(text, text, text, text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetTable(table_name text, output_table_name text, function_name text, params json)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
DECLARE
|
||||||
|
username text;
|
||||||
|
user_db_role text;
|
||||||
|
orgname text;
|
||||||
|
dbname text;
|
||||||
|
user_schema text;
|
||||||
|
result boolean;
|
||||||
|
BEGIN
|
||||||
|
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||||
|
RAISE EXCEPTION 'The api_key must be provided';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT session_user INTO user_db_role;
|
||||||
|
|
||||||
|
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||||
|
-- JSON value stored "" is taken as literal
|
||||||
|
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||||
|
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||||
|
user_schema := 'public';
|
||||||
|
ELSE
|
||||||
|
user_schema := username;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT current_database() INTO dbname;
|
||||||
|
|
||||||
|
SELECT cdb_dataservices_client.__OBS_GetTable(username, orgname, user_db_role, user_schema, dbname, table_name, output_table_name, function_name, params) INTO result;
|
||||||
|
|
||||||
|
RETURN result;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_AugmentTable(table_name text, function_name text, params json)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
DECLARE
|
||||||
|
username text;
|
||||||
|
user_db_role text;
|
||||||
|
orgname text;
|
||||||
|
dbname text;
|
||||||
|
user_schema text;
|
||||||
|
result boolean;
|
||||||
|
BEGIN
|
||||||
|
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||||
|
RAISE EXCEPTION 'The api_key must be provided';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT session_user INTO user_db_role;
|
||||||
|
|
||||||
|
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||||
|
-- JSON value stored "" is taken as literal
|
||||||
|
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||||
|
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||||
|
user_schema := 'public';
|
||||||
|
ELSE
|
||||||
|
user_schema := username;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT current_database() INTO dbname;
|
||||||
|
|
||||||
|
SELECT cdb_dataservices_client.__OBS_AugmentTable(username, orgname, user_db_role, user_schema, dbname, table_name, function_name, params) INTO result;
|
||||||
|
|
||||||
|
RETURN result;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_AugmentTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, function_name text, params json)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
from time import strftime
|
||||||
|
try:
|
||||||
|
server_table_name = None
|
||||||
|
temporary_table_name = 'ds_tmp_' + str(strftime("%s")) + table_name
|
||||||
|
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||||
|
)
|
||||||
|
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
colnames = ','.join(colnames_arr)
|
||||||
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {user_schema}::text, {dbname}::text, {table_name}::text);"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), user_schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||||
|
)
|
||||||
|
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
|
||||||
|
# Create temporary table with the augmented results
|
||||||
|
plpy.execute('CREATE UNLOGGED TABLE "{user_schema}".{temp_table_name} AS '
|
||||||
|
'(SELECT {columns}, cartodb_id '
|
||||||
|
'FROM cdb_dataservices_client._OBS_FetchJoinFdwTableData('
|
||||||
|
'{username}::text, {orgname}::text, {schema}::text, {table_name}::text, {function_name}::text, {params}::json) '
|
||||||
|
'AS results({columns_with_types}, cartodb_id int) )'
|
||||||
|
.format(columns=colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_schema=user_schema, schema=plpy.quote_literal(server_schema), table_name=plpy.quote_literal(server_table_name),
|
||||||
|
function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types,
|
||||||
|
temp_table_name=temporary_table_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add index to cartodb_id
|
||||||
|
plpy.execute('CREATE UNIQUE INDEX {temp_table_name}_pkey ON "{user_schema}".{temp_table_name} (cartodb_id)'
|
||||||
|
.format(user_schema=user_schema, temp_table_name=temporary_table_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prepare table to receive augmented results in new columns
|
||||||
|
for idx, column in enumerate(colnames_arr):
|
||||||
|
if colnames_arr[idx] is not 'the_geom':
|
||||||
|
plpy.execute('ALTER TABLE "{user_schema}".{table_name} ADD COLUMN {column_name} {column_type}'
|
||||||
|
.format(user_schema=user_schema, table_name=table_name, column_name=colnames_arr[idx], column_type=coltypes_arr[idx])
|
||||||
|
)
|
||||||
|
|
||||||
|
# Populate the user table with the augmented results
|
||||||
|
plpy.execute('UPDATE "{user_schema}".{table_name} SET {columns} = '
|
||||||
|
'(SELECT {columns} FROM "{user_schema}".{temporary_table_name} '
|
||||||
|
'WHERE "{user_schema}".{temporary_table_name}.cartodb_id = "{user_schema}".{table_name}.cartodb_id)'
|
||||||
|
.format(columns = colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_schema = user_schema, table_name=table_name, function_name=function_name, params=params, columns_with_types=columns_with_types,
|
||||||
|
temporary_table_name=temporary_table_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||||
|
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
plpy.warning('Error trying to augment table {0}'.format(e))
|
||||||
|
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||||
|
if server_table_name:
|
||||||
|
# Wipe local temporary table
|
||||||
|
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
||||||
|
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_GetTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, output_table_name text, function_name text, params json)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
try:
|
||||||
|
server_table_name = None
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
||||||
|
)
|
||||||
|
|
||||||
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
colnames = ','.join(colnames_arr)
|
||||||
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {table_name}::text);"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
||||||
|
)
|
||||||
|
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
|
||||||
|
# Get list of user columns to include in the new table
|
||||||
|
user_table_columns = ','.join(
|
||||||
|
plpy.execute('SELECT array_agg(\'user_table.\' || attname) AS columns '
|
||||||
|
'FROM pg_attribute WHERE attrelid = \'"{user_schema}".{table_name}\'::regclass '
|
||||||
|
'AND attnum > 0 AND NOT attisdropped AND attname NOT LIKE \'the_geom_webmercator\' '
|
||||||
|
'AND NOT attname LIKE ANY(string_to_array(\'{colnames}\',\',\'));'
|
||||||
|
.format(user_schema=user_schema, table_name=table_name, colnames=colnames)
|
||||||
|
)[0]["columns"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Populate a new table with the augmented results
|
||||||
|
plpy.execute('CREATE TABLE "{user_schema}".{output_table_name} AS '
|
||||||
|
'(SELECT results.{columns}, {user_table_columns} '
|
||||||
|
'FROM {table_name} AS user_table '
|
||||||
|
'LEFT JOIN cdb_dataservices_client._OBS_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {function_name}::text, {params}::json) as results({columns_with_types}, cartodb_id int) '
|
||||||
|
'ON results.cartodb_id = user_table.cartodb_id)'
|
||||||
|
.format(output_table_name=output_table_name, columns=colnames, user_table_columns=user_table_columns, username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname), user_schema=user_schema, server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
table_name=table_name, function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types)
|
||||||
|
)
|
||||||
|
|
||||||
|
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||||
|
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
plpy.warning('Error trying to get table {0}'.format(e))
|
||||||
|
# Wipe user FDW data from the server in case of failure if the table was connected
|
||||||
|
if server_table_name:
|
||||||
|
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_ConnectUserTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text)
|
||||||
|
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||||
|
CONNECT _server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._OBS_ConnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
|
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||||
|
CONNECT _server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._OBS_GetReturnMetadata;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
|
RETURNS SETOF record AS $$
|
||||||
|
CONNECT _server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._OBS_FetchJoinFdwTableData;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, server_name text)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
CONNECT _server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._OBS_DisconnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_augmenttable(table_name text, function_name text, params json) TO publicuser;
|
||||||
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_gettable(table_name text, output_table_name text, function_name text, params json) TO publicuser;
|
||||||
1792
client/old_versions/cdb_dataservices_client--0.11.0.sql
Normal file
1792
client/old_versions/cdb_dataservices_client--0.11.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -33,7 +33,6 @@
|
|||||||
- { name: admin1_name, type: text}
|
- { name: admin1_name, type: text}
|
||||||
- { name: country_name, type: text}
|
- { name: country_name, type: text}
|
||||||
|
|
||||||
|
|
||||||
- name: cdb_geocode_postalcode_polygon
|
- name: cdb_geocode_postalcode_polygon
|
||||||
return_type: Geometry
|
return_type: Geometry
|
||||||
params:
|
params:
|
||||||
|
|||||||
@@ -1,8 +1,53 @@
|
|||||||
CREATE TYPE cdb_dataservices_client.ds_fdw_metadata as (schemaname text, tabname text, servername text);
|
CREATE TYPE cdb_dataservices_client.ds_fdw_metadata as (schemaname text, tabname text, servername text);
|
||||||
CREATE TYPE cdb_dataservices_client.ds_return_metadata as (colnames text[], coltypes text[]);
|
CREATE TYPE cdb_dataservices_client.ds_return_metadata as (colnames text[], coltypes text[]);
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetTable(table_name text, output_table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(
|
||||||
RETURNS boolean AS $$
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
DECLARE
|
||||||
|
username text;
|
||||||
|
user_db_role text;
|
||||||
|
orgname text;
|
||||||
|
user_schema text;
|
||||||
|
result boolean;
|
||||||
|
BEGIN
|
||||||
|
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
||||||
|
RAISE EXCEPTION 'The api_key must be provided';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT session_user INTO user_db_role;
|
||||||
|
|
||||||
|
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
||||||
|
-- JSON value stored "" is taken as literal
|
||||||
|
IF username IS NULL OR username = '' OR username = '""' THEN
|
||||||
|
RAISE EXCEPTION 'Username is a mandatory argument';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
||||||
|
user_schema := 'public';
|
||||||
|
ELSE
|
||||||
|
user_schema := username;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
SELECT cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
|
username,
|
||||||
|
orgname,
|
||||||
|
user_db_role,
|
||||||
|
user_schema,
|
||||||
|
output_table_name,
|
||||||
|
params
|
||||||
|
) INTO result;
|
||||||
|
|
||||||
|
RETURN result;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
DECLARE
|
DECLARE
|
||||||
username text;
|
username text;
|
||||||
user_db_role text;
|
user_db_role text;
|
||||||
@@ -31,238 +76,200 @@ BEGIN
|
|||||||
|
|
||||||
SELECT current_database() INTO dbname;
|
SELECT current_database() INTO dbname;
|
||||||
|
|
||||||
SELECT cdb_dataservices_client.__OBS_GetTable(username, orgname, user_db_role, user_schema, dbname, table_name, output_table_name, function_name, params) INTO result;
|
SELECT cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username,
|
||||||
|
orgname,
|
||||||
|
user_db_role,
|
||||||
|
user_schema,
|
||||||
|
dbname,
|
||||||
|
table_name,
|
||||||
|
output_table_name,
|
||||||
|
params
|
||||||
|
) INTO result;
|
||||||
|
|
||||||
RETURN result;
|
RETURN result;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
||||||
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_AugmentTable(table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PrepareTableOBS_GetMeasure(
|
||||||
RETURNS boolean AS $$
|
username text,
|
||||||
DECLARE
|
orgname text,
|
||||||
username text;
|
user_db_role text,
|
||||||
user_db_role text;
|
user_schema text,
|
||||||
orgname text;
|
output_table_name text,
|
||||||
dbname text;
|
params json
|
||||||
user_schema text;
|
) RETURNS boolean AS $$
|
||||||
result boolean;
|
function_name = 'OBS_GetMeasure'
|
||||||
BEGIN
|
# Obtain return types for augmentation procedure
|
||||||
IF session_user = 'publicuser' OR session_user ~ 'cartodb_publicuser_*' THEN
|
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
||||||
RAISE EXCEPTION 'The api_key must be provided';
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
||||||
END IF;
|
.format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
SELECT session_user INTO user_db_role;
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
SELECT u, o INTO username, orgname FROM cdb_dataservices_client._cdb_entity_config() AS (u text, o text);
|
params=plpy.quote_literal(params)
|
||||||
-- JSON value stored "" is taken as literal
|
|
||||||
IF username IS NULL OR username = '' OR username = '""' THEN
|
|
||||||
RAISE EXCEPTION 'Username is a mandatory argument';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
IF orgname IS NULL OR orgname = '' OR orgname = '""' THEN
|
|
||||||
user_schema := 'public';
|
|
||||||
ELSE
|
|
||||||
user_schema := username;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
SELECT current_database() INTO dbname;
|
|
||||||
|
|
||||||
SELECT cdb_dataservices_client.__OBS_AugmentTable(username, orgname, user_db_role, user_schema, dbname, table_name, function_name, params) INTO result;
|
|
||||||
|
|
||||||
RETURN result;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE 'plpgsql' SECURITY DEFINER;
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_AugmentTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, function_name text, params json)
|
|
||||||
RETURNS boolean AS $$
|
|
||||||
from time import strftime
|
|
||||||
try:
|
|
||||||
server_table_name = None
|
|
||||||
temporary_table_name = 'ds_tmp_' + str(strftime("%s")) + table_name
|
|
||||||
|
|
||||||
# Obtain return types for augmentation procedure
|
|
||||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
|
||||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
if ds_return_metadata[0]["colnames"]:
|
||||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
|
else:
|
||||||
# Prepare column and type strings required in the SQL queries
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
colnames = ','.join(colnames_arr)
|
|
||||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
|
||||||
columns_with_types = ','.join(columns_with_types_arr)
|
|
||||||
|
|
||||||
|
|
||||||
# Instruct the OBS server side to establish a FDW
|
# Prepare column and type strings required in the SQL queries
|
||||||
# The metadata is obtained as well in order to:
|
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
||||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
# - (b) be able to tell OBS to free resources when done.
|
|
||||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
|
||||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {user_schema}::text, {dbname}::text, {table_name}::text);"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), user_schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
|
||||||
)
|
|
||||||
|
|
||||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
# Create a new table with the required columns
|
||||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
plpy.execute('CREATE TABLE "{schema}".{table_name} ( '
|
||||||
server_name = ds_fdw_metadata[0]["servername"]
|
'cartodb_id int, the_geom geometry, {columns_with_types} '
|
||||||
|
');'
|
||||||
# Create temporary table with the augmented results
|
.format(schema=user_schema, table_name=output_table_name, columns_with_types=columns_with_types)
|
||||||
plpy.execute('CREATE UNLOGGED TABLE "{user_schema}".{temp_table_name} AS '
|
|
||||||
'(SELECT {columns}, cartodb_id '
|
|
||||||
'FROM cdb_dataservices_client._OBS_FetchJoinFdwTableData('
|
|
||||||
'{username}::text, {orgname}::text, {schema}::text, {table_name}::text, {function_name}::text, {params}::json) '
|
|
||||||
'AS results({columns_with_types}, cartodb_id int) )'
|
|
||||||
.format(columns=colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
|
||||||
user_schema=user_schema, schema=plpy.quote_literal(server_schema), table_name=plpy.quote_literal(server_table_name),
|
|
||||||
function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types,
|
|
||||||
temp_table_name=temporary_table_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wipe user FDW data from the server
|
|
||||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add index to cartodb_id
|
|
||||||
plpy.execute('CREATE UNIQUE INDEX {temp_table_name}_pkey ON "{user_schema}".{temp_table_name} (cartodb_id)'
|
|
||||||
.format(user_schema=user_schema, temp_table_name=temporary_table_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Prepare table to receive augmented results in new columns
|
|
||||||
for idx, column in enumerate(colnames_arr):
|
|
||||||
if colnames_arr[idx] is not 'the_geom':
|
|
||||||
plpy.execute('ALTER TABLE "{user_schema}".{table_name} ADD COLUMN {column_name} {column_type}'
|
|
||||||
.format(user_schema=user_schema, table_name=table_name, column_name=colnames_arr[idx], column_type=coltypes_arr[idx])
|
|
||||||
)
|
|
||||||
|
|
||||||
# Populate the user table with the augmented results
|
|
||||||
plpy.execute('UPDATE "{user_schema}".{table_name} SET {columns} = '
|
|
||||||
'(SELECT {columns} FROM "{user_schema}".{temporary_table_name} '
|
|
||||||
'WHERE "{user_schema}".{temporary_table_name}.cartodb_id = "{user_schema}".{table_name}.cartodb_id)'
|
|
||||||
.format(columns = colnames, username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname),
|
|
||||||
user_schema = user_schema, table_name=table_name, function_name=function_name, params=params, columns_with_types=columns_with_types,
|
|
||||||
temporary_table_name=temporary_table_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
|
||||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
plpy.warning('Error trying to augment table {0}'.format(e))
|
|
||||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
|
||||||
if server_table_name:
|
|
||||||
# Wipe local temporary table
|
|
||||||
plpy.execute('DROP TABLE IF EXISTS "{user_schema}".{temporary_table_name}'
|
|
||||||
.format(user_schema=user_schema, table_name=table_name, temporary_table_name=temporary_table_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
$$ LANGUAGE plpythonu;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__OBS_GetTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text, output_table_name text, function_name text, params json)
|
|
||||||
RETURNS boolean AS $$
|
|
||||||
try:
|
|
||||||
server_table_name = None
|
|
||||||
# Obtain return types for augmentation procedure
|
|
||||||
ds_return_metadata = plpy.execute("SELECT colnames, coltypes "
|
|
||||||
"FROM cdb_dataservices_client._OBS_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params))
|
|
||||||
)
|
|
||||||
|
|
||||||
colnames_arr = ds_return_metadata[0]["colnames"]
|
|
||||||
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
|
||||||
|
|
||||||
# Prepare column and type strings required in the SQL queries
|
|
||||||
colnames = ','.join(colnames_arr)
|
|
||||||
columns_with_types_arr = [colnames_arr[i] + ' ' + coltypes_arr[i] for i in range(0,len(colnames_arr))]
|
|
||||||
columns_with_types = ','.join(columns_with_types_arr)
|
|
||||||
|
|
||||||
|
|
||||||
# Instruct the OBS server side to establish a FDW
|
|
||||||
# The metadata is obtained as well in order to:
|
|
||||||
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
|
||||||
# - (b) be able to tell OBS to free resources when done.
|
|
||||||
ds_fdw_metadata = plpy.execute("SELECT schemaname, tabname, servername "
|
|
||||||
"FROM cdb_dataservices_client._OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {table_name}::text);"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(user_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name))
|
|
||||||
)
|
|
||||||
|
|
||||||
server_schema = ds_fdw_metadata[0]["schemaname"]
|
|
||||||
server_table_name = ds_fdw_metadata[0]["tabname"]
|
|
||||||
server_name = ds_fdw_metadata[0]["servername"]
|
|
||||||
|
|
||||||
# Get list of user columns to include in the new table
|
|
||||||
user_table_columns = ','.join(
|
|
||||||
plpy.execute('SELECT array_agg(\'user_table.\' || attname) AS columns '
|
|
||||||
'FROM pg_attribute WHERE attrelid = \'"{user_schema}".{table_name}\'::regclass '
|
|
||||||
'AND attnum > 0 AND NOT attisdropped AND attname NOT LIKE \'the_geom_webmercator\' '
|
|
||||||
'AND NOT attname LIKE ANY(string_to_array(\'{colnames}\',\',\'));'
|
|
||||||
.format(user_schema=user_schema, table_name=table_name, colnames=colnames)
|
|
||||||
)[0]["columns"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Populate a new table with the augmented results
|
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
||||||
plpy.execute('CREATE TABLE "{user_schema}".{output_table_name} AS '
|
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
||||||
'(SELECT results.{columns}, {user_table_columns} '
|
)
|
||||||
'FROM {table_name} AS user_table '
|
|
||||||
'LEFT JOIN cdb_dataservices_client._OBS_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {function_name}::text, {params}::json) as results({columns_with_types}, cartodb_id int) '
|
|
||||||
'ON results.cartodb_id = user_table.cartodb_id)'
|
|
||||||
.format(output_table_name=output_table_name, columns=colnames, user_table_columns=user_table_columns, username=plpy.quote_nullable(username),
|
|
||||||
orgname=plpy.quote_nullable(orgname), user_schema=user_schema, server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name),
|
|
||||||
table_name=table_name, function_name=plpy.quote_literal(function_name), params=plpy.quote_literal(params), columns_with_types=columns_with_types)
|
|
||||||
)
|
|
||||||
|
|
||||||
plpy.execute('ALTER TABLE "{schema}".{table_name} OWNER TO "{user}";'
|
return True
|
||||||
.format(schema=user_schema, table_name=output_table_name, user=user_db_role)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wipe user FDW data from the server
|
|
||||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
plpy.warning('Error trying to get table {0}'.format(e))
|
|
||||||
# Wipe user FDW data from the server in case of failure if the table was connected
|
|
||||||
if server_table_name:
|
|
||||||
wiped = plpy.execute("SELECT cdb_dataservices_client._OBS_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, {fdw_server}::text)"
|
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), server_schema=plpy.quote_literal(server_schema), server_table_name=plpy.quote_literal(server_table_name), fdw_server=plpy.quote_literal(server_name))
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
$$ LANGUAGE plpythonu;
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client.__DST_PopulateTableOBS_GetMeasure(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text,
|
||||||
|
output_table_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
function_name = 'OBS_GetMeasure'
|
||||||
|
# Obtain return types for augmentation procedure
|
||||||
|
ds_return_metadata = plpy.execute(
|
||||||
|
"SELECT colnames, coltypes "
|
||||||
|
"FROM cdb_dataservices_client._DST_GetReturnMetadata({username}::text, {orgname}::text, {function_name}::text, {params}::json);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params)))
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_ConnectUserTable(username text, orgname text, user_db_role text, user_schema text, dbname text, table_name text)
|
if ds_return_metadata[0]["colnames"]:
|
||||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
colnames_arr = ds_return_metadata[0]["colnames"]
|
||||||
CONNECT _server_conn_str();
|
coltypes_arr = ds_return_metadata[0]["coltypes"]
|
||||||
TARGET cdb_dataservices_server._OBS_ConnectUserTable;
|
else:
|
||||||
|
raise Exception('Error retrieving OBS_GetMeasure metadata')
|
||||||
|
|
||||||
|
# Prepare column and type strings required in the SQL queries
|
||||||
|
columns_with_types_arr = [
|
||||||
|
colnames_arr[i] +
|
||||||
|
' ' +
|
||||||
|
coltypes_arr[i] for i in range(
|
||||||
|
0,
|
||||||
|
len(colnames_arr))]
|
||||||
|
columns_with_types = ','.join(columns_with_types_arr)
|
||||||
|
aliased_colname_list = ','.join(
|
||||||
|
['result.' + name for name in colnames_arr])
|
||||||
|
|
||||||
|
# Instruct the OBS server side to establish a FDW
|
||||||
|
# The metadata is obtained as well in order to:
|
||||||
|
# - (a) be able to write the query to grab the actual data to be executed in the remote server via pl/proxy,
|
||||||
|
# - (b) be able to tell OBS to free resources when done.
|
||||||
|
ds_fdw_metadata = plpy.execute(
|
||||||
|
"SELECT schemaname, tabname, servername "
|
||||||
|
"FROM cdb_dataservices_client._DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, "
|
||||||
|
"{schema}::text, {dbname}::text, {table_name}::text);" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
user_db_role=plpy.quote_literal(user_db_role),
|
||||||
|
schema=plpy.quote_literal(user_schema),
|
||||||
|
dbname=plpy.quote_literal(dbname),
|
||||||
|
table_name=plpy.quote_literal(table_name)))
|
||||||
|
|
||||||
|
if ds_fdw_metadata[0]["schemaname"]:
|
||||||
|
server_schema = ds_fdw_metadata[0]["schemaname"]
|
||||||
|
server_table_name = ds_fdw_metadata[0]["tabname"]
|
||||||
|
server_name = ds_fdw_metadata[0]["servername"]
|
||||||
|
else:
|
||||||
|
raise Exception('Error connecting dataset via FDW')
|
||||||
|
|
||||||
|
# Create a new table with the required columns
|
||||||
|
plpy.execute(
|
||||||
|
'INSERT INTO "{schema}".{analysis_table_name} '
|
||||||
|
'SELECT ut.cartodb_id, ut.the_geom, {colname_list} '
|
||||||
|
'FROM "{schema}".{table_name} ut '
|
||||||
|
'LEFT JOIN _DST_FetchJoinFdwTableData({username}::text, {orgname}::text, {server_schema}::text, {server_table_name}::text, '
|
||||||
|
'{function_name}::text, {params}::json) '
|
||||||
|
'AS result ({columns_with_types}, cartodb_id int) '
|
||||||
|
'ON result.cartodb_id = ut.cartodb_id;' .format(
|
||||||
|
schema=user_schema,
|
||||||
|
analysis_table_name=output_table_name,
|
||||||
|
colname_list=aliased_colname_list,
|
||||||
|
table_name=table_name,
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
function_name=plpy.quote_literal(function_name),
|
||||||
|
params=plpy.quote_literal(params),
|
||||||
|
columns_with_types=columns_with_types))
|
||||||
|
|
||||||
|
# Wipe user FDW data from the server
|
||||||
|
wiped = plpy.execute(
|
||||||
|
"SELECT cdb_dataservices_client._DST_DisconnectUserTable({username}::text, {orgname}::text, {server_schema}::text, "
|
||||||
|
"{server_table_name}::text, {fdw_server}::text)" .format(
|
||||||
|
username=plpy.quote_nullable(username),
|
||||||
|
orgname=plpy.quote_nullable(orgname),
|
||||||
|
server_schema=plpy.quote_literal(server_schema),
|
||||||
|
server_table_name=plpy.quote_literal(server_table_name),
|
||||||
|
fdw_server=plpy.quote_literal(server_name)))
|
||||||
|
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_ConnectUserTable(
|
||||||
|
username text,
|
||||||
|
orgname text,
|
||||||
|
user_db_role text,
|
||||||
|
user_schema text,
|
||||||
|
dbname text,
|
||||||
|
table_name text
|
||||||
|
)RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_ConnectUserTable;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_GetReturnMetadata(
|
||||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
username text,
|
||||||
CONNECT _server_conn_str();
|
orgname text,
|
||||||
TARGET cdb_dataservices_server._OBS_GetReturnMetadata;
|
function_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_GetReturnMetadata;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_FetchJoinFdwTableData(
|
||||||
RETURNS SETOF record AS $$
|
username text,
|
||||||
CONNECT _server_conn_str();
|
orgname text,
|
||||||
TARGET cdb_dataservices_server._OBS_FetchJoinFdwTableData;
|
table_schema text,
|
||||||
|
table_name text,
|
||||||
|
function_name text,
|
||||||
|
params json
|
||||||
|
) RETURNS SETOF record AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_FetchJoinFdwTableData;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_client._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, server_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_client._DST_DisconnectUserTable(
|
||||||
RETURNS boolean AS $$
|
username text,
|
||||||
CONNECT _server_conn_str();
|
orgname text,
|
||||||
TARGET cdb_dataservices_server._OBS_DisconnectUserTable;
|
table_schema text,
|
||||||
|
table_name text,
|
||||||
|
server_name text
|
||||||
|
) RETURNS boolean AS $$
|
||||||
|
CONNECT cdb_dataservices_client._server_conn_str();
|
||||||
|
TARGET cdb_dataservices_server._DST_DisconnectUserTable;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_augmenttable(table_name text, function_name text, params json) TO publicuser;
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure(output_table_name text, params json) TO publicuser;
|
||||||
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._obs_gettable(table_name text, output_table_name text, function_name text, params json) TO publicuser;
|
GRANT EXECUTE ON FUNCTION cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure(table_name text, output_table_name text, params json) TO publicuser;
|
||||||
|
|||||||
@@ -3,65 +3,63 @@ SET search_path TO public,cartodb,cdb_dataservices_client;
|
|||||||
CREATE TABLE my_table(cartodb_id int);
|
CREATE TABLE my_table(cartodb_id int);
|
||||||
INSERT INTO my_table (cartodb_id) VALUES (1);
|
INSERT INTO my_table (cartodb_id) VALUES (1);
|
||||||
-- Mock the server functions
|
-- Mock the server functions
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN (Array['total_pop'], Array['double precision']);
|
RETURN (Array['total_pop'], Array['double precision']);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
RETURNS RECORD AS $$
|
RETURNS RECORD AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN (23.4::double precision, 1::int);
|
RETURN (23.4::double precision, 1::int);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||||
RETURNS boolean AS $$
|
RETURNS boolean AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN true;
|
RETURN true;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
-- Augment a table with the total_pop column
|
-- Create a sample user table
|
||||||
SELECT cdb_dataservices_client._OBS_AugmentTable('my_table', 'dummy', '{"dummy":"dummy"}'::json);
|
CREATE TABLE user_table (cartodb_id int, the_geom geometry);
|
||||||
_obs_augmenttable
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (1, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
-------------------
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (2, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (3, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
|
-- Prepare a table with the total_pop column
|
||||||
|
SELECT cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure('my_table_dst', '{"dummy":"dummy"}'::json);
|
||||||
|
_dst_preparetableobs_getmeasure
|
||||||
|
---------------------------------
|
||||||
t
|
t
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
-- The results of the table should return the mocked value of 23.4 in the total_pop column
|
-- The table should now exist and be empty
|
||||||
SELECT * FROM my_table;
|
SELECT * FROM my_table_dst;
|
||||||
cartodb_id | total_pop
|
cartodb_id | the_geom | total_pop
|
||||||
------------+-----------
|
------------+----------+-----------
|
||||||
1 | 23.4
|
(0 rows)
|
||||||
(1 row)
|
|
||||||
|
|
||||||
-- Mock again the function for it to return a different value now
|
-- Populate the table with measurement data
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
SELECT cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure('user_table', 'my_table_dst', '{"dummy":"dummy"}'::json);
|
||||||
RETURNS RECORD AS $$
|
_dst_populatetableobs_getmeasure
|
||||||
BEGIN
|
----------------------------------
|
||||||
RETURN (577777.4::double precision, 1::int);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE 'plpgsql';
|
|
||||||
-- Augment a new table with total_pop
|
|
||||||
SELECT cdb_dataservices_client._OBS_GetTable('my_table', 'my_table_new', 'dummy', '{"dummy":"dummy"}'::json);
|
|
||||||
_obs_gettable
|
|
||||||
---------------
|
|
||||||
t
|
t
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
-- Check that the table contains the new value for total_pop and not the value already existent in the table
|
-- The table should now show the results
|
||||||
SELECT * FROM my_table_new;
|
SELECT * FROM my_table_dst;
|
||||||
total_pop | cartodb_id
|
cartodb_id | the_geom | total_pop
|
||||||
-----------+------------
|
------------+----------------------------------------------------+-----------
|
||||||
577777.4 | 1
|
1 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 | 23.4
|
||||||
(1 row)
|
2 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 |
|
||||||
|
3 | 0101000020E6100000F74FC902E07D52C05FE24CC7654B4440 |
|
||||||
|
(3 rows)
|
||||||
|
|
||||||
-- Clean tables
|
-- Clean tables
|
||||||
DROP TABLE my_table;
|
DROP TABLE my_table_dst;
|
||||||
DROP TABLE my_table_new;
|
|
||||||
|
|||||||
@@ -6,54 +6,51 @@ CREATE TABLE my_table(cartodb_id int);
|
|||||||
INSERT INTO my_table (cartodb_id) VALUES (1);
|
INSERT INTO my_table (cartodb_id) VALUES (1);
|
||||||
|
|
||||||
-- Mock the server functions
|
-- Mock the server functions
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||||
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
RETURNS cdb_dataservices_client.ds_fdw_metadata AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
RETURN ('dummy_schema'::text, 'dummy_table'::text, 'dummy_server'::text);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
RETURNS cdb_dataservices_client.ds_return_metadata AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN (Array['total_pop'], Array['double precision']);
|
RETURN (Array['total_pop'], Array['double precision']);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
RETURNS RECORD AS $$
|
RETURNS RECORD AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN (23.4::double precision, 1::int);
|
RETURN (23.4::double precision, 1::int);
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||||
RETURNS boolean AS $$
|
RETURNS boolean AS $$
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN true;
|
RETURN true;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE 'plpgsql';
|
$$ LANGUAGE 'plpgsql';
|
||||||
|
|
||||||
-- Augment a table with the total_pop column
|
-- Create a sample user table
|
||||||
SELECT cdb_dataservices_client._OBS_AugmentTable('my_table', 'dummy', '{"dummy":"dummy"}'::json);
|
CREATE TABLE user_table (cartodb_id int, the_geom geometry);
|
||||||
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (1, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (2, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
|
INSERT INTO user_table(cartodb_id, the_geom) VALUES (3, '0101000020E6100000F74FC902E07D52C05FE24CC7654B4440');
|
||||||
|
|
||||||
-- The results of the table should return the mocked value of 23.4 in the total_pop column
|
-- Prepare a table with the total_pop column
|
||||||
SELECT * FROM my_table;
|
SELECT cdb_dataservices_client._DST_PrepareTableOBS_GetMeasure('my_table_dst', '{"dummy":"dummy"}'::json);
|
||||||
|
|
||||||
-- Mock again the function for it to return a different value now
|
-- The table should now exist and be empty
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
SELECT * FROM my_table_dst;
|
||||||
RETURNS RECORD AS $$
|
|
||||||
BEGIN
|
|
||||||
RETURN (577777.4::double precision, 1::int);
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE 'plpgsql';
|
|
||||||
|
|
||||||
-- Augment a new table with total_pop
|
-- Populate the table with measurement data
|
||||||
SELECT cdb_dataservices_client._OBS_GetTable('my_table', 'my_table_new', 'dummy', '{"dummy":"dummy"}'::json);
|
SELECT cdb_dataservices_client._DST_PopulateTableOBS_GetMeasure('user_table', 'my_table_dst', '{"dummy":"dummy"}'::json);
|
||||||
|
|
||||||
-- Check that the table contains the new value for total_pop and not the value already existent in the table
|
-- The table should now show the results
|
||||||
SELECT * FROM my_table_new;
|
SELECT * FROM my_table_dst;
|
||||||
|
|
||||||
-- Clean tables
|
-- Clean tables
|
||||||
DROP TABLE my_table;
|
DROP TABLE my_table_dst;
|
||||||
DROP TABLE my_table_new;
|
|
||||||
69
server/extension/cdb_dataservices_server--0.15.1--0.16.0.sql
Normal file
69
server/extension/cdb_dataservices_server--0.15.1--0.16.0.sql
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.16.0'" to load this file. \quit
|
||||||
|
|
||||||
|
-- Here goes your code to upgrade/downgrade
|
||||||
|
|
||||||
|
-- This is done in order to avoid an undesired depedency on cartodb extension
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_conf_getconf(input_key text)
|
||||||
|
RETURNS JSON AS $$
|
||||||
|
SELECT VALUE FROM cartodb.cdb_conf WHERE key = input_key;
|
||||||
|
$$ LANGUAGE SQL STABLE SECURITY DEFINER;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
import cartodb_services
|
||||||
|
cartodb_services.init(plpy, GD)
|
||||||
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.tools import Logger
|
||||||
|
from cartodb_services.refactor.tools.logger import LoggerConfigBuilder
|
||||||
|
from cartodb_services.refactor.service.mapzen_geocoder_config import MapzenGeocoderConfigBuilder
|
||||||
|
from cartodb_services.refactor.core.environment import ServerEnvironmentBuilder
|
||||||
|
from cartodb_services.refactor.backend.server_config import ServerConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.user_config import UserConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.org_config import OrgConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.redis_metrics_connection import RedisMetricsConnectionFactory
|
||||||
|
|
||||||
|
server_config_backend = ServerConfigBackendFactory().get()
|
||||||
|
environment = ServerEnvironmentBuilder(server_config_backend).get()
|
||||||
|
user_config_backend = UserConfigBackendFactory(username, environment, server_config_backend).get()
|
||||||
|
org_config_backend = OrgConfigBackendFactory(orgname, environment, server_config_backend).get()
|
||||||
|
|
||||||
|
logger_config = LoggerConfigBuilder(environment, server_config_backend).get()
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
|
||||||
|
mapzen_geocoder_config = MapzenGeocoderConfigBuilder(server_config_backend, user_config_backend, org_config_backend, username, orgname).get()
|
||||||
|
|
||||||
|
redis_metrics_connection = RedisMetricsConnectionFactory(environment, server_config_backend).get()
|
||||||
|
|
||||||
|
quota_service = QuotaService(mapzen_geocoder_config, redis_metrics_connection)
|
||||||
|
if not quota_service.check_user_quota():
|
||||||
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
|
try:
|
||||||
|
geocoder = MapzenGeocoder(mapzen_geocoder_config.mapzen_api_key, logger)
|
||||||
|
country_iso3 = None
|
||||||
|
if country:
|
||||||
|
country_iso3 = country_to_iso3(country)
|
||||||
|
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
||||||
|
state_province=state_province,
|
||||||
|
country=country_iso3, search_type='address')
|
||||||
|
if coordinates:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||||
|
return point['st_setsrid']
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode street point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode street point using mapzen')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
54
server/extension/cdb_dataservices_server--0.16.0--0.15.1.sql
Normal file
54
server/extension/cdb_dataservices_server--0.16.0--0.15.1.sql
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.1'" to load this file. \quit
|
||||||
|
|
||||||
|
-- Here goes your code to upgrade/downgrade
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server.cdb_conf_getconf(text);
|
||||||
|
|
||||||
|
-- Geocodes a street address given a searchtext and a state and/or country
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
import cartodb_services
|
||||||
|
cartodb_services.init(plpy, GD)
|
||||||
|
|
||||||
|
from cartodb_services.config.user import User
|
||||||
|
from cartodb_services.config.configs import ConfigsFactory
|
||||||
|
from cartodb_services.config.hires_geocoder_config import HiResGeocoderConfigFactory
|
||||||
|
from cartodb_services.request.request import RequestFactory
|
||||||
|
|
||||||
|
user = User(username, orgname)
|
||||||
|
configs = ConfigsFactory.get(user)
|
||||||
|
request = RequestFactory().create(user, configs, 'cdb_geocode_street_point')
|
||||||
|
|
||||||
|
# TODO change to hires_geocoder_config = HiResGeocoderConfigFactory.get(request)
|
||||||
|
hires_geocoder_config = HiResGeocoderConfigFactory(configs).get(user)
|
||||||
|
|
||||||
|
if hires_geocoder_config.provider == 'here':
|
||||||
|
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||||
|
elif hires_geocoder_config.provider == 'google':
|
||||||
|
google_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_google_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(google_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||||
|
elif hires_geocoder_config.provider == 'mapzen':
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||||
|
else:
|
||||||
|
raise Exception('Requested geocoder is not available')
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_here_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||||
|
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
if user_geocoder_config.heremaps_geocoder:
|
||||||
|
here_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_here_geocode_street_point($1, $2, $3, $4, $5, $6) as point; ", ["text", "text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(here_plan, [username, orgname, searchtext, city, state_province, country], 1)[0]['point']
|
||||||
|
else:
|
||||||
|
raise Exception('Here geocoder is not available for your account.')
|
||||||
|
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
2411
server/extension/cdb_dataservices_server--0.16.0.sql
Normal file
2411
server/extension/cdb_dataservices_server--0.16.0.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
comment = 'CartoDB dataservices server extension'
|
comment = 'CartoDB dataservices server extension'
|
||||||
default_version = '0.14.2'
|
default_version = '0.16.0'
|
||||||
requires = 'plpythonu, plproxy, postgis, cdb_geocoder'
|
requires = 'plpythonu, plproxy, postgis, cdb_geocoder'
|
||||||
superuser = true
|
superuser = true
|
||||||
schema = cdb_dataservices_server
|
schema = cdb_dataservices_server
|
||||||
|
|||||||
@@ -0,0 +1,181 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.0'" to load this file. \quit
|
||||||
|
|
||||||
|
-- HERE goes your code to upgrade/downgrade
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._get_geocoder_config(text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_geocoder_config(username text, orgname text, provider text DEFAULT NULL)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
cache_key = "user_geocoder_config_{0}".format(username)
|
||||||
|
if cache_key in GD:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
from cartodb_services.metrics import GeocoderConfig
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
|
||||||
|
geocoder_config = GeocoderConfig(redis_conn, plpy, username, orgname, provider)
|
||||||
|
GD[cache_key] = geocoder_config
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
try:
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3) as point;", ["text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, city_name])[0]['point']
|
||||||
|
except BaseException as e:
|
||||||
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3) as point;", ["text", "text", "text"])
|
||||||
|
return plpy.execute(internal_plan, [username, orgname, city_name])[0]['point']
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text, country_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, country_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
try:
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3, NULL, $4) as point;", ["text", "text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, city_name, country_name])[0]['point']
|
||||||
|
except BaseException as e:
|
||||||
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3, NULL, $4) as point;", ["text", "text", "text", "text"])
|
||||||
|
return plpy.execute(internal_plan, [username, orgname, city_name, country_name])[0]['point']
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text, admin1_name text, country_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, admin1_name text, country_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
try:
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3, $4, $5) as point;", ["text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, city_name, admin1_name, country_name])[0]['point']
|
||||||
|
except BaseException as e:
|
||||||
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3, $4, $5) as point;", ["text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(internal_plan, [username, orgname, city_name, admin1_name, country_name])[0]['point']
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_namedplace(username text, orgname text, city_name text, admin1_name text DEFAULT NULL, country_name text DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_geocoder_config({0}, {1}, {2})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname), plpy.quote_nullable('mapzen')))
|
||||||
|
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
if not quota_service.check_user_quota():
|
||||||
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
|
try:
|
||||||
|
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||||
|
country_iso3 = None
|
||||||
|
if country_name:
|
||||||
|
country_iso3 = country_to_iso3(country_name)
|
||||||
|
coordinates = geocoder.geocode(searchtext=city_name, city=None,
|
||||||
|
state_province=admin1_name,
|
||||||
|
country=country_iso3, search_type='locality')
|
||||||
|
if coordinates:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||||
|
return point['st_setsrid']
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode city point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode city point using mapzen')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_internal_geocode_namedplace(username text, orgname text, city_name text, admin1_name text DEFAULT NULL, country_name text DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.metrics import InternalGeocoderConfig
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||||
|
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
try:
|
||||||
|
if admin1_name and country_name:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
||||||
|
elif country_name:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, country_name], 1)
|
||||||
|
else:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
||||||
|
rv = plpy.execute(plan, [city_name], 1)
|
||||||
|
result = rv[0]["mypoint"]
|
||||||
|
if result:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode namedplace point')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
if not quota_service.check_user_quota():
|
||||||
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
|
try:
|
||||||
|
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||||
|
country_iso3 = None
|
||||||
|
if country:
|
||||||
|
country_iso3 = country_to_iso3(country)
|
||||||
|
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
||||||
|
state_province=state_province,
|
||||||
|
country=country_iso3, search_type='address')
|
||||||
|
if coordinates:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||||
|
return point['st_setsrid']
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode street point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode street point using mapzen')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu
|
||||||
|
|
||||||
@@ -0,0 +1,169 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.14.2'" to load this file. \quit
|
||||||
|
|
||||||
|
-- HERE goes your code to upgrade/downgrade
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._get_geocoder_config(text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._cdb_mapzen_geocode_namedplace(text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._cdb_internal_geocode_namedplace(text, text, text, text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_geocoder_config(username text, orgname text)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
cache_key = "user_geocoder_config_{0}".format(username)
|
||||||
|
if cache_key in GD:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
from cartodb_services.metrics import GeocoderConfig
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
|
||||||
|
geocoder_config = GeocoderConfig(redis_conn, plpy, username, orgname)
|
||||||
|
GD[cache_key] = geocoder_config
|
||||||
|
return True
|
||||||
|
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.metrics import InternalGeocoderConfig
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||||
|
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
try:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
||||||
|
rv = plpy.execute(plan, [city_name], 1)
|
||||||
|
result = rv[0]["mypoint"]
|
||||||
|
if result:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode namedplace point')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text, country_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, country_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.metrics import InternalGeocoderConfig
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||||
|
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
try:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, country_name], 1)
|
||||||
|
result = rv[0]["mypoint"]
|
||||||
|
if result:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode namedplace point')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text, admin1_name text, country_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, admin1_name text, country_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.metrics import InternalGeocoderConfig
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
||||||
|
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
try:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
||||||
|
result = rv[0]["mypoint"]
|
||||||
|
if result:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode namedplace point')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
|
from cartodb_services.metrics import QuotaService
|
||||||
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
|
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
|
logger_config = GD["logger_config"]
|
||||||
|
logger = Logger(logger_config)
|
||||||
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
if not quota_service.check_user_quota():
|
||||||
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
|
try:
|
||||||
|
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||||
|
country_iso3 = None
|
||||||
|
if country:
|
||||||
|
country_iso3 = country_to_iso3(country)
|
||||||
|
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
||||||
|
state_province=state_province,
|
||||||
|
country=country_iso3)
|
||||||
|
if coordinates:
|
||||||
|
quota_service.increment_success_service_use()
|
||||||
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||||
|
return point['st_setsrid']
|
||||||
|
else:
|
||||||
|
quota_service.increment_empty_service_use()
|
||||||
|
return None
|
||||||
|
except BaseException as e:
|
||||||
|
import sys
|
||||||
|
quota_service.increment_failed_service_use()
|
||||||
|
logger.error('Error trying to geocode street point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
|
raise Exception('Error trying to geocode street point using mapzen')
|
||||||
|
finally:
|
||||||
|
quota_service.increment_total_service_use()
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.1'" to load this file. \quit
|
||||||
|
|
||||||
|
-- HERE goes your code to upgrade/downgrade
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_ConnectUserTable(text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server.__OBS_ConnectUserTable(text, text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_GetReturnMetadata(text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._OBS_DisconnectUserTable(text, text, text, text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||||
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
|
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||||
|
return plpy.execute("SELECT * FROM cdb_dataservices_server.__DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||||
|
)[0]
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||||
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
|
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
|
RETURNS SETOF record AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||||
|
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||||
|
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.15.0'" to load this file. \quit
|
||||||
|
|
||||||
|
-- HERE goes your code to upgrade/downgrade
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_ConnectUserTable(text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server.__DST_ConnectUserTable(text, text, text, text, text, text, text);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_GetReturnMetadata(text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_FetchJoinFdwTableData(text, text, text, text, text, json);
|
||||||
|
DROP FUNCTION IF EXISTS cdb_dataservices_server._DST_DisconnectUserTable(text, text, text, text, text);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||||
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
|
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||||
|
return plpy.execute("SELECT * FROM cdb_dataservices_server.__OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||||
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||||
|
)[0]
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||||
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
|
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
|
RETURNS SETOF record AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||||
|
RETURNS boolean AS $$
|
||||||
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
|
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||||
|
$$ LANGUAGE plproxy;
|
||||||
2389
server/extension/old_versions/cdb_dataservices_server--0.15.1.sql
Normal file
2389
server/extension/old_versions/cdb_dataservices_server--0.15.1.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,34 +2,34 @@ CREATE TYPE cdb_dataservices_server.ds_fdw_metadata as (schemaname text, tabname
|
|||||||
|
|
||||||
CREATE TYPE cdb_dataservices_server.ds_return_metadata as (colnames text[], coltypes text[]);
|
CREATE TYPE cdb_dataservices_server.ds_return_metadata as (colnames text[], coltypes text[]);
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, table_name text)
|
||||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
host_addr = plpy.execute("SELECT split_part(inet_client_addr()::text, '/', 1) as user_host")[0]['user_host']
|
||||||
return plpy.execute("SELECT * FROM cdb_dataservices_server.__OBS_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
return plpy.execute("SELECT * FROM cdb_dataservices_server.__DST_ConnectUserTable({username}::text, {orgname}::text, {user_db_role}::text, {schema}::text, {dbname}::text, {host_addr}::text, {table_name}::text)"
|
||||||
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
.format(username=plpy.quote_nullable(username), orgname=plpy.quote_nullable(orgname), user_db_role=plpy.quote_literal(user_db_role), schema=plpy.quote_literal(input_schema), dbname=plpy.quote_literal(dbname), table_name=plpy.quote_literal(table_name), host_addr=plpy.quote_literal(host_addr))
|
||||||
)[0]
|
)[0]
|
||||||
$$ LANGUAGE plpythonu;
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__OBS_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.__DST_ConnectUserTable(username text, orgname text, user_db_role text, input_schema text, dbname text, host_addr text, table_name text)
|
||||||
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
RETURNS cdb_dataservices_server.ds_fdw_metadata AS $$
|
||||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
TARGET cdb_observatory._OBS_ConnectUserTable;
|
TARGET cdb_observatory._OBS_ConnectUserTable;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_GetReturnMetadata(username text, orgname text, function_name text, params json)
|
||||||
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
RETURNS cdb_dataservices_server.ds_return_metadata AS $$
|
||||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
TARGET cdb_observatory._OBS_GetReturnMetadata;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_FetchJoinFdwTableData(username text, orgname text, table_schema text, table_name text, function_name text, params json)
|
||||||
RETURNS SETOF record AS $$
|
RETURNS SETOF record AS $$
|
||||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
TARGET cdb_observatory._OBS_FetchJoinFdwTableData;
|
||||||
$$ LANGUAGE plproxy;
|
$$ LANGUAGE plproxy;
|
||||||
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._DST_DisconnectUserTable(username text, orgname text, table_schema text, table_name text, servername text)
|
||||||
RETURNS boolean AS $$
|
RETURNS boolean AS $$
|
||||||
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
|
||||||
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
TARGET cdb_observatory._OBS_DisconnectUserTable;
|
||||||
|
|||||||
@@ -10,7 +10,13 @@ RETURNS boolean AS $$
|
|||||||
return True
|
return True
|
||||||
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_geocoder_config(username text, orgname text)
|
-- This is done in order to avoid an undesired depedency on cartodb extension
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_conf_getconf(input_key text)
|
||||||
|
RETURNS JSON AS $$
|
||||||
|
SELECT VALUE FROM cartodb.cdb_conf WHERE key = input_key;
|
||||||
|
$$ LANGUAGE SQL STABLE SECURITY DEFINER;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_geocoder_config(username text, orgname text, provider text DEFAULT NULL)
|
||||||
RETURNS boolean AS $$
|
RETURNS boolean AS $$
|
||||||
cache_key = "user_geocoder_config_{0}".format(username)
|
cache_key = "user_geocoder_config_{0}".format(username)
|
||||||
if cache_key in GD:
|
if cache_key in GD:
|
||||||
@@ -19,7 +25,7 @@ RETURNS boolean AS $$
|
|||||||
from cartodb_services.metrics import GeocoderConfig
|
from cartodb_services.metrics import GeocoderConfig
|
||||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
|
||||||
geocoder_config = GeocoderConfig(redis_conn, plpy, username, orgname)
|
geocoder_config = GeocoderConfig(redis_conn, plpy, username, orgname, provider)
|
||||||
GD[cache_key] = geocoder_config
|
GD[cache_key] = geocoder_config
|
||||||
return True
|
return True
|
||||||
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
$$ LANGUAGE plpythonu SECURITY DEFINER;
|
||||||
|
|||||||
@@ -137,29 +137,44 @@ $$ LANGUAGE plpythonu;
|
|||||||
|
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_street_point(username TEXT, orgname TEXT, searchtext TEXT, city TEXT DEFAULT NULL, state_province TEXT DEFAULT NULL, country TEXT DEFAULT NULL)
|
||||||
RETURNS Geometry AS $$
|
RETURNS Geometry AS $$
|
||||||
|
import cartodb_services
|
||||||
|
cartodb_services.init(plpy, GD)
|
||||||
from cartodb_services.mapzen import MapzenGeocoder
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
from cartodb_services.mapzen.types import country_to_iso3
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
from cartodb_services.metrics import QuotaService
|
from cartodb_services.metrics import QuotaService
|
||||||
from cartodb_services.tools import Logger,LoggerConfig
|
from cartodb_services.tools import Logger
|
||||||
|
from cartodb_services.refactor.tools.logger import LoggerConfigBuilder
|
||||||
|
from cartodb_services.refactor.service.mapzen_geocoder_config import MapzenGeocoderConfigBuilder
|
||||||
|
from cartodb_services.refactor.core.environment import ServerEnvironmentBuilder
|
||||||
|
from cartodb_services.refactor.backend.server_config import ServerConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.user_config import UserConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.org_config import OrgConfigBackendFactory
|
||||||
|
from cartodb_services.refactor.backend.redis_metrics_connection import RedisMetricsConnectionFactory
|
||||||
|
|
||||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
server_config_backend = ServerConfigBackendFactory().get()
|
||||||
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
environment = ServerEnvironmentBuilder(server_config_backend).get()
|
||||||
|
user_config_backend = UserConfigBackendFactory(username, environment, server_config_backend).get()
|
||||||
|
org_config_backend = OrgConfigBackendFactory(orgname, environment, server_config_backend).get()
|
||||||
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
logger_config = LoggerConfigBuilder(environment, server_config_backend).get()
|
||||||
logger_config = GD["logger_config"]
|
|
||||||
logger = Logger(logger_config)
|
logger = Logger(logger_config)
|
||||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
|
||||||
|
mapzen_geocoder_config = MapzenGeocoderConfigBuilder(server_config_backend, user_config_backend, org_config_backend, username, orgname).get()
|
||||||
|
|
||||||
|
redis_metrics_connection = RedisMetricsConnectionFactory(environment, server_config_backend).get()
|
||||||
|
|
||||||
|
quota_service = QuotaService(mapzen_geocoder_config, redis_metrics_connection)
|
||||||
if not quota_service.check_user_quota():
|
if not quota_service.check_user_quota():
|
||||||
raise Exception('You have reached the limit of your quota')
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
geocoder = MapzenGeocoder(mapzen_geocoder_config.mapzen_api_key, logger)
|
||||||
country_iso3 = None
|
country_iso3 = None
|
||||||
if country:
|
if country:
|
||||||
country_iso3 = country_to_iso3(country)
|
country_iso3 = country_to_iso3(country)
|
||||||
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
coordinates = geocoder.geocode(searchtext=searchtext, city=city,
|
||||||
state_province=state_province,
|
state_province=state_province,
|
||||||
country=country_iso3)
|
country=country_iso3, search_type='address')
|
||||||
if coordinates:
|
if coordinates:
|
||||||
quota_service.increment_success_service_use()
|
quota_service.increment_success_service_use()
|
||||||
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
|||||||
@@ -1,76 +1,81 @@
|
|||||||
---- cdb_geocode_namedplace_point(city_name text)
|
---- cdb_geocode_namedplace_point(city_name text)
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text)
|
||||||
RETURNS Geometry AS $$
|
RETURNS Geometry AS $$
|
||||||
from cartodb_services.metrics import QuotaService
|
|
||||||
from cartodb_services.metrics import InternalGeocoderConfig
|
|
||||||
from cartodb_services.tools import Logger,LoggerConfig
|
|
||||||
|
|
||||||
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
|
||||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
|
||||||
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
|
||||||
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
|
||||||
logger_config = GD["logger_config"]
|
|
||||||
logger = Logger(logger_config)
|
|
||||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
|
||||||
try:
|
try:
|
||||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3) as point;", ["text", "text", "text"])
|
||||||
rv = plpy.execute(plan, [city_name], 1)
|
return plpy.execute(mapzen_plan, [username, orgname, city_name])[0]['point']
|
||||||
result = rv[0]["mypoint"]
|
|
||||||
if result:
|
|
||||||
quota_service.increment_success_service_use()
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
quota_service.increment_empty_service_use()
|
|
||||||
return None
|
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
import sys
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3) as point;", ["text", "text", "text"])
|
||||||
quota_service.increment_failed_service_use()
|
return plpy.execute(internal_plan, [username, orgname, city_name])[0]['point']
|
||||||
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
|
||||||
raise Exception('Error trying to geocode namedplace point')
|
|
||||||
finally:
|
|
||||||
quota_service.increment_total_service_use()
|
|
||||||
$$ LANGUAGE plpythonu;
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
---- cdb_geocode_namedplace_point(city_name text, country_name text)
|
---- cdb_geocode_namedplace_point(city_name text, country_name text)
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, country_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, country_name text)
|
||||||
RETURNS Geometry AS $$
|
RETURNS Geometry AS $$
|
||||||
|
try:
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3, NULL, $4) as point;", ["text", "text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, city_name, country_name])[0]['point']
|
||||||
|
except BaseException as e:
|
||||||
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3, NULL, $4) as point;", ["text", "text", "text", "text"])
|
||||||
|
return plpy.execute(internal_plan, [username, orgname, city_name, country_name])[0]['point']
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
---- cdb_geocode_namedplace_point(city_name text, admin1_name text, country_name text)
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, admin1_name text, country_name text)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
try:
|
||||||
|
mapzen_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_mapzen_geocode_namedplace($1, $2, $3, $4, $5) as point;", ["text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(mapzen_plan, [username, orgname, city_name, admin1_name, country_name])[0]['point']
|
||||||
|
except BaseException as e:
|
||||||
|
internal_plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_internal_geocode_namedplace($1, $2, $3, $4, $5) as point;", ["text", "text", "text", "text", "text"])
|
||||||
|
return plpy.execute(internal_plan, [username, orgname, city_name, admin1_name, country_name])[0]['point']
|
||||||
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_mapzen_geocode_namedplace(username text, orgname text, city_name text, admin1_name text DEFAULT NULL, country_name text DEFAULT NULL)
|
||||||
|
RETURNS Geometry AS $$
|
||||||
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
|
from cartodb_services.mapzen.types import country_to_iso3
|
||||||
from cartodb_services.metrics import QuotaService
|
from cartodb_services.metrics import QuotaService
|
||||||
from cartodb_services.metrics import InternalGeocoderConfig
|
|
||||||
from cartodb_services.tools import Logger,LoggerConfig
|
from cartodb_services.tools import Logger,LoggerConfig
|
||||||
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
|
||||||
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
|
||||||
plpy.execute("SELECT cdb_dataservices_server._get_internal_geocoder_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
|
plpy.execute("SELECT cdb_dataservices_server._get_geocoder_config({0}, {1}, {2})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname), plpy.quote_nullable('mapzen')))
|
||||||
user_geocoder_config = GD["user_internal_geocoder_config_{0}".format(username)]
|
user_geocoder_config = GD["user_geocoder_config_{0}".format(username)]
|
||||||
|
|
||||||
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
|
||||||
logger_config = GD["logger_config"]
|
logger_config = GD["logger_config"]
|
||||||
logger = Logger(logger_config)
|
logger = Logger(logger_config)
|
||||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
|
if not quota_service.check_user_quota():
|
||||||
|
raise Exception('You have reached the limit of your quota')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
geocoder = MapzenGeocoder(user_geocoder_config.mapzen_api_key, logger)
|
||||||
rv = plpy.execute(plan, [city_name, country_name], 1)
|
country_iso3 = None
|
||||||
result = rv[0]["mypoint"]
|
if country_name:
|
||||||
if result:
|
country_iso3 = country_to_iso3(country_name)
|
||||||
|
coordinates = geocoder.geocode(searchtext=city_name, city=None,
|
||||||
|
state_province=admin1_name,
|
||||||
|
country=country_iso3, search_type='locality')
|
||||||
|
if coordinates:
|
||||||
quota_service.increment_success_service_use()
|
quota_service.increment_success_service_use()
|
||||||
return result
|
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326); ", ["double precision", "double precision"])
|
||||||
|
point = plpy.execute(plan, [coordinates[0], coordinates[1]], 1)[0]
|
||||||
|
return point['st_setsrid']
|
||||||
else:
|
else:
|
||||||
quota_service.increment_empty_service_use()
|
quota_service.increment_empty_service_use()
|
||||||
return None
|
return None
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
import sys
|
import sys
|
||||||
quota_service.increment_failed_service_use()
|
quota_service.increment_failed_service_use()
|
||||||
logger.error('Error trying to geocode namedplace point', sys.exc_info(), data={"username": username, "orgname": orgname})
|
logger.error('Error trying to geocode city point using mapzen', sys.exc_info(), data={"username": username, "orgname": orgname})
|
||||||
raise Exception('Error trying to geocode namedplace point')
|
raise Exception('Error trying to geocode city point using mapzen')
|
||||||
finally:
|
finally:
|
||||||
quota_service.increment_total_service_use()
|
quota_service.increment_total_service_use()
|
||||||
$$ LANGUAGE plpythonu;
|
$$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
---- cdb_geocode_namedplace_point(city_name text, admin1_name text, country_name text)
|
CREATE OR REPLACE FUNCTION cdb_dataservices_server._cdb_internal_geocode_namedplace(username text, orgname text, city_name text, admin1_name text DEFAULT NULL, country_name text DEFAULT NULL)
|
||||||
CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_geocode_namedplace_point(username text, orgname text, city_name text, admin1_name text, country_name text)
|
|
||||||
RETURNS Geometry AS $$
|
RETURNS Geometry AS $$
|
||||||
from cartodb_services.metrics import QuotaService
|
from cartodb_services.metrics import QuotaService
|
||||||
from cartodb_services.metrics import InternalGeocoderConfig
|
from cartodb_services.metrics import InternalGeocoderConfig
|
||||||
@@ -86,8 +91,15 @@ RETURNS Geometry AS $$
|
|||||||
logger = Logger(logger_config)
|
logger = Logger(logger_config)
|
||||||
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
quota_service = QuotaService(user_geocoder_config, redis_conn)
|
||||||
try:
|
try:
|
||||||
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
if admin1_name and country_name:
|
||||||
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2), trim($3)) AS mypoint", ["text", "text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, admin1_name, country_name], 1)
|
||||||
|
elif country_name:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1), trim($2)) AS mypoint", ["text", "text"])
|
||||||
|
rv = plpy.execute(plan, [city_name, country_name], 1)
|
||||||
|
else:
|
||||||
|
plan = plpy.prepare("SELECT cdb_dataservices_server._cdb_geocode_namedplace_point(trim($1)) AS mypoint", ["text"])
|
||||||
|
rv = plpy.execute(plan, [city_name], 1)
|
||||||
result = rv[0]["mypoint"]
|
result = rv[0]["mypoint"]
|
||||||
if result:
|
if result:
|
||||||
quota_service.increment_success_service_use()
|
quota_service.increment_success_service_use()
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ CREATE EXTENSION plpythonu;
|
|||||||
CREATE EXTENSION plproxy;
|
CREATE EXTENSION plproxy;
|
||||||
CREATE EXTENSION cartodb;
|
CREATE EXTENSION cartodb;
|
||||||
CREATE EXTENSION cdb_geocoder;
|
CREATE EXTENSION cdb_geocoder;
|
||||||
CREATE EXTENSION observatory VERSION 'dev';
|
|
||||||
-- Install the extension
|
-- Install the extension
|
||||||
CREATE EXTENSION cdb_dataservices_server;
|
CREATE EXTENSION cdb_dataservices_server;
|
||||||
-- Mock the redis server connection to point to this very test db
|
-- Mock the redis server connection to point to this very test db
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ SELECT exists(SELECT *
|
|||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_connectusertable'
|
AND proname = '_dst_connectusertable'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
||||||
exists
|
exists
|
||||||
--------
|
--------
|
||||||
@@ -13,7 +13,7 @@ SELECT exists(SELECT *
|
|||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_getreturnmetadata'
|
AND proname = '_dst_getreturnmetadata'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
||||||
exists
|
exists
|
||||||
--------
|
--------
|
||||||
@@ -24,7 +24,7 @@ SELECT exists(SELECT *
|
|||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_fetchjoinfdwtabledata'
|
AND proname = '_dst_fetchjoinfdwtabledata'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
||||||
exists
|
exists
|
||||||
--------
|
--------
|
||||||
@@ -35,7 +35,7 @@ SELECT exists(SELECT *
|
|||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_disconnectusertable'
|
AND proname = '_dst_disconnectusertable'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
||||||
exists
|
exists
|
||||||
--------
|
--------
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ INSERT INTO global_cities_alternates_limited (geoname_id, name, preferred, lower
|
|||||||
'POINT(0.6983 39.26787)',4326)
|
'POINT(0.6983 39.26787)',4326)
|
||||||
);
|
);
|
||||||
-- Insert dummy data into country decoder table
|
-- Insert dummy data into country decoder table
|
||||||
INSERT INTO country_decoder (synonyms, iso2) VALUES (Array['spain'], 'ES');
|
INSERT INTO country_decoder (synonyms, iso2) VALUES (Array['spain', 'Spain'], 'ES');
|
||||||
-- Insert dummy data into admin1 decoder table
|
-- Insert dummy data into admin1 decoder table
|
||||||
INSERT INTO admin1_decoder (admin1, synonyms, iso2) VALUES ('Valencia', Array['valencia', 'Valencia'], 'ES');
|
INSERT INTO admin1_decoder (admin1, synonyms, iso2) VALUES ('Valencia', Array['valencia', 'Valencia'], 'ES');
|
||||||
-- This should return the point inserted above
|
-- This should return the point inserted above
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ CREATE EXTENSION plpythonu;
|
|||||||
CREATE EXTENSION plproxy;
|
CREATE EXTENSION plproxy;
|
||||||
CREATE EXTENSION cartodb;
|
CREATE EXTENSION cartodb;
|
||||||
CREATE EXTENSION cdb_geocoder;
|
CREATE EXTENSION cdb_geocoder;
|
||||||
CREATE EXTENSION observatory VERSION 'dev';
|
|
||||||
|
|
||||||
-- Install the extension
|
-- Install the extension
|
||||||
CREATE EXTENSION cdb_dataservices_server;
|
CREATE EXTENSION cdb_dataservices_server;
|
||||||
|
|||||||
@@ -2,27 +2,27 @@ SELECT exists(SELECT *
|
|||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_connectusertable'
|
AND proname = '_dst_connectusertable'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, text');
|
||||||
|
|
||||||
SELECT exists(SELECT *
|
SELECT exists(SELECT *
|
||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_getreturnmetadata'
|
AND proname = '_dst_getreturnmetadata'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, json');
|
||||||
|
|
||||||
SELECT exists(SELECT *
|
SELECT exists(SELECT *
|
||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_fetchjoinfdwtabledata'
|
AND proname = '_dst_fetchjoinfdwtabledata'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text, json');
|
||||||
|
|
||||||
SELECT exists(SELECT *
|
SELECT exists(SELECT *
|
||||||
FROM pg_proc p
|
FROM pg_proc p
|
||||||
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
INNER JOIN pg_namespace ns ON (p.pronamespace = ns.oid)
|
||||||
WHERE ns.nspname = 'cdb_dataservices_server'
|
WHERE ns.nspname = 'cdb_dataservices_server'
|
||||||
AND proname = '_obs_disconnectusertable'
|
AND proname = '_dst_disconnectusertable'
|
||||||
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
AND oidvectortypes(p.proargtypes) = 'text, text, text, text, text');
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ INSERT INTO global_cities_alternates_limited (geoname_id, name, preferred, lower
|
|||||||
);
|
);
|
||||||
|
|
||||||
-- Insert dummy data into country decoder table
|
-- Insert dummy data into country decoder table
|
||||||
INSERT INTO country_decoder (synonyms, iso2) VALUES (Array['spain'], 'ES');
|
INSERT INTO country_decoder (synonyms, iso2) VALUES (Array['spain', 'Spain'], 'ES');
|
||||||
|
|
||||||
-- Insert dummy data into admin1 decoder table
|
-- Insert dummy data into admin1 decoder table
|
||||||
INSERT INTO admin1_decoder (admin1, synonyms, iso2) VALUES ('Valencia', Array['valencia', 'Valencia'], 'ES');
|
INSERT INTO admin1_decoder (admin1, synonyms, iso2) VALUES ('Valencia', Array['valencia', 'Valencia'], 'ES');
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# CartoDB dataservices API python module
|
# CARTO dataservices API python module
|
||||||
|
|
||||||
This directory contains the python library used by the server side of CARTO LDS (Location Data Services).
|
This directory contains the python library used by the server side of CARTO LDS (Location Data Services).
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
# NOTE: This init function must be called from plpythonu entry points to
|
||||||
|
# initialize cartodb_services module properly. E.g:
|
||||||
|
#
|
||||||
|
# CREATE OR REPLACE FUNCTION cdb_dataservices_server.cdb_isochrone(...)
|
||||||
|
# RETURNS SETOF cdb_dataservices_server.isoline AS $$
|
||||||
|
#
|
||||||
|
# import cartodb_services
|
||||||
|
# cartodb_services.init(plpy, GD)
|
||||||
|
#
|
||||||
|
# # rest of the code here
|
||||||
|
# cartodb_services.GD[key] = val
|
||||||
|
# cartodb_services.plpy.execute('SELECT * FROM ...')
|
||||||
|
#
|
||||||
|
# $$ LANGUAGE plpythonu;
|
||||||
|
|
||||||
|
plpy = None
|
||||||
|
GD = None
|
||||||
|
|
||||||
|
def init(_plpy, _GD):
|
||||||
|
global plpy
|
||||||
|
global GD
|
||||||
|
|
||||||
|
if plpy is None:
|
||||||
|
plpy = _plpy
|
||||||
|
|
||||||
|
if GD is None:
|
||||||
|
GD = _GD
|
||||||
|
|
||||||
|
def _reset():
|
||||||
|
# NOTE: just for testing
|
||||||
|
global plpy
|
||||||
|
global GD
|
||||||
|
|
||||||
|
plpy = None
|
||||||
|
GD = None
|
||||||
|
|||||||
@@ -19,3 +19,15 @@ class MalformedResult(Exception):
|
|||||||
class TimeoutException(Exception):
|
class TimeoutException(Exception):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr('Timeout requesting to mapzen server')
|
return repr('Timeout requesting to mapzen server')
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceException(Exception):
|
||||||
|
def __init__(self, message, response):
|
||||||
|
self.message = message
|
||||||
|
self.response = response
|
||||||
|
|
||||||
|
def response(self):
|
||||||
|
return self.response
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.message
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import requests
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from exceptions import WrongParams, MalformedResult
|
from exceptions import WrongParams, MalformedResult, ServiceException
|
||||||
from qps import qps_retry
|
from qps import qps_retry
|
||||||
from cartodb_services.tools import Coordinate, PolyLine
|
from cartodb_services.tools import Coordinate, PolyLine
|
||||||
|
|
||||||
@@ -17,11 +17,12 @@ class MapzenGeocoder:
|
|||||||
self._url = base_url
|
self._url = base_url
|
||||||
self._logger = logger
|
self._logger = logger
|
||||||
|
|
||||||
@qps_retry
|
@qps_retry(qps=20)
|
||||||
def geocode(self, searchtext, city=None, state_province=None, country=None):
|
def geocode(self, searchtext, city=None, state_province=None,
|
||||||
|
country=None, search_type=None):
|
||||||
request_params = self._build_requests_parameters(searchtext, city,
|
request_params = self._build_requests_parameters(searchtext, city,
|
||||||
state_province,
|
state_province,
|
||||||
country)
|
country, search_type)
|
||||||
try:
|
try:
|
||||||
response = requests.get(self._url, params=request_params)
|
response = requests.get(self._url, params=request_params)
|
||||||
if response.status_code == requests.codes.ok:
|
if response.status_code == requests.codes.ok:
|
||||||
@@ -31,30 +32,32 @@ class MapzenGeocoder:
|
|||||||
else:
|
else:
|
||||||
self._logger.error('Error trying to geocode using mapzen',
|
self._logger.error('Error trying to geocode using mapzen',
|
||||||
data={"response_status": response.status_code,
|
data={"response_status": response.status_code,
|
||||||
"response_reason": response.reason,
|
"response_reason": response.reason,
|
||||||
"response_content": response.text,
|
"response_content": response.text,
|
||||||
"reponse_url": response.url,
|
"reponse_url": response.url,
|
||||||
"response_headers": response.headers,
|
"response_headers": response.headers,
|
||||||
"searchtext": searchtext,
|
"searchtext": searchtext,
|
||||||
"city": city, "country": country,
|
"city": city, "country": country,
|
||||||
"state_province": state_province })
|
"state_province": state_province})
|
||||||
raise Exception('Error trying to geocode {0} using mapzen'.format(searchtext))
|
raise ServiceException('Error trying to geocode {0} using mapzen'.format(searchtext),
|
||||||
|
response)
|
||||||
except requests.ConnectionError as e:
|
except requests.ConnectionError as e:
|
||||||
# Don't raise the exception to continue with the geocoding job
|
# Don't raise the exception to continue with the geocoding job
|
||||||
self._logger.error('Error connecting to Mapzen geocoding server',
|
self._logger.error('Error connecting to Mapzen geocoding server',
|
||||||
exception=e)
|
exception=e)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def _build_requests_parameters(self, searchtext, city=None,
|
def _build_requests_parameters(self, searchtext, city=None,
|
||||||
state_province=None, country=None):
|
state_province=None, country=None,
|
||||||
|
search_type=None):
|
||||||
request_params = {}
|
request_params = {}
|
||||||
search_string = self._build_search_text(searchtext.strip(),
|
search_string = self._build_search_text(searchtext.strip(),
|
||||||
city,
|
city,
|
||||||
state_province)
|
state_province)
|
||||||
request_params['text'] = search_string
|
request_params['text'] = search_string
|
||||||
request_params['layers'] = 'address'
|
|
||||||
request_params['api_key'] = self._app_key
|
request_params['api_key'] = self._app_key
|
||||||
|
if search_type:
|
||||||
|
request_params['layers'] = search_type
|
||||||
if country:
|
if country:
|
||||||
request_params['boundary.country'] = country
|
request_params['boundary.country'] = country
|
||||||
return request_params
|
return request_params
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
from qps import qps_retry
|
from qps import qps_retry
|
||||||
|
from exceptions import ServiceException
|
||||||
|
|
||||||
|
|
||||||
class MatrixClient:
|
class MatrixClient:
|
||||||
@@ -51,6 +52,6 @@ class MatrixClient:
|
|||||||
"response_headers": response.headers,
|
"response_headers": response.headers,
|
||||||
"locations": locations,
|
"locations": locations,
|
||||||
"costing": costing})
|
"costing": costing})
|
||||||
raise Exception('Error trying to get matrix distance from mapzen')
|
raise ServiceException("Error trying to get matrix distance from mapzen", response)
|
||||||
|
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|||||||
@@ -4,18 +4,38 @@ from datetime import datetime
|
|||||||
from exceptions import TimeoutException
|
from exceptions import TimeoutException
|
||||||
|
|
||||||
DEFAULT_RETRY_TIMEOUT = 60
|
DEFAULT_RETRY_TIMEOUT = 60
|
||||||
|
DEFAULT_QUERIES_PER_SECOND = 10
|
||||||
|
|
||||||
|
def qps_retry(original_function=None,**options):
|
||||||
def qps_retry(f):
|
""" Query Per Second retry decorator
|
||||||
def wrapped_f(*args, **kw):
|
The intention of this decorator is to retry requests against third
|
||||||
return QPSService().call(f, *args, **kw)
|
party services that has QPS restriction.
|
||||||
return wrapped_f
|
Parameters:
|
||||||
|
- timeout: Maximum number of seconds to retry
|
||||||
|
- qps: Allowed queries per second. This parameter is used to
|
||||||
|
calculate the next time to retry the request
|
||||||
|
"""
|
||||||
|
if original_function is not None:
|
||||||
|
def wrapped_function(*args, **kwargs):
|
||||||
|
if 'timeout' in options:
|
||||||
|
timeout = options['timeout']
|
||||||
|
else:
|
||||||
|
timeout = DEFAULT_RETRY_TIMEOUT
|
||||||
|
if 'qps' in options:
|
||||||
|
qps = options['qps']
|
||||||
|
else:
|
||||||
|
qps = DEFAULT_QUERIES_PER_SECOND
|
||||||
|
return QPSService(retry_timeout=timeout, queries_per_second=qps).call(original_function, *args, **kwargs)
|
||||||
|
return wrapped_function
|
||||||
|
else:
|
||||||
|
def partial_wrapper(func):
|
||||||
|
return qps_retry(func, **options)
|
||||||
|
return partial_wrapper
|
||||||
|
|
||||||
|
|
||||||
class QPSService:
|
class QPSService:
|
||||||
|
|
||||||
def __init__(self, queries_per_second=10,
|
def __init__(self, queries_per_second, retry_timeout):
|
||||||
retry_timeout=DEFAULT_RETRY_TIMEOUT):
|
|
||||||
self._queries_per_second = queries_per_second
|
self._queries_per_second = queries_per_second
|
||||||
self._retry_timeout = retry_timeout
|
self._retry_timeout = retry_timeout
|
||||||
|
|
||||||
@@ -27,7 +47,7 @@ class QPSService:
|
|||||||
return fn(*args, **kwargs)
|
return fn(*args, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
response = getattr(e, 'response', None)
|
response = getattr(e, 'response', None)
|
||||||
if response and (response.status_code == 429):
|
if response is not None and (response.status_code == 429):
|
||||||
self.retry(start_time, attempt_number)
|
self.retry(start_time, attempt_number)
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
@@ -35,7 +55,7 @@ class QPSService:
|
|||||||
|
|
||||||
def retry(self, first_request_time, retry_count):
|
def retry(self, first_request_time, retry_count):
|
||||||
elapsed = datetime.now() - first_request_time
|
elapsed = datetime.now() - first_request_time
|
||||||
if elapsed.seconds > self._retry_timeout:
|
if elapsed.microseconds > (self._retry_timeout * 1000.0):
|
||||||
raise TimeoutException()
|
raise TimeoutException()
|
||||||
|
|
||||||
# inverse qps * (1.5 ^ i) is an increased sleep time of 1.5x per
|
# inverse qps * (1.5 ^ i) is an increased sleep time of 1.5x per
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import requests
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from exceptions import WrongParams, MalformedResult
|
from exceptions import WrongParams, MalformedResult, ServiceException
|
||||||
from qps import qps_retry
|
from qps import qps_retry
|
||||||
from cartodb_services.tools import Coordinate, PolyLine
|
from cartodb_services.tools import Coordinate, PolyLine
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ class MapzenRouting:
|
|||||||
"response_headers": response.headers,
|
"response_headers": response.headers,
|
||||||
"waypoints": waypoints, "mode": mode,
|
"waypoints": waypoints, "mode": mode,
|
||||||
"options": options})
|
"options": options})
|
||||||
raise Exception('Error trying to calculate route using Mapzen')
|
raise ServiceException('Error trying to calculate route using Mapzen', response)
|
||||||
|
|
||||||
def __parse_options(self, options):
|
def __parse_options(self, options):
|
||||||
return dict(option.split('=') for option in options)
|
return dict(option.split('=') for option in options)
|
||||||
|
|||||||
@@ -286,11 +286,11 @@ class GeocoderConfig(ServiceConfig):
|
|||||||
PERIOD_END_DATE = 'period_end_date'
|
PERIOD_END_DATE = 'period_end_date'
|
||||||
DEFAULT_PROVIDER = 'mapzen'
|
DEFAULT_PROVIDER = 'mapzen'
|
||||||
|
|
||||||
def __init__(self, redis_connection, db_conn, username, orgname=None):
|
def __init__(self, redis_connection, db_conn, username, orgname=None, forced_provider=None):
|
||||||
super(GeocoderConfig, self).__init__(redis_connection, db_conn,
|
super(GeocoderConfig, self).__init__(redis_connection, db_conn,
|
||||||
username, orgname)
|
username, orgname)
|
||||||
filtered_config = {key: self._redis_config[key] for key in self.GEOCODER_CONFIG_KEYS if key in self._redis_config.keys()}
|
filtered_config = {key: self._redis_config[key] for key in self.GEOCODER_CONFIG_KEYS if key in self._redis_config.keys()}
|
||||||
self.__parse_config(filtered_config, self._db_config)
|
self.__parse_config(filtered_config, self._db_config, forced_provider)
|
||||||
self.__check_config(filtered_config)
|
self.__check_config(filtered_config)
|
||||||
|
|
||||||
def __check_config(self, filtered_config):
|
def __check_config(self, filtered_config):
|
||||||
@@ -307,9 +307,12 @@ class GeocoderConfig(ServiceConfig):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def __parse_config(self, filtered_config, db_config):
|
def __parse_config(self, filtered_config, db_config, forced_provider):
|
||||||
self._geocoder_provider = filtered_config[self.GEOCODER_PROVIDER].lower()
|
if forced_provider:
|
||||||
if not self._geocoder_provider:
|
self._geocoder_provider = forced_provider
|
||||||
|
elif filtered_config[self.GEOCODER_PROVIDER].lower():
|
||||||
|
self._geocoder_provider = filtered_config[self.GEOCODER_PROVIDER].lower()
|
||||||
|
else:
|
||||||
self._geocoder_provider = self.DEFAULT_PROVIDER
|
self._geocoder_provider = self.DEFAULT_PROVIDER
|
||||||
self._geocoding_quota = float(filtered_config[self.QUOTA_KEY])
|
self._geocoding_quota = float(filtered_config[self.QUOTA_KEY])
|
||||||
self._period_end_date = date_parse(filtered_config[self.PERIOD_END_DATE])
|
self._period_end_date = date_parse(filtered_config[self.PERIOD_END_DATE])
|
||||||
|
|||||||
@@ -0,0 +1,24 @@
|
|||||||
|
from cartodb_services.refactor.storage.redis_connection_config import RedisMetadataConnectionConfigBuilder
|
||||||
|
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||||
|
from cartodb_services.refactor.storage.redis_config import RedisOrgConfigStorageBuilder
|
||||||
|
|
||||||
|
class OrgConfigBackendFactory(object):
|
||||||
|
"""
|
||||||
|
This class abstracts the creation of an org configuration backend. It will return
|
||||||
|
an implementation of the ConfigBackendInterface appropriate to the org, depending
|
||||||
|
on the environment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, orgname, environment, server_config_backend):
|
||||||
|
self._orgname = orgname
|
||||||
|
self._environment = environment
|
||||||
|
self._server_config_backend = server_config_backend
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._environment.is_onpremise:
|
||||||
|
org_config_backend = self._server_config_backend
|
||||||
|
else:
|
||||||
|
redis_metadata_connection_config = RedisMetadataConnectionConfigBuilder(self._server_config_backend).get()
|
||||||
|
redis_metadata_connection = RedisConnectionBuilder(redis_metadata_connection_config).get()
|
||||||
|
org_config_backend = RedisOrgConfigStorageBuilder(redis_metadata_connection, self._orgname).get()
|
||||||
|
return org_config_backend
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
from cartodb_services.refactor.tools.redis_mock import RedisConnectionMock
|
||||||
|
from cartodb_services.refactor.storage.redis_connection_config import RedisMetricsConnectionConfigBuilder
|
||||||
|
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||||
|
|
||||||
|
class RedisMetricsConnectionFactory(object):
|
||||||
|
def __init__(self, environment, server_config_storage):
|
||||||
|
self._environment = environment
|
||||||
|
self._server_config_storage = server_config_storage
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._environment.is_onpremise:
|
||||||
|
redis_metrics_connection = RedisConnectionMock()
|
||||||
|
else:
|
||||||
|
redis_metrics_connection_config = RedisMetricsConnectionConfigBuilder(self._server_config_storage).get()
|
||||||
|
redis_metrics_connection = RedisConnectionBuilder(redis_metrics_connection_config).get()
|
||||||
|
return redis_metrics_connection
|
||||||
|
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
from cartodb_services.refactor.storage.server_config import InDbServerConfigStorage
|
||||||
|
|
||||||
|
|
||||||
|
class ServerConfigBackendFactory(object):
|
||||||
|
"""
|
||||||
|
This class creates a backend to retrieve server configurations (implementing the ConfigBackendInterface).
|
||||||
|
|
||||||
|
At this moment it will always return an InDbServerConfigStorage, but nothing prevents from changing the
|
||||||
|
implementation. To something that reads from a file, memory or whatever. It is mostly there to keep
|
||||||
|
the layers separated.
|
||||||
|
"""
|
||||||
|
def get(self):
|
||||||
|
return InDbServerConfigStorage()
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
from cartodb_services.refactor.storage.redis_connection_config import RedisMetadataConnectionConfigBuilder
|
||||||
|
from cartodb_services.refactor.storage.redis_connection import RedisConnectionBuilder
|
||||||
|
from cartodb_services.refactor.storage.redis_config import RedisUserConfigStorageBuilder
|
||||||
|
|
||||||
|
class UserConfigBackendFactory(object):
|
||||||
|
"""
|
||||||
|
This class abstracts the creation of a user configuration backend. It will return
|
||||||
|
an implementation of the ConfigBackendInterface appropriate to the user, depending
|
||||||
|
on the environment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, username, environment, server_config_backend):
|
||||||
|
self._username = username
|
||||||
|
self._environment = environment
|
||||||
|
self._server_config_backend = server_config_backend
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._environment.is_onpremise:
|
||||||
|
user_config_backend = self._server_config_backend
|
||||||
|
else:
|
||||||
|
redis_metadata_connection_config = RedisMetadataConnectionConfigBuilder(self._server_config_backend).get()
|
||||||
|
redis_metadata_connection = RedisConnectionBuilder(redis_metadata_connection_config).get()
|
||||||
|
user_config_backend = RedisUserConfigStorageBuilder(redis_metadata_connection, self._username).get()
|
||||||
|
return user_config_backend
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
class ConfigException(Exception):
|
||||||
|
pass
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
class ServerEnvironment(object):
|
||||||
|
|
||||||
|
DEVELOPMENT = 'development'
|
||||||
|
STAGING = 'staging'
|
||||||
|
PRODUCTION = 'production'
|
||||||
|
ONPREMISE = 'onpremise'
|
||||||
|
|
||||||
|
VALID_ENVIRONMENTS = [
|
||||||
|
DEVELOPMENT,
|
||||||
|
STAGING,
|
||||||
|
PRODUCTION,
|
||||||
|
ONPREMISE
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, environment_str):
|
||||||
|
assert environment_str in self.VALID_ENVIRONMENTS
|
||||||
|
self._environment_str = environment_str
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._environment_str
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_development(self):
|
||||||
|
return self._environment_str == self.DEVELOPMENT
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_staging(self):
|
||||||
|
return self._environment_str == self.STAGING
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_production(self):
|
||||||
|
return self._environment_str == self.PRODUCTION
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_onpremise(self):
|
||||||
|
return self._environment_str == self.ONPREMISE
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self._environment_str == other._environment_str
|
||||||
|
|
||||||
|
|
||||||
|
class ServerEnvironmentBuilder(object):
|
||||||
|
|
||||||
|
DEFAULT_ENVIRONMENT = ServerEnvironment.DEVELOPMENT
|
||||||
|
|
||||||
|
def __init__(self, server_config_storage):
|
||||||
|
self._server_config_storage = server_config_storage
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
server_config = self._server_config_storage.get('server_conf')
|
||||||
|
|
||||||
|
if not server_config or 'environment' not in server_config:
|
||||||
|
environment_str = self.DEFAULT_ENVIRONMENT
|
||||||
|
else:
|
||||||
|
environment_str = server_config['environment']
|
||||||
|
|
||||||
|
return ServerEnvironment(environment_str)
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
import abc
|
||||||
|
|
||||||
|
class ConfigBackendInterface(object):
|
||||||
|
"""This is an interface that all config backends must abide to"""
|
||||||
|
|
||||||
|
__metaclass__ = abc.ABCMeta
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get(self, key):
|
||||||
|
"""Return a value based on the key supplied from some storage"""
|
||||||
|
pass
|
||||||
@@ -0,0 +1,112 @@
|
|||||||
|
from dateutil.parser import parse as date_parse
|
||||||
|
|
||||||
|
class MapzenGeocoderConfig(object):
|
||||||
|
"""
|
||||||
|
Value object that represents the configuration needed to operate the mapzen service.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
geocoding_quota,
|
||||||
|
soft_geocoding_limit,
|
||||||
|
period_end_date,
|
||||||
|
cost_per_hit,
|
||||||
|
log_path,
|
||||||
|
mapzen_api_key,
|
||||||
|
username,
|
||||||
|
organization):
|
||||||
|
self._geocoding_quota = geocoding_quota
|
||||||
|
self._soft_geocoding_limit = soft_geocoding_limit
|
||||||
|
self._period_end_date = period_end_date
|
||||||
|
self._cost_per_hit = cost_per_hit
|
||||||
|
self._log_path = log_path
|
||||||
|
self._mapzen_api_key = mapzen_api_key
|
||||||
|
self._username = username
|
||||||
|
self._organization = organization
|
||||||
|
|
||||||
|
# Kind of generic properties. Note which ones are for actually running the
|
||||||
|
# service and which ones are needed for quota stuff.
|
||||||
|
@property
|
||||||
|
def service_type(self):
|
||||||
|
return 'geocoder_mapzen'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def provider(self):
|
||||||
|
return 'mapzen'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_high_resolution(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def geocoding_quota(self):
|
||||||
|
return self._geocoding_quota
|
||||||
|
|
||||||
|
@property
|
||||||
|
def soft_geocoding_limit(self):
|
||||||
|
return self._soft_geocoding_limit
|
||||||
|
|
||||||
|
@property
|
||||||
|
def period_end_date(self):
|
||||||
|
return self._period_end_date
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cost_per_hit(self):
|
||||||
|
return self._cost_per_hit
|
||||||
|
|
||||||
|
# Server config, TODO: locate where this is actually used
|
||||||
|
@property
|
||||||
|
def log_path(self):
|
||||||
|
return self._log_path
|
||||||
|
|
||||||
|
# This is actually the specific one to run requests against the remote endpoitn
|
||||||
|
@property
|
||||||
|
def mapzen_api_key(self):
|
||||||
|
return self._mapzen_api_key
|
||||||
|
|
||||||
|
# These two identify the user
|
||||||
|
@property
|
||||||
|
def username(self):
|
||||||
|
return self._username
|
||||||
|
@property
|
||||||
|
def organization(self):
|
||||||
|
return self._organization
|
||||||
|
|
||||||
|
# TODO: for BW compat, remove
|
||||||
|
@property
|
||||||
|
def google_geocoder(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class MapzenGeocoderConfigBuilder(object):
|
||||||
|
|
||||||
|
def __init__(self, server_conf, user_conf, org_conf, username, orgname):
|
||||||
|
self._server_conf = server_conf
|
||||||
|
self._user_conf = user_conf
|
||||||
|
self._org_conf = org_conf
|
||||||
|
self._username = username
|
||||||
|
self._orgname = orgname
|
||||||
|
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
mapzen_server_conf = self._server_conf.get('mapzen_conf')
|
||||||
|
geocoding_quota = mapzen_server_conf['geocoder']['monthly_quota']
|
||||||
|
mapzen_api_key = mapzen_server_conf['geocoder']['api_key']
|
||||||
|
|
||||||
|
soft_geocoding_limit = self._user_conf.get('soft_geocoding_limit')
|
||||||
|
|
||||||
|
cost_per_hit=0
|
||||||
|
|
||||||
|
period_end_date_str = self._org_conf.get('period_end_date') or self._user_conf.get('period_end_date')
|
||||||
|
period_end_date = date_parse(period_end_date_str)
|
||||||
|
|
||||||
|
logger_conf = self._server_conf.get('logger_conf')
|
||||||
|
log_path = logger_conf['geocoder_log_path']
|
||||||
|
|
||||||
|
return MapzenGeocoderConfig(geocoding_quota,
|
||||||
|
soft_geocoding_limit,
|
||||||
|
period_end_date,
|
||||||
|
cost_per_hit,
|
||||||
|
log_path,
|
||||||
|
mapzen_api_key,
|
||||||
|
self._username,
|
||||||
|
self._orgname)
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
from ..core.interfaces import ConfigBackendInterface
|
||||||
|
|
||||||
|
class InMemoryConfigStorage(ConfigBackendInterface):
|
||||||
|
|
||||||
|
def __init__(self, config_hash={}):
|
||||||
|
self._config_hash = config_hash
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
try:
|
||||||
|
return self._config_hash[key]
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
from ..core.interfaces import ConfigBackendInterface
|
||||||
|
|
||||||
|
class NullConfigStorage(ConfigBackendInterface):
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return None
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
from ..core.interfaces import ConfigBackendInterface
|
||||||
|
from null_config import NullConfigStorage
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConfigStorage(ConfigBackendInterface):
|
||||||
|
|
||||||
|
def __init__(self, connection, config_key):
|
||||||
|
self._connection = connection
|
||||||
|
self._config_key = config_key
|
||||||
|
self._data = None
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
if not self._data:
|
||||||
|
self._data = self._connection.hgetall(self._config_key)
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
|
||||||
|
class RedisUserConfigStorageBuilder(object):
|
||||||
|
def __init__(self, redis_connection, username):
|
||||||
|
self._redis_connection = redis_connection
|
||||||
|
self._username = username
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return RedisConfigStorage(self._redis_connection, 'rails:users:{0}'.format(self._username))
|
||||||
|
|
||||||
|
|
||||||
|
class RedisOrgConfigStorageBuilder(object):
|
||||||
|
def __init__(self, redis_connection, orgname):
|
||||||
|
self._redis_connection = redis_connection
|
||||||
|
self._orgname = orgname
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._orgname:
|
||||||
|
return RedisConfigStorage(self._redis_connection, 'rails:orgs:{0}'.format(self._orgname))
|
||||||
|
else:
|
||||||
|
return NullConfigStorage()
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
from redis.sentinel import Sentinel
|
||||||
|
from redis import StrictRedis
|
||||||
|
|
||||||
|
class RedisConnectionBuilder():
|
||||||
|
|
||||||
|
def __init__(self, connection_config):
|
||||||
|
self._config = connection_config
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self._config.sentinel_id:
|
||||||
|
sentinel = Sentinel([(self._config.host,
|
||||||
|
self._config.port)],
|
||||||
|
socket_timeout=self._config.timeout)
|
||||||
|
return sentinel.master_for(self._config.sentinel_id,
|
||||||
|
socket_timeout=self._config.timeout,
|
||||||
|
db=self._config.db,
|
||||||
|
retry_on_timeout=True)
|
||||||
|
else:
|
||||||
|
conn = StrictRedis(host=self._config.host, port=self._config.port,
|
||||||
|
db=self._config.db, retry_on_timeout=True,
|
||||||
|
socket_timeout=self._config.timeout)
|
||||||
|
return conn
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
from cartodb_services.refactor.config.exceptions import ConfigException
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConnectionConfig(object):
|
||||||
|
"""
|
||||||
|
This represents a value object to contain configuration needed to set up
|
||||||
|
a connection to a redis server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, host, port, timeout, db, sentinel_id):
|
||||||
|
self._host = host
|
||||||
|
self._port = port
|
||||||
|
self._timeout = timeout
|
||||||
|
self._db = db
|
||||||
|
self._sentinel_id = sentinel_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host(self):
|
||||||
|
return self._host
|
||||||
|
|
||||||
|
@property
|
||||||
|
def port(self):
|
||||||
|
return self._port
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timeout(self):
|
||||||
|
return self._timeout
|
||||||
|
|
||||||
|
@property
|
||||||
|
def db(self):
|
||||||
|
return self._db
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sentinel_id(self):
|
||||||
|
return self._sentinel_id
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConnectionConfigBuilder(object):
|
||||||
|
|
||||||
|
__metaclass__ = ABCMeta
|
||||||
|
|
||||||
|
DEFAULT_USER_DB = 5
|
||||||
|
DEFAULT_TIMEOUT = 1.5 # seconds
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, server_config_storage, config_key):
|
||||||
|
self._server_config_storage = server_config_storage
|
||||||
|
self._config_key = config_key
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
conf = self._server_config_storage.get(self._config_key)
|
||||||
|
if conf is None:
|
||||||
|
raise ConfigException("There is no redis configuration defined")
|
||||||
|
|
||||||
|
host = conf['redis_host']
|
||||||
|
port = conf['redis_port']
|
||||||
|
timeout = conf.get('timeout', self.DEFAULT_TIMEOUT) or self.DEFAULT_TIMEOUT
|
||||||
|
db = conf.get('redis_db', self.DEFAULT_USER_DB) or self.DEFAULT_USER_DB
|
||||||
|
sentinel_id = conf.get('sentinel_master_id', None)
|
||||||
|
|
||||||
|
return RedisConnectionConfig(host, port, timeout, db, sentinel_id)
|
||||||
|
|
||||||
|
|
||||||
|
class RedisMetadataConnectionConfigBuilder(RedisConnectionConfigBuilder):
|
||||||
|
|
||||||
|
def __init__(self, server_config_storage):
|
||||||
|
super(RedisMetadataConnectionConfigBuilder, self).__init__(
|
||||||
|
server_config_storage,
|
||||||
|
'redis_metadata_config'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RedisMetricsConnectionConfigBuilder(RedisConnectionConfigBuilder):
|
||||||
|
|
||||||
|
def __init__(self, server_config_storage):
|
||||||
|
super(RedisMetricsConnectionConfigBuilder, self).__init__(
|
||||||
|
server_config_storage,
|
||||||
|
'redis_metrics_config'
|
||||||
|
)
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import json
|
||||||
|
import cartodb_services
|
||||||
|
from ..core.interfaces import ConfigBackendInterface
|
||||||
|
|
||||||
|
class InDbServerConfigStorage(ConfigBackendInterface):
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
sql = "SELECT cdb_dataservices_server.cdb_conf_getconf('{0}') as conf".format(key)
|
||||||
|
rows = cartodb_services.plpy.execute(sql, 1)
|
||||||
|
json_output = rows[0]['conf']
|
||||||
|
if json_output:
|
||||||
|
return json.loads(json_output)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
from cartodb_services.refactor.config.exceptions import ConfigException
|
||||||
|
|
||||||
|
class LoggerConfig(object):
|
||||||
|
|
||||||
|
"""This class is a value object needed to setup a Logger"""
|
||||||
|
|
||||||
|
def __init__(self, server_environment, rollbar_api_key, log_file_path, min_log_level):
|
||||||
|
self._server_environment = server_environment
|
||||||
|
self._rollbar_api_key = rollbar_api_key
|
||||||
|
self._log_file_path = log_file_path
|
||||||
|
self._min_log_level = min_log_level
|
||||||
|
|
||||||
|
@property
|
||||||
|
def environment(self):
|
||||||
|
return self._server_environment
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rollbar_api_key(self):
|
||||||
|
return self._rollbar_api_key
|
||||||
|
|
||||||
|
@property
|
||||||
|
def log_file_path(self):
|
||||||
|
return self._log_file_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def min_log_level(self):
|
||||||
|
return self._min_log_level
|
||||||
|
|
||||||
|
# TODO this needs tests
|
||||||
|
class LoggerConfigBuilder(object):
|
||||||
|
|
||||||
|
def __init__(self, environment, server_config_storage):
|
||||||
|
self._server_environment = environment
|
||||||
|
self._server_config_storage = server_config_storage
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
logger_conf = self._server_config_storage.get('logger_conf')
|
||||||
|
if not logger_conf:
|
||||||
|
raise ConfigException('Logger configuration missing')
|
||||||
|
|
||||||
|
rollbar_api_key = self._get_value_or_none(logger_conf, 'rollbar_api_key')
|
||||||
|
log_file_path = self._get_value_or_none(logger_conf, 'log_file_path')
|
||||||
|
min_log_level = self._get_value_or_none(logger_conf, 'min_log_level') or 'warning'
|
||||||
|
|
||||||
|
logger_config = LoggerConfig(str(self._server_environment), rollbar_api_key, log_file_path, min_log_level)
|
||||||
|
return logger_config
|
||||||
|
|
||||||
|
def _get_value_or_none(self, logger_conf, key):
|
||||||
|
value = None
|
||||||
|
if key in logger_conf:
|
||||||
|
value = logger_conf[key]
|
||||||
|
return value
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
class RedisConnectionMock(object):
|
||||||
|
""" Simple class to mock a dummy behaviour for Redis related functions """
|
||||||
|
|
||||||
|
def zscore(self, redis_prefix, day):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def zincrby(self, redis_prefix, day, amount):
|
||||||
|
pass
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import plpy
|
|
||||||
import rollbar
|
import rollbar
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
@@ -6,7 +5,14 @@ import traceback
|
|||||||
import sys
|
import sys
|
||||||
# Monkey patch because plpython sys module doesn't have argv and rollbar
|
# Monkey patch because plpython sys module doesn't have argv and rollbar
|
||||||
# package use it
|
# package use it
|
||||||
sys.__dict__['argv'] = []
|
if 'argv' not in sys.__dict__:
|
||||||
|
sys.__dict__['argv'] = []
|
||||||
|
|
||||||
|
# Only can be imported when is called from PLPython
|
||||||
|
try:
|
||||||
|
import plpy
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Logger:
|
class Logger:
|
||||||
@@ -30,30 +36,28 @@ class Logger:
|
|||||||
return
|
return
|
||||||
self._send_to_rollbar('debug', text, exception, data)
|
self._send_to_rollbar('debug', text, exception, data)
|
||||||
self._send_to_log_file('debug', text, exception, data)
|
self._send_to_log_file('debug', text, exception, data)
|
||||||
plpy.debug(text)
|
self._send_to_plpy('debug', text)
|
||||||
|
|
||||||
def info(self, text, exception=None, data={}):
|
def info(self, text, exception=None, data={}):
|
||||||
if not self._check_min_level('info'):
|
if not self._check_min_level('info'):
|
||||||
return
|
return
|
||||||
self._send_to_rollbar('info', text, exception, data)
|
self._send_to_rollbar('info', text, exception, data)
|
||||||
self._send_to_log_file('info', text, exception, data)
|
self._send_to_log_file('info', text, exception, data)
|
||||||
plpy.info(text)
|
self._send_to_plpy('info', text)
|
||||||
|
|
||||||
def warning(self, text, exception=None, data={}):
|
def warning(self, text, exception=None, data={}):
|
||||||
if not self._check_min_level('warning'):
|
if not self._check_min_level('warning'):
|
||||||
return
|
return
|
||||||
self._send_to_rollbar('warning', text, exception, data)
|
self._send_to_rollbar('warning', text, exception, data)
|
||||||
self._send_to_log_file('warning', text, exception, data)
|
self._send_to_log_file('warning', text, exception, data)
|
||||||
plpy.warning(text)
|
self._send_to_plpy('warning', text)
|
||||||
|
|
||||||
def error(self, text, exception=None, data={}):
|
def error(self, text, exception=None, data={}):
|
||||||
if not self._check_min_level('error'):
|
if not self._check_min_level('error'):
|
||||||
return
|
return
|
||||||
self._send_to_rollbar('error', text, exception, data)
|
self._send_to_rollbar('error', text, exception, data)
|
||||||
self._send_to_log_file('error', text, exception, data)
|
self._send_to_log_file('error', text, exception, data)
|
||||||
# Plpy.error and fatal raises exceptions and we only want to log an
|
self._send_to_plpy('error', text)
|
||||||
# error, exceptions should be raise explicitly
|
|
||||||
plpy.warning(text)
|
|
||||||
|
|
||||||
def _check_min_level(self, level):
|
def _check_min_level(self, level):
|
||||||
return True if self.LEVELS[level] >= self._min_level else False
|
return True if self.LEVELS[level] >= self._min_level else False
|
||||||
@@ -82,6 +86,19 @@ class Logger:
|
|||||||
elif level == 'error':
|
elif level == 'error':
|
||||||
self._file_logger.error(text, extra=extra_data)
|
self._file_logger.error(text, extra=extra_data)
|
||||||
|
|
||||||
|
def _send_to_plpy(self, level, text):
|
||||||
|
if self._check_plpy():
|
||||||
|
if level == 'debug':
|
||||||
|
plpy.debug(text)
|
||||||
|
elif level == 'info':
|
||||||
|
plpy.info(text)
|
||||||
|
elif level == 'warning':
|
||||||
|
plpy.warning(text)
|
||||||
|
elif level == 'error':
|
||||||
|
# Plpy.error and fatal raises exceptions and we only want to
|
||||||
|
# log an error, exceptions should be raise explicitly
|
||||||
|
plpy.warning(text)
|
||||||
|
|
||||||
def _parse_log_extra_data(self, exception, data):
|
def _parse_log_extra_data(self, exception, data):
|
||||||
extra_data = {}
|
extra_data = {}
|
||||||
if exception:
|
if exception:
|
||||||
@@ -118,6 +135,13 @@ class Logger:
|
|||||||
def _log_file_activated(self):
|
def _log_file_activated(self):
|
||||||
return True if self._config.log_file_path else False
|
return True if self._config.log_file_path else False
|
||||||
|
|
||||||
|
def _check_plpy(self):
|
||||||
|
try:
|
||||||
|
module = sys.modules['plpy']
|
||||||
|
return True
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class ConfigException(Exception):
|
class ConfigException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from setuptools import setup, find_packages
|
|||||||
setup(
|
setup(
|
||||||
name='cartodb_services',
|
name='cartodb_services',
|
||||||
|
|
||||||
version='0.7.4.2',
|
version='0.9.1',
|
||||||
|
|
||||||
description='CartoDB Services API Python Library',
|
description='CartoDB Services API Python Library',
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,47 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from cartodb_services.refactor.core.environment import *
|
||||||
|
from nose.tools import raises
|
||||||
|
from cartodb_services.refactor.storage.mem_config import InMemoryConfigStorage
|
||||||
|
|
||||||
|
class TestServerEnvironment(TestCase):
|
||||||
|
|
||||||
|
def test_can_be_a_valid_one(self):
|
||||||
|
env_dev = ServerEnvironment('development')
|
||||||
|
env_staging = ServerEnvironment('staging')
|
||||||
|
env_prod = ServerEnvironment('production')
|
||||||
|
env_onpremise = ServerEnvironment('onpremise')
|
||||||
|
|
||||||
|
@raises(AssertionError)
|
||||||
|
def test_cannot_be_a_non_valid_one(self):
|
||||||
|
env_whatever = ServerEnvironment('whatever')
|
||||||
|
|
||||||
|
def test_is_on_premise_returns_true_when_onpremise(self):
|
||||||
|
assert ServerEnvironment('onpremise').is_onpremise == True
|
||||||
|
|
||||||
|
def test_is_on_premise_returns_true_when_any_other(self):
|
||||||
|
assert ServerEnvironment('development').is_onpremise == False
|
||||||
|
assert ServerEnvironment('staging').is_onpremise == False
|
||||||
|
assert ServerEnvironment('production').is_onpremise == False
|
||||||
|
|
||||||
|
def test_equality(self):
|
||||||
|
assert ServerEnvironment('development') == ServerEnvironment('development')
|
||||||
|
assert ServerEnvironment('development') <> ServerEnvironment('onpremise')
|
||||||
|
|
||||||
|
|
||||||
|
class TestServerEnvironmentBuilder(TestCase):
|
||||||
|
|
||||||
|
def test_returns_env_according_to_configuration(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'server_conf': {
|
||||||
|
'environment': 'staging'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
server_env = ServerEnvironmentBuilder(server_config_storage).get()
|
||||||
|
assert server_env.is_staging == True
|
||||||
|
|
||||||
|
def test_returns_default_when_no_server_conf(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({})
|
||||||
|
server_env = ServerEnvironmentBuilder(server_config_storage).get()
|
||||||
|
|
||||||
|
assert server_env.is_development == True
|
||||||
|
assert str(server_env) == ServerEnvironmentBuilder.DEFAULT_ENVIRONMENT
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from cartodb_services.refactor.storage.mem_config import InMemoryConfigStorage
|
||||||
|
|
||||||
|
class TestInMemoryConfigStorage(TestCase):
|
||||||
|
|
||||||
|
def test_can_provide_values_from_hash(self):
|
||||||
|
server_config = InMemoryConfigStorage({'any_key': 'any_value'})
|
||||||
|
assert server_config.get('any_key') == 'any_value'
|
||||||
|
|
||||||
|
def test_gets_none_if_cannot_retrieve_key(self):
|
||||||
|
server_config = InMemoryConfigStorage()
|
||||||
|
assert server_config.get('any_non_existing_key') == None
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from cartodb_services.refactor.storage.null_config import NullConfigStorage
|
||||||
|
from cartodb_services.refactor.core.interfaces import ConfigBackendInterface
|
||||||
|
|
||||||
|
|
||||||
|
class TestNullConfigStorage(TestCase):
|
||||||
|
|
||||||
|
def test_is_a_config_backend(self):
|
||||||
|
null_config = NullConfigStorage()
|
||||||
|
assert isinstance(null_config, ConfigBackendInterface)
|
||||||
|
|
||||||
|
def test_returns_none_regardless_of_input(self):
|
||||||
|
null_config = NullConfigStorage()
|
||||||
|
assert null_config.get('whatever') is None
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from cartodb_services.refactor.storage.redis_config import *
|
||||||
|
from mockredis import MockRedis
|
||||||
|
from mock import Mock, MagicMock
|
||||||
|
from nose.tools import raises
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisConfigStorage(TestCase):
|
||||||
|
|
||||||
|
CONFIG_HASH_KEY = 'mykey'
|
||||||
|
|
||||||
|
def test_can_get_a_config_field(self):
|
||||||
|
connection = MockRedis()
|
||||||
|
connection.hset(self.CONFIG_HASH_KEY, 'field1', 42)
|
||||||
|
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
value = redis_config.get('field1')
|
||||||
|
assert type(value) == str # this is something to take into account, redis always returns strings
|
||||||
|
assert value == '42'
|
||||||
|
|
||||||
|
@raises(KeyError)
|
||||||
|
def test_raises_an_exception_if_config_key_not_present(self):
|
||||||
|
connection = MockRedis()
|
||||||
|
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||||
|
redis_config.get('whatever_field')
|
||||||
|
|
||||||
|
@raises(KeyError)
|
||||||
|
def test_returns_nothing_if_field_not_present(self):
|
||||||
|
connection = MockRedis()
|
||||||
|
connection.hmset(self.CONFIG_HASH_KEY, {'field1': 42, 'field2': 43})
|
||||||
|
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||||
|
redis_config.get('whatever_field')
|
||||||
|
|
||||||
|
def test_it_reads_the_config_hash_just_once(self):
|
||||||
|
connection = Mock()
|
||||||
|
connection.hgetall = MagicMock(return_value={'field1': '42'})
|
||||||
|
redis_config = RedisConfigStorage(connection, self.CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
assert redis_config.get('field1') == '42'
|
||||||
|
assert redis_config.get('field1') == '42'
|
||||||
|
|
||||||
|
connection.hgetall.assert_called_once_with(self.CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisUserConfigStorageBuilder(TestCase):
|
||||||
|
|
||||||
|
USERNAME = 'john'
|
||||||
|
EXPECTED_REDIS_CONFIG_HASH_KEY = 'rails:users:john'
|
||||||
|
|
||||||
|
def test_it_reads_the_correct_hash_key(self):
|
||||||
|
connection = Mock()
|
||||||
|
connection.hgetall = MagicMock(return_value={'an_user_config_field': 'nice'})
|
||||||
|
redis_config = RedisConfigStorage(connection, self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
redis_config = RedisUserConfigStorageBuilder(connection, self.USERNAME).get()
|
||||||
|
assert redis_config.get('an_user_config_field') == 'nice'
|
||||||
|
connection.hgetall.assert_called_once_with(self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisOrgConfigStorageBuilder(TestCase):
|
||||||
|
|
||||||
|
ORGNAME = 'smith'
|
||||||
|
EXPECTED_REDIS_CONFIG_HASH_KEY = 'rails:orgs:smith'
|
||||||
|
|
||||||
|
def test_it_reads_the_correct_hash_key(self):
|
||||||
|
connection = Mock()
|
||||||
|
connection.hgetall = MagicMock(return_value={'an_org_config_field': 'awesome'})
|
||||||
|
redis_config = RedisConfigStorage(connection, self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
redis_config = RedisOrgConfigStorageBuilder(connection, self.ORGNAME).get()
|
||||||
|
assert redis_config.get('an_org_config_field') == 'awesome'
|
||||||
|
connection.hgetall.assert_called_once_with(self.EXPECTED_REDIS_CONFIG_HASH_KEY)
|
||||||
|
|
||||||
|
def test_it_returns_a_null_config_storage_if_theres_no_orgname(self):
|
||||||
|
redis_config = RedisOrgConfigStorageBuilder(None, None).get()
|
||||||
|
assert type(redis_config) == NullConfigStorage
|
||||||
|
assert redis_config.get('whatever') == None
|
||||||
@@ -0,0 +1,115 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from cartodb_services.refactor.storage.redis_connection_config import *
|
||||||
|
from cartodb_services.refactor.storage.mem_config import InMemoryConfigStorage
|
||||||
|
from cartodb_services.refactor.config.exceptions import ConfigException
|
||||||
|
|
||||||
|
class TestRedisConnectionConfig(TestCase):
|
||||||
|
|
||||||
|
def test_config_holds_values(self):
|
||||||
|
# this is mostly for completeness, dummy class, dummy test
|
||||||
|
config = RedisConnectionConfig('myhost.com', 6379, 0.1, 5, None)
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == 0.1
|
||||||
|
assert config.db == 5
|
||||||
|
assert config.sentinel_id is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisConnectionConfigBuilder(TestCase):
|
||||||
|
|
||||||
|
def test_it_raises_exception_as_it_is_abstract(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage()
|
||||||
|
self.assertRaises(TypeError, RedisConnectionConfigBuilder, server_config_storage, 'whatever_key')
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisMetadataConnectionConfigBuilder(TestCase):
|
||||||
|
|
||||||
|
def test_it_raises_exception_if_config_is_missing(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage()
|
||||||
|
config_builder = RedisMetadataConnectionConfigBuilder(server_config_storage)
|
||||||
|
self.assertRaises(ConfigException, config_builder.get)
|
||||||
|
|
||||||
|
def test_it_gets_a_valid_config_from_the_server_storage(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'redis_metadata_config': {
|
||||||
|
'redis_host': 'myhost.com',
|
||||||
|
'redis_port': 6379,
|
||||||
|
'timeout': 0.2,
|
||||||
|
'redis_db': 3,
|
||||||
|
'sentinel_master_id': None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
config = RedisMetadataConnectionConfigBuilder(server_config_storage).get()
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == 0.2
|
||||||
|
assert config.db == 3
|
||||||
|
assert config.sentinel_id is None
|
||||||
|
|
||||||
|
def test_it_gets_a_default_timeout_if_none(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'redis_metadata_config': {
|
||||||
|
'redis_host': 'myhost.com',
|
||||||
|
'redis_port': 6379,
|
||||||
|
'timeout': None,
|
||||||
|
'redis_db': 3,
|
||||||
|
'sentinel_master_id': None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
config = RedisMetadataConnectionConfigBuilder(server_config_storage).get()
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == RedisConnectionConfigBuilder.DEFAULT_TIMEOUT
|
||||||
|
assert config.db == 3
|
||||||
|
assert config.sentinel_id is None
|
||||||
|
|
||||||
|
def test_it_gets_a_default_db_if_none(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'redis_metadata_config': {
|
||||||
|
'redis_host': 'myhost.com',
|
||||||
|
'redis_port': 6379,
|
||||||
|
'timeout': 0.2,
|
||||||
|
'redis_db': None,
|
||||||
|
'sentinel_master_id': None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
config = RedisMetadataConnectionConfigBuilder(server_config_storage).get()
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == 0.2
|
||||||
|
assert config.db == RedisConnectionConfigBuilder.DEFAULT_USER_DB
|
||||||
|
assert config.sentinel_id is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestRedisMetricsConnectionConfigBuilder(TestCase):
|
||||||
|
|
||||||
|
def test_it_gets_a_valid_config_from_the_server_storage(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'redis_metrics_config': {
|
||||||
|
'redis_host': 'myhost.com',
|
||||||
|
'redis_port': 6379,
|
||||||
|
'timeout': 0.2,
|
||||||
|
'redis_db': 3,
|
||||||
|
'sentinel_master_id': 'some_master_id'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
config = RedisMetricsConnectionConfigBuilder(server_config_storage).get()
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == 0.2
|
||||||
|
assert config.db == 3
|
||||||
|
assert config.sentinel_id == 'some_master_id'
|
||||||
|
|
||||||
|
def test_it_sets_absent_values_to_none_or_defaults(self):
|
||||||
|
server_config_storage = InMemoryConfigStorage({
|
||||||
|
'redis_metrics_config': {
|
||||||
|
'redis_host': 'myhost.com',
|
||||||
|
'redis_port': 6379,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
config = RedisMetricsConnectionConfigBuilder(server_config_storage).get()
|
||||||
|
assert config.host == 'myhost.com'
|
||||||
|
assert config.port == 6379
|
||||||
|
assert config.timeout == 1.5
|
||||||
|
assert config.db == 5
|
||||||
|
assert config.sentinel_id is None
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
from unittest import TestCase
|
||||||
|
from mock import Mock, MagicMock
|
||||||
|
from nose.tools import raises
|
||||||
|
from cartodb_services.refactor.storage.server_config import *
|
||||||
|
import cartodb_services
|
||||||
|
|
||||||
|
class TestInDbServerConfigStorage(TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.plpy_mock = Mock()
|
||||||
|
cartodb_services.init(self.plpy_mock, _GD={})
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
cartodb_services._reset()
|
||||||
|
|
||||||
|
def test_gets_configs_from_db(self):
|
||||||
|
self.plpy_mock.execute = MagicMock(return_value=[{'conf': '"any_value"'}])
|
||||||
|
server_config = InDbServerConfigStorage()
|
||||||
|
assert server_config.get('any_config') == 'any_value'
|
||||||
|
self.plpy_mock.execute.assert_called_once_with("SELECT cdb_dataservices_server.cdb_conf_getconf('any_config') as conf", 1)
|
||||||
|
|
||||||
|
def test_gets_none_if_cannot_retrieve_key(self):
|
||||||
|
self.plpy_mock.execute = MagicMock(return_value=[{'conf': None}])
|
||||||
|
server_config = InDbServerConfigStorage()
|
||||||
|
assert server_config.get('any_non_existing_key') is None
|
||||||
|
|
||||||
|
def test_deserializes_from_db_to_plain_dict(self):
|
||||||
|
self.plpy_mock.execute = MagicMock(return_value=[{'conf': '{"environment": "testing"}'}])
|
||||||
|
server_config = InDbServerConfigStorage()
|
||||||
|
assert server_config.get('server_conf') == {'environment': 'testing'}
|
||||||
|
self.plpy_mock.execute.assert_called_once_with("SELECT cdb_dataservices_server.cdb_conf_getconf('server_conf') as conf", 1)
|
||||||
@@ -6,7 +6,7 @@ import requests_mock
|
|||||||
from mock import Mock
|
from mock import Mock
|
||||||
|
|
||||||
from cartodb_services.mapzen import MapzenGeocoder
|
from cartodb_services.mapzen import MapzenGeocoder
|
||||||
from cartodb_services.mapzen.exceptions import MalformedResult
|
from cartodb_services.mapzen.exceptions import MalformedResult, TimeoutException
|
||||||
|
|
||||||
requests_mock.Mocker.TEST_PREFIX = 'test_'
|
requests_mock.Mocker.TEST_PREFIX = 'test_'
|
||||||
|
|
||||||
|
|||||||
33
server/lib/python/cartodb_services/test/test_qps.py
Normal file
33
server/lib/python/cartodb_services/test/test_qps.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import test_helper
|
||||||
|
import requests
|
||||||
|
from unittest import TestCase
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
from datetime import datetime, date
|
||||||
|
from cartodb_services.mapzen.qps import qps_retry
|
||||||
|
from cartodb_services.mapzen.exceptions import ServiceException, TimeoutException
|
||||||
|
import requests_mock
|
||||||
|
import mock
|
||||||
|
|
||||||
|
requests_mock.Mocker.TEST_PREFIX = 'test_'
|
||||||
|
|
||||||
|
@requests_mock.Mocker()
|
||||||
|
class TestQPS(TestCase):
|
||||||
|
QPS_ERROR_MESSAGE = "Queries per second exceeded: Queries exceeded (10 allowed)"
|
||||||
|
|
||||||
|
def test_qps_timeout(self, req_mock):
|
||||||
|
class TestClass:
|
||||||
|
@qps_retry(timeout=0.001, qps=100)
|
||||||
|
def test(self):
|
||||||
|
response = requests.get('http://localhost/test_qps')
|
||||||
|
if response.status_code == 429:
|
||||||
|
raise ServiceException('Error 429', response)
|
||||||
|
|
||||||
|
def _text_cb(request, context):
|
||||||
|
context.status_code = 429
|
||||||
|
return self.QPS_ERROR_MESSAGE
|
||||||
|
|
||||||
|
req_mock.register_uri('GET', 'http://localhost/test_qps',
|
||||||
|
text=_text_cb)
|
||||||
|
with self.assertRaises(TimeoutException):
|
||||||
|
c = TestClass()
|
||||||
|
c.test()
|
||||||
Reference in New Issue
Block a user