Compare commits

..

50 Commits

Author SHA1 Message Date
Mario de Frutos
9a6f52d63b Merge pull request #517 from CartoDB/development
Release 0.33 server and 0.20 for python library
2018-08-27 12:19:07 +02:00
Mario de Frutos
48d82e025a Merge pull request #516 from CartoDB/remove_obs_snapshot_quota
Remove obs snapshot quota
2018-08-27 11:57:44 +02:00
Mario de Frutos
9398ec0524 Updated NEWS.md 2018-08-27 11:56:15 +02:00
Mario de Frutos
073c527a62 Update NEWS.md 2018-08-21 18:42:16 +02:00
Mario de Frutos
a8e96366a5 Bump for the python library version 2018-08-21 18:40:22 +02:00
Mario de Frutos
6cc3cda6e0 Remove obs_snapshot_config from python library 2018-08-21 18:28:55 +02:00
Mario de Frutos
0e95c5ff90 Now the snapshot functions use the obs_genral config 2018-08-21 18:25:12 +02:00
Mario de Frutos
fba933cc88 Remove obs_snapshot_config 2018-08-21 18:18:10 +02:00
Mario de Frutos
44da876b4c Version 0.33.0 initial commit 2018-08-21 18:15:29 +02:00
Alejandro Guirao Rodríguez
288e3a4077 Merge pull request #515 from CartoDB/development
carto-package.json
2018-08-20 10:35:32 +02:00
Juan Ignacio Sánchez Lara
821fc04d49 Merge pull request #514 from CartoDB/naming-fixes
Component change and versioning using more numbers
2018-08-20 10:17:17 +02:00
Alejandro Guirao Rodríguez
a563abb7ab Update carto-package.json 2018-08-17 10:58:56 +02:00
Alejandro Guirao Rodríguez
9c8647ebd4 Changes in component names and versions 2018-08-17 10:57:52 +02:00
Alejandro Guirao Rodríguez
61aaa0804f Changes in component names and versions 2018-08-17 10:56:07 +02:00
Alejandro Guirao Rodríguez
bd391f4bf4 Component change and versioning using more numbers 2018-08-17 10:54:24 +02:00
Juan Ignacio Sánchez Lara
2f4e3d6e05 Merge pull request #513 from CartoDB/development
Disable Mapbox Matrix API
2018-08-14 10:50:43 +02:00
Juan Ignacio Sánchez Lara
a025034d64 Merge pull request #512 from CartoDB/remove_mapbox_matrix_api_usage
Remove mapbox matrix api usage
2018-08-14 10:49:26 +02:00
Juan Ignacio Sánchez Lara
d9f647504a Disable Mapbox Matrix API usage at tests 2018-08-14 09:39:42 +02:00
Juan Ignacio Sánchez Lara
ed2e87f4ca Fix test fixture 2018-08-14 09:27:59 +02:00
Juan Ignacio Sánchez Lara
f6791d6ec8 Merge pull request #511 from CartoDB/dataservices_dependencies_specification
Dataservices dependencies specification
2018-08-13 17:13:32 +02:00
Juan Ignacio Sánchez Lara
c1c671755c Final carto-package.json format 2018-08-13 16:28:47 +02:00
Juan Ignacio Sánchez Lara
a32e90ea8a carto-package.json proposal 2018-08-02 12:59:45 +02:00
Juan Ignacio Sánchez Lara
a6bff9b8d2 Merge pull request #509 from CartoDB/development
Version `0.19.1` of the Python library
2018-07-25 11:27:43 +02:00
Juan Ignacio Sánchez Lara
075f602a7f WELL_KNOWN_SHAPE and WELL_KNOWN_LENGTH fixture update 2018-07-25 11:22:31 +02:00
Juan Ignacio Sánchez Lara
e69849fb86 Merge pull request #508 from CartoDB/fix_batch_geocoding_error_accounting
Fix batch geocoding error accounting
2018-07-25 11:04:11 +02:00
Juan Ignacio Sánchez Lara
11ec6075c3 Python library version 0.19.1 2018-07-24 11:59:29 +02:00
Juan Ignacio Sánchez Lara
c6720bf689 Better debug message 2018-07-24 11:57:54 +02:00
Juan Ignacio Sánchez Lara
3524ee1e24 Handle postprocessing error 2018-07-24 11:31:05 +02:00
Juan Ignacio Sánchez Lara
80dcde2db0 Log Mapbox unknown status 2018-07-23 22:00:16 +02:00
Juan Ignacio Sánchez Lara
fa3d7db5f8 Fix Google geocoder error handling 2018-07-23 21:54:37 +02:00
Juan Ignacio Sánchez Lara
d060ab3d41 Empty vs missing count detail 2018-07-23 19:11:23 +02:00
Juan Ignacio Sánchez Lara
3a5360c96c Refactor and fix for actual searches type 2018-07-23 19:10:35 +02:00
Juan Ignacio Sánchez Lara
fc75f1afc8 Google batch geocoder error handling 2018-07-23 18:41:47 +02:00
Juan Ignacio Sánchez Lara
4be3aa88fd Constant extraction refactor 2018-07-23 18:30:33 +02:00
Juan Ignacio Sánchez Lara
8162bff204 Serial geocoding error handling 2018-07-23 18:27:02 +02:00
Juan Ignacio Sánchez Lara
1b31c089ce Global error handling for batched geocoding 2018-07-23 18:16:40 +02:00
Juan Ignacio Sánchez Lara
faf9b7237b Adjustments on street level fixtures 2018-07-23 17:36:06 +02:00
Juan Ignacio Sánchez Lara
5d2303e1de Log a failed one if any 2018-07-23 17:23:08 +02:00
Juan Ignacio Sánchez Lara
07f5be9207 TomTom error handling 2018-07-23 17:17:38 +02:00
Juan Ignacio Sánchez Lara
bcb34d1cea Adjustments on street level fixtures 2018-07-23 15:57:32 +02:00
Juan Ignacio Sánchez Lara
c5d9db61e6 Mapbox error handling 2018-07-23 15:48:32 +02:00
Juan Ignacio Sánchez Lara
1ff512839d Fixes empty results count 2018-07-23 13:01:46 +02:00
Juan Ignacio Sánchez Lara
9a1b1e2832 Error count 2018-07-23 12:18:26 +02:00
Juan Ignacio Sánchez Lara
1cebbe7af0 Missing warning mock and fix for debug 2018-07-23 12:16:48 +02:00
Juan Ignacio Sánchez Lara
2862c80025 Proper empty count on bulk geocoding 2018-07-23 11:53:02 +02:00
Juan Ignacio Sánchez Lara
abbaf83e97 run_street_point_geocoder tests 2018-07-23 11:42:51 +02:00
Juan Ignacio Sánchez Lara
cd5e6510a6 In case of general error, failed service use should be incremented by searches length 2018-07-23 09:30:09 +02:00
Juan Ignacio Sánchez Lara
fd097724f1 In case of general error, total service use should be incremented by searches length 2018-07-19 21:17:09 +02:00
Juan Ignacio Sánchez Lara
96fbf3080a Base run_street_point_geocoder test 2018-07-19 21:08:07 +02:00
Juan Ignacio Sánchez Lara
0d490bbb19 Extract EMPTY_RESPONSE 2018-07-19 20:52:59 +02:00
33 changed files with 4666 additions and 215 deletions

View File

@@ -1,6 +1,12 @@
Aug 27th, 2018
==============
* Version `0.33.0` of the server, and `0.20.0` of the Python library.
* Remove the obs_snapshot quota and now the snapshot functions uses obs_general quota
Jul 19th, 2018
==============
* Version `0.25.0` of the client, `0.32.0` of the server, and `0.19.0` of the Python library.
* Version `0.25.0` of the client, `0.32.0` of the server, and `0.19.1` of the Python library.
* Support for batch street-level geocoding.
May 7th, 2018

13
client/carto-package.json Normal file
View File

@@ -0,0 +1,13 @@
{
"name": "dataservices-api-client-extension",
"current_version": {
"requires": {
"postgresql": "^10.0.0",
"postgis": "^2.4.0.0",
"carto_postgresql_ext": "^0.23.0"
},
"works_with": {
"dataservices-api-server-extension": "^0.32.0"
}
}
}

View File

@@ -0,0 +1,14 @@
{
"name": "dataservices-api-server-extension",
"current_version": {
"requires": {
"postgresql": "^10.0.0",
"postgis": "^2.4.0.0",
"carto_postgresql_ext": "^0.23.0"
},
"works_with": {
"dataservices-api-server-python-lib": "^0.19.1",
"observatory-server-extension": "^1.9.0"
}
}
}

View File

@@ -0,0 +1,204 @@
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.33.0'" to load this file. \quit
-- HERE goes your code to upgrade/downgrade
DROP FUNCTION IF EXISTS cdb_dataservices_server._get_obs_snapshot_config;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._obs_server_conn_str(
username TEXT,
orgname TEXT)
RETURNS text AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
return user_obs_config.connection_str
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_demographic_snapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
time_span TEXT DEFAULT NULL,
geometry_level TEXT DEFAULT NULL)
RETURNS json AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
import json
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshotJSON($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
if result:
quota_service.increment_success_service_use()
return result[0]['snapshot']
else:
quota_service.increment_empty_service_use()
return None
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_demographic_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetDemographicSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
time_span TEXT DEFAULT NULL,
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF JSON AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshot($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
if result:
resp = []
for element in result:
value = element['snapshot']
resp.append(value)
quota_service.increment_success_service_use()
return resp
else:
quota_service.increment_empty_service_use()
return []
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_demographic_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_segment_snapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS json AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
import json
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetSegmentSnapshotJSON($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
if result:
quota_service.increment_success_service_use()
return result[0]['snapshot']
else:
quota_service.increment_empty_service_use()
return None
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_segment_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_segment_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetSegmentSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF json AS $$
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
SELECT * FROM cdb_observatory.OBS_GetSegmentSnapshot(geom, geometry_level);
$$ LANGUAGE plproxy VOLATILE PARALLEL UNSAFE;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetSegmentSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF JSON AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetSegmentSnapshot($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
if result:
resp = []
for element in result:
value = element['snapshot']
resp.append(value)
quota_service.increment_success_service_use()
return resp
else:
quota_service.increment_empty_service_use()
return []
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to OBS_GetSegmentSnapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to OBS_GetSegmentSnapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;

View File

@@ -0,0 +1,216 @@
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
\echo Use "ALTER EXTENSION cdb_dataservices_server UPDATE TO '0.32.0'" to load this file. \quit
-- HERE goes your code to upgrade/downgrade
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_obs_snapshot_config(username text, orgname text)
RETURNS boolean AS $$
cache_key = "user_obs_snapshot_config_{0}".format(username)
if cache_key in GD:
return False
else:
from cartodb_services.metrics import ObservatorySnapshotConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
obs_snapshot_config = ObservatorySnapshotConfig(redis_conn, plpy, username, orgname)
GD[cache_key] = obs_snapshot_config
return True
$$ LANGUAGE plpythonu SECURITY DEFINER STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._obs_server_conn_str(
username TEXT,
orgname TEXT)
RETURNS text AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
return user_obs_config.connection_str
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_demographic_snapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
time_span TEXT DEFAULT NULL,
geometry_level TEXT DEFAULT NULL)
RETURNS json AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
import json
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshotJSON($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
if result:
quota_service.increment_success_service_use()
return result[0]['snapshot']
else:
quota_service.increment_empty_service_use()
return None
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_demographic_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetDemographicSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
time_span TEXT DEFAULT NULL,
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF JSON AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getdemographicsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetDemographicSnapshot($1, $2, $3, $4, $5) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, time_span, geometry_level])
if result:
resp = []
for element in result:
value = element['snapshot']
resp.append(value)
quota_service.increment_success_service_use()
return resp
else:
quota_service.increment_empty_service_use()
return []
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_demographic_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_demographic_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.obs_get_segment_snapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS json AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
import json
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT cdb_dataservices_server._OBS_GetSegmentSnapshotJSON($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
if result:
quota_service.increment_success_service_use()
return result[0]['snapshot']
else:
quota_service.increment_empty_service_use()
return None
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to obs_get_segment_snapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to obs_get_segment_snapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._OBS_GetSegmentSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF json AS $$
CONNECT cdb_dataservices_server._obs_server_conn_str(username, orgname);
SELECT * FROM cdb_observatory.OBS_GetSegmentSnapshot(geom, geometry_level);
$$ LANGUAGE plproxy VOLATILE PARALLEL UNSAFE;
CREATE OR REPLACE FUNCTION cdb_dataservices_server.OBS_GetSegmentSnapshot(
username TEXT,
orgname TEXT,
geom geometry(Geometry, 4326),
geometry_level TEXT DEFAULT NULL)
RETURNS SETOF JSON AS $$
from cartodb_services.metrics import metrics
from cartodb_services.metrics import QuotaService
from cartodb_services.tools import Logger,LoggerConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
quota_service = QuotaService(user_obs_config, redis_conn)
if not quota_service.check_user_quota():
raise Exception('You have reached the limit of your quota')
with metrics('obs_getsegmentsnapshot', user_obs_config, logger):
try:
obs_plan = plpy.prepare("SELECT * FROM cdb_dataservices_server._OBS_GetSegmentSnapshot($1, $2, $3, $4) as snapshot;", ["text", "text", "geometry(Geometry, 4326)", "text"])
result = plpy.execute(obs_plan, [username, orgname, geom, geometry_level])
if result:
resp = []
for element in result:
value = element['snapshot']
resp.append(value)
quota_service.increment_success_service_use()
return resp
else:
quota_service.increment_empty_service_use()
return []
except BaseException as e:
import sys
quota_service.increment_failed_service_use()
logger.error('Error trying to OBS_GetSegmentSnapshot', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to OBS_GetSegmentSnapshot')
finally:
quota_service.increment_total_service_use()
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;

View File

@@ -1,5 +1,5 @@
comment = 'CartoDB dataservices server extension'
default_version = '0.32.0'
default_version = '0.33.0'
requires = 'plpythonu, plproxy, postgis, cdb_geocoder'
superuser = true
schema = cdb_dataservices_server

File diff suppressed because it is too large Load Diff

View File

@@ -10,8 +10,8 @@ CREATE OR REPLACE FUNCTION cdb_dataservices_server._obs_server_conn_str(
RETURNS text AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
return user_obs_config.connection_str
$$ LANGUAGE plpythonu STABLE PARALLEL RESTRICTED;
@@ -41,8 +41,8 @@ RETURNS json AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
@@ -94,8 +94,8 @@ RETURNS SETOF JSON AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
@@ -150,8 +150,8 @@ RETURNS json AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
@@ -201,8 +201,8 @@ RETURNS SETOF JSON AS $$
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metrics_connection']
plpy.execute("SELECT cdb_dataservices_server._get_obs_snapshot_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_snapshot_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_obs_config({0}, {1})".format(plpy.quote_nullable(username), plpy.quote_nullable(orgname)))
user_obs_config = GD["user_obs_config_{0}".format(username)]
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]

View File

@@ -90,20 +90,6 @@ RETURNS boolean AS $$
return True
$$ LANGUAGE plpythonu SECURITY DEFINER;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_obs_snapshot_config(username text, orgname text)
RETURNS boolean AS $$
cache_key = "user_obs_snapshot_config_{0}".format(username)
if cache_key in GD:
return False
else:
from cartodb_services.metrics import ObservatorySnapshotConfig
plpy.execute("SELECT cdb_dataservices_server._connect_to_redis('{0}')".format(username))
redis_conn = GD["redis_connection_{0}".format(username)]['redis_metadata_connection']
obs_snapshot_config = ObservatorySnapshotConfig(redis_conn, plpy, username, orgname)
GD[cache_key] = obs_snapshot_config
return True
$$ LANGUAGE plpythonu SECURITY DEFINER STABLE PARALLEL RESTRICTED;
CREATE OR REPLACE FUNCTION cdb_dataservices_server._get_obs_config(username text, orgname text)
RETURNS boolean AS $$
cache_key = "user_obs_config_{0}".format(username)

View File

@@ -0,0 +1,8 @@
{
"name": "dataservices-api-server-python-lib",
"current_version": {
"requires": {
"python": "~2.7.0"
}
}
}

View File

@@ -9,7 +9,6 @@ import json
PRECISION_PRECISE = 'precise'
PRECISION_INTERPOLATED = 'interpolated'
def geocoder_metadata(relevance, precision, match_types):
return {
'relevance': round(relevance, 2),
@@ -18,49 +17,104 @@ def geocoder_metadata(relevance, precision, match_types):
}
def geocoder_error_response(message):
return [[], {'error': message}]
# Single empty result
EMPTY_RESPONSE = [[], {}]
# HTTP 429 and related
TOO_MANY_REQUESTS_ERROR_RESPONSE = geocoder_error_response('Rate limit exceeded')
# Full empty _batch_geocode response
EMPTY_BATCH_RESPONSE = []
def compose_address(street, city=None, state=None, country=None):
return ', '.join(filter(None, [street, city, state, country]))
def run_street_point_geocoder(plpy, GD, geocoder, service_manager, username, orgname, searches):
def run_street_point_geocoder(plpy, GD, geocoder, service_manager, username, orgname, searches_string):
plpy.execute("SELECT cdb_dataservices_server._get_logger_config()")
logger_config = GD["logger_config"]
logger = Logger(logger_config)
success_count, failed_count, empty_count = 0, 0, 0
try:
searches = json.loads(searches_string)
except Exception as e:
logger.error('Parsing searches', exception=e, data={'searches': searches_string})
raise e
try:
service_manager.assert_within_limits(quota=False)
geocode_results = geocoder.bulk_geocode(searches=searches)
if geocode_results:
results = []
geocode_results = geocoder.bulk_geocode(searches)
results = []
a_failed_one = None
if not geocode_results == EMPTY_BATCH_RESPONSE:
for result in geocode_results:
if len(result) > 2:
metadata = json.dumps(result[2])
else:
logger.warning('Geocoding for {} without metadata'.format(username))
metadata = '{}'
metadata = result[2] if len(result) > 2 else {}
try:
if metadata.get('error', None):
results.append([result[0], None, json.dumps(metadata)])
a_failed_one = result
failed_count += 1
elif result[1] and len(result[1]) == 2:
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326) as the_geom; ", ["double precision", "double precision"])
point = plpy.execute(plan, result[1], 1)[0]
results.append([result[0], point['the_geom'], json.dumps(metadata)])
success_count += 1
else:
results.append([result[0], None, json.dumps(metadata)])
empty_count += 1
except Exception as e:
import sys
logger.error("Error processing geocode", sys.exc_info(), data={"username": username, "orgname": orgname})
metadata['processing_error'] = 'Error: {}'.format(e.message)
results.append([result[0], None, json.dumps(metadata)])
failed_count += 1
if result[1] and len(result[1]) == 2:
plan = plpy.prepare("SELECT ST_SetSRID(ST_MakePoint($1, $2), 4326) as the_geom; ", ["double precision", "double precision"])
point = plpy.execute(plan, result[1], 1)[0]
results.append([result[0], point['the_geom'], metadata])
else:
results.append([result[0], None, metadata])
service_manager.quota_service.increment_success_service_use(len(results))
return results
missing_count = len(searches) - success_count - failed_count - empty_count
if a_failed_one:
logger.warning("failed geocoding",
data={
"username": username,
"orgname": orgname,
"failed": str(a_failed_one),
"success_count": success_count,
"empty_count": empty_count,
"missing_count": missing_count,
"failed_count": failed_count
})
else:
service_manager.quota_service.increment_empty_service_use(len(searches))
return []
logger.debug("finished geocoding",
data={
"username": username,
"orgname": orgname,
"success_count": success_count,
"empty_count": empty_count,
"missing_count": missing_count,
"failed_count": failed_count
})
service_manager.quota_service.increment_success_service_use(success_count)
service_manager.quota_service.increment_empty_service_use(empty_count + missing_count)
service_manager.quota_service.increment_failed_service_use(failed_count)
return results
except QuotaExceededException as qe:
logger.debug('QuotaExceededException at run_street_point_geocoder', qe,
data={"username": username, "orgname": orgname})
service_manager.quota_service.increment_failed_service_use(len(searches))
return []
except BaseException as e:
import sys
service_manager.quota_service.increment_failed_service_use()
service_manager.quota_service.increment_failed_service_use(len(searches))
service_manager.logger.error('Error trying to bulk geocode street point', sys.exc_info(), data={"username": username, "orgname": orgname})
raise Exception('Error trying to bulk geocode street')
finally:
service_manager.quota_service.increment_total_service_use()
service_manager.quota_service.increment_total_service_use(len(searches))
StreetGeocoderSearch = namedtuple('StreetGeocoderSearch', 'id address city state country')
@@ -79,20 +133,14 @@ class StreetPointBulkGeocoder:
SEARCH_KEYS = ['id', 'address', 'city', 'state', 'country']
def bulk_geocode(self, searches):
def bulk_geocode(self, decoded_searches):
"""
:param searches: array of StreetGeocoderSearch
:param decoded_searches: JSON array
:return: array of tuples with three elements:
* id
* latitude and longitude (array of two elements)
* empty array (future use: metadata)
"""
try:
decoded_searches = json.loads(searches)
except Exception as e:
self._logger.error('General error', exception=e)
raise e
street_geocoder_searches = []
for search in decoded_searches:
search_id, address, city, state, country = \
@@ -102,10 +150,19 @@ class StreetPointBulkGeocoder:
if len(street_geocoder_searches) > self.MAX_BATCH_SIZE:
raise Exception("Batch size can't be larger than {}".format(self.MAX_BATCH_SIZE))
if self._should_use_batch(street_geocoder_searches):
return self._batch_geocode(street_geocoder_searches)
else:
return self._serial_geocode(street_geocoder_searches)
try:
if self._should_use_batch(street_geocoder_searches):
return self._batch_geocode(street_geocoder_searches)
else:
return self._serial_geocode(street_geocoder_searches)
except Exception as e:
msg = "Error running geocode: {}".format(e)
self._logger.error(msg, e)
errors = [geocoder_error_response(msg)] * len(decoded_searches)
results = []
for s, r in zip(decoded_searches, errors):
results.append((s['id'], r[0], r[1]))
return results
def _batch_geocode(self, street_geocoder_searches):
raise NotImplementedError('Subclasses must implement _batch_geocode')

View File

@@ -1,7 +1,7 @@
from multiprocessing import Pool
from exceptions import MalformedResult
from cartodb_services import StreetPointBulkGeocoder
from cartodb_services.geocoder import compose_address
from cartodb_services.geocoder import compose_address, geocoder_error_response
from cartodb_services.google import GoogleMapsGeocoder
@@ -25,7 +25,11 @@ class GoogleMapsBulkGeocoder(GoogleMapsGeocoder, StreetPointBulkGeocoder):
results = []
for search in searches:
(cartodb_id, street, city, state, country) = search
lng_lat, metadata = self.geocode_meta(street, city, state, country)
try:
lng_lat, metadata = self.geocode_meta(street, city, state, country)
except Exception as e:
self._logger.error("Error geocoding", e)
lng_lat, metadata = geocoder_error_response("Error geocoding")
results.append((cartodb_id, lng_lat, metadata))
return results
@@ -49,14 +53,12 @@ class GoogleMapsBulkGeocoder(GoogleMapsGeocoder, StreetPointBulkGeocoder):
try:
lng_lat, metadata = self._process_results(bulk_result.get())
except Exception as e:
self._logger.error('Error at Google async_geocoder', e)
lng_lat, metadata = [[], {}]
msg = 'Error at Google async_geocoder'
self._logger.error(msg, e)
lng_lat, metadata = geocoder_error_response(msg)
results.append((cartodb_id, lng_lat, metadata))
return results
except KeyError as e:
self._logger.error('KeyError error', exception=e)
raise MalformedResult()
except Exception as e:
self._logger.error('General error', exception=e)
raise e

View File

@@ -4,11 +4,10 @@
from urlparse import parse_qs
from exceptions import MalformedResult
from cartodb_services.geocoder import compose_address, geocoder_metadata, PRECISION_PRECISE, PRECISION_INTERPOLATED
from cartodb_services.geocoder import compose_address, geocoder_metadata, PRECISION_PRECISE, PRECISION_INTERPOLATED, EMPTY_RESPONSE
from cartodb_services.google.exceptions import InvalidGoogleCredentials
from client_factory import GoogleMapsClientFactory
EMPTY_RESPONSE = [[], {}]
PARTIAL_FACTOR = 0.8
RELEVANCE_BY_LOCATION_TYPE = {
'ROOFTOP': 1,

View File

@@ -8,7 +8,7 @@ from collections import namedtuple
from requests.adapters import HTTPAdapter
from cartodb_services import StreetPointBulkGeocoder
from cartodb_services.here import HereMapsGeocoder
from cartodb_services.geocoder import geocoder_metadata
from cartodb_services.geocoder import geocoder_metadata, geocoder_error_response
from cartodb_services.metrics import Traceable
from cartodb_services.tools.exceptions import ServiceException
@@ -42,7 +42,11 @@ class HereMapsBulkGeocoder(HereMapsGeocoder, StreetPointBulkGeocoder):
results = []
for search in searches:
(search_id, address, city, state, country) = search
result = self.geocode_meta(searchtext=address, city=city, state=state, country=country)
try:
result = self.geocode_meta(searchtext=address, city=city, state=state, country=country)
except Exception as e:
self._logger.error("Error geocoding", e)
result = geocoder_error_response("Error geocoding")
results.append((search_id, result[0], result[1]))
return results

View File

@@ -6,7 +6,7 @@ import requests
from requests.adapters import HTTPAdapter
from exceptions import *
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata, EMPTY_RESPONSE
from cartodb_services.metrics import Traceable
class HereMapsGeocoder(Traceable):
@@ -90,7 +90,7 @@ class HereMapsGeocoder(Traceable):
if value and value.strip():
params[key] = value
if not params:
return [[], {}]
return EMPTY_RESPONSE
return self._execute_geocode(params)
def _execute_geocode(self, params):
@@ -102,7 +102,7 @@ class HereMapsGeocoder(Traceable):
return [self._extract_lng_lat_from_result(result),
self._extract_metadata_from_result(result)]
except IndexError:
return [[], {}]
return EMPTY_RESPONSE
except KeyError as e:
self._logger.error('params: {}'.format(params), e)
raise MalformedResult()
@@ -127,7 +127,7 @@ class HereMapsGeocoder(Traceable):
self._logger.warning('Error 4xx trying to geocode street using HERE',
data={"response": response.json(), "params":
params})
return []
return EMPTY_RESPONSE
else:
self._logger.error('Error trying to geocode street using HERE',
data={"response": response.json(), "params":

View File

@@ -1,8 +1,6 @@
import json, requests, time
from requests.adapters import HTTPAdapter
import requests
from cartodb_services import StreetPointBulkGeocoder
from cartodb_services.mapbox import MapboxGeocoder
from cartodb_services.tools.exceptions import ServiceException
from iso3166 import countries
from cartodb_services.tools.country import country_to_iso3

View File

@@ -5,7 +5,7 @@ Python client for the Mapbox Geocoder service.
import json
import requests
from mapbox import Geocoder
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata, EMPTY_RESPONSE, EMPTY_BATCH_RESPONSE, TOO_MANY_REQUESTS_ERROR_RESPONSE, geocoder_error_response
from cartodb_services.metrics import Traceable
from cartodb_services.tools.exceptions import ServiceException
from cartodb_services.tools.qps import qps_retry
@@ -23,8 +23,6 @@ ENTRY_COORDINATES = 'coordinates'
ENTRY_TYPE = 'type'
TYPE_POINT = 'Point'
EMPTY_RESPONSE = [[], {}]
MATCH_TYPE_BY_MATCH_LEVEL = {
'poi': 'point_of_interest',
'poi.landmark': 'point_of_interest',
@@ -71,7 +69,7 @@ class MapboxGeocoder(Traceable):
result.append(EMPTY_RESPONSE)
return result
else:
return EMPTY_RESPONSE
return EMPTY_BATCH_RESPONSE
def _extract_lng_lat_from_feature(self, feature):
geometry = feature[ENTRY_GEOMETRY]
@@ -120,9 +118,17 @@ class MapboxGeocoder(Traceable):
:param city:
:param state_province:
:param country: Country ISO 3166 code
:return: [x, y] on success, [] on error
:return: [x, y] on success, raises ServiceException on error
"""
return self.geocode_meta(searchtext, city, state_province, country)[0]
response = self.geocode_meta(searchtext, city, state_province, country)
if response:
error_message = response[1].get('error', None)
if error_message:
raise ServiceException(error_message, None)
else:
return response[0]
else:
return EMPTY_RESPONSE
@qps_retry(qps=10)
def geocode_meta(self, searchtext, city=None, state_province=None,
@@ -140,7 +146,8 @@ class MapboxGeocoder(Traceable):
free_search = ', '.join(address)
return self.geocode_free_text_meta([free_search], country)[0]
response = self.geocode_free_text_meta([free_search], country)
return response[0] if response else EMPTY_RESPONSE
@qps_retry(qps=10)
def geocode_free_text_meta(self, free_searches, country=None):
@@ -154,28 +161,31 @@ class MapboxGeocoder(Traceable):
try:
free_search = ';'.join([self._escape(fs) for fs in free_searches])
response = self._geocoder.forward(address=free_search.decode('utf-8'),
limit=1,
country=country)
if response.status_code == requests.codes.ok:
return self._parse_geocoder_response(response.text)
elif response.status_code == requests.codes.too_many_requests:
return [TOO_MANY_REQUESTS_ERROR_RESPONSE] * len(free_searches)
elif response.status_code == requests.codes.bad_request:
return EMPTY_RESPONSE
return EMPTY_BATCH_RESPONSE
elif response.status_code == requests.codes.unprocessable_entity:
return EMPTY_RESPONSE
return EMPTY_BATCH_RESPONSE
else:
raise ServiceException(response.status_code, response)
msg = "Unkown status: {}".format(response.status_code)
self._logger.warning(msg, data={"searches": free_searches})
return [geocoder_error_response(msg)] * len(free_searches)
except requests.Timeout as te:
# In case of timeout we want to stop the job because the server
# could be down
self._logger.error('Timeout connecting to Mapbox geocoding server',
te)
raise ServiceException('Error geocoding {0} using Mapbox'.format(
free_search), None)
msg = 'Timeout connecting to Mapbox geocoding server'
self._logger.error(msg, te)
return [geocoder_error_response(msg)] * len(free_searches)
except requests.ConnectionError as ce:
# Don't raise the exception to continue with the geocoding job
self._logger.error('Error connecting to Mapbox geocoding server',
exception=ce)
return EMPTY_RESPONSE
return EMPTY_BATCH_RESPONSE
def _escape(self, free_search):
# Semicolon is used to separate batch geocoding; there's no documented

View File

@@ -1,4 +1,4 @@
from config import GeocoderConfig, IsolinesRoutingConfig, InternalGeocoderConfig, RoutingConfig, ConfigException, ObservatorySnapshotConfig, ObservatoryConfig
from config import GeocoderConfig, IsolinesRoutingConfig, InternalGeocoderConfig, RoutingConfig, ConfigException, ObservatoryConfig
from quota import QuotaService
from user import UserMetricsService
from log import metrics, MetricsDataGatherer, Traceable

View File

@@ -86,28 +86,6 @@ class DataObservatoryConfig(ServiceConfig):
return 'data observatory'
class ObservatorySnapshotConfig(DataObservatoryConfig):
SOFT_LIMIT_KEY = 'soft_obs_snapshot_limit'
QUOTA_KEY = 'obs_snapshot_quota'
PERIOD_END_DATE = 'period_end_date'
def __init__(self, redis_connection, db_conn, username, orgname=None):
super(ObservatorySnapshotConfig, self).__init__(redis_connection, db_conn,
username, orgname)
self._period_end_date = date_parse(self._redis_config[self.PERIOD_END_DATE])
if self.SOFT_LIMIT_KEY in self._redis_config and self._redis_config[self.SOFT_LIMIT_KEY].lower() == 'true':
self._soft_limit = True
else:
self._soft_limit = False
self._monthly_quota = self._get_effective_monthly_quota(self.QUOTA_KEY)
self._connection_str = self._db_config.data_observatory_connection_str
@property
def service_type(self):
return 'obs_snapshot'
class ObservatoryConfig(DataObservatoryConfig):
SOFT_LIMIT_KEY = 'soft_obs_general_limit'
@@ -890,7 +868,6 @@ class ServicesRedisConfig:
QUOTA_KEY = 'geocoding_quota'
ISOLINES_QUOTA_KEY = 'here_isolines_quota'
ROUTING_QUOTA_KEY = 'mapzen_routing_quota'
OBS_SNAPSHOT_QUOTA_KEY = 'obs_snapshot_quota'
OBS_GENERAL_QUOTA_KEY = 'obs_general_quota'
PERIOD_END_DATE = 'period_end_date'
GEOCODER_PROVIDER_KEY = 'geocoder_provider'
@@ -934,8 +911,6 @@ class ServicesRedisConfig:
user_config[self.ISOLINES_QUOTA_KEY] = org_config[self.ISOLINES_QUOTA_KEY]
if self.ROUTING_QUOTA_KEY in org_config:
user_config[self.ROUTING_QUOTA_KEY] = org_config[self.ROUTING_QUOTA_KEY]
if self.OBS_SNAPSHOT_QUOTA_KEY in org_config:
user_config[self.OBS_SNAPSHOT_QUOTA_KEY] = org_config[self.OBS_SNAPSHOT_QUOTA_KEY]
if self.OBS_GENERAL_QUOTA_KEY in org_config:
user_config[self.OBS_GENERAL_QUOTA_KEY] = org_config[self.OBS_GENERAL_QUOTA_KEY]
if self.PERIOD_END_DATE in org_config:

View File

@@ -1,6 +1,7 @@
import json, requests, time
from requests.adapters import HTTPAdapter
from cartodb_services import StreetPointBulkGeocoder
from cartodb_services.geocoder import geocoder_error_response
from cartodb_services.tomtom import TomTomGeocoder
from cartodb_services.tools.exceptions import ServiceException
@@ -43,13 +44,21 @@ class TomTomBulkGeocoder(TomTomGeocoder, StreetPointBulkGeocoder):
return results
def _batch_geocode(self, searches):
location = self._send_batch(searches)
full_results = self._download_results(location)
full_results = self._geocode_searches(searches)
results = []
for s, r in zip(searches, full_results):
results.append((s[0], r[0], r[1]))
return results
def _geocode_searches(self, searches):
try:
location = self._send_batch(searches)
return self._download_results(location)
except Exception as e:
msg = "Error running TomTom batch geocode: {}".format(e)
self._logger.error(msg, e)
return [geocoder_error_response(msg)] * len(searches)
def _send_batch(self, searches):
body = {'batchItems': [{'query': self._query(s)} for s in searches]}
request_params = {

View File

@@ -5,7 +5,7 @@ import json
import requests
from uritemplate import URITemplate
from math import tanh
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata
from cartodb_services.geocoder import PRECISION_PRECISE, PRECISION_INTERPOLATED, geocoder_metadata, EMPTY_RESPONSE, geocoder_error_response
from cartodb_services.metrics import Traceable
from cartodb_services.tools.exceptions import ServiceException
from cartodb_services.tools.qps import qps_retry
@@ -20,7 +20,6 @@ ENTRY_RESULTS = 'results'
ENTRY_POSITION = 'position'
ENTRY_LON = 'lon'
ENTRY_LAT = 'lat'
EMPTY_RESPONSE = [[], {}]
SCORE_NORMALIZATION_FACTOR = 0.15
PRECISION_SCORE_THRESHOLD = 0.5
@@ -74,7 +73,12 @@ class TomTomGeocoder(Traceable):
@qps_retry(qps=5)
def geocode(self, searchtext, city=None, state_province=None,
country=None):
return self.geocode_meta(searchtext, city, state_province, country)[0]
response = self.geocode_meta(searchtext, city, state_province, country)
error_message = response[1].get('error', None)
if error_message:
raise ServiceException(error_message, None)
else:
return response[0]
@qps_retry(qps=5)
def geocode_meta(self, searchtext, city=None, state_province=None,
@@ -107,10 +111,9 @@ class TomTomGeocoder(Traceable):
except requests.Timeout as te:
# In case of timeout we want to stop the job because the server
# could be down
self._logger.error('Timeout connecting to TomTom geocoding server',
te)
raise ServiceException('Error geocoding {0} using TomTom'.format(
searchtext), None)
msg = 'Timeout connecting to TomTom geocoding server'
self._logger.error(msg, te)
return geocoder_error_response(msg)
except requests.ConnectionError as ce:
# Don't raise the exception to continue with the geocoding job
self._logger.error('Error connecting to TomTom geocoding server',
@@ -126,7 +129,9 @@ class TomTomGeocoder(Traceable):
return EMPTY_RESPONSE
else:
msg = 'Unknown response {}: {}'.format(str(status_code), text)
raise ServiceException(msg, None)
self._logger.warning('Error parsing TomTom geocoding response',
data={'msg': msg})
return geocoder_error_response(msg)
def _parse_geocoder_response(self, response):
json_response = json.loads(response) \

View File

@@ -10,7 +10,7 @@ from setuptools import setup, find_packages
setup(
name='cartodb_services',
version='0.19.0',
version='0.20.0',
description='CartoDB Services API Python Library',

View File

@@ -173,7 +173,8 @@ class TestGeocoderOrgConfig(TestCase):
class TestIsolinesUserConfig(TestCase):
ISOLINES_PROVIDERS = ['heremaps', 'mapzen', 'mapbox', 'tomtom']
# Don't test mapbox. See CartoDB/cartodb-management/issues/5199"
ISOLINES_PROVIDERS = ['heremaps', 'mapzen', 'tomtom']
def setUp(self):
self.redis_conn = MockRedis()
@@ -233,8 +234,8 @@ class TestIsolinesUserConfig(TestCase):
class TestIsolinesOrgConfig(TestCase):
ISOLINES_PROVIDERS = ['heremaps', 'mapzen', 'mapbox', 'tomtom']
# Don't test mapbox. See CartoDB/cartodb-management/issues/5199"
ISOLINES_PROVIDERS = ['heremaps', 'mapzen', 'tomtom']
def setUp(self):
self.redis_conn = MockRedis()
@@ -343,41 +344,7 @@ class TestDataObservatoryUserConfig(TestCase):
self.redis_conn = MockRedis()
plpy_mock_config()
def test_should_return_config_for_obs_snapshot(self):
yesterday = datetime.today() - timedelta(days=1)
build_redis_user_config(self.redis_conn, 'test_user', 'data_observatory',
quota=100, end_date=yesterday)
do_config = ObservatorySnapshotConfig(self.redis_conn, plpy_mock,
'test_user')
assert do_config.monthly_quota == 100
assert do_config.soft_limit is False
assert do_config.period_end_date.date() == yesterday.date()
def test_should_return_true_if_soft_limit_is_true_in_redis(self):
yesterday = datetime.today() - timedelta(days=1)
build_redis_user_config(self.redis_conn, 'test_user', 'data_observatory',
quota=0, soft_limit=True, end_date=yesterday)
do_config = ObservatorySnapshotConfig(self.redis_conn, plpy_mock,
'test_user')
assert do_config.soft_limit is True
def test_should_return_0_if_quota_is_0_in_redis(self):
yesterday = datetime.today() - timedelta(days=1)
build_redis_user_config(self.redis_conn, 'test_user', 'data_observatory',
quota=0, end_date=yesterday)
do_config = ObservatorySnapshotConfig(self.redis_conn, plpy_mock,
'test_user')
assert do_config.monthly_quota == 0
def test_should_return_0_if_quota_is_empty_in_redis(self):
yesterday = datetime.today() - timedelta(days=1)
build_redis_user_config(self.redis_conn, 'test_user', 'data_observatory',
quota='', end_date=yesterday)
do_config = ObservatorySnapshotConfig(self.redis_conn, plpy_mock,
'test_user')
assert do_config.monthly_quota == 0
def test_should_return_config_for_obs_snapshot(self):
def test_should_return_config_for_obs_config(self):
yesterday = datetime.today() - timedelta(days=1)
build_redis_user_config(self.redis_conn, 'test_user', 'data_observatory',
quota=100, end_date=yesterday)

View File

@@ -52,11 +52,14 @@ class MockPlPy:
self._logged_queries = []
self._log_executed_queries = True
def warning(self, msg):
self.warnings.append(msg)
def notice(self, msg):
self.notices.append(msg)
def debug(self, msg):
self.notices.append(msg)
self.debugs.append(msg)
def info(self, msg):
self.infos.append(msg)

View File

@@ -0,0 +1,211 @@
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import json
from unittest import TestCase
from mock import Mock, MagicMock
from nose.tools import assert_not_equal, assert_equal, assert_true
from cartodb_services.tools import QuotaExceededException
from cartodb_services.geocoder import run_street_point_geocoder, StreetGeocoderSearch
SEARCH_FIXTURES = {
'two': [
StreetGeocoderSearch(id=1, address='Paseo Zorrilla 1, Valladolid',
city=None, state=None, country=None),
StreetGeocoderSearch(id=2, address='Paseo Zorrilla 2, Valladolid',
city=None, state=None, country=None)
],
'wrong': [
StreetGeocoderSearch(id=100, address='deowpfjoepwjfopejwpofjewpojgf',
city=None, state=None, country=None),
],
'error': [
StreetGeocoderSearch(id=200, address=None, city=None, state=None,
country=None),
],
'broken_middle': [
StreetGeocoderSearch(id=301, address='Paseo Zorrilla 1, Valladolid',
city=None, state=None, country=None),
StreetGeocoderSearch(id=302, address='Marsopolis',
city=None, state=None, country=None),
StreetGeocoderSearch(id=303, address='Paseo Zorrilla 2, Valladolid',
city=None, state=None, country=None)
],
}
BULK_RESULTS_FIXTURES = {
'two': [
(1, [0, 0], {}),
(2, [0, 0], {}),
],
'wrong': [
(100, [], {})
],
'error': [
(200, [], {'error': 'Something wrong happened'})
],
'broken_middle': [
(301, [0, 0], {}),
(302, ['a', 'b'], {}),
(303, [0, 0], {}),
]
}
EXPECTED_RESULTS_FIXTURES = {
'two': [
[1, [0, 0], '{}'],
[2, [0, 0], '{}'],
],
'wrong': [
[100, None, '{}']
],
'error': [
[200, None, '{"error": "Something wrong happened"}']
],
'broken_middle': [
[301, [0, 0], '{}'],
[302, None, '{"processing_error": "Error: NO!"}'],
[303, [0, 0], '{}'],
]
}
class TestRunStreetPointGeocoder(TestCase):
def _run_geocoder(self, plpy=None, gd=None, geocoder=None,
service_manager=None, username=None, orgname=None,
searches=None):
return run_street_point_geocoder(
plpy if plpy else self.plpy_mock,
gd if gd else self.gd_mock,
geocoder if geocoder else self.geocoder_mock,
service_manager if service_manager else self.service_manager_mock,
username if username else 'any_username',
orgname if orgname else None,
json.dumps(searches) if searches else '[]')
def setUp(self):
point = [0,0]
self.plpy_mock = Mock()
self.plpy_mock.execute = MagicMock(return_value=[{'the_geom': point}])
self.logger_config_mock = MagicMock(min_log_level='debug',
log_file_path='/tmp/ptest.log',
rollbar_api_key=None)
self.gd_mock = {'logger_config': self.logger_config_mock}
self.geocoder_mock = Mock()
self.quota_service_mock = Mock()
self.service_manager_mock = Mock()
self.service_manager_mock.quota_service = self.quota_service_mock
self.service_manager_mock.assert_within_limits = MagicMock()
def test_count_increment_total_and_failed_service_use_on_error(self):
self.service_manager_mock.assert_within_limits = \
Mock(side_effect=Exception('Fail!'))
searches = []
with(self.assertRaises(BaseException)):
self._run_geocoder(service_manager=self.service_manager_mock,
searches=searches)
self.quota_service_mock.increment_total_service_use. \
assert_called_once_with(len(searches))
self.quota_service_mock.increment_failed_service_use. \
assert_called_once_with(len(searches))
def test_count_increment_failed_service_use_on_quota_error(self):
self.service_manager_mock.assert_within_limits = \
Mock(side_effect=QuotaExceededException())
searches = SEARCH_FIXTURES['two']
result = self._run_geocoder(service_manager=self.service_manager_mock,
searches=searches)
assert_equal(result, [])
self.quota_service_mock.increment_failed_service_use. \
assert_called_once_with(len(searches))
def test_increment_success_service_use_on_complete_response(self):
searches = SEARCH_FIXTURES['two']
results = [
(1, [0, 0], {}),
(2, [0, 0], {}),
]
expected_results = [
[1, [0, 0], '{}'],
[2, [0, 0], '{}'],
]
self.geocoder_mock.bulk_geocode = MagicMock(return_value=results)
result = self._run_geocoder(geocoder=self.geocoder_mock,
searches=searches)
assert_equal(result, expected_results)
self.quota_service_mock.increment_success_service_use. \
assert_called_once_with(len(results))
def test_increment_empty_service_use_on_complete_response(self):
searches = SEARCH_FIXTURES['two']
results = []
self.geocoder_mock.bulk_geocode = MagicMock(return_value=results)
result = self._run_geocoder(geocoder=self.geocoder_mock,
searches=searches)
assert_equal(result, results)
self.quota_service_mock.increment_empty_service_use. \
assert_called_once_with(len(searches))
def test_increment_mixed_empty_service_use_on_complete_response(self):
searches = SEARCH_FIXTURES['two'] + SEARCH_FIXTURES['wrong']
bulk_results = BULK_RESULTS_FIXTURES['two'] + BULK_RESULTS_FIXTURES['wrong']
self.geocoder_mock.bulk_geocode = MagicMock(return_value=bulk_results)
result = self._run_geocoder(geocoder=self.geocoder_mock,
searches=searches)
assert_equal(result, EXPECTED_RESULTS_FIXTURES['two'] + EXPECTED_RESULTS_FIXTURES['wrong'])
self.quota_service_mock.increment_success_service_use. \
assert_called_once_with(len(SEARCH_FIXTURES['two']))
self.quota_service_mock.increment_empty_service_use. \
assert_called_once_with(len(SEARCH_FIXTURES['wrong']))
def test_increment_mixed_error_service_use_on_complete_response(self):
searches = SEARCH_FIXTURES['two'] + SEARCH_FIXTURES['error']
bulk_results = BULK_RESULTS_FIXTURES['two'] + BULK_RESULTS_FIXTURES['error']
self.geocoder_mock.bulk_geocode = MagicMock(return_value=bulk_results)
result = self._run_geocoder(geocoder=self.geocoder_mock,
searches=searches)
assert_equal(result, EXPECTED_RESULTS_FIXTURES['two'] + EXPECTED_RESULTS_FIXTURES['error'])
self.quota_service_mock.increment_success_service_use. \
assert_called_once_with(len(SEARCH_FIXTURES['two']))
self.quota_service_mock.increment_failed_service_use. \
assert_called_once_with(len(SEARCH_FIXTURES['error']))
def test_controlled_failure_on_query_break(self):
searches = SEARCH_FIXTURES['broken_middle']
bulk_results = BULK_RESULTS_FIXTURES['broken_middle']
self.geocoder_mock.bulk_geocode = MagicMock(return_value=bulk_results)
def break_on_302(*args):
if len(args) == 3:
plan, values, limit = args
if values[0] == 'a':
raise Exception('NO!')
return [{'the_geom': [0,0]}]
self.plpy_mock.execute = break_on_302
result = self._run_geocoder(geocoder=self.geocoder_mock,
searches=searches)
assert_equal(result, EXPECTED_RESULTS_FIXTURES['broken_middle'])
self.quota_service_mock.increment_success_service_use. \
assert_called_once_with(2)
self.quota_service_mock.increment_failed_service_use. \
assert_called_once_with(1)

View File

@@ -28,9 +28,7 @@ def build_redis_user_config(redis_conn, username, service, quota=100,
redis_conn.hset(user_redis_name, 'mapzen_routing_quota', str(quota))
redis_conn.hset(user_redis_name, 'soft_mapzen_routing_limit', str(soft_limit).lower())
elif service is 'data_observatory':
redis_conn.hset(user_redis_name, 'obs_snapshot_quota', str(quota))
redis_conn.hset(user_redis_name, 'obs_general_quota', str(quota))
redis_conn.hset(user_redis_name, 'soft_obs_snapshot_limit', str(soft_limit).lower())
redis_conn.hset(user_redis_name, 'soft_obs_general_limit', str(soft_limit).lower())
redis_conn.hset(user_redis_name, 'google_maps_client_id', '')
@@ -57,7 +55,6 @@ def build_redis_org_config(redis_conn, orgname, service, quota=100,
redis_conn.hset(org_redis_name, 'mapzen_routing_quota', str(quota))
elif service is 'data_observatory':
if quota is not None:
redis_conn.hset(org_redis_name, 'obs_snapshot_quota', str(quota))
redis_conn.hset(org_redis_name, 'obs_general_quota', str(quota))
redis_conn.hset(org_redis_name, 'google_maps_client_id', '')

View File

@@ -3,15 +3,13 @@ from mock import Mock
from cartodb_services.mapbox.isolines import MapboxIsolines
from cartodb_services.mapbox.matrix_client import DEFAULT_PROFILE
from cartodb_services.mapbox.matrix_client import MapboxMatrixClient
from cartodb_services.mapbox.routing import MapboxRouting
from cartodb_services.tools import Coordinate
from cartodb_services.tools.coordinates import (validate_coordinates,
marshall_coordinates)
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
@unittest.skip("Stop using Matrix API. CartoDB/cartodb-management/issues/5199")
class MapboxIsolinesTestCase(unittest.TestCase):
def setUp(self):

View File

@@ -20,6 +20,7 @@ VALID_PROFILE = DEFAULT_PROFILE
INVALID_PROFILE = 'invalid_profile'
@unittest.skip("Stop using Matrix API. CartoDB/cartodb-management/issues/5199")
class MapboxMatrixTestCase(unittest.TestCase):
def setUp(self):
self.matrix_client = MapboxMatrixClient(token=mapbox_api_key(),

View File

@@ -181,28 +181,6 @@ class TestQuotaService(TestCase):
# Quick workaround so we don't take into account numer of credits
# spent for users that have defined the quota.
# See https://github.com/CartoDB/bigmetadata/issues/215
def test_should_check_user_obs_snapshot_quota_correctly(self):
qs = self.__build_obs_snapshot_quota_service('test_user')
qs.increment_success_service_use()
assert qs.check_user_quota() is True
qs.increment_success_service_use(amount=100000)
assert qs.check_user_quota() is True
def test_should_check_org_obs_snapshot_quota_correctly(self):
qs = self.__build_obs_snapshot_quota_service('test_user',
orgname='testorg')
qs.increment_success_service_use()
assert qs.check_user_quota() is True
qs.increment_success_service_use(amount=100000)
assert qs.check_user_quota() is True
def test_should_check_user_obs_quota_correctly(self):
qs = self.__build_obs_snapshot_quota_service('test_user')
qs.increment_success_service_use()
assert qs.check_user_quota() is True
qs.increment_success_service_use(amount=100000)
assert qs.check_user_quota() is True
def test_should_check_org_obs_quota_correctly(self):
qs = self.__build_obs_quota_service('test_user',
orgname='testorg')
@@ -249,17 +227,6 @@ class TestQuotaService(TestCase):
username, orgname)
return QuotaService(isolines_config, redis_connection=self.redis_conn)
def __build_obs_snapshot_quota_service(self, username, quota=100,
provider='obs_snapshot',
orgname=None,
soft_limit=False,
end_date=datetime.today()):
self.__prepare_quota_service(username, 'data_observatory', quota,
None, orgname, soft_limit, end_date)
do_config = ObservatorySnapshotConfig(self.redis_conn, plpy_mock,
username, orgname)
return QuotaService(do_config, redis_connection=self.redis_conn)
def __build_obs_quota_service(self, username, quota=100,
provider='obs_general',
orgname=None,

View File

@@ -11,7 +11,7 @@ class TestStreetFunctionsSetUp(TestCase):
fixture_points = None
GOOGLE_POINTS = {
'Plaza Mayor, Valladolid': [-4.728252, 41.6517025],
'Plaza Mayor 1, Valladolid': [-4.728252, 41.6517025],
'Paseo Zorrilla, Valladolid': [-4.7404453, 41.6314339],
'1900 amphitheatre parkway': [-122.0875324, 37.4227968],
'1901 amphitheatre parkway': [-122.0885504, 37.4238657],
@@ -26,7 +26,7 @@ class TestStreetFunctionsSetUp(TestCase):
}
HERE_POINTS = {
'Plaza Mayor, Valladolid': [-4.72979, 41.65258],
'Plaza Mayor 1, Valladolid': [-4.729, 41.65258],
'Paseo Zorrilla, Valladolid': [-4.73869, 41.63817],
'1900 amphitheatre parkway': [-122.0879468, 37.4234763],
'1901 amphitheatre parkway': [-122.0879253, 37.4238725],
@@ -42,13 +42,13 @@ class TestStreetFunctionsSetUp(TestCase):
TOMTOM_POINTS = HERE_POINTS.copy()
TOMTOM_POINTS.update({
'Plaza Mayor, Valladolid': [-4.72183, 41.5826],
'Plaza Mayor 1, Valladolid': [-4.7286, 41.6523],
'Paseo Zorrilla, Valladolid': [-4.74031, 41.63181],
'Valladolid': [-4.72838, 41.6542],
'Valladolid, Spain': [-4.72838, 41.6542],
'Madrid': [-3.70035, 40.42028],
'Logroño, Spain': [-2.44998, 42.46592],
'Plaza España, Barcelona': [2.1497, 41.37516]
'Plaza España, Barcelona': [2.14856, 41.37516]
})
MAPBOX_POINTS = GOOGLE_POINTS.copy()
@@ -174,7 +174,7 @@ class TestBulkStreetFunctions(TestStreetFunctionsSetUp):
"FROM cdb_dataservices_client.cdb_bulk_geocode_street_point(" \
"'select 1 as cartodb_id, ''Spain'' as country, " \
"''Castilla y León'' as state, ''Valladolid'' as city, " \
"''Plaza Mayor'' as street " \
"''Plaza Mayor 1'' as street " \
"UNION " \
"select 2 as cartodb_id, ''Spain'' as country, " \
"''Castilla y León'' as state, ''Valladolid'' as city, " \
@@ -183,7 +183,7 @@ class TestBulkStreetFunctions(TestStreetFunctionsSetUp):
response = self._run_authenticated(query)
points_by_cartodb_id = {
1: self.fixture_points['Plaza Mayor, Valladolid'],
1: self.fixture_points['Plaza Mayor 1, Valladolid'],
2: self.fixture_points['Paseo Zorrilla, Valladolid']
}
self.assert_close_points(self._x_y_by_cartodb_id(response), points_by_cartodb_id)