Compare commits
2 Commits
bayesian_b
...
new-versio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e972128eb | ||
|
|
cdd2d9e722 |
4
Makefile
4
Makefile
@@ -1,5 +1,5 @@
|
||||
EXT_DIR = pg
|
||||
PYP_DIR = python
|
||||
EXT_DIR = src/pg
|
||||
PYP_DIR = src/py
|
||||
|
||||
.PHONY: install
|
||||
.PHONY: run_tests
|
||||
|
||||
84
README.md
84
README.md
@@ -4,9 +4,87 @@ CartoDB Spatial Analysis extension for PostgreSQL.
|
||||
|
||||
## Code organization
|
||||
|
||||
* *pg* contains the PostgreSQL extension source code
|
||||
* *python* Python module
|
||||
* *doc* documentation
|
||||
* *src* source code
|
||||
* - *src/pg* contains the PostgreSQL extension source code
|
||||
* - *src/py* Python module source code
|
||||
* *release* reselesed versions
|
||||
|
||||
## Requirements
|
||||
|
||||
* pip
|
||||
* pip, virtualenv, PostgreSQL
|
||||
|
||||
# Working Process
|
||||
|
||||
## Development
|
||||
|
||||
Work in `src/pg/sql`, `src/py/crankshaft`;
|
||||
use topic branch.
|
||||
|
||||
Update local installation with `sudo make install`
|
||||
(this will update the 'dev' version of the extension in 'src/pg/')
|
||||
|
||||
Run the tests with `PGUSER=postgres make test`
|
||||
|
||||
Update extension in working database with
|
||||
|
||||
* `ALTER EXTENSION crankshaft VERSION TO 'current';`
|
||||
`ALTER EXTENSION crankshaft VERSION TO 'dev';`
|
||||
|
||||
Note: we keep the current development version install as 'dev' always;
|
||||
we update through the 'current' alias to allow changing the extension
|
||||
contents but not the version identifier. This will fail if the
|
||||
changes involve incompatible function changes such as a different
|
||||
return type; in that case the offending function (or the whole extension)
|
||||
should be dropped manually before the update.
|
||||
|
||||
If the extension has not previously been installed in a database
|
||||
we can:
|
||||
|
||||
Add tests...
|
||||
|
||||
* `CREATE EXTENSION crankshaft WITH VERSION 'dev';`
|
||||
|
||||
Test
|
||||
|
||||
Commit, push, create PR, wait for CI tests, CR, ...
|
||||
|
||||
## Release
|
||||
|
||||
To release current development version
|
||||
(working directory should be clean in dev branch)
|
||||
|
||||
(process to be gradually automated)
|
||||
|
||||
For backwards compatible changes (no return value, num of arguments, etc. changes...)
|
||||
new version number increasing either patch level (no new functionality)
|
||||
or minor level (new functionality) => 'X.Y.Z'.
|
||||
Update version in src/pg/crankshaft.control
|
||||
Copy release/crankshaft--current.sql to release/crankshaft--X.Y.Z.sql
|
||||
Prepare incremental downgrade, upgrade scripts....
|
||||
|
||||
Python: ...
|
||||
|
||||
Install the new release
|
||||
|
||||
`make install-release`
|
||||
|
||||
Test the new release
|
||||
|
||||
`make test-release`
|
||||
|
||||
Push the release
|
||||
|
||||
Wait for CI tests
|
||||
|
||||
Merge into master
|
||||
|
||||
Deploy: install extension and python to production hosts,
|
||||
update extension in databases (limited to team users, data observatory, ...)
|
||||
|
||||
Release manager role: ...
|
||||
|
||||
.sql release scripts
|
||||
commit
|
||||
tests: staging....
|
||||
merge, tag, deploy...
|
||||
|
||||
3
pg/.gitignore
vendored
3
pg/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
regression.diffs
|
||||
regression.out
|
||||
results/
|
||||
33
pg/Makefile
33
pg/Makefile
@@ -1,33 +0,0 @@
|
||||
# Makefile to generate the extension out of separate sql source files.
|
||||
# Once a version is released, it is not meant to be changed. E.g: once version 0.0.1 is out, it SHALL NOT be changed.
|
||||
|
||||
EXTENSION = crankshaft
|
||||
EXTVERSION = $(shell grep default_version $(EXTENSION).control | sed -e "s/default_version[[:space:]]*=[[:space:]]*'\([^']*\)'/\1/")
|
||||
|
||||
# The new version to be generated from templates
|
||||
NEW_EXTENSION_ARTIFACT = $(EXTENSION)--$(EXTVERSION).sql
|
||||
|
||||
# DATA is a special variable used by postgres build infrastructure
|
||||
# These are the files to be installed in the server shared dir,
|
||||
# for installation from scratch, upgrades and downgrades.
|
||||
# @see http://www.postgresql.org/docs/current/static/extend-pgxs.html
|
||||
DATA = $(NEW_EXTENSION_ARTIFACT)
|
||||
|
||||
SOURCES_DATA_DIR = sql/$(EXTVERSION)
|
||||
SOURCES_DATA = $(wildcard sql/$(EXTVERSION)/*.sql)
|
||||
|
||||
# The extension installation artifacts are stored in the base subdirectory
|
||||
$(NEW_EXTENSION_ARTIFACT): $(SOURCES_DATA)
|
||||
rm -f $@
|
||||
cat $(SOURCES_DATA_DIR)/*.sql >> $@
|
||||
|
||||
REGRESS = $(notdir $(basename $(wildcard test/$(EXTVERSION)/sql/*test.sql)))
|
||||
TEST_DIR = test/$(EXTVERSION)
|
||||
REGRESS_OPTS = --inputdir='$(TEST_DIR)' --outputdir='$(TEST_DIR)'
|
||||
|
||||
PG_CONFIG = pg_config
|
||||
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
||||
include $(PGXS)
|
||||
|
||||
# This seems to be needed at least for PG 9.3.11
|
||||
all: $(DATA)
|
||||
@@ -1,195 +0,0 @@
|
||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
||||
\echo Use "CREATE EXTENSION crankshaft" to load this file. \quit
|
||||
-- Internal function.
|
||||
-- Set the seeds of the RNGs (Random Number Generators)
|
||||
-- used internally.
|
||||
CREATE OR REPLACE FUNCTION
|
||||
_cdb_random_seeds (seed_value INTEGER) RETURNS VOID
|
||||
AS $$
|
||||
from crankshaft import random_seeds
|
||||
random_seeds.set_random_seeds(seed_value)
|
||||
$$ LANGUAGE plpythonu;
|
||||
-- Moran's I
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_moran_local (
|
||||
t TEXT,
|
||||
attr TEXT,
|
||||
significance float DEFAULT 0.05,
|
||||
num_ngbrs INT DEFAULT 5,
|
||||
permutations INT DEFAULT 99,
|
||||
geom_column TEXT DEFAULT 'the_geom',
|
||||
id_col TEXT DEFAULT 'cartodb_id',
|
||||
w_type TEXT DEFAULT 'knn')
|
||||
RETURNS TABLE (moran FLOAT, quads TEXT, significance FLOAT, ids INT)
|
||||
AS $$
|
||||
from crankshaft.clustering import moran_local
|
||||
# TODO: use named parameters or a dictionary
|
||||
return moran_local(t, attr, significance, num_ngbrs, permutations, geom_column, id_col, w_type)
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
-- Moran's I Local Rate
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_moran_local_rate(t TEXT,
|
||||
numerator TEXT,
|
||||
denominator TEXT,
|
||||
significance FLOAT DEFAULT 0.05,
|
||||
num_ngbrs INT DEFAULT 5,
|
||||
permutations INT DEFAULT 99,
|
||||
geom_column TEXT DEFAULT 'the_geom',
|
||||
id_col TEXT DEFAULT 'cartodb_id',
|
||||
w_type TEXT DEFAULT 'knn')
|
||||
RETURNS TABLE(moran FLOAT, quads TEXT, significance FLOAT, ids INT, y numeric)
|
||||
AS $$
|
||||
from crankshaft.clustering import moran_local_rate
|
||||
# TODO: use named parameters or a dictionary
|
||||
return moran_local_rate(t, numerator, denominator, significance, num_ngbrs, permutations, geom_column, id_col, w_type)
|
||||
$$ LANGUAGE plpythonu;
|
||||
-- Function by Stuart Lynn for a simple interpolation of a value
|
||||
-- from a polygon table over an arbitrary polygon
|
||||
-- (weighted by the area proportion overlapped)
|
||||
-- Aereal weighting is a very simple form of aereal interpolation.
|
||||
--
|
||||
-- Parameters:
|
||||
-- * geom a Polygon geometry which defines the area where a value will be
|
||||
-- estimated as the area-weighted sum of a given table/column
|
||||
-- * target_table_name table name of the table that provides the values
|
||||
-- * target_column column name of the column that provides the values
|
||||
-- * schema_name optional parameter to defina the schema the target table
|
||||
-- belongs to, which is necessary if its not in the search_path.
|
||||
-- Note that target_table_name should never include the schema in it.
|
||||
-- Return value:
|
||||
-- Aereal-weighted interpolation of the column values over the geometry
|
||||
CREATE OR REPLACE
|
||||
FUNCTION cdb_overlap_sum(geom geometry, target_table_name text, target_column text, schema_name text DEFAULT NULL)
|
||||
RETURNS numeric AS
|
||||
$$
|
||||
DECLARE
|
||||
result numeric;
|
||||
qualified_name text;
|
||||
BEGIN
|
||||
IF schema_name IS NULL THEN
|
||||
qualified_name := Format('%I', target_table_name);
|
||||
ELSE
|
||||
qualified_name := Format('%I.%s', schema_name, target_table_name);
|
||||
END IF;
|
||||
EXECUTE Format('
|
||||
SELECT sum(%I*ST_Area(St_Intersection($1, a.the_geom))/ST_Area(a.the_geom))
|
||||
FROM %s AS a
|
||||
WHERE $1 && a.the_geom
|
||||
', target_column, qualified_name)
|
||||
USING geom
|
||||
INTO result;
|
||||
RETURN result;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
--
|
||||
-- Creates N points randomly distributed arround the polygon
|
||||
--
|
||||
-- @param g - the geometry to be turned in to points
|
||||
--
|
||||
-- @param no_points - the number of points to generate
|
||||
--
|
||||
-- @params max_iter_per_point - the function generates points in the polygon's bounding box
|
||||
-- and discards points which don't lie in the polygon. max_iter_per_point specifies how many
|
||||
-- misses per point the funciton accepts before giving up.
|
||||
--
|
||||
-- Returns: Multipoint with the requested points
|
||||
CREATE OR REPLACE FUNCTION cdb_dot_density(geom geometry , no_points Integer, max_iter_per_point Integer DEFAULT 1000)
|
||||
RETURNS GEOMETRY AS $$
|
||||
DECLARE
|
||||
extent GEOMETRY;
|
||||
test_point Geometry;
|
||||
width NUMERIC;
|
||||
height NUMERIC;
|
||||
x0 NUMERIC;
|
||||
y0 NUMERIC;
|
||||
xp NUMERIC;
|
||||
yp NUMERIC;
|
||||
no_left INTEGER;
|
||||
remaining_iterations INTEGER;
|
||||
points GEOMETRY[];
|
||||
bbox_line GEOMETRY;
|
||||
intersection_line GEOMETRY;
|
||||
BEGIN
|
||||
extent := ST_Envelope(geom);
|
||||
width := ST_XMax(extent) - ST_XMIN(extent);
|
||||
height := ST_YMax(extent) - ST_YMIN(extent);
|
||||
x0 := ST_XMin(extent);
|
||||
y0 := ST_YMin(extent);
|
||||
no_left := no_points;
|
||||
|
||||
LOOP
|
||||
if(no_left=0) THEN
|
||||
EXIT;
|
||||
END IF;
|
||||
yp = y0 + height*random();
|
||||
bbox_line = ST_MakeLine(
|
||||
ST_SetSRID(ST_MakePoint(yp, x0),4326),
|
||||
ST_SetSRID(ST_MakePoint(yp, x0+width),4326)
|
||||
);
|
||||
intersection_line = ST_Intersection(bbox_line,geom);
|
||||
test_point = ST_LineInterpolatePoint(st_makeline(st_linemerge(intersection_line)),random());
|
||||
points := points || test_point;
|
||||
no_left = no_left - 1 ;
|
||||
END LOOP;
|
||||
RETURN ST_Collect(points);
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql VOLATILE;
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_create_segment (
|
||||
segment_name TEXT,
|
||||
table_name TEXT,
|
||||
column_name TEXT,
|
||||
geoid_column TEXT DEFAULT 'geoid',
|
||||
census_table TEXT DEFAULT 'block_groups'
|
||||
)
|
||||
RETURNS NUMERIC
|
||||
AS $$
|
||||
from crankshaft.segmentation import create_segemnt
|
||||
# TODO: use named parameters or a dictionary
|
||||
return create_segment('table')
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_predict_segment (
|
||||
segment_name TEXT,
|
||||
geoid_column TEXT DEFAULT 'geoid',
|
||||
census_table TEXT DEFAULT 'block_groups'
|
||||
)
|
||||
RETURNS TABLE(geoid TEXT, prediction NUMERIC)
|
||||
AS $$
|
||||
from crankshaft.segmentation import create_segemnt
|
||||
# TODO: use named parameters or a dictionary
|
||||
return create_segment('table')
|
||||
$$ LANGUAGE plpythonu;
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_adaptive_histogram (
|
||||
table_name TEXT,
|
||||
column_name TEXT
|
||||
)
|
||||
RETURNS TABLE (bin_start numeric,bin_end numeric,value numeric)
|
||||
|
||||
AS $$
|
||||
from crankshaft.bayesian_blocks import adaptive_histogram
|
||||
return adaptive_histogram(table_name,column_name)
|
||||
$$ LANGUAGE plpythonu;
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_simple_test (
|
||||
)
|
||||
RETURNS NUMERIC
|
||||
|
||||
AS $$
|
||||
return 5
|
||||
$$ LANGUAGE plpythonu;
|
||||
-- Make sure by default there are no permissions for publicuser
|
||||
-- NOTE: this happens at extension creation time, as part of an implicit transaction.
|
||||
-- REVOKE ALL PRIVILEGES ON SCHEMA cdb_crankshaft FROM PUBLIC, publicuser CASCADE;
|
||||
|
||||
-- Grant permissions on the schema to publicuser (but just the schema)
|
||||
GRANT USAGE ON SCHEMA cdb_crankshaft TO publicuser;
|
||||
|
||||
-- Revoke execute permissions on all functions in the schema by default
|
||||
-- REVOKE EXECUTE ON ALL FUNCTIONS IN SCHEMA cdb_crankshaft FROM PUBLIC, publicuser;
|
||||
@@ -1,11 +0,0 @@
|
||||
CREATE OR REPLACE FUNCTION
|
||||
cdb_adaptive_histogram (
|
||||
table_name TEXT,
|
||||
column_name TEXT
|
||||
)
|
||||
RETURNS TABLE (bin_start numeric,bin_end numeric,value numeric)
|
||||
|
||||
AS $$
|
||||
from crankshaft.bayesian_blocks import adaptive_histogram
|
||||
return adaptive_histogram(table_name,column_name)
|
||||
$$ LANGUAGE plpythonu;
|
||||
@@ -1,6 +0,0 @@
|
||||
-- Install dependencies
|
||||
CREATE EXTENSION plpythonu;
|
||||
CREATE EXTENSION postgis;
|
||||
CREATE EXTENSION cartodb;
|
||||
-- Install the extension
|
||||
CREATE EXTENSION crankshaft;
|
||||
@@ -1,11 +0,0 @@
|
||||
# Install the package (needs root privileges)
|
||||
install:
|
||||
pip install ./crankshaft --upgrade
|
||||
|
||||
# Test from source code
|
||||
test:
|
||||
(cd crankshaft && nosetests test/)
|
||||
|
||||
# Test currently installed package
|
||||
testinstalled:
|
||||
nosetests crankshaft/test/
|
||||
@@ -1 +0,0 @@
|
||||
from bayesian_blocks import *
|
||||
@@ -1,84 +0,0 @@
|
||||
import plpy
|
||||
import numpy as np
|
||||
|
||||
|
||||
def adaptive_histogram(table_name,column_name):
|
||||
data = plpy.execute("select {column_name} from {table_name}".format(**locals()))
|
||||
|
||||
data = [float(d['count']) for d in data]
|
||||
plpy.notice(data)
|
||||
vals, bins = np.histogram( data, bins=_bayesian_blocks(data))
|
||||
return zip(vals,bins, bins[1:])
|
||||
|
||||
|
||||
def _bayesian_blocks(t):
|
||||
"""Bayesian Blocks Implementation
|
||||
|
||||
By Jake Vanderplas. License: BSD
|
||||
Based on algorithm outlined in http://adsabs.harvard.edu/abs/2012arXiv1207.5578S
|
||||
|
||||
Parameters
|
||||
----------
|
||||
t : ndarray, length N
|
||||
data to be histogrammed
|
||||
|
||||
Returns
|
||||
-------
|
||||
bins : ndarray
|
||||
array containing the (N+1) bin edges
|
||||
|
||||
Notes
|
||||
-----
|
||||
This is an incomplete implementation: it may fail for some
|
||||
datasets. Alternate fitness functions and prior forms can
|
||||
be found in the paper listed above.
|
||||
"""
|
||||
# copy and sort the array
|
||||
t = np.sort(t)
|
||||
N = t.size
|
||||
|
||||
# create length-(N + 1) array of cell edges
|
||||
edges = np.concatenate([t[:1],
|
||||
0.5 * (t[1:] + t[:-1]),
|
||||
t[-1:]])
|
||||
block_length = t[-1] - edges
|
||||
|
||||
# arrays needed for the iteration
|
||||
nn_vec = np.ones(N)
|
||||
best = np.zeros(N, dtype=float)
|
||||
last = np.zeros(N, dtype=int)
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# Start with first data cell; add one cell at each iteration
|
||||
#-----------------------------------------------------------------
|
||||
for K in range(N):
|
||||
# Compute the width and count of the final bin for all possible
|
||||
# locations of the K^th changepoint
|
||||
width = block_length[:K + 1] - block_length[K + 1]
|
||||
count_vec = np.cumsum(nn_vec[:K + 1][::-1])[::-1]
|
||||
|
||||
# evaluate fitness function for these possibilities
|
||||
fit_vec = count_vec * (np.log(count_vec) - np.log(width))
|
||||
fit_vec -= 4 # 4 comes from the prior on the number of changepoints
|
||||
fit_vec[1:] += best[:K]
|
||||
|
||||
# find the max of the fitness: this is the K^th changepoint
|
||||
i_max = np.argmax(fit_vec)
|
||||
last[K] = i_max
|
||||
best[K] = fit_vec[i_max]
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# Recover changepoints by iteratively peeling off the last block
|
||||
#-----------------------------------------------------------------
|
||||
change_points = np.zeros(N, dtype=int)
|
||||
i_cp = N
|
||||
ind = N
|
||||
while True:
|
||||
i_cp -= 1
|
||||
change_points[i_cp] = ind
|
||||
if ind == 0:
|
||||
break
|
||||
ind = last[ind - 1]
|
||||
change_points = change_points[i_cp:]
|
||||
|
||||
return edges[change_points]
|
||||
6
src/pg/.gitignore
vendored
Normal file
6
src/pg/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
regression.diffs
|
||||
regression.out
|
||||
results/
|
||||
crankshaft--dev.sql
|
||||
crankshaft--dev--current.sql
|
||||
crankshaft--current--dev.sql
|
||||
41
src/pg/Makefile
Normal file
41
src/pg/Makefile
Normal file
@@ -0,0 +1,41 @@
|
||||
# Generation of a new development version 'dev' (with an alias 'current' for
|
||||
# updating easily by upgrading to 'current', then 'dev')
|
||||
|
||||
# sudo make install -- generate the 'dev' version from current source
|
||||
# and make it available to PostgreSQL
|
||||
# PGUSER=postgres make installcheck -- test the 'dev' extension
|
||||
|
||||
EXTENSION = crankshaft
|
||||
|
||||
DATA = $(EXTENSION)--dev.sql \
|
||||
$(EXTENSION)--current--dev.sql \
|
||||
$(EXTENSION)--dev--current.sql
|
||||
|
||||
SOURCES_DATA_DIR = sql
|
||||
SOURCES_DATA = $(wildcard $(SOURCES_DATA_DIR)/*.sql)
|
||||
|
||||
$(DATA): $(SOURCES_DATA)
|
||||
cat $(SOURCES_DATA_DIR)/*.sql > $@
|
||||
|
||||
TEST_DIR = test
|
||||
REGRESS = $(notdir $(basename $(wildcard $(TEST_DIR)/sql/*test.sql)))
|
||||
REGRESS_OPTS = --inputdir='$(TEST_DIR)' --outputdir='$(TEST_DIR)'
|
||||
|
||||
PG_CONFIG = pg_config
|
||||
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
||||
include $(PGXS)
|
||||
|
||||
# This seems to be needed at least for PG 9.3.11
|
||||
all: $(DATA)
|
||||
|
||||
# WIP: goals for releasing the extension...
|
||||
|
||||
EXTVERSION = $(shell grep default_version $(EXTENSION).control | sed -e "s/default_version[[:space:]]*=[[:space:]]*'\([^']*\)'/\1/")
|
||||
|
||||
../release/$(EXTENSION).control: $(EXTENSION).control
|
||||
cp $< $@
|
||||
|
||||
release: ../release/$(EXTENSION).control
|
||||
cp $(EXTENSION)--dev.sql $(EXTENSION)--$(EXTVERSION).sql
|
||||
# pending: create upgrade/downgrade scripts,
|
||||
# commit, push, tag....
|
||||
18
src/pg/sql/01_py.sql
Normal file
18
src/pg/sql/01_py.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
-- Use the crankshaft python module
|
||||
CREATE OR REPLACE FUNCTION _cdb_crankshaft_activate_py()
|
||||
RETURNS VOID
|
||||
AS $$
|
||||
# activate virtualenv
|
||||
# TODO: parameterize with environment variables or something
|
||||
venv_path = '/home/ubuntu/crankshaft/src/py/dev'
|
||||
activate_path = venv_path + '/bin/activate_this.py'
|
||||
exec(open(activate_path).read(),
|
||||
dict(__file__=activate_path))
|
||||
|
||||
# import something from virtualenv
|
||||
# from crankshaft import random_seeds
|
||||
|
||||
# do some stuff
|
||||
# random_seeds.set_random_seeds(123)
|
||||
# plpy.notice('here we are')
|
||||
$$ LANGUAGE plpythonu;
|
||||
@@ -4,6 +4,7 @@
|
||||
CREATE OR REPLACE FUNCTION
|
||||
_cdb_random_seeds (seed_value INTEGER) RETURNS VOID
|
||||
AS $$
|
||||
plpy.execute('SELECT cdb_crankshaft._cdb_crankshaft_activate_py()')
|
||||
from crankshaft import random_seeds
|
||||
random_seeds.set_random_seeds(seed_value)
|
||||
$$ LANGUAGE plpythonu;
|
||||
@@ -11,6 +11,7 @@ CREATE OR REPLACE FUNCTION
|
||||
w_type TEXT DEFAULT 'knn')
|
||||
RETURNS TABLE (moran FLOAT, quads TEXT, significance FLOAT, ids INT)
|
||||
AS $$
|
||||
plpy.execute('SELECT cdb_crankshaft._cdb_crankshaft_activate_py()')
|
||||
from crankshaft.clustering import moran_local
|
||||
# TODO: use named parameters or a dictionary
|
||||
return moran_local(t, attr, significance, num_ngbrs, permutations, geom_column, id_col, w_type)
|
||||
@@ -29,6 +30,7 @@ CREATE OR REPLACE FUNCTION
|
||||
w_type TEXT DEFAULT 'knn')
|
||||
RETURNS TABLE(moran FLOAT, quads TEXT, significance FLOAT, ids INT, y numeric)
|
||||
AS $$
|
||||
plpy.execute('SELECT cdb_crankshaft._cdb_crankshaft_activate_py()')
|
||||
from crankshaft.clustering import moran_local_rate
|
||||
# TODO: use named parameters or a dictionary
|
||||
return moran_local_rate(t, numerator, denominator, significance, num_ngbrs, permutations, geom_column, id_col, w_type)
|
||||
@@ -3,4 +3,4 @@ CREATE EXTENSION plpythonu;
|
||||
CREATE EXTENSION postgis;
|
||||
CREATE EXTENSION cartodb;
|
||||
-- Install the extension
|
||||
CREATE EXTENSION crankshaft;
|
||||
CREATE EXTENSION crankshaft VERSION 'dev';
|
||||
@@ -4,4 +4,4 @@ CREATE EXTENSION postgis;
|
||||
CREATE EXTENSION cartodb;
|
||||
|
||||
-- Install the extension
|
||||
CREATE EXTENSION crankshaft;
|
||||
CREATE EXTENSION crankshaft VERSION 'dev';
|
||||
1
python/.gitignore → src/py/.gitignore
vendored
1
python/.gitignore → src/py/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
*.pyc
|
||||
dev/
|
||||
9
src/py/Makefile
Normal file
9
src/py/Makefile
Normal file
@@ -0,0 +1,9 @@
|
||||
# Install the package locally for development
|
||||
install:
|
||||
virtualenv dev
|
||||
./dev/bin/pip install ./crankshaft --upgrade
|
||||
./dev/bin/pip install nose
|
||||
|
||||
# Test develpment install
|
||||
testinstalled:
|
||||
./dev/bin/nosetests crankshaft/test/
|
||||
@@ -1,3 +1,2 @@
|
||||
import random_seeds
|
||||
import clustering
|
||||
import bayesian_blocks
|
||||
@@ -40,7 +40,7 @@ setup(
|
||||
|
||||
# The choice of component versions is dictated by what's
|
||||
# provisioned in the production servers.
|
||||
install_requires=['pysal==1.11.0','numpy==1.10.1','scipy==0.17.0'],
|
||||
install_requires=['pysal==1.11.0','numpy==1.6.1','scipy==0.17.0'],
|
||||
|
||||
requires=['pysal', 'numpy'],
|
||||
|
||||
Reference in New Issue
Block a user