Compare commits
2 Commits
docker
...
population
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c293781624 | ||
|
|
6fa726bcce |
@@ -12,7 +12,7 @@ name must be created.
|
|||||||
### Version numbers
|
### Version numbers
|
||||||
|
|
||||||
The version of both the SQL extension and the Python package shall
|
The version of both the SQL extension and the Python package shall
|
||||||
follow the [Semantic Versioning 2.0](http://semver.org/) guidelines:
|
follow the[Semantic Versioning 2.0](http://semver.org/) guidelines:
|
||||||
|
|
||||||
* When backwards incompatibility is introduced the major number is incremented
|
* When backwards incompatibility is introduced the major number is incremented
|
||||||
* When functionally is added (in a backwards-compatible manner) the minor number
|
* When functionally is added (in a backwards-compatible manner) the minor number
|
||||||
|
|||||||
187
Dockerfile
187
Dockerfile
@@ -1,187 +0,0 @@
|
|||||||
# PostgreSQL GIS stack
|
|
||||||
#
|
|
||||||
# This image includes the following tools
|
|
||||||
# - PostgreSQL 9.5
|
|
||||||
# - PostGIS 2.2 with raster, topology and sfcgal support
|
|
||||||
# - OGR Foreign Data Wrapper
|
|
||||||
# - PgRouting
|
|
||||||
# - PDAL master
|
|
||||||
# - PostgreSQL PointCloud version master
|
|
||||||
#
|
|
||||||
# Version 1.7
|
|
||||||
|
|
||||||
FROM phusion/baseimage
|
|
||||||
MAINTAINER Vincent Picavet, vincent.picavet@oslandia.com
|
|
||||||
|
|
||||||
# Set correct environment variables.
|
|
||||||
ENV HOME /root
|
|
||||||
|
|
||||||
# Regenerate SSH host keys. baseimage-docker does not contain any, so you
|
|
||||||
# have to do that yourself. You may also comment out this instruction; the
|
|
||||||
# init system will auto-generate one during boot.
|
|
||||||
RUN /etc/my_init.d/00_regen_ssh_host_keys.sh
|
|
||||||
|
|
||||||
# Use baseimage-docker's init system.
|
|
||||||
CMD ["/sbin/my_init"]
|
|
||||||
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y wget ca-certificates
|
|
||||||
|
|
||||||
# Use APT postgresql repositories for 9.5 version
|
|
||||||
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ wheezy-pgdg main 9.5" > /etc/apt/sources.list.d/pgdg.list && wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
|
||||||
|
|
||||||
# packages needed for compilation
|
|
||||||
RUN apt-get update
|
|
||||||
|
|
||||||
RUN apt-get install -y autoconf build-essential cmake docbook-mathml docbook-xsl libboost-dev libboost-thread-dev libboost-filesystem-dev libboost-system-dev libboost-iostreams-dev libboost-program-options-dev libboost-timer-dev libcunit1-dev libgdal-dev libgeos++-dev libgeotiff-dev libgmp-dev libjson0-dev libjson-c-dev liblas-dev libmpfr-dev libopenscenegraph-dev libpq-dev libproj-dev libxml2-dev postgresql-server-dev-9.5 xsltproc git build-essential wget
|
|
||||||
|
|
||||||
RUN add-apt-repository ppa:fkrull/deadsnakes &&\
|
|
||||||
apt-get update &&\
|
|
||||||
apt-get install -y python3.2
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# application packages
|
|
||||||
RUN apt-get install -y postgresql-9.5 postgresql-plpython-9.5
|
|
||||||
|
|
||||||
|
|
||||||
# Download and compile CGAL
|
|
||||||
RUN wget https://gforge.inria.fr/frs/download.php/file/32994/CGAL-4.3.tar.gz &&\
|
|
||||||
tar -xzf CGAL-4.3.tar.gz &&\
|
|
||||||
cd CGAL-4.3 &&\
|
|
||||||
mkdir build && cd build &&\
|
|
||||||
cmake .. &&\
|
|
||||||
make -j3 && make install
|
|
||||||
|
|
||||||
# orig sfcgal method
|
|
||||||
# download and compile SFCGAL
|
|
||||||
# RUN git clone https://github.com/Oslandia/SFCGAL.git
|
|
||||||
# RUN cd SFCGAL && cmake . && make -j3 && make install
|
|
||||||
# # cleanup
|
|
||||||
# RUN rm -Rf SFCGAL
|
|
||||||
|
|
||||||
# andrewxhill fix for stable sfcgal version
|
|
||||||
RUN wget https://github.com/Oslandia/SFCGAL/archive/v1.2.0.tar.gz
|
|
||||||
RUN tar -xzf v1.2.0.tar.gz
|
|
||||||
RUN cd SFCGAL-1.2.0 && cmake . && make -j 1 && make install
|
|
||||||
RUN rm -Rf v1.2.0.tar.gz SFCGAL-1.2.0
|
|
||||||
|
|
||||||
# download and install GEOS 3.5
|
|
||||||
RUN wget http://download.osgeo.org/geos/geos-3.5.0.tar.bz2 &&\
|
|
||||||
tar -xjf geos-3.5.0.tar.bz2 &&\
|
|
||||||
cd geos-3.5.0 &&\
|
|
||||||
./configure && make && make install &&\
|
|
||||||
cd .. && rm -Rf geos-3.5.0 geos-3.5.0.tar.bz2
|
|
||||||
|
|
||||||
# Download and compile PostGIS
|
|
||||||
RUN wget http://download.osgeo.org/postgis/source/postgis-2.2.0.tar.gz
|
|
||||||
RUN tar -xzf postgis-2.2.0.tar.gz
|
|
||||||
RUN cd postgis-2.2.0 && ./configure --with-sfcgal=/usr/local/bin/sfcgal-config --with-geos=/usr/local/bin/geos-config
|
|
||||||
RUN cd postgis-2.2.0 && make && make install
|
|
||||||
# cleanup
|
|
||||||
RUN rm -Rf postgis-2.2.0.tar.gz postgis-2.2.0
|
|
||||||
|
|
||||||
# Download and compile pgrouting
|
|
||||||
RUN git clone https://github.com/pgRouting/pgrouting.git &&\
|
|
||||||
cd pgrouting &&\
|
|
||||||
mkdir build && cd build &&\
|
|
||||||
cmake -DWITH_DOC=OFF -DWITH_DD=ON .. &&\
|
|
||||||
make -j3 && make install
|
|
||||||
# cleanup
|
|
||||||
RUN rm -Rf pgrouting
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Download and compile ogr_fdw
|
|
||||||
RUN git clone https://github.com/pramsey/pgsql-ogr-fdw.git &&\
|
|
||||||
cd pgsql-ogr-fdw &&\
|
|
||||||
make && make install &&\
|
|
||||||
cd .. && rm -Rf pgsql-ogr-fdw
|
|
||||||
|
|
||||||
# Compile PDAL
|
|
||||||
RUN git clone https://github.com/PDAL/PDAL.git pdal
|
|
||||||
RUN mkdir PDAL-build && \
|
|
||||||
cd PDAL-build && \
|
|
||||||
cmake ../pdal && \
|
|
||||||
make -j3 && \
|
|
||||||
make install
|
|
||||||
# cleanup
|
|
||||||
RUN rm -Rf pdal && rm -Rf PDAL-build
|
|
||||||
|
|
||||||
# Compile PointCloud
|
|
||||||
RUN git clone https://github.com/pramsey/pointcloud.git
|
|
||||||
RUN cd pointcloud && ./autogen.sh && ./configure && make -j3 && make install
|
|
||||||
# cleanup
|
|
||||||
RUN rm -Rf pointcloud
|
|
||||||
|
|
||||||
|
|
||||||
RUN git clone https://github.com/CartoDB/cartodb-postgresql.git &&\
|
|
||||||
cd cartodb-postgresql &&\
|
|
||||||
make all install &&\
|
|
||||||
cd .. && rm -Rf cartodb-postgresql
|
|
||||||
|
|
||||||
# install pip
|
|
||||||
|
|
||||||
RUN apt-get -y install python-dev python-pip liblapack-dev gfortran libyaml-dev
|
|
||||||
|
|
||||||
RUN pip install numpy pandas scipy theano keras sklearn
|
|
||||||
RUN pip install pysal
|
|
||||||
|
|
||||||
# get compiled libraries recognized
|
|
||||||
RUN ldconfig
|
|
||||||
|
|
||||||
# clean packages
|
|
||||||
|
|
||||||
# all -dev packages
|
|
||||||
# RUN apt-get remove -y --purge autotools-dev libgeos-dev libgif-dev libgl1-mesa-dev libglu1-mesa-dev libgnutls-dev libgpg-error-dev libhdf4-alt-dev libhdf5-dev libicu-dev libidn11-dev libjasper-dev libjbig-dev libjpeg8-dev libjpeg-dev libjpeg-turbo8-dev libkrb5-dev libldap2-dev libltdl-dev liblzma-dev libmysqlclient-dev libnetcdf-dev libopenthreads-dev libp11-kit-dev libpng12-dev libpthread-stubs0-dev librtmp-dev libspatialite-dev libsqlite3-dev libssl-dev libstdc++-4.8-dev libtasn1-6-dev libtiff5-dev libwebp-dev libx11-dev libx11-xcb-dev libxau-dev libxcb1-dev libxcb-dri2-0-dev libxcb-dri3-dev libxcb-glx0-dev libxcb-present-dev libxcb-randr0-dev libxcb-render0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxdamage-dev libxdmcp-dev libxerces-c-dev libxext-dev libxfixes-dev libxshmfence-dev libxxf86vm-dev linux-libc-dev manpages-dev mesa-common-dev libgcrypt11-dev unixodbc-dev uuid-dev x11proto-core-dev x11proto-damage-dev x11proto-dri2-dev x11proto-fixes-dev x11proto-gl-dev x11proto-input-dev x11proto-kb-dev x11proto-xext-dev x11proto-xf86vidmode-dev xtrans-dev zlib1g-dev
|
|
||||||
|
|
||||||
# installed packages
|
|
||||||
# RUN apt-get remove -y --purge autoconf build-essential cmake docbook-mathml docbook-xsl libboost-dev libboost-filesystem-dev libboost-timer-dev libcgal-dev libcunit1-dev libgdal-dev libgeos++-dev libgeotiff-dev libgmp-dev libjson0-dev libjson-c-dev liblas-dev libmpfr-dev libopenscenegraph-dev libpq-dev libproj-dev libxml2-dev postgresql-server-dev-9.5 xsltproc git build-essential wget
|
|
||||||
|
|
||||||
# additional compilation packages
|
|
||||||
# RUN apt-get remove -y --purge automake m4 make
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# ---------- SETUP --------------
|
|
||||||
|
|
||||||
# add a baseimage PostgreSQL init script
|
|
||||||
RUN mkdir /etc/service/postgresql
|
|
||||||
ADD postgresql.sh /etc/service/postgresql/run
|
|
||||||
|
|
||||||
# Adjust PostgreSQL configuration so that remote connections to the
|
|
||||||
# database are possible.
|
|
||||||
RUN echo "host all all 0.0.0.0/0 md5" >> /etc/postgresql/9.5/main/pg_hba.conf
|
|
||||||
|
|
||||||
# And add ``listen_addresses`` to ``/etc/postgresql/9.5/main/postgresql.conf``
|
|
||||||
RUN echo "listen_addresses='*'" >> /etc/postgresql/9.5/main/postgresql.conf
|
|
||||||
|
|
||||||
# Expose PostgreSQL
|
|
||||||
EXPOSE 5432
|
|
||||||
|
|
||||||
# Add VOLUMEs to allow backup of config, logs and databases
|
|
||||||
VOLUME ["/data", "/etc/postgresql", "/var/log/postgresql", "/var/lib/postgresql"]
|
|
||||||
|
|
||||||
# Add pip
|
|
||||||
|
|
||||||
|
|
||||||
# http://bugs.python.org/issue19846
|
|
||||||
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
|
|
||||||
|
|
||||||
# add database setup upon image start
|
|
||||||
ADD pgpass /root/.pgpass
|
|
||||||
RUN chmod 700 /root/.pgpass
|
|
||||||
RUN mkdir -p /etc/my_init.d
|
|
||||||
ADD init_db_script.sh /etc/my_init.d/init_db_script.sh
|
|
||||||
ADD init_db.sh /root/init_db.sh
|
|
||||||
ADD run_tests.sh /root/run_tests.sh
|
|
||||||
ADD run_tests.sh /root/run_server.sh
|
|
||||||
|
|
||||||
# ---------- Final cleanup --------------
|
|
||||||
#
|
|
||||||
# Clean up APT when done.
|
|
||||||
# RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
28
README.md
28
README.md
@@ -7,33 +7,7 @@ CartoDB Spatial Analysis extension for PostgreSQL.
|
|||||||
* *pg* contains the PostgreSQL extension source code
|
* *pg* contains the PostgreSQL extension source code
|
||||||
* *python* Python module
|
* *python* Python module
|
||||||
|
|
||||||
## Running with Docker
|
FIXME: should it be `./extension` and `./lib/python' ?
|
||||||
|
|
||||||
Crankshaft comes with a Dockerfile to build and run a sandboxed machine for testing
|
|
||||||
and development.
|
|
||||||
|
|
||||||
First you have to build the docker container
|
|
||||||
|
|
||||||
docker build -t crankshaft .
|
|
||||||
|
|
||||||
To run the pg tests run
|
|
||||||
|
|
||||||
docker run -it --rm -v $(pwd):/crankshaft crankshaft /root/run_tests.sh
|
|
||||||
|
|
||||||
if there are failures it will dump the reasion to the screen.
|
|
||||||
|
|
||||||
To run a server you can develop on run
|
|
||||||
|
|
||||||
docker run -it --rm -v $(pwd):/crankshaft -p $(docker-machine ip default):5432:5432 /root/run_server.sh
|
|
||||||
|
|
||||||
and connect from you host using
|
|
||||||
|
|
||||||
psql -U pggis -h $(docker-machine ip default) -p 5432 -W
|
|
||||||
|
|
||||||
the password is pggis
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
|
|||||||
77
init_db.sh
77
init_db.sh
@@ -1,77 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# wait for pg server to be ready
|
|
||||||
echo "Waiting for PostgreSQL to run..."
|
|
||||||
sleep 1
|
|
||||||
while ! /usr/bin/pg_isready -q
|
|
||||||
do
|
|
||||||
sleep 1
|
|
||||||
echo -n "."
|
|
||||||
done
|
|
||||||
|
|
||||||
# PostgreSQL running
|
|
||||||
echo "PostgreSQL running, initializing database."
|
|
||||||
|
|
||||||
# PostgreSQL user
|
|
||||||
#
|
|
||||||
# create postgresql user pggis
|
|
||||||
|
|
||||||
/sbin/setuser postgres /usr/bin/psql -c "CREATE USER pggis with SUPERUSER PASSWORD 'pggis';"
|
|
||||||
/sbin/setuser postgres /usr/bin/psql -c "CREATE role publicuser;"
|
|
||||||
# == Auto restore dumps ==
|
|
||||||
#
|
|
||||||
# If we find some postgresql dumps in /data/restore, then we load it
|
|
||||||
# in new databases
|
|
||||||
shopt -s nullglob
|
|
||||||
for f in /data/restore/*.backup
|
|
||||||
do
|
|
||||||
echo "Found database dump to restore : $f"
|
|
||||||
DBNAME=$(basename -s ".backup" "$f")
|
|
||||||
echo "Creating a new database $DBNAME.."
|
|
||||||
/usr/bin/psql -U pggis -h localhost -c "CREATE DATABASE $DBNAME WITH OWNER = pggis ENCODING = 'UTF8' TEMPLATE = template0 CONNECTION LIMIT = -1;" postgres
|
|
||||||
/usr/bin/psql -U pggis -h localhost -w -c "CREATE EXTENSION citext; CREATE EXTENSION pg_trgm; CREATE EXTENSION btree_gist; CREATE EXTENSION hstore; CREATE EXTENSION fuzzystrmatch; CREATE EXTENSION unaccent; CREATE EXTENSION postgres_fdw; CREATE EXTENSION pgcrypto; CREATE EXTENSION plpythonu; CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology; CREATE EXTENSION pgrouting; CREATE EXTENSION pointcloud; CREATE EXTENSION pointcloud_postgis; CREATE EXTENSION postgis_sfcgal; drop type if exists texture; create type texture as (url text,uv float[][]);CREATE ROLE publicuser;" $DBNAME
|
|
||||||
|
|
||||||
# /usr/bin/psql -U pggis -h localhost -w -f /usr/share/postgresql/9.5/contrib/postgis-2.1/sfcgal.sql -d $DBNAME
|
|
||||||
|
|
||||||
|
|
||||||
echo "Restoring database $DBNAME.."
|
|
||||||
/usr/bin/pg_restore -U pggis -h localhost -d $DBNAME -w "$f"
|
|
||||||
|
|
||||||
echo "creating public user"
|
|
||||||
/usr/bin/psql -U pggis -h localhost -w -c "CREATE ROLE publicuser;"
|
|
||||||
|
|
||||||
echo "Restore done."
|
|
||||||
|
|
||||||
done
|
|
||||||
|
|
||||||
# == Auto restore SQL backups ==
|
|
||||||
#
|
|
||||||
# If we find some postgresql sql scripts /data/restore, then we load it
|
|
||||||
# in new databases
|
|
||||||
shopt -s nullglob
|
|
||||||
for f in /data/restore/*.sql
|
|
||||||
do
|
|
||||||
echo "Found database SQL dump to restore : $f"
|
|
||||||
DBNAME=$(basename -s ".sql" "$f")
|
|
||||||
echo "Creating a new database $DBNAME.."
|
|
||||||
/usr/bin/psql -U pggis -h localhost -c "CREATE DATABASE $DBNAME WITH OWNER = pggis ENCODING = 'UTF8' TEMPLATE = template0 CONNECTION LIMIT = -1;" postgres
|
|
||||||
/usr/bin/psql -U pggis -h localhost -w -c "CREATE EXTENSION citext; CREATE EXTENSION pg_trgm; CREATE EXTENSION btree_gist; CREATE EXTENSION hstore; CREATE EXTENSION fuzzystrmatch; CREATE EXTENSION unaccent; CREATE EXTENSION postgres_fdw; CREATE EXTENSION pgcrypto; CREATE EXTENSION plpythonu; CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology; CREATE EXTENSION postgis_sfcgal; CREATE EXTENSION pgrouting; CREATE EXTENSION pointcloud; CREATE EXTENSION pointcloud_postgis; drop type if exists texture; create type texture as (url text,uv float[][]);" $DBNAME
|
|
||||||
# /usr/bin/psql -U pggis -h localhost -w -f /usr/share/postgresql/9.5/contrib/postgis-2.1/sfcgal.sql -d $DBNAME
|
|
||||||
echo "Restoring database $DBNAME.."
|
|
||||||
/usr/bin/psql -U pggis -h localhost -d $DBNAME -w -f "$f"
|
|
||||||
echo "Restore done."
|
|
||||||
done
|
|
||||||
|
|
||||||
# == create new database pggis ==
|
|
||||||
echo "Creating a new empty database..."
|
|
||||||
# create user and main database
|
|
||||||
/usr/bin/psql -U pggis -h localhost -c "CREATE DATABASE pggis WITH OWNER = pggis ENCODING = 'UTF8' TEMPLATE = template0 CONNECTION LIMIT = -1;" postgres
|
|
||||||
|
|
||||||
# activate all needed extension in pggis database
|
|
||||||
/usr/bin/psql -U pggis -h localhost -w -c "CREATE EXTENSION citext; CREATE EXTENSION pg_trgm; CREATE EXTENSION btree_gist; CREATE EXTENSION hstore; CREATE EXTENSION fuzzystrmatch; CREATE EXTENSION unaccent; CREATE EXTENSION postgres_fdw; CREATE EXTENSION pgcrypto; CREATE EXTENSION plpythonu; CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology; CREATE EXTENSION postgis_sfcgal; CREATE EXTENSION pgrouting; CREATE EXTENSION pointcloud; CREATE EXTENSION pointcloud_postgis; drop type if exists texture;
|
|
||||||
create type texture as (url text,uv float[][]);" pggis
|
|
||||||
#/usr/bin/psql -U pggis -h localhost -w -f /usr/share/postgresql/9.5/contrib/postgis-2.1/sfcgal.sql -d pggis
|
|
||||||
|
|
||||||
echo "Database initialized. Connect from host with :"
|
|
||||||
echo "psql -h localhost -p <PORT> -U pggis -W pggis"
|
|
||||||
echo "Get <PORT> value with 'docker ps'"
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
# Script for my_init.d, so as to run database init without blocking
|
|
||||||
/root/init_db.sh &
|
|
||||||
@@ -28,6 +28,3 @@ REGRESS_OPTS = --inputdir='$(TEST_DIR)' --outputdir='$(TEST_DIR)'
|
|||||||
PG_CONFIG = pg_config
|
PG_CONFIG = pg_config
|
||||||
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
||||||
include $(PGXS)
|
include $(PGXS)
|
||||||
|
|
||||||
# This seems to be needed at least for PG 9.3.11
|
|
||||||
all: $(DATA)
|
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
|
||||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
|
||||||
\echo Use "CREATE EXTENSION crankshaft" to load this file. \quit
|
|
||||||
-- Internal function.
|
-- Internal function.
|
||||||
-- Set the seeds of the RNGs (Random Number Generators)
|
-- Set the seeds of the RNGs (Random Number Generators)
|
||||||
-- used internally.
|
-- used internally.
|
||||||
@@ -136,13 +133,4 @@ BEGIN
|
|||||||
RETURN ST_Collect(points);
|
RETURN ST_Collect(points);
|
||||||
END;
|
END;
|
||||||
$$
|
$$
|
||||||
LANGUAGE plpgsql VOLATILE;
|
LANGUAGE plpgsql VOLATILE
|
||||||
-- Make sure by default there are no permissions for publicuser
|
|
||||||
-- NOTE: this happens at extension creation time, as part of an implicit transaction.
|
|
||||||
-- REVOKE ALL PRIVILEGES ON SCHEMA cdb_crankshaft FROM PUBLIC, publicuser CASCADE;
|
|
||||||
|
|
||||||
-- Grant permissions on the schema to publicuser (but just the schema)
|
|
||||||
GRANT USAGE ON SCHEMA cdb_crankshaft TO publicuser;
|
|
||||||
|
|
||||||
-- Revoke execute permissions on all functions in the schema by default
|
|
||||||
-- REVOKE EXECUTE ON ALL FUNCTIONS IN SCHEMA cdb_crankshaft FROM PUBLIC, publicuser;
|
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
--DO NOT MODIFY THIS FILE, IT IS GENERATED AUTOMATICALLY FROM SOURCES
|
|
||||||
-- Complain if script is sourced in psql, rather than via CREATE EXTENSION
|
|
||||||
\echo Use "CREATE EXTENSION crankshaft" to load this file. \quit
|
|
||||||
@@ -51,4 +51,4 @@ BEGIN
|
|||||||
RETURN ST_Collect(points);
|
RETURN ST_Collect(points);
|
||||||
END;
|
END;
|
||||||
$$
|
$$
|
||||||
LANGUAGE plpgsql VOLATILE;
|
LANGUAGE plpgsql VOLATILE
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
-- Make sure by default there are no permissions for publicuser
|
|
||||||
-- NOTE: this happens at extension creation time, as part of an implicit transaction.
|
|
||||||
-- REVOKE ALL PRIVILEGES ON SCHEMA cdb_crankshaft FROM PUBLIC, publicuser CASCADE;
|
|
||||||
|
|
||||||
-- Grant permissions on the schema to publicuser (but just the schema)
|
|
||||||
GRANT USAGE ON SCHEMA cdb_crankshaft TO publicuser;
|
|
||||||
|
|
||||||
-- Revoke execute permissions on all functions in the schema by default
|
|
||||||
-- REVOKE EXECUTE ON ALL FUNCTIONS IN SCHEMA cdb_crankshaft FROM PUBLIC, publicuser;
|
|
||||||
138
pg/sql/0.0.1/population.sql
Normal file
138
pg/sql/0.0.1/population.sql
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
-- Function to obtain an estimate of the population living inside
|
||||||
|
-- an area (polygon) from the CartoDB Data Observatory
|
||||||
|
CREATE OR REPLACE FUNCTION cdb_population(area geometry)
|
||||||
|
RETURNS NUMERIC AS $$
|
||||||
|
DECLARE
|
||||||
|
georef_column TEXT;
|
||||||
|
table_id TEXT;
|
||||||
|
tag_value TEXT;
|
||||||
|
table_name TEXT;
|
||||||
|
column_name TEXT;
|
||||||
|
population NUMERIC;
|
||||||
|
BEGIN
|
||||||
|
|
||||||
|
-- Note: comments contain pseudo-code that should be implemented
|
||||||
|
|
||||||
|
-- Register metadata tables:
|
||||||
|
-- This would require super-user privileges
|
||||||
|
/*
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_column_table');
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_column_2_column');
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_table');
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_column_table');
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_column_tag');
|
||||||
|
SELECT cdb_add_remote_table('observatory', 'bmd_tag');
|
||||||
|
*/
|
||||||
|
|
||||||
|
tag_value := 'population';
|
||||||
|
|
||||||
|
|
||||||
|
-- Determine the georef column id to be used: it must have type 'geometry',
|
||||||
|
-- the maximum weight.
|
||||||
|
-- TODO: in general, multiple columns with maximal weight could be found;
|
||||||
|
-- we should use the timespan of the table to disambiguate (choose the
|
||||||
|
-- most recent). Also a rank of geometry columns should be introduced to
|
||||||
|
-- find select the greatest resolution available.
|
||||||
|
/*
|
||||||
|
WITH selected_tables AS (
|
||||||
|
-- Find tables that have population columns and cover the input area
|
||||||
|
SELECT tab.id AS id
|
||||||
|
FROM observatory.bmd_column col,
|
||||||
|
observatory.bmd_column_table coltab,
|
||||||
|
observatory.bmd_table tab,
|
||||||
|
observatory.bmd_tag tag,
|
||||||
|
observatory.bmd_column_tag coltag
|
||||||
|
WHERE coltab.column_id = col.id
|
||||||
|
AND coltab.table_id = tab.id
|
||||||
|
AND coltag.tag_id = tag.id
|
||||||
|
AND coltag.column_id = col.id
|
||||||
|
AND tag.name ILIKE tag_value
|
||||||
|
AND tab.id = table_id
|
||||||
|
AND tab.bounds && area;
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
FROM bmd_column col
|
||||||
|
JOIN bmd_table tab ON col.table_id = tab.id
|
||||||
|
WHERE type = 'geometry'
|
||||||
|
AND tab.id IN (selected_tables)
|
||||||
|
ORDER BY weight DESC LIMIT 1;
|
||||||
|
*/
|
||||||
|
georef_column := '"us.census.tiger".block_group_2013';
|
||||||
|
|
||||||
|
-- Now we will query the metadata to find which actual tables correspond
|
||||||
|
-- to this datasource and resolution/timespan
|
||||||
|
-- and choose the 'parent' or more general of them.
|
||||||
|
/*
|
||||||
|
SELECT from_table_geoid.id data_table_id
|
||||||
|
FROM observatory.bmd_column_table from_column_table_geoid,
|
||||||
|
observatory.bmd_column_table to_column_table_geoid,
|
||||||
|
observatory.bmd_column_2_column rel,
|
||||||
|
observatory.bmd_column_table to_column_table_geom,
|
||||||
|
observatory.bmd_table from_table_geoid,
|
||||||
|
observatory.bmd_table to_table_geoid,
|
||||||
|
observatory.bmd_table to_table_geom
|
||||||
|
WHERE from_column_table_geoid.column_id = to_column_table_geoid.column_id
|
||||||
|
AND to_column_table_geoid.column_id = rel.from_id
|
||||||
|
AND rel.reltype = 'geom_ref'
|
||||||
|
AND rel.to_id = to_column_table_geom.column_id
|
||||||
|
AND to_column_table_geom.column_id = georef_column
|
||||||
|
AND from_table_geoid.id = from_column_table_geoid.table_id
|
||||||
|
AND to_table_geoid.id = to_column_table_geoid.table_id
|
||||||
|
AND to_table_geom.id = to_column_table_geom.table_id
|
||||||
|
AND from_table_geoid.bounds && area
|
||||||
|
ORDER by from_table_geoid.timespan desc
|
||||||
|
INTO table_id;
|
||||||
|
*/
|
||||||
|
table_id := '"us.census.acs".extract_2013_5yr_block_group';
|
||||||
|
|
||||||
|
-- Next will fetch the columns of that table that are tagged as population:
|
||||||
|
-- and get the more general one (not having a parent or denominator)
|
||||||
|
/*
|
||||||
|
WITH column_ids AS (
|
||||||
|
SELECT col.id AS id
|
||||||
|
FROM observatory.bmd_column col,
|
||||||
|
observatory.bmd_column_table coltab,
|
||||||
|
observatory.bmd_table tab,
|
||||||
|
observatory.bmd_tag tag,
|
||||||
|
observatory.bmd_column_tag coltag
|
||||||
|
WHERE coltab.column_id = col.id
|
||||||
|
AND coltab.table_id = tab.id
|
||||||
|
AND coltag.tag_id = tag.id
|
||||||
|
AND coltag.column_id = col.id
|
||||||
|
AND tag.name ILIKE tag_value
|
||||||
|
AND tab.id = table_id;
|
||||||
|
),
|
||||||
|
excluded_column_ids AS (
|
||||||
|
SELECT from_id AS id
|
||||||
|
FROM observatory.bmd_column_2_column
|
||||||
|
WHERE from_id in (column_ids)
|
||||||
|
AND reltype in ('parent', 'denominator')
|
||||||
|
AND to_id in (column_ids)
|
||||||
|
),
|
||||||
|
SELECT bmd_table.tablename, bmd_column_table.colname
|
||||||
|
FROM observatory.bmd_column_table,
|
||||||
|
observatory.bmd_table
|
||||||
|
WHERE bmd_column_table.table_id = bmd_table.id
|
||||||
|
AND bmd_column_table.column_id IN (column_ids)
|
||||||
|
AND NOT bmd_column_table.column_id IN (exclude_column_ids)
|
||||||
|
INTO (table_name, column_name);
|
||||||
|
*/
|
||||||
|
table_name := 'us_census_acs2013_5yr_block_group';
|
||||||
|
column_name := 'total_pop';
|
||||||
|
|
||||||
|
-- Register the foreign table
|
||||||
|
-- This would require super-user privileges
|
||||||
|
-- SELECT cdb_add_remote_table('observatory', table_name);
|
||||||
|
|
||||||
|
-- Perform the query
|
||||||
|
SELECT cdb_crankshaft.cdb_overlap_sum(
|
||||||
|
area,
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
schema_name := 'observatory')
|
||||||
|
INTO population;
|
||||||
|
|
||||||
|
RETURN population;
|
||||||
|
END;
|
||||||
|
$$
|
||||||
|
LANGUAGE plpgsql VOLATILE
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
SELECT cdb_crankshaft._cdb_random_seeds(1234);
|
|
||||||
|
|
||||||
-- Use regular user role
|
|
||||||
SET ROLE test_regular_user;
|
|
||||||
|
|
||||||
-- Add to the search path the schema
|
|
||||||
SET search_path TO public,cartodb,cdb_crankshaft;
|
|
||||||
|
|
||||||
-- Exercise public functions
|
|
||||||
SELECT ppoints.code, m.quads
|
|
||||||
FROM ppoints
|
|
||||||
JOIN cdb_moran_local('ppoints', 'value') m
|
|
||||||
ON ppoints.cartodb_id = m.ids
|
|
||||||
ORDER BY ppoints.code;
|
|
||||||
SELECT round(cdb_overlap_sum(
|
|
||||||
'0106000020E61000000100000001030000000100000004000000FFFFFFFFFF3604C09A0B9ECEC42E444000000000C060FBBF30C7FD70E01D44400000000040AD02C06481F1C8CD034440FFFFFFFFFF3604C09A0B9ECEC42E4440'::geometry,
|
|
||||||
'values', 'value'
|
|
||||||
), 2);
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
# `/sbin/setuser postgres` runs the given command as the user `postgres`.
|
|
||||||
# If you omit that part, the command will be run as root.
|
|
||||||
rm -rf /etc/ssl/private-copy; mkdir /etc/ssl/private-copy; mv /etc/ssl/private/* /etc/ssl/private-copy/; rm -r /etc/ssl/private; mv /etc/ssl/private-copy /etc/ssl/private; chmod -R 0700 /etc/ssl/private; chown -R postgres /etc/ssl/private
|
|
||||||
exec /sbin/setuser postgres /usr/lib/postgresql/9.5/bin/postgres -D /var/lib/postgresql/9.5/main -c config_file=/etc/postgresql/9.5/main/postgresql.conf >> /var/log/postgresql.log 2>&1
|
|
||||||
@@ -10,7 +10,7 @@ from setuptools import setup, find_packages
|
|||||||
setup(
|
setup(
|
||||||
name='crankshaft',
|
name='crankshaft',
|
||||||
|
|
||||||
version='0.0.1',
|
version='0.0.01',
|
||||||
|
|
||||||
description='CartoDB Spatial Analysis Python Library',
|
description='CartoDB Spatial Analysis Python Library',
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
/sbin/my_init &
|
|
||||||
|
|
||||||
echo "Waiting for PostgreSQL to run..."
|
|
||||||
sleep 1
|
|
||||||
while ! /usr/bin/pg_isready -q
|
|
||||||
do
|
|
||||||
sleep 1
|
|
||||||
echo -n "."
|
|
||||||
done
|
|
||||||
|
|
||||||
cd /crankshaft/pg
|
|
||||||
make install
|
|
||||||
fg
|
|
||||||
23
run_tests.sh
23
run_tests.sh
@@ -1,23 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
/sbin/my_init &
|
|
||||||
|
|
||||||
echo "Waiting for PostgreSQL to run..."
|
|
||||||
sleep 1
|
|
||||||
while ! /usr/bin/pg_isready -q
|
|
||||||
do
|
|
||||||
sleep 1
|
|
||||||
echo -n "."
|
|
||||||
done
|
|
||||||
|
|
||||||
cd /crankshaft/pg
|
|
||||||
make install
|
|
||||||
PGUSER=pggis PGPASSOWRD=pggis PGHOST=localhost make installcheck
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if [ "$?" -eq "0" ]
|
|
||||||
then
|
|
||||||
echo "PASSED"
|
|
||||||
else
|
|
||||||
cat /crankshaft/pg/test/0.0.1/regression.diffs
|
|
||||||
fi
|
|
||||||
Reference in New Issue
Block a user