-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathget-external-data.py
executable file
·217 lines (180 loc) · 9.33 KB
/
get-external-data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
#!/usr/bin/env python3
# This script is designed to load quasi-static data into a PostGIS database for
# rendering maps. It differs from the usual scripts to do this in that it is
# designed to take its configuration from a file rather than be a series of
# shell commands, and consideration has been given to doing the loading the
# right way.
#
# Some implicit assumptions are
# - Time spent querying (rendering) the data is more valuable than the one-time
# cost of loading it
# - The script will not be running multiple times in parallel. This is not
# normally likely because the script is likely to be called daily or less,
# not minutely.
# - Usage patterns will be similar to typical map rendering
import yaml
import os
import re
import argparse
import shutil
# modules for getting data
import zipfile
import requests
import io
# modules for converting and postgres loading
import subprocess
import psycopg2
import logging
def database_setup(conn, temp_schema, schema, metadata_table):
with conn.cursor() as cur:
cur.execute('''CREATE TABLE IF NOT EXISTS "{schema}"."{metadata_table}" (name text primary key, last_modified text);'''
.format(schema=schema, metadata_table=metadata_table))
conn.commit()
class Table:
def __init__(self, name, conn, temp_schema, schema, metadata_table):
self._name = name
self._conn = conn
self._temp_schema = temp_schema
self._dst_schema = schema
self._metadata_table = metadata_table
# Clean up the temporary schema in preperation for loading
def clean_temp(self):
with self._conn.cursor() as cur:
cur.execute('''DROP TABLE IF EXISTS "{temp_schema}"."{name}"'''.format(name=self._name, temp_schema=self._temp_schema))
self._conn.commit()
# get the last modified date from the metadata table
def last_modified(self):
with self._conn.cursor() as cur:
cur.execute('''SELECT last_modified FROM "{schema}"."{metadata_table}" WHERE name = %s'''.format(schema=self._dst_schema, metadata_table=self._metadata_table), [self._name])
results = cur.fetchone()
if results is not None:
return results[0]
def index(self):
with self._conn.cursor() as cur:
# ogr creates a ogc_fid column we don't need
cur.execute('''ALTER TABLE "{temp_schema}"."{name}" DROP COLUMN ogc_fid;'''.format(name=self._name, temp_schema=self._temp_schema))
# sorting static tables helps performance and reduces size from the column drop above
# see osm2pgsql for why this particular geohash invocation
cur.execute('''
CREATE INDEX "{name}_geohash"
ON "{temp_schema}"."{name}"
(ST_GeoHash(ST_Transform(ST_Envelope(way),4326),10) COLLATE "C");
CLUSTER "{temp_schema}"."{name}" USING "{name}_geohash";
DROP INDEX "{temp_schema}"."{name}_geohash";
CREATE INDEX ON "{temp_schema}"."{name}" USING GIST (way) WITH (fillfactor=100);
ANALYZE "{temp_schema}"."{name}";
'''.format(name=self._name, temp_schema=self._temp_schema))
self._conn.commit()
def replace(self, new_last_modified):
with self._conn.cursor() as cur:
cur.execute('''BEGIN;''')
cur.execute('''
DROP TABLE IF EXISTS "{schema}"."{name}";
ALTER TABLE "{temp_schema}"."{name}" SET SCHEMA "{schema}";
'''.format(name=self._name, temp_schema=self._temp_schema, schema=self._dst_schema))
# We checked if the metadata table had this table way up above
cur.execute('''SELECT 1 FROM "{schema}"."{metadata_table}" WHERE name = %s'''.format(schema=self._dst_schema, metadata_table=self._metadata_table), [self._name])
if cur.rowcount == 0:
cur.execute('''INSERT INTO "{schema}"."{metadata_table}" (name, last_modified) VALUES (%s, %s)'''.format(schema=self._dst_schema, metadata_table=self._metadata_table),
[self._name, new_last_modified])
else:
cur.execute('''UPDATE "{schema}"."{metadata_table}" SET last_modified = %s WHERE name = %s'''.format(schema=self._dst_schema, metadata_table=self._metadata_table),
[new_last_modified, self._name])
self._conn.commit()
def main():
# parse options
parser = argparse.ArgumentParser(description="Load external data into a database")
parser.add_argument("-f", "--force", action="store_true", help="Download new data, even if not required")
parser.add_argument("-c", "--config", action="store", default="external-data.yml", help="Name of configuraton file (default external-data.yml)")
parser.add_argument("-D", "--data", action="store", help="Override data download directory")
parser.add_argument("-d", "--database", action="store", help="Override database name to connect to")
parser.add_argument("-H", "--host", action="store", help="Override database server host or socket directory")
parser.add_argument("-p", "--port", action="store", help="Override database server port")
parser.add_argument("-U", "--username", action="store", help="Override database user name")
parser.add_argument("-v", "--verbose", action="store_true", help="Be more verbose. Overrides -q")
parser.add_argument("-q", "--quiet", action="store_true", help="Only report serious problems")
opts = parser.parse_args()
if opts.verbose:
logging.basicConfig(level=logging.DEBUG)
elif opts.quiet:
logging.basicConfig(level=logging.WARNING)
else:
logging.basicConfig(level=logging.INFO)
with open(opts.config) as config_file:
config = yaml.safe_load(config_file)
data_dir = opts.data or config["settings"]["data_dir"]
os.makedirs(data_dir, exist_ok=True)
# If the DB options are unspecified in both on the command line and in the
# config file, libpq will pick what to use with the None
database = opts.database or config["settings"].get("database")
host = opts.host or config["settings"].get("host")
port = opts.port or config["settings"].get("port")
user = opts.username or config["settings"].get("username")
with requests.Session() as s, \
psycopg2.connect(database=database,
host=host,
port=port,
user=user) as conn:
s.headers.update({'User-Agent': 'get-external-data.py/meddo'})
# DB setup
database_setup(conn, config["settings"]["temp_schema"], config["settings"]["schema"], config["settings"]["metadata_table"])
for name, source in config["sources"].items():
logging.info("Checking table {}".format(name))
# Don't attempt to handle strange names
# Even if there was code to escape them properly here, you don't want
# in a style with all the quoting headaches
if not re.match('''^[a-zA-Z0-9_]+$''', name):
raise RuntimeError("Only ASCII alphanumeric table are names supported")
workingdir = os.path.join(data_dir, name)
# Clean up anything left over from an aborted run
shutil.rmtree(workingdir, ignore_errors=True)
os.makedirs(workingdir, exist_ok=True)
this_table = Table(name, conn, config["settings"]["temp_schema"], config["settings"]["schema"], config["settings"]["metadata_table"])
this_table.clean_temp()
if not opts.force:
headers = {'If-Modified-Since': this_table.last_modified()}
else:
headers = {}
download = s.get(source["url"], headers=headers)
download.raise_for_status()
if (download.status_code == 200):
if "Last-Modified" in download.headers:
new_last_modified = download.headers["Last-Modified"]
else:
new_last_modified = None
if "archive" in source and source["archive"]["format"] == "zip":
zip = zipfile.ZipFile(io.BytesIO(download.content))
for member in source["archive"]["files"]:
zip.extract(member, workingdir)
ogrpg = "PG:dbname={}".format(database)
if port is not None:
ogrpg = ogrpg + " port={}".format(port)
if user is not None:
ogrpg = ogrpg + " user={}".format(user)
if host is not None:
ogrpg = ogrpg + " host={}".format(host)
ogrcommand = ["ogr2ogr",
'-f', 'PostgreSQL',
'-lco', 'GEOMETRY_NAME=way',
'-lco', 'SPATIAL_INDEX=FALSE',
'-lco', 'EXTRACT_SCHEMA_FROM_LAYER_NAME=YES',
'-nln', "{}.{}".format(config["settings"]["temp_schema"], name)]
if "ogropts" in source:
ogrcommand += source["ogropts"]
ogrcommand += [ogrpg, os.path.join(workingdir, source["file"])]
logging.debug("running {}".format(subprocess.list2cmdline(ogrcommand)))
# ogr2ogr can raise errors here, so they need to be caught
try:
ogr2ogr = subprocess.check_output(ogrcommand, stderr=subprocess.PIPE, universal_newlines=True)
except subprocess.CalledProcessError as e:
# Add more detail on stdout for the logs
logging.critical("ogr2ogr returned {} with layer {}".format(e.returncode, name))
logging.critical("Command line was {}".format(subprocess.list2cmdline(e.cmd)))
logging.critical("Output was\n{}".format(e.output))
raise RuntimeError("ogr2ogr error when loading table {}".format(name))
this_table.index()
this_table.replace(new_last_modified)
else:
logging.info("Table {} did not require updating".format(name))
if __name__ == '__main__':
main()