This repository has been archived by the owner on Sep 6, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path05_inclusion_area_geometries.py
150 lines (134 loc) · 6.07 KB
/
05_inclusion_area_geometries.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# Script: area_linkage_tables.py
# Purpose: Create ABS and non-ABS linkage tables using 2016 data sourced from ABS
#
# Parcel address points are already associated with Meshblock in the sample_point_feature table
# Further linkage with the abs_linkage table (actually, a reduced version of the existing mb_dwellings)
# facilitates aggregation to ABS area units such as SA1, SA2, SA3, SA4.
#
# The non-ABS linkage table associated points with the suburb and LGA in which they are located, according
# to ABS sourced spatial features.
#
# Regarding linkage of addresses with non-ABS structures, while the ABS provides some
# correspondence tables between areas, (e.g. SA2 2016 to LGA 2016) this will not be as accurate
# for our purposes as taking an address point location and evaluating the polygon it intersects.
# There are pitfalls in this approach (e.g. if a point lies exactly on a boundary), however
# this is par for the course when generalising unique units into aggregate categories
# (ie. points to averages, sums or variances within contiguous areas).
#
# Author: Carl Higgs
# Date: 20180710
# Import arcpy module
import subprocess as sp # for executing external commands (e.g. pgsql2shp or ogr2ogr)
import numpy
import time
import psycopg2
from progressor import progressor
from sqlalchemy import create_engine
from script_running_log import script_running_log
# Import custom variables for National Liveability indicator process
from _project_setup import *
# simple timer for log file
start = time.time()
script = os.path.basename(sys.argv[0])
task = 'Create ABS and non-ABS linkage tables using 2016 data sourced from ABS'
# INPUT PARAMETERS
engine = create_engine("postgresql://{user}:{pwd}@{host}/{db}".format(user = db_user,
pwd = db_pwd,
host = db_host,
db = db))
# OUTPUT PROCESS
task = '\nCreate inclusion area geometries... '
print("Commencing task: {} at {}".format(task,time.strftime("%Y%m%d-%H%M%S")))
# connect to the PostgreSQL server
conn = psycopg2.connect(dbname=db, user=db_user, password=db_pwd)
curs = conn.cursor()
# Create area tables
print(" - Analysis region tables... ")
for area in analysis_regions:
area_id = df_regions.loc[area,'id']
abbrev = df_regions.loc[area,'abbreviation']
print(" - {} ({})... ".format(area,area_id)),
if area == 'sa1':
additional_fields = '''
string_agg(distinct(ssc_name_2016),',') AS suburb,
string_agg(distinct(lga_name_2016), ', ') AS lga,
'''
else:
additional_fields = ''
sql = '''
-- DROP TABLE IF EXISTS {boundary_schema}.area_{abbrev}_included;
CREATE TABLE IF NOT EXISTS {boundary_schema}.area_{abbrev}_included AS
SELECT {area_id},
{additional_fields}
SUM(dwelling) AS dwellings,
SUM(person) AS persons,
SUM(area_ha) AS area_ha,
ST_Union(geom) AS geom
FROM area_linkage
WHERE irsd_score IS NOT NULL
AND dwelling > 0
AND urban = 'urban'
AND study_region IS TRUE
GROUP BY {area_id}
ORDER BY {area_id} ASC;
CREATE INDEX IF NOT EXISTS id_area_{abbrev}_included ON {boundary_schema}.area_{abbrev}_included ({area_id});
CREATE INDEX IF NOT EXISTS gix_area_{abbrev}_included ON {boundary_schema}.area_{abbrev}_included USING GIST (geom);
'''.format(area_id = area_id,
abbrev = abbrev,
boundary_schema=boundary_schema,
additional_fields = additional_fields)
curs.execute(sql)
conn.commit()
print(" area_{abbrev}_included created.".format(abbrev = abbrev))
print(" - SOS region tables")
create_study_region_tables = '''
-- DROP TABLE IF EXISTS {boundary_schema}.study_region_all_sos;
CREATE TABLE IF NOT EXISTS {boundary_schema}.study_region_all_sos AS
SELECT sos_name_2016,
SUM(dwelling) AS dwelling,
SUM(person) AS person,
SUM(area_ha) AS area_ha,
ST_Union(geom) geom
FROM area_linkage
WHERE study_region IS TRUE
GROUP BY sos_name_2016;
CREATE UNIQUE INDEX IF NOT EXISTS ix_study_region_all_sos ON {boundary_schema}.study_region_all_sos (sos_name_2016);
CREATE INDEX IF NOT EXISTS gix_study_region_all_sos ON {boundary_schema}.study_region_all_sos USING GIST (geom);
-- DROP TABLE IF EXISTS {boundary_schema}.study_region_urban;
CREATE TABLE IF NOT EXISTS {boundary_schema}.study_region_urban AS
SELECT urban,
SUM(dwelling) AS dwelling,
SUM(person) AS person,
SUM(area_ha) AS area_ha,
ST_Union(geom) geom
FROM area_linkage
WHERE study_region IS TRUE
GROUP BY urban;
CREATE UNIQUE INDEX IF NOT EXISTS ix_study_region_urban ON {boundary_schema}.study_region_urban (urban);
CREATE INDEX IF NOT EXISTS gix_study_region_urban ON {boundary_schema}.study_region_urban USING GIST (geom);
'''.format(region = region.lower(), year = year,boundary_schema=boundary_schema)
curs.execute(create_study_region_tables)
conn.commit()
sql = '''
ALTER TABLE {sample_point_feature} ADD COLUMN IF NOT EXISTS mb_code_2016 text;
UPDATE {sample_point_feature} p set mb_code_2016 = m.mb_code_2016
FROM area_linkage AS m
WHERE ST_Intersects(p.geom, m.geom);
'''.format(sample_point_feature = sample_point_feature)
curs.execute(sql)
conn.commit()
print(" - SOS indexed by parcel")
create_parcel_sos = '''
-- DROP TABLE IF EXISTS parcel_sos;
CREATE TABLE IF NOT EXISTS parcel_sos AS
SELECT a.{id},
sos_name_2016
FROM {sample_point_feature} a LEFT JOIN area_linkage b ON a.mb_code_2016 = b.mb_code_2016;
CREATE UNIQUE INDEX IF NOT EXISTS parcel_sos_idx ON parcel_sos ({id});
'''.format(id = points_id,sample_point_feature = sample_point_feature)
curs.execute(create_parcel_sos)
conn.commit()
# output to completion log
script_running_log(script, task, start, locale)
# clean up
conn.close()