This repository has been archived by the owner on Sep 6, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path04_create_service_areas.py
237 lines (202 loc) · 10.3 KB
/
04_create_service_areas.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
# Script: createSausageBuffer_Loop.py
# Purpose: This script creates service areas for a set of input distances
# Carl Higgs, 2019-20
import arcpy, arcinfo
import glob
import time
import psycopg2
import numpy as np
from shutil import copytree,rmtree,ignore_patterns
from progressor import progressor
from sqlalchemy import create_engine
from script_running_log import script_running_log
# Import custom variables for National Liveability indicator process
from _project_setup import *
# simple timer for log file
start = time.time()
script = os.path.basename(sys.argv[0])
task = 'create service areas ({}) for locations in {} based on road network'.format(', '.join([str(x) for x in service_areas]),full_locale)
print("Commencing task: {} at {}".format(task,time.strftime("%Y%m%d-%H%M%S")))
schema=point_schema
# ArcGIS environment settings
arcpy.env.workspace = gdb_path
# Specify points
points = sample_point_feature
denominator = int(arcpy.GetCount_management(points).getOutput(0))
# temp --- using SSD copies to save write/read time and avoid conflicts
if not os.path.exists(temp):
os.makedirs(temp)
# initiate postgresql connection
conn = psycopg2.connect(database=db, user=db_user, password=db_pwd)
curs = conn.cursor()
engine = create_engine("postgresql://{user}:{pwd}@{host}/{db}".format(user = db_user,
pwd = db_pwd,
host = db_host,
db = db),
use_native_hstore=False)
temp_gdb = os.path.join(temp,"scratch_{}".format(db))
# create project specific folder in temp dir for scratch.gdb, if not exists
if not os.path.exists(temp_gdb):
os.makedirs(temp_gdb)
arcpy.env.scratchWorkspace = temp_gdb
arcpy.env.qualifiedFieldNames = False
arcpy.env.overwriteOutput = True
arcpy.CheckOutExtension('Network')
arcpy.MakeFeatureLayer_management(points, "points")
print("Processing service areas...")
for distance in service_areas:
print(" - {}m... ".format(distance)),
table = "nh{}m".format(distance)
if engine.has_table(table, schema=point_schema):
print("Aleady exists; skipping.")
else:
createTable_sausageBuffer = '''
CREATE TABLE IF NOT EXISTS {point_schema}.{table}
({id} {type} PRIMARY KEY,
area_sqm double precision,
area_sqkm double precision,
area_ha double precision,
geom geometry);
'''.format(point_schema = point_schema,
table = table,
id = points_id.lower(),
type = points_id_type)
# create output spatial feature in Postgresql
engine.execute(createTable_sausageBuffer)
# preparatory set up
# Process: Make Service Area Layer
outSAResultObject = arcpy.MakeServiceAreaLayer_na(in_network_dataset = in_network_dataset,
out_network_analysis_layer = os.path.join(arcpy.env.scratchGDB,"ServiceArea"),
impedance_attribute = "Length",
travel_from_to = "TRAVEL_FROM",
default_break_values = distance,
line_type="TRUE_LINES",
overlap="OVERLAP",
polygon_type="NO_POLYS",
lines_source_fields="NO_LINES_SOURCE_FIELDS",
hierarchy="NO_HIERARCHY")
outNALayer = outSAResultObject.getOutput(0)
#Get the names of all the sublayers within the service area layer.
subLayerNames = arcpy.na.GetNAClassNames(outNALayer)
#Store the layer names that we will use later
facilitiesLayerName = subLayerNames["Facilities"]
linesLayerName = subLayerNames["SALines"]
linesSubLayer = arcpy.mapping.ListLayers(outNALayer,linesLayerName)[0]
facilitiesSubLayer = arcpy.mapping.ListLayers(outNALayer,facilitiesLayerName)[0]
fcLines = os.path.join(arcpy.env.scratchGDB,"Lines")
# Process: Add Locations
arcpy.AddLocations_na(in_network_analysis_layer = os.path.join(arcpy.env.scratchGDB,"ServiceArea"),
sub_layer = facilitiesLayerName,
in_table = "points",
field_mappings = "Name {} #".format(points_id),
search_tolerance = "{} Meters".format(tolerance),
search_criteria = "{} SHAPE;{} NONE".format(network_edges,network_junctions),
append = "CLEAR",
snap_to_position_along_network = "NO_SNAP",
exclude_restricted_elements = "INCLUDE",
search_query = "{} #;{} #".format(network_edges,network_junctions))
place = "after AddLocations"
# Process: Solve
arcpy.Solve_na(in_network_analysis_layer = os.path.join(arcpy.env.scratchGDB,"ServiceArea"), ignore_invalids = "SKIP",terminate_on_solve_error = "CONTINUE")
place = "after Solve_na"
# Dissolve linesLayerName
# field_names = [f.name for f in arcpy.ListFields(linesSubLayer)]
arcpy.Dissolve_management(in_features=linesSubLayer,
out_feature_class=fcLines,
dissolve_field="FacilityID",
statistics_fields="",
multi_part="MULTI_PART",
unsplit_lines="DISSOLVE_LINES")
place = "after Dissolve"
# Process: Join Field
arcpy.MakeFeatureLayer_management(fcLines, "tempLayer")
place = "after MakeFeatureLayer of TempLayer"
arcpy.AddJoin_management(in_layer_or_view = "tempLayer",
in_field = "FacilityID",
join_table = facilitiesSubLayer,
join_field = "ObjectId")
place = "after AddJoin"
# write output line features within chunk to Postgresql spatial feature
# Need to parse WKT output slightly (Postgresql doesn't take this M-values nonsense)
with arcpy.da.SearchCursor("tempLayer",['Facilities.Name','Shape@WKT']) as cursor:
for row in cursor:
id = row[0].encode('utf-8')
wkt = row[1].encode('utf-8').replace(' NAN','').replace(' M ','')
sql = '''
INSERT INTO {point_schema}.{table}
SELECT
'{id}',
b.area_sqm,
b.area_sqm/1000000 AS area_sqkm,
b.area_sqm/10000 AS area_ha,
b.geom
FROM (SELECT ST_Area(geom) AS area_sqm,
geom
FROM (SELECT ST_Buffer(ST_SnapToGrid(ST_GeometryFromText('{wkt}',
{srid}),
{snap_to_grid}),
{line_buffer}) AS geom
) a
) b ;
'''.format(point_schema = point_schema,
table = table,
id = id ,
wkt = wkt ,
srid = srid ,
snap_to_grid = snap_to_grid ,
line_buffer = line_buffer
)
curs.execute(sql)
place = "after curs.execute insert sausage buffer"
conn.commit()
place = "after conn.commit for insert sausage buffer"
# clean up
arcpy.Delete_management("tempLayer")
arcpy.Delete_management(fcLines)
# Create sausage buffer spatial index
engine.execute("CREATE INDEX IF NOT EXISTS {table}_gix ON {point_schema}.{table} USING GIST (geom);".format(point_schema = point_schema, table = table))
if distance==1600:
# Create summary table of parcel id and area
print(" - Creating summary table of points with no 1600m buffer (if any)... "),
sql = '''
CREATE TABLE IF NOT EXISTS {validation_schema}.no_nh_1600m AS
SELECT * FROM {sample_point_feature}
WHERE {points_id} NOT IN (SELECT {points_id} FROM {point_schema}.{table});
'''.format(sample_point_feature = sample_point_feature,
point_schema = point_schema,
validation_schema = validation_schema,
points_id=points_id,
table=table)
engine.execute(sql)
print("Processed.")
arcpy.Delete_management("points")
arcpy.CheckInExtension('Network')
conn.close()
try:
for gdb in glob.glob(os.path.join(temp,"scratch_{}_*.gdb".format(study_region))):
arcpy.Delete_management(gdb)
except:
print("FRIENDLY REMINDER!!! Remember to delete temp gdbs to save space!")
print("(there may be lock files preventing automatic deletion.)")
# Create combined service areas table
areas_sql = []
from_sql = []
for distance in service_areas:
table = 'nh{}m'.format(distance)
areas_sql = areas_sql+['''{schema}.{table}.area_ha AS {table}_ha'''.format(schema=schema,table=table)]
from_sql = from_sql+['''LEFT JOIN {schema}.{table} ON p.{points_id} = {schema}.{table}.{points_id}'''.format(schema=schema,table=table,sample_point_feature=sample_point_feature,points_id=points_id)]
sql = '''
CREATE TABLE IF NOT EXISTS {schema}.service_areas AS
SELECT p.{points_id},
{areas_sql}
FROM {sample_point_feature} p
{from_sql}
'''.format(schema = schema,
points_id=points_id,
sample_point_feature=sample_point_feature,
areas_sql=',\n'.join(areas_sql),
from_sql=' \n'.join(from_sql))
engine.execute(sql)
engine.dispose()
# output to completion log
script_running_log(script, task, start, locale)