-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathut.py
1573 lines (1385 loc) · 88.2 KB
/
ut.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
########################################################################################
## Copyright 2017 Esri
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
########################################################################################
'''Module implementing utility tools that help in publishing network analysis services.'''
import os
import logging
import ConfigParser
import fnmatch
import json
import urlparse
import urllib2
import io
import uuid
import base64
import locale
import collections
import shutil
import pprint
import time
import xml.dom.minidom as DOM
try:
import cPickle as pickle
except:
import pickle
import arcpy
import nas
# module level attributes
TIME_UNITS = ('Minutes', 'Hours', 'Days', 'Seconds')
class CreateSupportingFiles(object):
'''class containing the execution logic'''
##constants
#Keywords for restriction usage values
RESTRICTION_USAGE_VALUES = {
"-1.0": "PROHIBITED",
"5.0": "AVOID_HIGH",
"2.0": "AVOID_MEDIUM",
"1.3": "AVOID_LOW",
"0.5" : "PREFER_MEDIUM",
"0.8" : "PREFER_LOW",
"0.2" : "PREFER_HIGH"
}
TOOL_LIMITS = {
"FindClosestFacilities": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumFacilities", None),
("maximumFacilitiesToFind", None),
("maximumIncidents", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles"),
)),
"FindRoutes" : collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumStops", None),
("maximumStopsPerRoute", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles")
)),
"GenerateOriginDestinationCostMatrix": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumOrigins", None),
("maximumDestinations", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles")
)),
"GenerateServiceAreas": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumFacilities", None),
("maximumNumberOfBreaks", None),
("maximumBreakTimeValue", None),
("maximumBreakTimeValueUnits", "Minutes"),
("maximumBreakDistanceValue", None),
("maximumBreakDistanceValueUnits", "Miles"),
("forceHierarchyBeyondBreakTimeValue", None),
("forceHierarchyBeyondBreakTimeValueUnits", "Minutes"),
("forceHierarchyBeyondBreakDistanceValue", None),
("forceHierarchyBeyondBreakDistanceValueUnits", "Miles"),
)),
"SolveLocationAllocation": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumFacilities", None),
("maximumFacilitiesToFind", None),
("maximumDemandPoints", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles"),
)),
"SolveVehicleRoutingProblem": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumOrders", None),
("maximumRoutes", None),
("maximumOrdersPerRoute", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles"),
)),
"EditVehicleRoutingProblem": collections.OrderedDict((
("maximumFeaturesAffectedByPointBarriers", None),
("maximumFeaturesAffectedByLineBarriers", None),
("maximumFeaturesAffectedByPolygonBarriers", None),
("maximumOrders", None),
("maximumRoutes", None),
("maximumOrdersPerRoute", None),
("forceHierarchyBeyondDistance", None),
("forceHierarchyBeyondDistanceUnits", "Miles"),
)),
}
TIME_UNITS = ('Minutes', 'Hours', 'Days', 'Seconds')
class NetworkDatasetAttributes(object):
'''Store info about network dataset attributes such as default restrictions, time costs,
distance costs and default impedance attribute'''
def __init__(self, nds_desc, populate_attribute_parameters=True):
'''Derive info from network dataset attributes'''
time_costs = []
distance_costs = []
other_costs = []
count = 0
self.defaultRestrictionAttributes = []
self.restrictions = []
self.timeCosts = {}
self.distanceCosts = {}
self.otherCosts = {}
self.defaultImpedanceAttribute = ""
self.attributeParameters = {}
attributes = nds_desc.attributes
for attribute in attributes:
usage_type = attribute.usageType
name = attribute.name
unit = attribute.units
use_by_default = attribute.useByDefault
if usage_type == "Restriction":
if use_by_default:
self.defaultRestrictionAttributes.append(name)
self.restrictions.append(name)
elif usage_type == "Cost":
#Determine if it is time based or distance based
if unit in CreateSupportingFiles.TIME_UNITS:
time_costs.append((name, unit))
if use_by_default:
self.defaultImpedanceAttribute = name
elif unit.lower() == "unknown":
other_costs.append((name, unit))
else:
distance_costs.append((name, unit))
if use_by_default:
self.defaultImpedanceAttribute = name
else:
pass
#populate all the attribute parameters and their default values.
#Store this in a dict with key of row id and value as a list
if populate_attribute_parameters:
parameter_count = attribute.parameterCount
if parameter_count:
for i in range(parameter_count):
param_name = getattr(attribute, "parameterName" + str(i))
param_default_value = None
if hasattr(attribute, "parameterDefaultValue" + str(i)):
param_default_value = str(getattr(attribute, "parameterDefaultValue" + str(i)))
if param_name.upper() == "RESTRICTION USAGE" and param_default_value in CreateSupportingFiles.RESTRICTION_USAGE_VALUES:
param_default_value = CreateSupportingFiles.RESTRICTION_USAGE_VALUES[param_default_value]
count += 1
self.attributeParameters[count] = (name, param_name, param_default_value)
# If the network dataset does not define the default cost attribute, use in this order
# - first time based cost attribute
# - first distance based cost attribute
# - first cost attribute
if not self.defaultImpedanceAttribute:
if time_costs:
self.defaultImpedanceAttribute = time_costs[0][0]
elif distance_costs:
self.defaultImpedanceAttribute = distance_costs[0][0]
elif other_costs:
self.defaultImpedanceAttribute = other_costs[0][0]
else:
self.defaultImpedanceAttribute = ""
#Return costs as dicts of name: unit
self.timeCosts = dict(time_costs)
self.distanceCosts = dict(distance_costs)
self.otherCosts = dict(other_costs)
return
def __init__(self, *args, **kwargs):
'''Constructor'''
#Initialize instance attributes
self.logger = nas.Logger(nas.LOG_LEVEL)
self.templateNDS = None
self.templateNDSDesc = None
self.templateNDSTravelModes = None
self.travelModesJSON = None
if self.logger.DEBUG:
for arg_name in sorted(kwargs):
self.logger.debug(u"{0}: {1}".format(arg_name, kwargs[arg_name]))
#Store tool parameter values as instance attributes
self.networkDatasets = kwargs.get("network_datasets", None)
if self.networkDatasets:
self.networkDatasets = nas.strip_quotes(self.networkDatasets.split(";"))
self.templateNDS = self.networkDatasets[0]
self.templateNDSDesc = arcpy.Describe(self.templateNDS)
self.supportingFilesFolder = kwargs.get("supporting_files_folder", None)
self.localizedTravelModesFolder = kwargs.get("localized_travel_modes_folder", None)
self.serviceLimits = kwargs.get("service_limits", None)
#Initialize derived outputs
self.ndsPropertiesFile = os.path.join(self.supportingFilesFolder, "NetworkDatasetProperties.ini")
self.travelModesFile = os.path.join(self.supportingFilesFolder, "DefaultTravelModes.json")
self.localizedTravelModesFile = os.path.join(self.supportingFilesFolder, "DefaultTravelModesLocalized.json")
self.toolInfoFile = os.path.join(self.supportingFilesFolder, "ToolInfo.json")
def execute(self):
'''Main execution logic'''
#Create the network dataset properties file
parser = ConfigParser.SafeConfigParser()
#Add a new section with property names and values for each network dataset
for network in self.networkDatasets:
network_props = self._getNetworkProperties(network)
parser.add_section(network)
for prop in sorted(network_props):
parser.set(network, prop, network_props[prop])
#Write the properties to a ini file
self.logger.info(u"Writing network dataset properties to {0}".format(self.ndsPropertiesFile))
with open(self.ndsPropertiesFile, "w", 0) as config_file:
parser.write(config_file)
#Store the default travel modes
self._getTravelModes()
#Store network dataset description as JSON
template_nds_description = self._getNDSDescription()
#Get service limits
service_limits = self._getServiceLimits()
#Write tool info with network dataset description and service limits to a json file
tool_info_json = {
"networkDataset" : template_nds_description,
"serviceLimits": service_limits,
}
#Save the localized travel modes to a new file
self.logger.info(u"Saving tool info to {0}".format(self.toolInfoFile))
self._saveJSONToFile(self.toolInfoFile, tool_info_json)
def _saveJSONToFile(self, file_path, json_content):
'''Write out the json content to a file'''
with io.open(file_path, "wb") as json_fp:
json_fp.write(json.dumps(json_content, encoding="utf-8", sort_keys=True, indent=2,
ensure_ascii=False).encode("utf-8"))
json_fp.write("\n")
def _getNetworkProperties(self, network):
'''Populate a dict containing properties for the network dataset'''
property_names = ("time_attribute", "time_attribute_units", "distance_attribute",
"distance_attribute_units", "restrictions", "default_restrictions",
"attribute_parameter_values", "feature_locator_where_clause", "Extent",
"travel_modes", "default_custom_travel_mode", "walk_time_attribute",
"walk_time_attribute_units", "truck_time_attribute", "truck_time_attribute_units",
"non_walking_restrictions", "walking_restriction", "trucking_restriction",
"time_neutral_attribute", "time_neutral_attribute_units")
populate_attribute_parameters = True
esmp_travel_mode_names = ("DRIVING TIME", "DRIVING DISTANCE", "TRUCKING TIME", "TRUCKING DISTANCE",
"WALKING TIME", "WALKING DISTANCE", "RURAL DRIVING TIME", "RURAL DRIVING DISTANCE")
default_time_attr = ""
default_distance_attr = ""
default_impedance_attr = ""
default_travel_mode_name = ""
default_travel_mode = None
default_restriction_attrs = []
time_costs = {}
distance_costs = {}
restrictions = []
enable_hierarchy = False
hierarchy = 0
attribute_parameters = {}
network_properties = dict.fromkeys(property_names)
nds_desc = arcpy.Describe(network)
nds_type = nds_desc.networkType
is_sdc_nds = (nds_type == 'SDC')
nds_travel_modes = {k.upper() : v for k,v in arcpy.na.GetTravelModes(nds_desc.catalogPath).iteritems()}
# Store travel mode names grouped by type
nds_travel_mode_types = {}
for k,v in nds_travel_modes.iteritems():
travel_mode_type = nds_travel_mode_types.setdefault(v.type, [])
travel_mode_type.append(k)
#Build a list of restriction, time and distance cost attributes
#Get default attributes for geodatabase network datasets.
nds_attributes = self.NetworkDatasetAttributes(nds_desc, populate_attribute_parameters)
default_impedance_attr = nds_attributes.defaultImpedanceAttribute
default_restriction_attrs = nds_attributes.defaultRestrictionAttributes
time_costs = nds_attributes.timeCosts
distance_costs = nds_attributes.distanceCosts
restrictions = nds_attributes.restrictions
attribute_parameters = nds_attributes.attributeParameters
#Set the default time and distance attributes based on default travel mode. If a default travel mode is not
#set use the default cost attribute. If even a defualt cost attribute is not set, use the first time based
#cost attribute in alphabetical order and last distance based cost attribute.
first_time_cost_attribute = sorted(time_costs.keys())[0]
default_travel_mode_name = nds_desc.defaultTravelModeName
# If a default travel mode is not defined, use the first travel mode as default
if not default_travel_mode_name:
default_travel_mode_name = sorted(nds_travel_modes.keys())[0]
if default_travel_mode_name and default_travel_mode_name.upper() in nds_travel_modes:
default_travel_mode = nds_travel_modes[default_travel_mode_name.upper()]
default_time_attr = default_travel_mode.timeAttributeName
default_distance_attr = default_travel_mode.distanceAttributeName
elif default_impedance_attr in time_costs:
default_time_attr = default_impedance_attr
elif default_impedance_attr in distance_costs:
default_distance_attr = default_impedance_attr
if default_time_attr == "":
#if there is no default use the first one in the list
default_time_attr = first_time_cost_attribute
if default_distance_attr == "":
#Use the last one in case a default is not set
default_distance_attr = sorted(distance_costs.keys())[-1]
network_properties["time_attribute"] = default_time_attr
network_properties["time_attribute_units"] = time_costs[default_time_attr]
#Set the walk time and truck travel time attribute and their units. If the attributes with name
#WalkTime and TruckTravelTime do not exist, use the first travel mode of type WALK or TRUCK. Otherwise use the
#first time cost attribute.
if "WalkTime" in time_costs:
walk_time_attribute = "WalkTime"
elif nds_travel_mode_types.get("WALK", None):
first_walk_type_travel_mode = sorted(nds_travel_mode_types["WALK"])[0]
walk_time_attribute = nds_travel_modes[first_walk_type_travel_mode].timeAttributeName
else:
walk_time_attribute = first_time_cost_attribute
network_properties["walk_time_attribute"] = walk_time_attribute
network_properties["walk_time_attribute_units"] = time_costs[walk_time_attribute]
if "TruckTravelTime" in time_costs:
truck_time_attribute = "TruckTravelTime"
elif nds_travel_mode_types.get("TRUCK", None):
first_truck_type_travel_mode = sorted(nds_travel_mode_types["TRUCK"])[0]
truck_time_attribute = nds_travel_modes[first_truck_type_travel_mode].timeAttributeName
else:
truck_time_attribute = first_time_cost_attribute
network_properties["truck_time_attribute"] = truck_time_attribute
network_properties["truck_time_attribute_units"] = time_costs[truck_time_attribute]
time_neutral_attribute = "Minutes" if "Minutes" in time_costs else first_time_cost_attribute
network_properties["time_neutral_attribute"] = time_neutral_attribute
network_properties["time_neutral_attribute_units"] = time_costs[time_neutral_attribute]
network_properties["distance_attribute"] = default_distance_attr
network_properties["distance_attribute_units"] = distance_costs[default_distance_attr]
#Set complete restrictions, default restrictions and non-walking restrictions
network_properties["restrictions"] = ";".join(restrictions)
network_properties["default_restrictions"] = ";".join(default_restriction_attrs)
network_properties["non_walking_restrictions"] = ";".join(fnmatch.filter(restrictions, "Driving*"))
walking_restriction = "Walking" if "Walking" in restrictions else ""
trucking_restriction = "Driving a Truck" if "Driving a Truck" in restrictions else ""
network_properties["walking_restriction"] = walking_restriction
network_properties["trucking_restriction"] = trucking_restriction
#Set attribute parameters
if populate_attribute_parameters and attribute_parameters:
network_properties["attribute_parameter_values"] = pickle.dumps(attribute_parameters)
#Update the feature locator where clause
if is_sdc_nds:
source_names = ["SDC Edge Source"]
else:
all_source_names = [source.name for source in nds_desc.sources]
turn_source_names = [turn_source.name for turn_source in nds_desc.turnSources]
source_names = list(set(all_source_names) - set(turn_source_names))
search_query = [('"' + source_name + '"', "#") for source_name in source_names]
search_query = [" ".join(s) for s in search_query]
network_properties["feature_locator_where_clause"] = ";".join(search_query)
#store the extent
extent = nds_desc.Extent
extent_coords = (str(extent.XMin),str(extent.YMin), str(extent.XMax),
str(extent.YMax))
network_properties["Extent"] = pickle.dumps(extent_coords)
#Store the travel modes in a dict with key as a two value tuple (travel mode type, isModeTimeBased)
#and value as travel mode name
travel_modes = {}
for travel_mode_name in nds_travel_modes:
nds_travel_mode = json.loads(unicode(nds_travel_modes[travel_mode_name]))
travel_mode_impedance = nds_travel_mode["impedanceAttributeName"]
is_impedance_time_based = None
if travel_mode_impedance in time_costs:
is_impedance_time_based = True
elif travel_mode_impedance in distance_costs:
is_impedance_time_based = False
else:
continue
if travel_mode_name in esmp_travel_mode_names:
travel_mode_type = travel_mode_name.split(" ")[0]
else:
travel_mode_type = travel_mode_name
travel_modes[(travel_mode_type, is_impedance_time_based)] = travel_mode_name
network_properties["travel_modes"] = pickle.dumps(travel_modes)
#store the travel mode that is used to set the custom travel mode settings parameters
#default_custom_travel_mode_name = "Driving Time"
default_custom_travel_mode_name = "DRIVING TIME"
default_custom_travel_mode = nds_travel_modes.get(default_custom_travel_mode_name, default_travel_mode)
if default_custom_travel_mode:
default_custom_travel_mode = unicode(default_custom_travel_mode)
network_properties["default_custom_travel_mode"] = default_custom_travel_mode
return network_properties
def _getTravelModes(self):
"""Save travel modes used by default for all the routing services."""
#Store the travel mode name and travel mode ID mappings
TRAVEL_MODE_IDS = {
"Driving Time": "FEgifRtFndKNcJMJ",
"Driving Distance" : "iKjmHuBSIqdEfOVr",
"Trucking Time" : "ZzzRtYcPLjXFBKwr",
"Trucking Distance" : "UBaNfFWeKcrRVYIo",
"Walking Time" : "caFAgoThrvUpkFBW",
"Walking Distance" : "yFuMFwIYblqKEefX",
"Rural Driving Time" : "NmNhNDUwZmE1YTlj",
"Rural Driving Distance" : "Yzk3NjI1NTU5NjVj",
}
#File name that contains translations
LOCALIZED_FILE_NAME = "DefaultTravelModeNamesAndDescriptions.json"
BOM = u"\ufeff"
default_travel_mode_name = u""
default_travel_mode_id = ""
#Get all the travel modes defined in the template network dataset
self.templateNDSTravelModes = arcpy.na.GetTravelModes(self.templateNDSDesc.catalogPath)
nds_attributes = self.templateNDSDesc.attributes
#Get the default travel mode. If the network dataset does not define default travel mode, use any travel mode
#as default
if hasattr(self.templateNDSDesc, "defaultTravelModeName"):
default_travel_mode_name = self.templateNDSDesc.defaultTravelModeName
if not default_travel_mode_name:
if "Driving Time" in self.templateNDSTravelModes:
default_travel_mode_name = "Driving Time"
else:
default_travel_mode_name = self.templateNDSTravelModes.iterkeys().next()
#Modify the attribute parameters stored with network dataset travel modes so that they store only those
#attribute parameters that are relevant to restriction and time and distance attributes used in the travel mode
travel_modes = {}
for travel_mode_name in self.templateNDSTravelModes:
#Generate a new id if a travel mode name is not a esmp travel mode.
travel_mode_id = TRAVEL_MODE_IDS.get(travel_mode_name, base64.b64encode(uuid.uuid4().hex)[0:16])
travel_mode = json.loads(unicode(self.templateNDSTravelModes[travel_mode_name]), encoding="utf-8")
attribute_parameters = travel_mode.get("attributeParameterValues", [])
applicable_attributes = travel_mode.get("restrictionAttributeNames", []) + [travel_mode.get("timeAttributeName", ""),
travel_mode.get("distanceAttributeName", "") ]
applicable_attribute_parameters = [attr_param for attr_param in attribute_parameters
if attr_param["attributeName"] in applicable_attributes]
applicable_attribute_parameters = []
for attr_param in attribute_parameters:
if attr_param["attributeName"] in applicable_attributes:
#Use string keyword for the restriction usage value
if attr_param["parameterName"].upper() == "RESTRICTION USAGE":
param_value = "{0:.1f}".format(attr_param["value"])
attr_param["value"] = self.RESTRICTION_USAGE_VALUES.get(param_value, param_value)
applicable_attribute_parameters.append(attr_param)
travel_mode["attributeParameterValues"] = applicable_attribute_parameters
travel_mode["id"] = travel_mode_id
travel_modes[travel_mode_id] = travel_mode
#Determine the default travel mode id
if travel_mode_name == default_travel_mode_name:
default_travel_mode_id = travel_mode_id
#Prepare the json to be written to the output file.
self.travelModesJSON = {
"supportedTravelModes": travel_modes.values(),
"defaultTravelMode": default_travel_mode_id,
}
#Save the JSON descriptions to a file
self.logger.info(u"Saving travel modes to {0}".format(self.travelModesFile))
self._saveJSONToFile(self.travelModesFile, self.travelModesJSON)
#Get localized travel mode names and descriptions
localized_travel_modes = {}
if self.localizedTravelModesFolder:
#Get a list of all the *.json file
for root, dirs, files in os.walk(self.localizedTravelModesFolder):
for filename in files:
if filename == LOCALIZED_FILE_NAME:
with io.open(os.path.join(root,filename), "r", encoding="utf-8") as fp:
localized_travel_mode_str = fp.read()
if localized_travel_mode_str.startswith(BOM):
localized_travel_mode_str = localized_travel_mode_str.lstrip(BOM)
localized_travel_modes[os.path.basename(root)] = json.loads(localized_travel_mode_str, "utf-8")
#Save the localized travel modes to a new file
self.logger.info(u"Saving localized travel modes to {0}".format(self.localizedTravelModesFile))
self._saveJSONToFile(self.localizedTravelModesFile, localized_travel_modes)
def _getNDSDescription(self):
'''Store the description of the template network dataset as a dict in JSON'''
attribute_parameter_values = []
attribute_parameter_prop_names = ("attributeName", "parameterName", "parameterType", "value")
nds_attributes = []
nds_attribute_prop_names = ("name", "dataType", "units", "usageType", "parameterNames",
"restrictionUsageParameterName", "trafficSupport")
nds_traffic_support_type = "NONE"
default_cost_attribute = ""
default_restrictions = []
#Get network dataset traffic support type
if hasattr(self.templateNDSDesc, "trafficSupportType"):
nds_traffic_support_type = self.templateNDSDesc.trafficSupportType
else:
#Calculate based on other properties
supports_historical_traffic = self.templateNDSDesc.supportsHistoricalTrafficData
supports_live_traffic = self.templateNDSDesc.supportsLiveTrafficData
if supports_historical_traffic:
if supports_live_traffic:
live_traffic_data = self.templateNDSDesc.liveTrafficData
if live_traffic_data.trafficFeedLocation:
nds_traffic_support_type = "HISTORICAL_AND_LIVE"
else:
nds_traffic_support_type = "HISTORICAL"
else:
nds_traffic_support_type = "HISTORICAL"
#Get information about network dataset attributes including attribute parameter values
for nds_attribute in self.templateNDSDesc.attributes:
nds_attribute_name = nds_attribute.name
nds_attribute_traffic_support_type = self._getNDSAttributeTrafficSupportType(nds_attribute,
nds_traffic_support_type)
nds_attribute_parameter_names = []
nds_attribute_restriction_usage_parameter_name = None
parameter_count = nds_attribute.parameterCount
if parameter_count:
for i in range(parameter_count):
param_name = getattr(nds_attribute, "parameterName" + str(i))
nds_attribute_parameter_names.append(param_name)
param_data_type = getattr(nds_attribute, "parameterType" + str(i))
param_usage_type = getattr(nds_attribute, "parameterUsageType" + str(i))
if param_usage_type.lower() == "restriction":
nds_attribute_restriction_usage_parameter_name = param_name
param_default_value = None
if hasattr(nds_attribute, "parameterDefaultValue" + str(i)):
param_default_value = str(getattr(nds_attribute, "parameterDefaultValue" + str(i)))
#if param_name.upper() == "RESTRICTION USAGE" and param_default_value in self.RESTRICTION_USAGE_VALUES:
if nds_attribute_restriction_usage_parameter_name and param_default_value in self.RESTRICTION_USAGE_VALUES:
param_default_value = self.RESTRICTION_USAGE_VALUES[param_default_value]
attribute_parameter_values.append(dict(zip(attribute_parameter_prop_names,
(nds_attribute_name, param_name, param_data_type,
param_default_value)
)
)
)
nds_attributes.append(dict(zip(nds_attribute_prop_names,
(nds_attribute_name, nds_attribute.dataType, nds_attribute.units,
nds_attribute.usageType, nds_attribute_parameter_names,
nds_attribute_restriction_usage_parameter_name,
nds_attribute_traffic_support_type)
)
)
)
#Get default cost and restriction attributes
nds_attrs = self.NetworkDatasetAttributes(self.templateNDSDesc, False)
default_cost_attribute = nds_attrs.defaultImpedanceAttribute
default_restrictions = nds_attrs.defaultRestrictionAttributes
network_dataset_description = {
"attributeParameterValues": attribute_parameter_values,
"defaultCostAttribute" : default_cost_attribute,
"defaultRestrictions" : default_restrictions,
"networkAttributes": nds_attributes,
"supportedTravelModes": self.travelModesJSON.get("supportedTravelModes", []),
"trafficSupport": nds_traffic_support_type,
}
return network_dataset_description
def _getNDSAttributeTrafficSupportType(self, nds_attribute, nds_traffic_support_type):
'''Calculates traffic support type for a network dataset attribute'''
attr_traffic_support_type = "NONE"
if hasattr(nds_attribute, "trafficSupportType"):
attr_traffic_support_type = nds_attribute.trafficSupportType
else:
#Traffic support type for an attribute with NetworkEdgeTraffic evaluator is equal to the traffic support
#type of the network dataset.
evaluator_count = nds_attribute.evaluatorCount
if evaluator_count:
for i in range(evaluator_count):
evaluator_type = getattr(nds_attribute, "evaluatorType{0}".format(i))
if evaluator_type.lower() == "networkedgetraffic":
attr_traffic_support_type = nds_traffic_support_type
break
return attr_traffic_support_type
def _getServiceLimits(self):
'''return service limits for each tool within a GP service'''
tool_limits = {}
if self.serviceLimits:
for limit in self.serviceLimits.split(";"):
tool_name, limit_name, limit_value = limit.split(" ")
if limit_value == "#":
limit_value = None
else:
try:
limit_value = nas.str_to_float(limit_value)
except Exception as ex:
pass
tool_limits.setdefault(tool_name, dict())
tool_limits[tool_name][limit_name] = limit_value
else:
#if service limits is not specified, assume all limits to be None
tool_limits = self.TOOL_LIMITS
service_names = nas.NetworkAnalysisService.SERVICE_NAMES
service_limits = dict.fromkeys(service_names, {})
for service_name in service_names:
service_limits[service_name] = {tool_name: tool_limits[tool_name]
for tool_name in service_names[service_name]}
return service_limits
class PublishRoutingServices(object):
'''class containing the execution logic'''
def __init__(self, *args, **kwargs):
'''constructor'''
#Initialize instance attributes
self.siteAdminToken = {}
self.agsConnectionFile = None
self.serviceMapDocument = None
self.supportingFilesFolder = None
self.owningSystemUrl = ""
self.ignoreSSLErrors = False
self.tokenReferrer = None
self.publishingToolsToolbox = None
#Write messages to a file and as GP messages
log_file = os.path.join(kwargs["service_definition_folder"], "PublishRoutingServices.log")
self.logger = nas.Logger(nas.LOG_LEVEL, log_file)
#if self.logger.DEBUG:
self.logger.debug("Input parameter values")
for arg_name in sorted(kwargs):
if arg_name == "password":
if kwargs[arg_name]:
self.logger.debug(u"{0}: {1}".format(arg_name, "********"))
else:
self.logger.debug(u"{0}: {1}".format(arg_name, kwargs[arg_name]))
else:
self.logger.debug(u"{0}: {1}".format(arg_name, kwargs[arg_name]))
#Store tool parameter values as instance attributes
self.networkDatasets = kwargs.get("network_dataset", None)
if self.networkDatasets:
self.networkDatasets = nas.strip_quotes(self.networkDatasets.split(";"))
self.templateNDS = self.networkDatasets[0]
self.templateNDSDescribe = arcpy.Describe(self.templateNDS)
self.serverUrl = kwargs.get("server_url", None)
self.userName = kwargs.get("user_name", None)
self.password = kwargs.get("password", None)
self.serverDataFolderPath = kwargs.get("server_data_folder_path", None)
self.serviceDefinitionFolder = kwargs.get("service_definition_folder", None)
#Initialize derived outputs
self.networkAnalysisMapService = ""
self.networkAnalysisUtilitiesGeoprocessingService = ""
self.networkAnalysisGeoprocessingService = ""
self.networkAnalysisSyncGeoprocessingService = ""
def execute(self):
'''Main execution logic'''
ROUTING_SERVICE_FOLDER_NAME = "Routing"
ROUTING_SERVICE_FOLDER_DESC = "Contains services used to perform network analysis."
DATA_STORE_ITEM_NAME = "RoutingData"
SUPPORTING_FILES_FOLDER_NAME = "NDSupportingFiles"
NA_MAP_SERVICE_NAME = "NetworkAnalysis"
NA_MAP_SERVICE_SUMMARY = "Supports visualizing historical traffic and performs route, closest facility and service area network analysis in synchronous execution mode."
NA_MAP_SERVICE_TAGS = "route, closest facility, service area, traffic"
TRAFFIC_LAYER_MIN_SCALE = 100000
NA_GP_SERVICE_NAME = "NetworkAnalysis"
NA_GP_SERVICE_SUMMARY = "Performs route, closest facility, service area, location-allocation, and vehicle routing problem analyses in asynchoronous execution mode."
NA_GP_SERVICE_TAGS = "route, closest facility, service area, location-allocation, vehicle routing problem, vrp"
NAUTILS_GP_SERVICE_NAME = "NetworkAnalysisUtilities"
NAUTILS_GP_SERVICE_SUMMARY = "Contains tools that provide auxiliary information for working with network analysis services available with your portal"
NAUTILS_GP_SERVICE_TAGS = "travel modes, tool info, network description"
NASYNC_GP_SERVICE_NAME = "NetworkAnalysisSync"
NASYNC_GP_SERVICE_SUMMARY = "Performs vehicle routing problem analysis in synchoronous execution mode."
NASYNC_GP_SERVICE_TAGS = "vehicle routing problem, vrp"
arcpy.CheckOutExtension("network")
#Get a site admin token
self._getAdminToken()
#Check if the provided user has admin or publisher priviledge
try:
admin_info_url = "{0}/admin/info".format(self.serverUrl)
admin_info_response = nas.make_http_request(admin_info_url, self.siteAdminToken, referer=self.tokenReferrer,
ignore_ssl_errors=self.ignoreSSLErrors)
except urllib2.HTTPError as ex:
#admin info request can fail if the server URL is the web-adaptor url that is not authorized for
#server admin access.
if ex.code == 403:
self.logger.error("Administrative access is disabled at {0}".format(self.serverUrl))
else:
self.logger.error("The following HTTP error occurred when trying to fetch {0}".format(admin_info_url))
self.logger.error("{0}: {1}".format(ex.code, ex.reason))
raise arcpy.ExecuteError
else:
self.userName = admin_info_response.get("loggedInUser", self.userName).split("::")[-1]
user_privilege = admin_info_response.get("loggedInUserPrivilege", "")
if not user_privilege in ("ADMINISTER"):
self.logger.error("User {0} does not have administrator privilege".format(self.userName))
raise arcpy.ExecuteError
# Check if the network dataset can be successfully used for publishing routing services
try:
analyze_nds = AnalyzeNetworkDataset(self.templateNDSDescribe)
analyze_nds.execute()
except Exception as ex:
self.logger.exception(u"Failed to analyze the network dataset.")
raise arcpy.ExecuteError
# Write any warnings from network dataset analyzer
if analyze_nds.analyzeSucceedeed:
analyzer_warnings = analyze_nds.analyzeMessages["warnings"]
if analyzer_warnings:
self.logger.info("The following warnings were returned when analyzing the input network dataset:")
for msg in analyzer_warnings:
self.logger.warning(msg)
else:
# fail since analyzer returned errors
self.logger.info("The following errors were returned when analyzing the input network dataset:")
for msg in analyze_nds.analyzeMessages["errors"]:
self.logger.error(msg)
raise arcpy.ExecuteError
#Create a AGS file with server connection info
ags_connection_file_name = "server.ags"
self.agsConnectionFile = os.path.join(self.serviceDefinitionFolder, ags_connection_file_name)
#For federated servers, create the connection file using the signed in user credentials by not passing
#an explicit user name and password.
if self.owningSystemUrl:
user_name = None
password = None
else:
user_name = self.userName
password = self.password
arcpy.mapping.CreateGISServerConnectionFile("ADMINISTER_GIS_SERVICES", self.serviceDefinitionFolder,
ags_connection_file_name, "{0}/admin".format(self.serverUrl),
"ARCGIS_SERVER", True, None, user_name, password, True)
#Check if file gdb containing the network dataset is accessible on the server
nds_filegdb_name = os.path.basename(os.path.dirname(os.path.dirname(self.templateNDSDescribe.catalogPath)))
server_nds_filegdb = u"{}/{}".format(self.serverDataFolderPath, nds_filegdb_name)
self.publishingToolsToolbox = "{0};{1}".format(self.agsConnectionFile, "System/PublishingTools")
arcpy.ImportToolbox(self.publishingToolsToolbox)
validate_data_store_result = arcpy.ValidateServerDataStore_PublishingTools(server_nds_filegdb, "FILE_SHARE")
while validate_data_store_result.status < 4:
time.sleep(1)
if validate_data_store_result.getOutput(0).lower() == "false":
#Return an error as the Server Data Folder Path is invalid
self.logger.error("GIS Server cannot access the file geodatabase containing the network dataset at {}".format(validate_data_store_result.getInput(0)))
self.logger.error(u"Invalid value, {}, for the Server Data Folder Path parameter.".format(self.serverDataFolderPath))
self.logger.error("The value for the Server Data Folder Path parameter must be the folder on the GIS Server containing the file geodatabase that stores your network dataset.")
raise arcpy.ExecuteError
else:
self.logger.debug(u"File geodatabase containing the network dataset on GIS Server: {}".format(validate_data_store_result.getInput(0)))
#Check if service folder name Routing exists. If not create it
#Get a list of existing service folders
services_root_url = "{0}/admin/services".format(self.serverUrl)
services_root_response = nas.make_http_request(services_root_url, self.siteAdminToken,
referer=self.tokenReferrer, ignore_ssl_errors=self.ignoreSSLErrors)
service_folders = services_root_response.get("folders", [])
if ROUTING_SERVICE_FOLDER_NAME in service_folders:
#Fail if any of the services already exist
routing_service_folder_response = nas.make_http_request("{0}/{1}".format(services_root_url,
ROUTING_SERVICE_FOLDER_NAME),
self.siteAdminToken, referer=self.tokenReferrer,
ignore_ssl_errors=self.ignoreSSLErrors)
expected_service_names = (
NA_MAP_SERVICE_NAME + ".MapServer",
NA_GP_SERVICE_NAME + ".GPServer",
NAUTILS_GP_SERVICE_NAME + ".GPServer",
NASYNC_GP_SERVICE_NAME + ".GPServer",
)
service_exists = False
for service in routing_service_folder_response.get("services", []):
service_name = service.get("serviceName", "")
service_type = service.get("type", "")
if u"{0}.{1}".format(service_name, service_type) in expected_service_names:
self.logger.error(u"A {0} with name {1} already exists in {2} folder".format(service_type.replace("Server", " service"),
service_name,
ROUTING_SERVICE_FOLDER_NAME))
service_exists = True
if service_exists:
raise arcpy.ExecuteError
else:
self.logger.info("Using existing {0} service folder to publish services".format(ROUTING_SERVICE_FOLDER_NAME))
else:
service_folder_create_params = dict(self.siteAdminToken)
service_folder_create_params["folderName"] = ROUTING_SERVICE_FOLDER_NAME
service_folder_create_params["description"] = ROUTING_SERVICE_FOLDER_DESC
service_folder_create_response = nas.make_http_request("{0}/createFolder".format(services_root_url),
service_folder_create_params,
referer=self.tokenReferrer,
ignore_ssl_errors=self.ignoreSSLErrors)
if service_folder_create_response.get("status", "") == "success":
self.logger.info("Successfully created {0} service folder".format(ROUTING_SERVICE_FOLDER_NAME))
else:
self.logger.error("Failed to create {0} service folder".format(ROUTING_SERVICE_FOLDER_NAME))
raise arcpy.ExecuteError
#Register the folder containing the network dataset in the data store
#If the data store item already exists, remove it
existing_data_store_items = arcpy.ListDataStoreItems(self.agsConnectionFile, "FOLDER")
for item in existing_data_store_items:
if item[0].lower() == DATA_STORE_ITEM_NAME.lower():
arcpy.RemoveDataStoreItem(self.agsConnectionFile, "FOLDER", DATA_STORE_ITEM_NAME)
break
nds_folder = os.path.dirname(os.path.dirname(os.path.dirname(self.templateNDSDescribe.catalogPath)))
data_store_item_status = arcpy.AddDataStoreItem(self.agsConnectionFile, "FOLDER", DATA_STORE_ITEM_NAME,
self.serverDataFolderPath, nds_folder)
if data_store_item_status.lower() == "success":
self.logger.info("Successfully added {0} entry in the server data store".format(DATA_STORE_ITEM_NAME))
else:
self.logger.error("Failed to add {0} entry in the server data store".format(DATA_STORE_ITEM_NAME))
self.logger.error(data_store_item_status)
raise arcpy.ExecuteError
#Create a folder to store supporting files. If the folder exists, delete it
supporting_files_folder = os.path.join(arcpy.env.scratchFolder, SUPPORTING_FILES_FOLDER_NAME)
if os.path.exists(supporting_files_folder):
try:
shutil.rmtree(supporting_files_folder)
except Exception as ex:
self.logger.exception(u"Failed to delete {0} folder".format(supporting_files_folder))
raise arcpy.ExecuteError
os.mkdir(supporting_files_folder)
self.supportingFilesFolder = supporting_files_folder
#Create supporting files
create_supporting_files = CreateSupportingFiles(network_datasets=";".join(self.networkDatasets),
supporting_files_folder=supporting_files_folder)
try:
self.logger.info(u"Creating supporting files at {0}".format(supporting_files_folder))
create_supporting_files.execute()
except Exception as ex:
self.logger.exception(u"Failed to create supporting files in {0}".format(supporting_files_folder))
raise arcpy.ExecuteError
##Publish NetworkAnalysis Map service
#Create map document with NA layers that is published as a map service
self.serviceMapDocument = os.path.join(self.serviceDefinitionFolder, NA_MAP_SERVICE_NAME + ".mxd")
self.logger.info(u"Creating map document used to publish network analysis map service at {0}".format(self.serviceMapDocument))
mxd = arcpy.mapping.MapDocument("CURRENT")
data_frame = arcpy.mapping.ListDataFrames(mxd)[0]
#Make sure the mxd has only network dataset layers and network dataset layers are visible
for lyr in arcpy.mapping.ListLayers(mxd, "*", data_frame):
if lyr.name in self.networkDatasets:
lyr.visible = True
lyr.minScale = TRAFFIC_LAYER_MIN_SCALE
else:
arcpy.mapping.RemoveLayer(data_frame, lyr)
#Determine the use by default cost attribute for the network dataset
for nds_attr in self.templateNDSDescribe.attributes:
if nds_attr.usageType == "Cost" and nds_attr.useByDefault:
default_cost_attribute = nds_attr.name
break
else:
default_cost_attribute = nds_attr.name
#Create Closest Facility network analysis layer
output_cf_layer = arcpy.na.MakeClosestFacilityLayer(self.templateNDS, "ClosestFacility", default_cost_attribute,
output_path_shape="TRUE_LINES_WITHOUT_MEASURES").getOutput(0)
output_cf_layer.visible = False
arcpy.mapping.AddLayer(data_frame, output_cf_layer, "TOP")
#Create Service Area network analysis layer
output_sa_layer = arcpy.na.MakeServiceAreaLayer(self.templateNDS, "ServiceArea", default_cost_attribute,
hierarchy=False).getOutput(0)
output_sa_layer.visible = False
arcpy.mapping.AddLayer(data_frame, output_sa_layer, "TOP")
#Create Route Network analysis layer
output_route_layer = arcpy.na.MakeRouteLayer(self.templateNDS, "Route", default_cost_attribute,
output_path_shape="TRUE_LINES_WITHOUT_MEASURES").getOutput(0)
output_route_layer.visible = False
arcpy.mapping.AddLayer(data_frame, output_route_layer, "TOP")
#Save the map document
mxd.summary = NA_MAP_SERVICE_SUMMARY
mxd.tags = NA_MAP_SERVICE_TAGS
mxd.saveACopy(self.serviceMapDocument)
#Create a sd draft
na_map_service_sddraft = os.path.join(self.serviceDefinitionFolder, NA_MAP_SERVICE_NAME + "_NAServer.sddraft")
sddraft_msgs = arcpy.mapping.CreateMapSDDraft(arcpy.mapping.MapDocument(self.serviceMapDocument),
na_map_service_sddraft, NA_MAP_SERVICE_NAME,
"FROM_CONNECTION_FILE", self.agsConnectionFile,
folder_name=ROUTING_SERVICE_FOLDER_NAME)
self.logger.debug(u"Analyzer messages when analyzing {0}".format(na_map_service_sddraft))
self.logger.debug(pprint.pformat(sddraft_msgs, indent=2))
#modify SD draft to disable KMLServer and enable NAServer SOE
doc = DOM.parse(na_map_service_sddraft)
type_names = doc.getElementsByTagName('TypeName')
for type_name in type_names:
# Get the TypeName we want to disable.
if type_name.firstChild.data == "KmlServer":
extension = type_name.parentNode
for ext_element in extension.childNodes:
# Disable SOE.
if ext_element.tagName == 'Enabled':
ext_element.firstChild.data = 'false'
elif type_name.firstChild.data == "NAServer":
extension = type_name.parentNode
for ext_element in extension.childNodes:
# Enable SOE.
if ext_element.tagName == 'Enabled':
ext_element.firstChild.data = 'true'
with open(na_map_service_sddraft, "w") as sddraft_fp:
doc.writexml(sddraft_fp)
#Publish SD draft as SD
if self.logger.DEBUG:
shutil.copy2(na_map_service_sddraft, na_map_service_sddraft + ".xml")
na_map_service_sd = os.path.join(self.serviceDefinitionFolder, NA_MAP_SERVICE_NAME + "_NAServer.sd")
self.logger.info("Creating network analysis map service definition at {0}".format(na_map_service_sd))