-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathMapObjectsFunctions.py
214 lines (177 loc) · 9.8 KB
/
MapObjectsFunctions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 6 13:17:06 2020
@author: Santiago
"""
def Object_Detection(bbox, values, client_id, max_score, min_score, token, layers, filename, path):
import json, requests, os
# create our empty geojson
output = {"type":"FeatureCollection","features":[]}
# define the header that will be passed to the API request--it needs to include the token to authorize access to subscription data
header = {"Authorization" : "Bearer {}".format(token)}
# build the API call
if layers == 'segmentations':
url = (('https://a.mapillary.com/v3/object_detections/segmentations?client_id={}&&bbox={}&values={}&min_score={}&max_score={}&per_page=1000').format(client_id,bbox,values,min_score,max_score))
elif layers == 'trafficsigns':
url = (('https://a.mapillary.com/v3/object_detections/trafficsigns?client_id={}&&bbox={}&values={}&min_score={}&max_score={}&per_page=1000').format(client_id,bbox,values,min_score,max_score))
else:
url = (('https://a.mapillary.com/v3/object_detections/instances?client_id={}&&bbox={}&values={}&min_score={}&max_score={}&per_page=1000').format(client_id,bbox,values,min_score,max_score))
# print the URL so we can preview it
print(url)
# send the API request with the header and no timeout
r = requests.get(url,timeout=None,headers=header)
# if call fails, keeping trying until it succeeds with a 200 status code
while r.status_code != 200:
r = requests.get(url,timeout=None,headers=header)
# get data response as a JSON and count how many features were found
data = r.json()
data_length = len(data['features'])
# print number of features
print(data_length)
# add each feature to the empty GeoJSON we created
for f in data['features']:
output['features'].append(f)
# loop through each new page and continue adding the results to the empty GeoJSON
while data_length == 1000:
link = r.links['next']['url']
r = requests.get(link,timeout=None,headers=header)
while r.status_code != 200:
r = requests.get(url,timeout=None,headers=header)
data = r.json()
for f in data['features']:
output['features'].append(f)
# print total number of features found so far
print("Total features: {}".format(len(output['features'])))
# update length of data in last call to see if it still remains at 1000 (maximum) indicating a next page
data_length = len(data['features'])
with open((os.path.join(path, '{}.geojson'.format(filename))), 'w') as outfile:
print('DONE')
json.dump(output, outfile)
"""
Created on Wed Dec 23 2020
@author: gregoriiv
"""
# function returns objects from Mapillary API for given shapefile
# --variables-- #
# path - path to shapefile
# fact - factor for disaggregation boundingbox of shapefile in degrees (ex. 0.02)
# values - reference values you'd like here, separate multiple by comma, leave empty quotes to get all values from Mapillary docs
# (ex. 'regulatory--priority-road--g1') https://www.mapillary.com/developer/api-documentation/#traffic-signs
# max_score/min_score - (ex. 0/1) percentage of object we are looking for in each image in Mapillary database
# token - token from Mapillary API for authorized request
# layers - choose 'trafficsigns' or 'instances' or 'segmentations'
def MapillaryObjFromStudyArea(path, fact, values, client_id, min_score, max_score, token, layers, dir_feat):
import json
import os
import time
import StudyArea2Bboxes
# create filename for result geojson file from 'values' variable
filename_fin = values
# create result geojson
output_result = {"type": "FeatureCollection", "features": []}
with open(os.path.join(dir_feat, (filename_fin + '.geojson')), 'w') as outfile:
json.dump(output_result, outfile)
# disaggregate bbox of shapefile to grid of bboxes
bboxes = StudyArea2Bboxes.Shp2Bbox(path, fact)
# filename for Mapillary request
filename = 'request'
# iterate through each bbox
for bb in bboxes:
# write geojson file with object for given bbox as request.geojson
Object_Detection(bb, values, client_id, max_score, min_score, token, layers, filename, dir_feat)
# open request.geojson
with open(os.path.join(dir_feat, (filename + '.geojson'))) as r:
request = json.load(r)
# open result.geojson
with open(os.path.join(dir_feat, (filename_fin + '.geojson'))) as res:
result = json.load(res)
# append features form request.geojson to result.geojson
for f in request['features']:
result['features'].append(f)
# write down new result.geojson
with open(os.path.join(dir_feat, (filename_fin + '.geojson')), 'w') as outfile:
json.dump(result, outfile)
print(filename_fin + '.UPDATED')
# remove request.geojson
os.remove(os.path.join(dir_feat, (filename + '.geojson')))
# sleep for 1 minute
#time.sleep(60) #60sec
return print('DONE')
# fuction returns geojson with objects(from images), set of value been requested for given shapefile
def MapillaryMultiObjectsRequest(client_id, token):
import json, os
import yaml
import geopandas
import time
#import data from config.yaml
with open('mapil_request_config.yaml') as m:
config = yaml.safe_load(m)
var = config['VARIABLES_SET']
#create directory 'data' if not exists
for area in var:
dir_main = os.path.join('data', area)
if not os.path.exists(dir_main):
os.mkdir(dir_main)
dir_feat = os.path.join(dir_main, 'objects')
if not os.path.exists(dir_feat):
os.mkdir(dir_feat)
#set variables from config yaml
fact = var[area]['fact']
path = var[area]['path']
max_score = var[area]['max_score']
min_score = var[area]['min_score']
#create metadata file
metadata = ["Study area: " + area +"\n", "Datetime of request: " + time.strftime(r"%Y-%m-%d %H:%M:%S", time.localtime()) +"\n",
"Path to study area shape file: " + path +"\n", "Factor of studyarea bounding box deaggregation: " + str(fact) +"\n",
"Minimal score: " + str(min_score) +"\n", "Maximal score: " + str(max_score) +"\n", "Object set: " + "\n"]
#create result files for each group of features with name of group
for v_cat in var[area]['custom_object_set']:
metadata.append("- " + str(v_cat) + ": " + str(var[area]['custom_object_set'][v_cat]) +"\n")
if len(var[area]['custom_object_set'][v_cat]) > 0:
output_val_cat = {"type": "FeatureCollection", "features": []}
with open(dir_feat + '/' + v_cat + '.geojson', 'w') as outfile:
json.dump(output_val_cat, outfile)
# make request for each feature in each group
for object_ in var[area]['custom_object_set'][v_cat]:
MapillaryObjFromStudyArea(path, fact, object_, client_id, min_score, max_score, token, v_cat, dir_feat)
# open request.geojson
with open(os.path.join(dir_feat, (object_ + '.geojson'))) as r:
request = json.load(r)
# open result.geojson
with open(os.path.join(dir_feat, (v_cat +'.geojson'))) as res:
result = json.load(res)
# append features form request.geojson to result.geojson
for f in request['features']:
result['features'].append(f)
# write down new result.geojson
with open(os.path.join(dir_feat, (v_cat +'.geojson')), 'w') as outfile:
json.dump(result, outfile)
print(v_cat +'.UPDATED')
# remove request.geojson
os.remove(os.path.join(dir_feat, (object_ + '.geojson')))
#remove objects located outside the study area with geopandas
#open files with objects and shapefile
categ_gjson = geopandas.read_file(os.path.join(dir_feat, (v_cat +'.geojson')))
st_area = geopandas.read_file(path + '.shp')
#save SRID for metadata
if ("Objects SRID: " + str(categ_gjson.crs) + "\n") not in metadata:
metadata.insert(6, "Objects SRID: " + str(categ_gjson.crs) + "\n")
if ("Study area SRID: " + str(st_area.crs) + "\n") not in metadata:
metadata.insert(1, "Study area SRID: " + str(st_area.crs) + "\n")
#create mask for intersected objects and filter them
categ_mask = categ_gjson.within(st_area.loc[0,'geometry'])
categ_cut = categ_gjson[categ_mask]
#remove initial geojson file and save cut one
if categ_cut.empty:
metadata.append(" Number of objects: 0" + "\n")
pass
else:
os.remove(os.path.join(dir_feat, (v_cat +'.geojson')))
metadata.append(" Number of objects: " + str(len(categ_cut)) + "\n")
categ_cut.to_file(os.path.join(dir_feat, (v_cat +'.geojson')), driver='GeoJSON')
else:
metadata.append(" Number of objects: 0" + "\n")
#write metadata file
with open((os.path.join(dir_feat, ('metadata_objects.txt'))), 'w') as mo:
mo.writelines(metadata)