-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcollection.mk
138 lines (111 loc) · 4.72 KB
/
collection.mk
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
.PHONY: \
collect\
collection\
commit-collection\
clobber-today
ifeq ($(COLLECTION_CONFIG_URL),)
COLLECTION_CONFIG_URL=$(CONFIG_URL)collection/$(COLLECTION_NAME)/
endif
ifeq ($(COLLECTION_DIR),)
COLLECTION_DIR=collection/
endif
ifeq ($(RESOURCE_DIR),)
RESOURCE_DIR=$(COLLECTION_DIR)resource/
endif
ifeq ($(DATASTORE_URL),)
DATASTORE_URL=https://files.planning.data.gov.uk/
endif
# data sources
SOURCE_CSV=$(COLLECTION_DIR)source.csv
ENDPOINT_CSV=$(COLLECTION_DIR)endpoint.csv
OLD_RESOURCE_CSV=$(COLLECTION_DIR)old-resource.csv
ifeq ($(COLLECTION_CONFIG_FILES),)
COLLECTION_CONFIG_FILES=\
$(SOURCE_CSV)\
$(ENDPOINT_CSV)\
$(OLD_RESOURCE_CSV)
endif
# collection log
LOG_DIR=$(COLLECTION_DIR)log/
LOG_FILES_TODAY:=$(LOG_DIR)$(shell date +%Y-%m-%d)/
# collection index
COLLECTION_INDEX=\
$(COLLECTION_DIR)/log.csv\
$(COLLECTION_DIR)/resource.csv
# collection URL
ifneq ($(COLLECTION),)
COLLECTION_URL=\
$(DATASTORE_URL)$(COLLECTION)-collection/collection
else
COLLECTION_URL=\
$(DATASTORE_URL)$(REPOSITORY)/collection
endif
init::
ifeq ($(COLLECTION_DATASET_BUCKET_NAME),)
$(eval LOG_STATUS_CODE := $(shell curl -I -o /dev/null -s -w "%{http_code}" '$(COLLECTION_URL)/log.csv'))
$(eval RESOURCE_STATUS_CODE = $(shell curl -I -o /dev/null -s -w "%{http_code}" '$(COLLECTION_URL)/resource.csv'))
@if [ $(LOG_STATUS_CODE) -ne 403 ] && [ $(RESOURCE_STATUS_CODE) -ne 403 ]; then \
echo 'Downloading log.csv and resource.csv from $(COLLECTION_URL)'; \
curl -qfsL '$(COLLECTION_URL)/log.csv' > $(COLLECTION_DIR)log.csv; \
curl -qfsL '$(COLLECTION_URL)/resource.csv' > $(COLLECTION_DIR)resource.csv; \
else \
echo 'Unable to locate log.csv and resource.csv' ;\
fi
else ifeq ($(REGENERATE_LOG_OVERRIDE),True)
echo 'Syncing log files to local';
aws s3 sync s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(LOG_DIR) $(LOG_DIR) --only-show-errors;
else
aws s3 cp s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/collection/log.csv collection/log.csv --only-show-errors;
aws s3 cp s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/collection/resource.csv collection/resource.csv --only-show-errors;
endif
first-pass:: collect
second-pass:: collection
collect:: $(COLLECTION_CONFIG_FILES)
@mkdir -p $(RESOURCE_DIR)
digital-land ${DIGITAL_LAND_OPTS} collect $(ENDPOINT_CSV) --collection-dir $(COLLECTION_DIR)
collection::
digital-land ${DIGITAL_LAND_OPTS} collection-save-csv --collection-dir $(COLLECTION_DIR)
clobber-today::
rm -rf $(LOG_FILES_TODAY) $(COLLECTION_INDEX)
makerules::
curl -qfsL '$(MAKERULES_URL)collection.mk' > makerules/collection.mk
load-resources::
aws s3 sync s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(RESOURCE_DIR) $(RESOURCE_DIR) --no-progress
load-logs::
aws s3 sync s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR)log $(COLLECTION_DIR)log --no-progress
save-resources::
aws s3 sync $(RESOURCE_DIR) s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(RESOURCE_DIR) --no-progress
save-logs::
aws s3 sync $(COLLECTION_DIR)log s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR)log --no-progress
save-collection::
aws s3 cp $(COLLECTION_DIR)log.csv s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR) --no-progress
aws s3 cp $(COLLECTION_DIR)resource.csv s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR) --no-progress
aws s3 cp $(COLLECTION_DIR)source.csv s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR) --no-progress
aws s3 cp $(COLLECTION_DIR)endpoint.csv s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR) --no-progress
ifneq ($(wildcard $(COLLECTION_DIR)old-resource.csv),)
aws s3 cp $(COLLECTION_DIR)old-resource.csv s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(COLLECTION_DIR) --no-progress
endif
collection/resource/%:
@mkdir -p collection/resource/
ifeq ($(COLLECTION_DATASET_BUCKET_NAME),)
curl -qfsL '$(DATASTORE_URL)$(REPOSITORY)/$(RESOURCE_DIR)$(notdir $@)' > $@
else
aws s3 cp s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(RESOURCE_DIR)$(notdir $@) $@ --no-progress
endif
collection/$(COLLECTION)/resource/%:
@mkdir -p collection/$(COLLECTION)/resource
ifeq ($(COLLECTION_DATASET_BUCKET_NAME),)
curl -qfsL '$(DATASTORE_URL)$(REPOSITORY)/$(RESOURCE_DIR)$(notdir $@)' > $@
else
aws s3 cp s3://$(COLLECTION_DATASET_BUCKET_NAME)/$(REPOSITORY)/$(RESOURCE_DIR)$(notdir $@) $@ --no-progress
endif
collection/%.csv:
@mkdir -p $(COLLECTION_DIR)
ifeq ($(COLLECTION_DATASET_BUCKET_NAME),)
curl -qfsL '$(COLLECTION_CONFIG_URL)$(notdir $@)?version=$(shell date +%s)' > $@
else
aws s3 cp s3://$(COLLECTION_DATASET_BUCKET_NAME)/config/$(COLLECTION_DIR)$(COLLECTION_NAME)/$(notdir $@) $@ --no-progress
endif
config:: $(COLLECTION_CONFIG_FILES)
clean::
rm -f $(COLLECTION_CONFIG_FILES)