-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathapifiles3.py
244 lines (175 loc) · 6.11 KB
/
apifiles3.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
import hashlib
import logging
import boto3
from botocore.exceptions import ClientError
from config import (
AWS_BUCKET_NAME,
TARIC_FILES_FOLDER,
TARIC_FILES_INDEX,
STREAM_CHUNK_SIZE,
S3_ENDPOINT_URL,
)
logger = logging.getLogger("taricapi.files3")
sid = None
# AWS S3 session
def session():
# return cached client id
global sid # pylint: disable=W0603
if sid is not None:
return sid
try:
s3c = boto3.client("s3", endpoint_url=S3_ENDPOINT_URL)
sid = s3c
except ClientError:
logger.error("Error connecting to AWS S3")
return None
return s3c
# -------------------------------------------------
# File Handling functions
# Note these are to provide a level of abstraction
# - e.g. database or S3 could be physical locations
# -------------------------------------------------
# generic file functions
def modification_date(filepath):
response = session().get_object(Bucket=AWS_BUCKET_NAME, Key=filepath)
md = response["Metadata"]
if "modified" in md:
modtime = md["modified"]
else:
modtime = response["LastModified"].isoformat()[:19]
return modtime
def find(list_, key, value):
for i, dic in enumerate(list_):
if dic[key] == value:
return i
return -1
def file_exists(filename):
try:
session().get_object(Bucket=AWS_BUCKET_NAME, Key=filename)
return True
except session().exceptions.NoSuchKey:
return False
except ClientError as e:
logger.error("Error occurred in get_object for %s: %s", filename, e)
return None
def get_file(filepath):
try:
response = session().get_object(Bucket=AWS_BUCKET_NAME, Key=filepath)
return response["Body"]
except ClientError as e:
logger.error("Error opening %s: %s", filepath, e)
return None
def get_file_size(filepath):
try:
response = session().get_object(Bucket=AWS_BUCKET_NAME, Key=filepath)
return response["ContentLength"]
except ClientError as e:
logger.error("Error opening %s: %s", filepath, e)
return None
def read_file(filepath):
generator = stream_file(filepath)
return b"".join(x for x in generator)
def stream_file(filepath):
try:
obj = session().get_object(Bucket=AWS_BUCKET_NAME, Key=filepath)
except session().exceptions.NoSuchKey as e:
logger.error("Error opening %s: %s", filepath, e)
raise e
else:
while True:
chunk = obj["Body"].read(STREAM_CHUNK_SIZE)
if chunk:
yield chunk
else:
break
def write_file(filepath, jsoncontent):
session().put_object(Body=jsoncontent, Bucket=AWS_BUCKET_NAME, Key=filepath)
def create_multipart_upload(filename):
resp = session().create_multipart_upload(Bucket=AWS_BUCKET_NAME, Key=filename)
logger.debug("%s", resp)
return resp["UploadId"]
def upload_part(filename, uploadid, partnumber, bodypart):
session().upload_part(
Bucket=AWS_BUCKET_NAME,
Key=filename,
UploadId=uploadid,
PartNumber=partnumber,
Body=bodypart,
)
def complete_multipart_upload(filename, uploadid):
session().complete_multipart_upload(
Bucket=AWS_BUCKET_NAME, Key=filename, UploadId=uploadid
)
def abort_multipart_upload(filename, uploadid):
session().abort_multipart_upload(
Bucket=AWS_BUCKET_NAME, Key=filename, UploadId=uploadid
)
def get_file_list(prefix):
if prefix is None:
prefix = TARIC_FILES_FOLDER
files = session().list_objects(Bucket=AWS_BUCKET_NAME, Prefix=prefix)
try:
return files["Contents"]
except KeyError:
return []
def sha512(filepath):
hash_sha512 = hashlib.sha512()
for chunk in stream_file(filepath):
hash_sha512.update(chunk)
return hash_sha512.hexdigest()
# Taric file specific functions
def get_taric_filepath(seq):
return TARIC_FILES_FOLDER + "/" + seq + ".xml"
def get_temp_taric_filepath(seq):
return TARIC_FILES_FOLDER + "/TEMP_" + seq + ".xml"
def get_taric_index_file():
return TARIC_FILES_INDEX
def stream_taric_file(seq):
if not file_exists(get_taric_filepath(seq)):
return None
return stream_file(get_taric_filepath(seq))
def save_temp_taric_file(file, seq):
filename = get_temp_taric_filepath(seq)
write_file(filename, file)
return filename
def remove_temp_taric_file(seq):
filename = get_temp_taric_filepath(seq)
logger.debug("Removing file %s", filename)
session().delete_object(Bucket=AWS_BUCKET_NAME, Key=filename)
def remove_taric_file(seq):
"""In ordinary operation taric files are not removed, but occasionally
this is required.
"""
filename = get_taric_filepath(seq)
logger.debug("Removing file %s", filename)
session().delete_object(Bucket=AWS_BUCKET_NAME, Key=filename)
def rename_file(fromname, toname):
# AWS S3 has no rename - have to copy & delete
logger.debug("Renaming file from %s to %s", fromname, toname)
session().copy_object(
Bucket=AWS_BUCKET_NAME,
CopySource={"Bucket": AWS_BUCKET_NAME, "Key": fromname},
Key=toname,
MetadataDirective="COPY",
)
session().delete_object(Bucket=AWS_BUCKET_NAME, Key=fromname)
def rename_taric_file(seq, filetime):
# AWS S3 has no rename - have to copy & delete
logger.debug("Renaming temp file to %s", get_taric_filepath(seq))
if filetime is not None:
logger.debug("Setting Metadata modified to %s", filetime)
session().copy_object(
Bucket=AWS_BUCKET_NAME,
CopySource={"Bucket": AWS_BUCKET_NAME, "Key": get_temp_taric_filepath(seq)},
Key=get_taric_filepath(seq),
Metadata={"modified": filetime},
MetadataDirective="REPLACE",
)
else:
session().copy_object(
Bucket=AWS_BUCKET_NAME,
CopySource={"Bucket": AWS_BUCKET_NAME, "Key": get_temp_taric_filepath(seq)},
Key=get_taric_filepath(seq),
MetadataDirective="REPLACE",
)
session().delete_object(Bucket=AWS_BUCKET_NAME, Key=get_temp_taric_filepath(seq))