-
Notifications
You must be signed in to change notification settings - Fork 34
/
Copy pathinference.py
83 lines (60 loc) · 2.52 KB
/
inference.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import numpy as np
import tensorflow as tf
# imagePath = '/media/jai/Local drive A/tmp/imagenet/image1.jpg'
# modelFullPath = '/media/jai/Local drive A/tmp/imagenet/output_graph.pb'
# labelsFullPath = '/media/jai/Local drive A/tmp/imagenet/output_labels.txt'
imagePath = './a.png'
modelFullPath = './classify.pb'
labelsFullPath = './labels.txt'
def create_graph():
"""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(modelFullPath, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
def run_inference_on_image():
answer = None
if not tf.gfile.Exists(imagePath):
tf.logging.fatal('File does not exist %s', imagePath)
return answer
image_data = tf.gfile.FastGFile(imagePath, 'rb').read()
# Creates graph from saved GraphDef.
create_graph()
with tf.Session() as sess:
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
predictions = sess.run(softmax_tensor,
{'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
top_k = predictions.argsort()[-1:][::-1] # Getting top 5 predictions
f = open(labelsFullPath, 'rb')
lines = f.readlines()
labels = [str(w).replace("\n", "") for w in lines]
for node_id in top_k:
human_string = labels[node_id]
score = predictions[node_id]
answer = labels[top_k[0]]
return score
def run_inference_on_image2():
answer = None
if not tf.gfile.Exists(imagePath):
tf.logging.fatal('File does not exist %s', imagePath)
return answer
image_data = tf.gfile.FastGFile(imagePath, 'rb').read()
# Creates graph from saved GraphDef.
with tf.Session() as sess:
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
predictions = sess.run(softmax_tensor,
{'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
top_k = predictions.argsort()[-1:][::-1] # Getting top 5 predictions
f = open(labelsFullPath, 'rb')
lines = f.readlines()
labels = [str(w).replace("\n", "") for w in lines]
for node_id in top_k:
human_string = labels[node_id]
score = predictions[node_id]
answer = labels[top_k[0]]
return answer
if __name__ == '__main__':
run_inference_on_image()