-
Notifications
You must be signed in to change notification settings - Fork 1
/
create_tf_records.py
117 lines (100 loc) · 4.41 KB
/
create_tf_records.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import os
import io
import glob
import hashlib
import pandas as pd
import xml.etree.ElementTree as ET
import tensorflow as tf
import random
from PIL import Image
from object_detection.utils import dataset_util
# Define the class names and their weight
class_names = ['class_1', 'class_2', ...]
class_weights = [1.0, 1.0, ...]
def create_example(xml_file):
tree = ET.parse(xml_file)
root = tree.getroot()
image_name = root.find('filename').text
image_path = root.find('path').text
file_name = image_name.encode('utf8')
size=root.find('size')
# IMPORTANT: assumes "size" to be defined as [width, height] and not [height, width]
width = int(size[0].text)
height = int(size[1].text)
xmin = []
ymin = []
xmax = []
ymax = []
classes = []
classes_text = []
truncated = []
poses = []
difficult_obj = []
weights = [] # Important line
for member in root.findall('object'):
# IMPORTANT: assumes "bbox" to be defined as [xmin, ymin, xmax, ymax]
xmin.append(float(member[4][0].text) / width)
ymin.append(float(member[4][1].text) / height)
xmax.append(float(member[4][2].text) / width)
ymax.append(float(member[4][3].text) / height)
difficult_obj.append(0)
class_name = member[0].text
class_id = class_names.index(class_name)
weights.append(class_weights[class_id])
if class_name == 'class_1':
classes_text.append('class_1'.encode('utf8'))
classes.append(1)
elif class_name == 'class_2':
classes_text.append('class_2'.encode('utf8'))
classes.append(2)
else:
print('E: class not recognized!')
truncated.append(0)
poses.append('Unspecified'.encode('utf8'))
full_path = image_path
with tf.gfile.GFile(full_path, 'rb') as fid:
encoded_jpg = fid.read()
encoded_jpg_io = io.BytesIO(encoded_jpg)
image = Image.open(encoded_jpg_io)
if image.format != 'JPEG':
raise ValueError('Image format not JPEG')
key = hashlib.sha256(encoded_jpg).hexdigest()
#create TFRecord Example
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_util.int64_feature(height),
'image/width': dataset_util.int64_feature(width),
'image/filename': dataset_util.bytes_feature(file_name),
'image/source_id': dataset_util.bytes_feature(file_name),
'image/key/sha256': dataset_util.bytes_feature(key.encode('utf8')),
'image/encoded': dataset_util.bytes_feature(encoded_jpg),
'image/format': dataset_util.bytes_feature('jpeg'.encode('utf8')),
'image/object/bbox/xmin': dataset_util.float_list_feature(xmin),
'image/object/bbox/xmax': dataset_util.float_list_feature(xmax),
'image/object/bbox/ymin': dataset_util.float_list_feature(ymin),
'image/object/bbox/ymax': dataset_util.float_list_feature(ymax),
'image/object/class/text': dataset_util.bytes_list_feature(classes_text),
'image/object/class/label': dataset_util.int64_list_feature(classes),
'image/object/difficult': dataset_util.int64_list_feature(difficult_obj),
'image/object/truncated': dataset_util.int64_list_feature(truncated),
'image/object/view': dataset_util.bytes_list_feature(poses),
'image/object/weight': dataset_util.float_list_feature(weights)
}))
return example
def main(_):
weighted_tf_records_output = 'name_of_records_file.record' # output file
annotations_path = '/path/to/annotations/folder/*.xml' # input annotations
writer_train = tf.python_io.TFRecordWriter(weighted_tf_records_output)
filename_list=tf.train.match_filenames_once(annotations_path)
init = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess=tf.Session()
sess.run(init)
list = sess.run(filename_list)
random.shuffle(list)
for xml_file in list:
print('-> Processing {}'.format(xml_file))
example = create_example(xml_file)
writer_train.write(example.SerializeToString())
writer_train.close()
print('-> Successfully converted dataset to TFRecord.')
if __name__ == '__main__':
tf.app.run()