-
Notifications
You must be signed in to change notification settings - Fork 48
/
Copy pathmain.py
201 lines (151 loc) · 6.44 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
import click
import torch
import numpy as np
import random
def seed(config=None):
# This removes randomness, makes everything deterministic
if config is None:
import config
torch.cuda.manual_seed_all(config.seed) # if you are using multi-GPU.
torch.manual_seed(config.seed)
torch.cuda.manual_seed(config.seed)
np.random.seed(config.seed)
random.seed(config.seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
@click.group()
def main():
seed()
pass
@main.command()
def train_synth():
"""
Training using strong supervision on Synth-Text dataset
:return: None
"""
from train_synth import train
train.main()
@main.command()
@click.option('-model', '--model', help='Path to Model', required=True)
def test_synth(model):
"""
Testing using strong supervision on Synth-Text dataset
:param model: Path to trained model
:return: None
"""
from train_synth import test
test.main(model)
@main.command()
@click.option('-model', '--model', help='Path to Model trained on SYNTH', required=True)
@click.option('-iter', '--iterations', help='Number of Iterations to do', required=True)
def weak_supervision(model, iterations):
"""
Training weak supervision on icdar 2013 dataset
:param model: Path to Pre-trained model on Synth-Text using the function train_synth
:param iterations: Number of iterations to train on icdar 2013
:return: None
"""
from train_weak_supervision.__init__ import get_initial_model_optimizer, generate_target, train, save_model, test
import config
# ToDo - Check the effects of using optimizer of Synth-Text or starting from a random optimizer
model, optimizer = get_initial_model_optimizer(model)
print('Number of parameters in the model:', sum(p.numel() for p in model.parameters() if p.requires_grad))
"""
Steps -
1) Using the pre-trained model generate the targets
2) Fine-tune the model on icdar 2013 dataset using weak-supervision
3) Saving the model and again repeating process 1-3
4) Saving the final model
"""
for iteration in range(config.start_iteration, int(iterations)):
if iteration not in config.skip_iterations:
print('Generating for iteration:', iteration)
generate_target(model, iteration)
print('Testing for iteration:', iteration)
f_score_test, precision_test, recall_test = test(model, iteration)
print(
'Test Results for iteration:', iteration,
' | F-score: ', f_score_test,
' | Precision: ', precision_test,
' | Recall: ', recall_test
)
print('Fine-tuning for iteration:', iteration)
model, optimizer, loss, accuracy = train(model, optimizer, iteration)
print('Saving for iteration:', iteration)
save_model(model, optimizer, 'intermediate', iteration, loss=loss, accuracy=accuracy)
save_model(model, optimizer, 'final')
@main.command()
@click.option('-model', '--model', help='Path to Model trained on SYNTH', required=True)
@click.option('-folder', '--folder', help='Path to the image folder', required=True)
def synthesize(model, folder):
from train_synth import synthesize
if model is None:
print('Please Enter the model path')
elif folder is None:
print('Please Enter the path of the folder you want to generate the targets for')
else:
print('Will generate the Affinity Heatmap at: ', '/'.join(folder.split('/')[:-1])+'/affinity_heatmap')
print('Will generate the Character Heatmap at: ', '/'.join(folder.split('/')[:-1]) + '/character_heatmap')
print('Will generate the Word Bbox at: ', '/'.join(folder.split('/')[:-1]) + '/word_bbox')
print('Will generate the Character Bbox at: ', '/'.join(folder.split('/')[:-1]) + '/character_bbox')
print('Will generate the Affinity Bbox at: ', '/'.join(folder.split('/')[:-1]) + '/affinity_bbox')
print('Will generate the json annotations at: ', '/'.join(folder.split('/')[:-1]) + '/json_annotations')
synthesize.main(
folder,
model_path=model,
base_path_character='/'.join(folder.split('/')[:-1])+'/character_heatmap',
base_path_affinity='/'.join(folder.split('/')[:-1])+'/affinity_heatmap',
base_path_bbox='/'.join(folder.split('/')[:-1])+'/word_bbox',
base_path_char='/'.join(folder.split('/')[:-1])+'/character_bbox',
base_path_aff='/'.join(folder.split('/')[:-1])+'/affinity_bbox',
base_path_json='/'.join(folder.split('/')[:-1]) + '/json_annotations',
)
@main.command()
@click.option('-dataset', '--dataset', help='name of the dataset you want to pre-process(IC13, IC15)', required=True)
def pre_process(dataset):
valid_choice = ['ic13', 'ic15']
if dataset.lower() not in valid_choice:
print('Invalid Dataset', dataset.lower(), ', currently available:', valid_choice)
exit()
if dataset.lower() == 'ic13':
import config
if \
config.dataset_pre_process['ic13']['train']['target_json_path'] is None or \
config.dataset_pre_process['ic13']['train']['target_folder_path'] is None or \
config.dataset_pre_process['ic13']['test']['target_json_path'] is None or \
config.dataset_pre_process['ic13']['test']['target_folder_path'] is None:
print(
'Change the config.py file. '
'Add the path to the output json file and the target folder path. Detailed instructions in ReadMe.md')
else:
from src.utils.data_structure_ic13 import icdar2013_test, icdar2013_train
icdar2013_test(
config.dataset_pre_process['ic13']['test']['target_folder_path'],
config.dataset_pre_process['ic13']['test']['target_json_path']
)
icdar2013_train(
config.dataset_pre_process['ic13']['train']['target_folder_path'],
config.dataset_pre_process['ic13']['train']['target_json_path']
)
elif dataset.lower() == 'ic15':
import config
if \
config.dataset_pre_process['ic15']['train']['target_json_path'] is None or \
config.dataset_pre_process['ic15']['train']['target_folder_path'] is None or \
config.dataset_pre_process['ic15']['test']['target_json_path'] is None or \
config.dataset_pre_process['ic15']['test']['target_folder_path'] is None:
print(
'Change the config.py file. '
'Add the path to the output json file and the target folder path. Detailed instructions in ReadMe.md')
else:
from src.utils.data_structure_ic15 import icdar2015_test, icdar2015_train
icdar2015_test(
config.dataset_pre_process['ic15']['test']['target_folder_path'],
config.dataset_pre_process['ic15']['test']['target_json_path']
)
icdar2015_train(
config.dataset_pre_process['ic15']['train']['target_folder_path'],
config.dataset_pre_process['ic15']['train']['target_json_path']
)
if __name__ == "__main__":
main()