Skip to content

Commit 085508d

Browse files
authored
[PaddleV3] 修复部分依赖 paddle.fluid 的 tensorflow 模型 part 1 (#1069)
* [Fix] version check * [Fix] tensorflow version and initailizer * [Fix] tensorflow albert * [Fix] tensorflow inceptionresnetv2 * [Fix] tensorflow inceptionv3 * [Fix] tensorflow inceptionv4 * [Fix] tensorflow mnasneta1 * [Fix] tensorflow mtcnn onet * [Fix] tensorflow mtcnn pnet * [Fix] tensorflow mtcnn rnet * [Fix] tensorflow mobilenetv1 * [Fix] tensorflow decoder eager * [Fix] tensorflow mobilenetv2 * [Fix] tensorflow mobilenetv3 large * [Update] black.list
1 parent 9a98187 commit 085508d

File tree

25 files changed

+34
-95
lines changed

25 files changed

+34
-95
lines changed

test_benchmark/TensorFlow/AlBert/deploy_infer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import numpy as np
66
import paddle
7-
import paddle.fluid as fluid
87
from paddle.inference import Config
98
from paddle.inference import create_predictor
109

test_benchmark/TensorFlow/AlBert/pd_infer.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import paddle
2-
import paddle.fluid as fluid
32
import numpy
43
import sys
54
import pickle
@@ -18,11 +17,8 @@
1817

1918
# test dygraph
2019
[inference_program, feed_target_names,
21-
fetch_targets] = fluid.io.load_inference_model(
22-
dirname="pd_model_dygraph/inference_model/",
23-
executor=exe,
24-
model_filename="model.pdmodel",
25-
params_filename="model.pdiparams")
20+
fetch_targets] = paddle.static.load_inference_model(
21+
path_prefix="pd_model_dygraph/inference_model/model", executor=exe)
2622

2723
result = exe.run(inference_program,
2824
feed={

test_benchmark/TensorFlow/InceptionResNetV2/deploy_infer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import numpy as np
66
import paddle
7-
import paddle.fluid as fluid
87
from paddle.inference import Config
98
from paddle.inference import create_predictor
109

test_benchmark/TensorFlow/InceptionResNetV2/pd_infer.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import paddle
2-
import paddle.fluid as fluid
32
import numpy as np
43
import sys
54
import os
@@ -11,11 +10,8 @@
1110
exe = paddle.static.Executor(paddle.CPUPlace())
1211

1312
# test dygraph
14-
[prog, inputs, outputs] = fluid.io.load_inference_model(
15-
dirname="pd_model_dygraph/inference_model/",
16-
executor=exe,
17-
model_filename="model.pdmodel",
18-
params_filename="model.pdiparams")
13+
[prog, inputs, outputs] = paddle.static.load_inference_model(
14+
path_prefix="pd_model_dygraph/inference_model/model", executor=exe)
1915
data = np.load('../dataset/InceptionResNetV2/input.npy')
2016
result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs)
2117

test_benchmark/TensorFlow/InceptionV3/deploy_infer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import numpy as np
66
import paddle
7-
import paddle.fluid as fluid
87
from paddle.inference import Config
98
from paddle.inference import create_predictor
109

test_benchmark/TensorFlow/InceptionV3/pd_infer.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
from __future__ import print_function
22
import paddle
3-
import paddle.fluid as fluid
43
import sys
54
import os
65
import numpy as np
@@ -17,11 +16,9 @@
1716

1817
# test dygrah
1918
[inference_program, feed_target_names,
20-
fetch_targets] = fluid.io.load_inference_model(
21-
dirname="pd_model_dygraph/inference_model/",
22-
executor=exe,
23-
model_filename="model.pdmodel",
24-
params_filename="model.pdiparams")
19+
fetch_targets] = paddle.static.load_inference_model(
20+
path_prefix="pd_model_dygraph/inference_model/model", executor=exe)
21+
2522
result = exe.run(inference_program,
2623
feed={feed_target_names[0]: data},
2724
fetch_list=fetch_targets)

test_benchmark/TensorFlow/InceptionV4/deploy_infer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import numpy as np
66
import paddle
7-
import paddle.fluid as fluid
87
from paddle.inference import Config
98
from paddle.inference import create_predictor
109

test_benchmark/TensorFlow/InceptionV4/pd_infer.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import paddle
2-
import paddle.fluid as fluid
32
import numpy as np
43
import sys
54
import os
@@ -11,11 +10,8 @@
1110
exe = paddle.static.Executor(paddle.CPUPlace())
1211

1312
# test dygraph
14-
[prog, inputs, outputs] = fluid.io.load_inference_model(
15-
dirname="pd_model_dygraph/inference_model/",
16-
executor=exe,
17-
model_filename="model.pdmodel",
18-
params_filename="model.pdiparams")
13+
[prog, inputs, outputs] = paddle.static.load_inference_model(
14+
path_prefix="pd_model_dygraph/inference_model/model", executor=exe)
1915
data = np.load('../dataset/InceptionV4/input.npy')
2016
result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs)
2117

test_benchmark/TensorFlow/MNASNetA1/deploy_infer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import numpy as np
66
import paddle
7-
import paddle.fluid as fluid
87
from paddle.inference import Config
98
from paddle.inference import create_predictor
109

test_benchmark/TensorFlow/MNASNetA1/pd_infer.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import paddle
2-
import paddle.fluid as fluid
32
import numpy as np
43
import sys
54
import os
@@ -11,11 +10,8 @@
1110
exe = paddle.static.Executor(paddle.CPUPlace())
1211

1312
# test dygraph
14-
[prog, inputs, outputs] = fluid.io.load_inference_model(
15-
dirname="pd_model_dygraph/inference_model/",
16-
executor=exe,
17-
model_filename="model.pdmodel",
18-
params_filename="model.pdiparams")
13+
[prog, inputs, outputs] = paddle.static.load_inference_model(
14+
path_prefix="pd_model_dygraph/inference_model/model", executor=exe)
1915
data = np.load('../dataset/MNASNetA1/input.npy')
2016
result = exe.run(prog, feed={inputs[0]: data}, fetch_list=outputs)
2117

0 commit comments

Comments
 (0)