|
18 | 18 | import paddle |
19 | 19 | import yaml |
20 | 20 |
|
21 | | -from paddleseg.cvlibs import Config |
22 | | -from paddleseg.utils import logger |
| 21 | +from paddleseg.cvlibs import Config, SegBuilder |
| 22 | +from paddleseg.utils import logger, utils |
| 23 | +from paddleseg.deploy.export import WrappedModel |
23 | 24 |
|
24 | 25 |
|
25 | 26 | def parse_args(): |
26 | | - parser = argparse.ArgumentParser(description='Model export.') |
| 27 | + parser = argparse.ArgumentParser(description='Export Inference Model.') |
| 28 | + parser.add_argument("--config", help="The path of config file.", type=str) |
27 | 29 | parser.add_argument( |
28 | | - "--config", help="The config file.", type=str, required=True) |
29 | | - parser.add_argument( |
30 | | - '--model_path', help='The path of model for export', type=str) |
| 30 | + '--model_path', |
| 31 | + help='The path of trained weights for exporting inference model', |
| 32 | + type=str) |
31 | 33 | parser.add_argument( |
32 | 34 | '--save_dir', |
33 | | - help='The directory for saving the exported model', |
| 35 | + help='The directory for saving the exported inference model', |
34 | 36 | type=str, |
35 | 37 | default='./output/inference_model') |
36 | | - parser.add_argument( |
37 | | - '--output_op', |
38 | | - choices=['argmax', 'softmax', 'none'], |
39 | | - default="argmax", |
40 | | - help="Select which op to be appended to output result, default: argmax") |
41 | | - parser.add_argument( |
42 | | - '--without_argmax', |
43 | | - help='Do not add the argmax operation at the end of the network. [Deprecated]', |
44 | | - action='store_true') |
45 | | - parser.add_argument( |
46 | | - '--with_softmax', |
47 | | - help='Add the softmax operation at the end of the network. [Deprecated]', |
48 | | - action='store_true') |
49 | 38 | parser.add_argument( |
50 | 39 | "--input_shape", |
51 | 40 | nargs='+', |
52 | | - help="Export the model with fixed input shape, such as 1 3 1024 1024.", |
| 41 | + help="Export the model with fixed input shape, e.g., `--input_shape 1 3 1024 1024`.", |
53 | 42 | type=int, |
54 | 43 | default=None) |
| 44 | + parser.add_argument( |
| 45 | + '--output_op', |
| 46 | + choices=['argmax', 'softmax', 'none'], |
| 47 | + default="argmax", |
| 48 | + help="Select the op to be appended to the last of inference model, default: argmax." |
| 49 | + "In PaddleSeg, the output of trained model is logit (H*C*H*W). We can apply argmax and" |
| 50 | + "softmax op to the logit according the actual situation.") |
55 | 51 |
|
56 | 52 | return parser.parse_args() |
57 | 53 |
|
58 | 54 |
|
59 | | -class SavedSegmentationNet(paddle.nn.Layer): |
60 | | - def __init__(self, net, output_op): |
61 | | - super().__init__() |
62 | | - self.net = net |
63 | | - self.output_op = output_op |
64 | | - assert output_op in ['argmax', 'softmax'], \ |
65 | | - "output_op should in ['argmax', 'softmax']" |
66 | | - |
67 | | - def forward(self, x): |
68 | | - outs = self.net(x) |
69 | | - |
70 | | - new_outs = [] |
71 | | - for out in outs: |
72 | | - if self.output_op == 'argmax': |
73 | | - out = paddle.argmax(out, axis=1, dtype='int32') |
74 | | - elif self.output_op == 'softmax': |
75 | | - out = paddle.nn.functional.softmax(out, axis=1) |
76 | | - new_outs.append(out) |
77 | | - return new_outs |
78 | | - |
79 | | - |
80 | 55 | def main(args): |
81 | | - os.environ['PADDLESEG_EXPORT_STAGE'] = 'True' |
| 56 | + assert args.config is not None, \ |
| 57 | + 'No configuration file specified, please set --config' |
82 | 58 | cfg = Config(args.config) |
83 | | - cfg.check_sync_info() |
84 | | - net = cfg.model |
| 59 | + builder = SegBuilder(cfg) |
| 60 | + |
| 61 | + utils.show_env_info() |
| 62 | + utils.show_cfg_info(cfg) |
| 63 | + os.environ['PADDLESEG_EXPORT_STAGE'] = 'True' |
85 | 64 |
|
| 65 | + # save model |
| 66 | + model = builder.model |
86 | 67 | if args.model_path is not None: |
87 | | - para_state_dict = paddle.load(args.model_path) |
88 | | - net.set_dict(para_state_dict) |
89 | | - logger.info('Loaded trained params of model successfully.') |
90 | | - |
91 | | - if args.input_shape is None: |
92 | | - shape = [None, 3, None, None] |
93 | | - else: |
94 | | - shape = args.input_shape |
95 | | - |
96 | | - output_op = args.output_op |
97 | | - if args.without_argmax: |
98 | | - logger.warning( |
99 | | - '--without_argmax will be deprecated, please use --output_op') |
100 | | - output_op = 'none' |
101 | | - if args.with_softmax: |
102 | | - logger.warning( |
103 | | - '--with_softmax will be deprecated, please use --output_op') |
104 | | - output_op = 'softmax' |
105 | | - |
106 | | - new_net = net if output_op == 'none' else SavedSegmentationNet(net, |
107 | | - output_op) |
108 | | - new_net.eval() |
109 | | - new_net = paddle.jit.to_static( |
110 | | - new_net, |
111 | | - input_spec=[paddle.static.InputSpec( |
112 | | - shape=shape, dtype='float32')]) |
113 | | - |
114 | | - save_path = os.path.join(args.save_dir, 'model') |
115 | | - paddle.jit.save(new_net, save_path) |
| 68 | + state_dict = paddle.load(args.model_path) |
| 69 | + model.set_dict(state_dict) |
| 70 | + logger.info('Loaded trained params successfully.') |
| 71 | + if args.output_op != 'none': |
| 72 | + model = WrappedModel(model, args.output_op) |
| 73 | + |
| 74 | + shape = [None, 3, None, None] if args.input_shape is None \ |
| 75 | + else args.input_shape |
| 76 | + input_spec = [paddle.static.InputSpec(shape=shape, dtype='float32')] |
| 77 | + model.eval() |
| 78 | + model = paddle.jit.to_static(model, input_spec=input_spec) |
| 79 | + paddle.jit.save(model, os.path.join(args.save_dir, 'model')) |
| 80 | + |
| 81 | + # save deploy.yaml |
| 82 | + val_dataset_cfg = cfg.val_dataset_cfg |
| 83 | + assert val_dataset_cfg != {}, 'No val_dataset specified in the configuration file.' |
| 84 | + transforms = val_dataset_cfg.get('transforms', None) |
| 85 | + output_dtype = 'int32' if args.output_op == 'argmax' else 'float32' |
| 86 | + |
| 87 | + # TODO add test config |
| 88 | + deploy_info = { |
| 89 | + 'Deploy': { |
| 90 | + 'model': 'model.pdmodel', |
| 91 | + 'params': 'model.pdiparams', |
| 92 | + 'transforms': transforms, |
| 93 | + 'input_shape': shape, |
| 94 | + 'output_op': args.output_op, |
| 95 | + 'output_dtype': output_dtype |
| 96 | + } |
| 97 | + } |
| 98 | + msg = '\n---------------Deploy Information---------------\n' |
| 99 | + msg += str(yaml.dump(deploy_info)) |
| 100 | + logger.info(msg) |
116 | 101 |
|
117 | 102 | yml_file = os.path.join(args.save_dir, 'deploy.yaml') |
118 | 103 | with open(yml_file, 'w') as file: |
119 | | - transforms = cfg.export_config.get('transforms', [{ |
120 | | - 'type': 'Normalize' |
121 | | - }]) |
122 | | - output_dtype = 'int32' if output_op == 'argmax' else 'float32' |
123 | | - data = { |
124 | | - 'Deploy': { |
125 | | - 'model': 'model.pdmodel', |
126 | | - 'params': 'model.pdiparams', |
127 | | - 'transforms': transforms, |
128 | | - 'input_shape': shape, |
129 | | - 'output_op': output_op, |
130 | | - 'output_dtype': output_dtype |
131 | | - } |
132 | | - } |
133 | | - yaml.dump(data, file) |
| 104 | + yaml.dump(deploy_info, file) |
134 | 105 |
|
135 | 106 | logger.info(f'The inference model is saved in {args.save_dir}') |
136 | 107 |
|
|
0 commit comments