Commit de33175f by BoxuanXu

fix converter try except bug

1 parent a72aa6a4
Showing with 14 additions and 8 deletions
......@@ -401,17 +401,21 @@ def test():
logging.info(load_graph(sym))
def load_checkpoint(params, network_struct):
symbol = mx.sym.load(network_struct)
save_dict = mx.nd.load(params)
arg_params = {}
aux_params = {}
for k, v in save_dict.items():
try:
symbol = mx.sym.load(network_struct)
save_dict = mx.nd.load(params)
arg_params = {}
aux_params = {}
for k, v in save_dict.items():
tp, name = k.split(':', 1)
if tp == 'arg':
arg_params[name] = v
if tp == 'aux':
aux_params[name] = v
return (symbol, arg_params, aux_params)
return (symbol, arg_params, aux_params)
except Exception, e:
logging.info('model load failed!!!')
return (None, None, None)
#function created by xuboxuan@20170807
#if __name__ == '__main__':
......@@ -420,12 +424,14 @@ def Run_Converter(model_param,model_json,seetanet_model):
#parser.add_argument('--model_param',type=str,default = None)
#parser.add_argument('--model_json',type=str,default = None)
#args = parser.parse_args()
#model_param = "wKgB6Fmo2w2ASRqpBky2APcM8zs.params"
#model_json = "wKgB6Vmo2w2AXrJbAAGjO2NrZLE75.json"
#model_param = "wKgB7Vm2iBaALVPdENbR4G1D6sc.params"
#model_json = "wKgB6lm2iBaAcq2PAAANvOLMPRI82.json"
#seetanet_model = "model_test"
try:
sym, arg_params, aux_params = \
load_checkpoint(model_param, model_json)
if sym is None or arg_params is None or aux_params is None:
return None
graph = load_graph(model_json)
converter = Converter(graph, arg_params, aux_params)
logging.info('start to convert model parameters')
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!