diff --git a/src/lib/models/model.py b/src/lib/models/model.py index fbc906e..643660e 100644 --- a/src/lib/models/model.py +++ b/src/lib/models/model.py @@ -54,7 +54,7 @@ def load_model(model, model_path, optimizer=None, resume=False, if state_dict[k].shape != model_state_dict[k].shape: print('Skip loading parameter {}, required shape{}, '\ 'loaded shape{}. {}'.format( - k, model_state_dict[k].shape, state_dict[k].shape) + msg) + k, model_state_dict[k].shape, state_dict[k].shape, msg)) state_dict[k] = model_state_dict[k] else: print('Drop parameter {}.'.format(k) + msg) diff --git a/src/lib/models/networks/dlav0.py b/src/lib/models/networks/dlav0.py index 92fdbcf..3ff343c 100644 --- a/src/lib/models/networks/dlav0.py +++ b/src/lib/models/networks/dlav0.py @@ -639,7 +639,7 @@ def dla169up(classes, pretrained_base=None, **kwargs): return model ''' -def get_pose_net(num_layers, heads, add_conv=256, down_ratio=4): +def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4): model = DLASeg('dla{}'.format(num_layers), heads, pretrained=True, down_ratio=down_ratio,