diff --git a/MTVulnerability/experiments/attacks/__init__.py b/MTVulnerability/experiments/attacks/__init__.py index 01e4da0..a632e0e 100644 --- a/MTVulnerability/experiments/attacks/__init__.py +++ b/MTVulnerability/experiments/attacks/__init__.py @@ -39,16 +39,17 @@ def load_model(args): epoch = args.epoch - path_model = os.path.join(args.model_root, "savecheckpoint","checkpoint_{epoch}.pth.tar").format( - arch=args.arch, - dataset=args.dataset, - train="".join(train), - aux="".join(aux), - test="".join(test), - epoch=epoch - ) - - path_model= path_model.replace("\r","") + # path_model = os.path.join(args.model_root, "savecheckpoint","checkpoint_{epoch}.pth.tar").format( + # arch=args.arch, + # dataset=args.dataset, + # train="".join(train), + # aux="".join(aux), + # test="".join(test), + # epoch=epoch + # ) + + # path_model= path_model.replace("\r","") + path_model = args.model_root if args.dataset == "taskonomy" and args.arch == "resnet18": model = resnet18_taskonomy(pretrained=False, tasks=args.test_task_set) @@ -96,7 +97,7 @@ def load_model(args): for k in ks: state[k[7:]] = state.pop(k) - model.load_state_dict(state) # , strict=False + model.load_state_dict(state, strict=False) # , strict=False model = torch.nn.DataParallel(model) if torch.cuda.is_available(): model.cuda() diff --git a/MTVulnerability/utils/xception_taskonomy_new.py b/MTVulnerability/utils/xception_taskonomy_new.py index 36531f4..f7cb521 100644 --- a/MTVulnerability/utils/xception_taskonomy_new.py +++ b/MTVulnerability/utils/xception_taskonomy_new.py @@ -320,7 +320,7 @@ def __init__(self,size=1, tasks=None,num_classes=None, ozan=False,half_sized_out if tasks is not None: task_to_params = { - 'autoencoder': {'output_channels': 3}, + 'autoencoder': {'output_chanels': 3}, 'class_object' : {'output_chanels' : 0, 'num_cla sses':1000}, 'class_places' : {'output_chanels' : 0, 'num_cla sses':63}, 'depth_eucln' : {'output_chanels' : 1}, @@ -343,7 +343,7 @@ def __init__(self,size=1, tasks=None,num_classes=None, ozan=False,half_sized_out self.final_conv_bn = nn.BatchNorm2d(512) for task in tasks: - output_channels = task_to_params.get(task).get("output_channels", 0) + output_channels = task_to_params.get(task).get("output_chanels", 0) nb_classes = task_to_params.get(task).get("num_classes", num_classes) decoder = Decoder(output_channels, nb_classes,half_sized_output=half_sized_output) self.task_to_decoder[task] = decoder