Ajout plus de controle/Vision sur les optimizers

This commit is contained in:
Harle, Antoine (Contracteur) 2019-12-09 13:49:57 -05:00
parent d1ee0c632e
commit 41c7273241
3 changed files with 49 additions and 23 deletions

View file

@ -65,16 +65,28 @@ else:
if __name__ == "__main__":
tasks={
#'classic',
'aug_dataset',
'classic',
#'aug_dataset',
#'aug_model'
}
n_inner_iter = 1
epochs = 150
epochs = 100
dataug_epoch_start=0
optim_param={
'Meta':{
'optim':'Adam',
'lr':1e-2, #1e-2
},
'Inner':{
'optim': 'SGD',
'lr':1e-2, #1e-2
'momentum':0.9, #0.9
}
}
model = LeNet(3,10)
#model = LeNet(3,10)
#model = MobileNetV2(num_classes=10)
model = ResNet(num_classes=10)
#model = WideResNet(num_classes=10, wrn_size=32)
#### Classic ####
@ -83,14 +95,14 @@ if __name__ == "__main__":
model = model.to(device)
print("{} on {} for {} epochs".format(str(model), device_name, epochs))
log= train_classic(model=model, epochs=epochs, print_freq=1)
log= train_classic(model=model, opt_param=optim_param, epochs=epochs, print_freq=1)
#log= train_classic_higher(model=model, epochs=epochs)
exec_time=time.process_time() - t0
####
print('-'*9)
times = [x["time"] for x in log]
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), "Device": device_name, "Log": log}
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), 'Optimizer': optim_param['Inner'], "Device": device_name, "Log": log}
print(str(model),": acc", out["Accuracy"], "in:", out["Time"][0], "+/-", out["Time"][1])
filename = "{}-{} epochs".format(str(model),epochs)
with open("res/log/%s.json" % filename, "w+") as f:
@ -123,7 +135,7 @@ if __name__ == "__main__":
##log= train_classic_higher(model=model, epochs=epochs)
data_train_aug = AugmentedDatasetV2("./data", train=True, download=download_data, transform=transform, subset=(0,int(len(data_train)/2)))
data_train_aug.augement_data(aug_copy=10)
data_train_aug.augement_data(aug_copy=1)
print(data_train_aug)
unsup_ratio = 5
dl_unsup = torch.utils.data.DataLoader(data_train_aug, batch_size=BATCH_SIZE*unsup_ratio, shuffle=True)
@ -135,13 +147,13 @@ if __name__ == "__main__":
model = model.to(device)
print("{} on {} for {} epochs".format(str(model), device_name, epochs))
log= train_UDA(model=model, dl_unsup=dl_unsup, epochs=epochs, print_freq=10)
log= train_UDA(model=model, dl_unsup=dl_unsup, epochs=epochs, opt_param=optim_param, print_freq=10)
exec_time=time.process_time() - t0
####
print('-'*9)
times = [x["time"] for x in log]
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), "Device": device_name, "Log": log}
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), 'Optimizer': optim_param['Inner'], "Device": device_name, "Log": log}
print(str(model),": acc", out["Accuracy"], "in:", out["Time"][0], "+/-", out["Time"][1])
filename = "{}-{}-{} epochs".format(str(data_train_aug),str(model),epochs)
with open("res/log/%s.json" % filename, "w+") as f:
@ -164,13 +176,20 @@ if __name__ == "__main__":
#aug_model = Augmented_model(RandAug(TF_dict=tf_dict, N_TF=2), model).to(device)
print("{} on {} for {} epochs - {} inner_it".format(str(aug_model), device_name, epochs, n_inner_iter))
log= run_dist_dataugV2(model=aug_model, epochs=epochs, inner_it=n_inner_iter, dataug_epoch_start=dataug_epoch_start, print_freq=10, KLdiv=False, loss_patience=None)
log= run_dist_dataugV2(model=aug_model,
epochs=epochs,
inner_it=n_inner_iter,
dataug_epoch_start=dataug_epoch_start,
opt_param=optim_param,
print_freq=10,
KLdiv=True,
loss_patience=None)
exec_time=time.process_time() - t0
####
print('-'*9)
times = [x["time"] for x in log]
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), "Device": device_name, "Param_names": aug_model.TF_names(), "Log": log}
out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), 'Optimizer': optim_param, "Device": device_name, "Log": log}
print(str(aug_model),": acc", out["Accuracy"], "in:", out["Time"][0], "+/-", out["Time"][1])
filename = "{}-{} epochs (dataug:{})- {} in_it".format(str(aug_model),epochs,dataug_epoch_start,n_inner_iter)
with open("res/log/%s.json" % filename, "w+") as f: