From fcd0217d54723bea97d7264c72da0de30ebe4e87 Mon Sep 17 00:00:00 2001 From: "Harle, Antoine (Contracteur)" Date: Mon, 3 Feb 2020 11:21:54 -0500 Subject: [PATCH] F1 par classes + plot OK --- higher/smart_aug/compare_res.py | 29 +++++++++++++++++++++++++++-- higher/smart_aug/datasets.py | 6 +++--- higher/smart_aug/test_dataug.py | 24 ++++++++++++++++-------- higher/smart_aug/train_utils.py | 6 ++++-- higher/smart_aug/utils.py | 14 +++++++------- 5 files changed, 57 insertions(+), 22 deletions(-) diff --git a/higher/smart_aug/compare_res.py b/higher/smart_aug/compare_res.py index 25d5e6b..28a537d 100755 --- a/higher/smart_aug/compare_res.py +++ b/higher/smart_aug/compare_res.py @@ -4,7 +4,7 @@ if __name__ == "__main__": #''' files=[ - "res/log/Aug_mod(Data_augV5(Mix0.8-23TFx4-Mag)-LeNet)-100 epochs (dataug:0)- 1 in_it.json", + "../res/log/Aug_mod(Data_augV5(Mix0.8-3TFx2-MagFx)-resnet18)-2 epochs (dataug:0)- 1 in_it.json", #"res/brutus-tests/log/Aug_mod(Data_augV5(Uniform-14TFx3-MagFxSh)-LeNet)-150epochs(dataug:0)-10in_it-0.json", #"res/brutus-tests/log/Aug_mod(Data_augV5(Uniform-14TFx3-MagFxSh)-LeNet)-150epochs(dataug:0)-10in_it-1.json", #"res/brutus-tests/log/Aug_mod(Data_augV5(Uniform-14TFx3-MagFxSh)-LeNet)-150epochs(dataug:0)-10in_it-2.json", @@ -15,7 +15,7 @@ if __name__ == "__main__": #legend+=str(idx)+'-'+file+'\n' with open(file) as json_file: data = json.load(json_file) - plot_resV2(data['Log'], fig_name=file.replace('.json','').replace('log/',''), param_names=data['Param_names']) + plot_resV2(data['Log'], fig_name=file.replace("/log","").replace(".json",""), param_names=data['Param_names']) #plot_TF_influence(data['Log'], param_names=data['Param_names']) #''' ## Loss , Acc, Proba = f(epoch) ## @@ -92,4 +92,29 @@ if __name__ == "__main__": print(idx, data['Accuracy']) print(files[0], np.mean(accs), np.std(accs), np.mean(times)) + ''' + + ''' + inner_its = [1] + dist_mix = [0]#[0.5, 0.8, 1.0] #Uniform + N_seq_TF= [4, 3, 2] + nb_run= 3 + + for n_inner_iter in inner_its: + for n_tf in N_seq_TF: + for dist in dist_mix: + + #files = ["../res/brutus-tests2/log/Aug_mod(Data_augV5(Mix%.1f-14TFx%d-MagFxSh)-ResNet18)-150 epochs (dataug:0)- 1 in_it-%s.json"%(dist, n_tf, str(run)) for run in range(nb_run)] + files = ["../res/brutus-tests2/log/Aug_mod(Data_augV5(Uniform-14TFx%d-MagFxSh)-ResNet18)-150 epochs (dataug:0)- 1 in_it-%s.json"%(n_tf, str(run)) for run in range(nb_run)] + accs = [] + times = [] + for idx, file in enumerate(files): + #legend+=str(idx)+'-'+file+'\n' + with open(file) as json_file: + data = json.load(json_file) + accs.append(data['Accuracy']) + times.append(data['Time'][0]) + print(idx, data['Accuracy']) + + print(files[0], 'acc', np.mean(accs), '+-',np.std(accs), ',t', np.mean(times)) ''' \ No newline at end of file diff --git a/higher/smart_aug/datasets.py b/higher/smart_aug/datasets.py index 1d70a67..963d96b 100755 --- a/higher/smart_aug/datasets.py +++ b/higher/smart_aug/datasets.py @@ -13,7 +13,7 @@ TEST_SIZE = BATCH_SIZE #TEST_SIZE = 10000 #legerement +Rapide / + Consomation memoire ! #Wether to download data. -download_data=True +download_data=False #Number of worker to use. num_workers=2 #4 #Pin GPU memory @@ -46,7 +46,7 @@ transform_train = torchvision.transforms.Compose([ #data_test = torchvision.datasets.MNIST("../data", train=False, download=True, transform=transform) #CIFAR data_train = torchvision.datasets.CIFAR10("../data", train=True, download=download_data, transform=transform_train) -data_val = torchvision.datasets.CIFAR10("../data", train=True, download=download_data, transform=transform) +#data_val = torchvision.datasets.CIFAR10("../data", train=True, download=download_data, transform=transform) data_test = torchvision.datasets.CIFAR10("../data", train=False, download=download_data, transform=transform) train_subset_indices=range(int(len(data_train)/2)) @@ -55,5 +55,5 @@ val_subset_indices=range(int(len(data_train)/2),len(data_train)) #val_subset_indices=range(BATCH_SIZE*10, BATCH_SIZE*20) dl_train = torch.utils.data.DataLoader(data_train, batch_size=BATCH_SIZE, shuffle=False, sampler=SubsetRandomSampler(train_subset_indices), num_workers=num_workers, pin_memory=pin_memory) -dl_val = torch.utils.data.DataLoader(data_val, batch_size=BATCH_SIZE, shuffle=False, sampler=SubsetRandomSampler(val_subset_indices), num_workers=num_workers, pin_memory=pin_memory) +dl_val = torch.utils.data.DataLoader(data_train, batch_size=BATCH_SIZE, shuffle=False, sampler=SubsetRandomSampler(val_subset_indices), num_workers=num_workers, pin_memory=pin_memory) dl_test = torch.utils.data.DataLoader(data_test, batch_size=TEST_SIZE, shuffle=False, num_workers=num_workers, pin_memory=pin_memory) diff --git a/higher/smart_aug/test_dataug.py b/higher/smart_aug/test_dataug.py index 7bb5033..560636d 100755 --- a/higher/smart_aug/test_dataug.py +++ b/higher/smart_aug/test_dataug.py @@ -79,7 +79,7 @@ if __name__ == "__main__": } #Parameters n_inner_iter = 1 - epochs = 2 + epochs = 10 dataug_epoch_start=0 optim_param={ 'Meta':{ @@ -108,7 +108,7 @@ if __name__ == "__main__": print("{} on {} for {} epochs".format(model_name, device_name, epochs)) - log= train_classic(model=model, opt_param=optim_param, epochs=epochs, print_freq=5) + log= train_classic(model=model, opt_param=optim_param, epochs=epochs, print_freq=1) #log= train_classic_higher(model=model, epochs=epochs) exec_time=time.process_time() - t0 @@ -116,13 +116,21 @@ if __name__ == "__main__": print('-'*9) times = [x["time"] for x in log] out = {"Accuracy": max([x["acc"] for x in log]), "Time": (np.mean(times),np.std(times), exec_time), 'Optimizer': optim_param['Inner'], "Device": device_name, "Log": log} - print(str(model),": acc", out["Accuracy"], "in:", out["Time"][0], "+/-", out["Time"][1]) + print(model_name,": acc", out["Accuracy"], "in:", out["Time"][0], "+/-", out["Time"][1]) filename = "{}-{} epochs".format(model_name,epochs) with open("../res/log/%s.json" % filename, "w+") as f: - json.dump(out, f, indent=True) - print('Log :\"',f.name, '\" saved !') + try: + json.dump(out, f, indent=True) + print('Log :\"',f.name, '\" saved !') + except: + print("Failed to save logs :",f.name) + print(sys.exc_info()[1]) - #plot_res(log, fig_name="../res/"+filename) + try: + plot_resV2(log, fig_name="../res/"+filename) + except: + print("Failed to plot res") + print(sys.exc_info()[1]) print('Execution Time : %.00f '%(exec_time)) print('-'*9) @@ -160,12 +168,12 @@ if __name__ == "__main__": print('Log :\"',f.name, '\" saved !') except: print("Failed to save logs :",f.name) - print(sys.exc_info()[0]) + print(sys.exc_info()[1]) try: plot_resV2(log, fig_name="../res/"+filename, param_names=aug_model.TF_names()) except: print("Failed to plot res") - print(sys.exc_info()[0]) + print(sys.exc_info()[1]) print('Execution Time : %.00f '%(exec_time)) print('-'*9) \ No newline at end of file diff --git a/higher/smart_aug/train_utils.py b/higher/smart_aug/train_utils.py index 3711677..28aa04a 100755 --- a/higher/smart_aug/train_utils.py +++ b/higher/smart_aug/train_utils.py @@ -52,7 +52,7 @@ def test(model): #from sklearn.metrics import f1_score #f1 = f1_score(labels.data.to('cpu'), predicted.data.to('cpu'), average="macro") - return accuracy, confmat.f1_metric(average="macro") + return accuracy, confmat.f1_metric(average=None) def compute_vaLoss(model, dl_it, dl): """Evaluate a model on a batch of data. @@ -167,7 +167,7 @@ def train_classic(model, opt_param, epochs=1, print_freq=1): tf = time.process_time() val_loss = compute_vaLoss(model=model, dl_it=dl_val_it, dl=dl_val) - accuracy, _ =test(model) + accuracy, f1 =test(model) model.train() #### Print #### @@ -177,6 +177,7 @@ def train_classic(model, opt_param, epochs=1, print_freq=1): print('Time : %.00f'%(tf - t0)) print('Train loss :',loss.item(), '/ val loss', val_loss.item()) print('Accuracy :', accuracy) + print('F1 :', f1.data) #### Log #### data={ @@ -184,6 +185,7 @@ def train_classic(model, opt_param, epochs=1, print_freq=1): "train_loss": loss.item(), "val_loss": val_loss.item(), "acc": accuracy, + "f1": f1.cpu().numpy().tolist(), "time": tf - t0, "param": None, diff --git a/higher/smart_aug/utils.py b/higher/smart_aug/utils.py index 41326b7..9dedfdc 100755 --- a/higher/smart_aug/utils.py +++ b/higher/smart_aug/utils.py @@ -115,15 +115,15 @@ def plot_resV2(log, fig_name='res', param_names=None): ax[1, 0].plot(epochs,[x["acc"] for x in log], label='Acc') if "f1" in log[0].keys(): - ax[1, 0].plot(epochs,[x["f1"]*100 for x in log], label='F1') - ''' + #ax[1, 0].plot(epochs,[x["f1"]*100 for x in log], label='F1') + #''' #print(log[0]["f1"]) - if len(log[0]["f1"])==1: - ax[1, 0].plot(epochs,[x["f1"]*100 for x in log], label='F1') - else: + if isinstance(log[0]["f1"], list): for c in range(len(log[0]["f1"])): - ax[1, 0].plot(epochs,[x["f1"][c]*100 for x in log], label='F1-'+str(c)) - ''' + ax[1, 0].plot(epochs,[x["f1"][c]*100 for x in log], label='F1-'+str(c), ls='--') + else: + ax[1, 0].plot(epochs,[x["f1"]*100 for x in log], label='F1', ls='--') + #''' ax[1, 0].legend()