Fix LeNet Tensorflow
|
@ -39,8 +39,9 @@ data_test = torchvision.datasets.CIFAR10(
|
|||
)
|
||||
#'''
|
||||
train_subset_indices=range(int(len(data_train)/2))
|
||||
#train_subset_indices=range(BATCH_SIZE*10)
|
||||
val_subset_indices=range(int(len(data_train)/2),len(data_train))
|
||||
#train_subset_indices=range(BATCH_SIZE*10)
|
||||
#val_subset_indices=range(BATCH_SIZE*10, BATCH_SIZE*20)
|
||||
|
||||
dl_train = torch.utils.data.DataLoader(data_train, batch_size=BATCH_SIZE, shuffle=False, sampler=SubsetRandomSampler(train_subset_indices))
|
||||
dl_val = torch.utils.data.DataLoader(data_train, batch_size=BATCH_SIZE, shuffle=False, sampler=SubsetRandomSampler(val_subset_indices))
|
||||
|
|
After Width: | Height: | Size: 269 KiB |
After Width: | Height: | Size: 164 KiB |
After Width: | Height: | Size: 232 KiB |
After Width: | Height: | Size: 332 KiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
After Width: | Height: | Size: 146 KiB |
After Width: | Height: | Size: 212 KiB |
After Width: | Height: | Size: 160 KiB |
After Width: | Height: | Size: 256 KiB |
After Width: | Height: | Size: 133 KiB |
After Width: | Height: | Size: 149 KiB |
After Width: | Height: | Size: 142 KiB |
After Width: | Height: | Size: 167 KiB |
After Width: | Height: | Size: 139 KiB |
After Width: | Height: | Size: 162 KiB |
|
@ -6,11 +6,11 @@ from train_utils import *
|
|||
tf_names = [
|
||||
## Geometric TF ##
|
||||
'Identity',
|
||||
'FlipUD',
|
||||
#'FlipUD',
|
||||
'FlipLR',
|
||||
'Rotate',
|
||||
'TranslateX',
|
||||
'TranslateY',
|
||||
#'TranslateX',
|
||||
#'TranslateY',
|
||||
'ShearX',
|
||||
'ShearY',
|
||||
|
||||
|
@ -20,7 +20,7 @@ tf_names = [
|
|||
'Brightness',
|
||||
'Sharpness',
|
||||
'Posterize',
|
||||
'Solarize', #=>Image entre [0,1] #Pas opti pour des batch
|
||||
#'Solarize', #=>Image entre [0,1] #Pas opti pour des batch
|
||||
|
||||
#Non fonctionnel
|
||||
#'Auto_Contrast', #Pas opti pour des batch (Super lent)
|
||||
|
@ -37,14 +37,14 @@ else:
|
|||
##########################################
|
||||
if __name__ == "__main__":
|
||||
|
||||
n_inner_iter = 10
|
||||
epochs = 200
|
||||
n_inner_iter = 0
|
||||
epochs = 100
|
||||
dataug_epoch_start=0
|
||||
|
||||
#### Classic ####
|
||||
'''
|
||||
#model = LeNet(3,10).to(device)
|
||||
model = WideResNet(num_classes=10, wrn_size=16).to(device)
|
||||
model = LeNet(3,10).to(device)
|
||||
#model = WideResNet(num_classes=10, wrn_size=16).to(device)
|
||||
#model = Augmented_model(Data_augV3(mix_dist=0.0), LeNet(3,10)).to(device)
|
||||
#model.augment(mode=False)
|
||||
|
||||
|
@ -68,11 +68,11 @@ if __name__ == "__main__":
|
|||
t0 = time.process_time()
|
||||
tf_dict = {k: TF.TF_dict[k] for k in tf_names}
|
||||
#tf_dict = TF.TF_dict
|
||||
aug_model = Augmented_model(Data_augV5(TF_dict=tf_dict, N_TF=2, mix_dist=0.5, fixed_mag=False, shared_mag=True), LeNet(3,10)).to(device)
|
||||
#aug_model = Augmented_model(Data_augV4(TF_dict=tf_dict, N_TF=2, mix_dist=0.0), WideResNet(num_classes=10, wrn_size=160)).to(device)
|
||||
aug_model = Augmented_model(Data_augV5(TF_dict=tf_dict, N_TF=2, mix_dist=0.5, fixed_mag=True, shared_mag=True), LeNet(3,10)).to(device)
|
||||
#aug_model = Augmented_model(Data_augV5(TF_dict=tf_dict, N_TF=2, mix_dist=0.5, fixed_mag=True, shared_mag=True), WideResNet(num_classes=10, wrn_size=160)).to(device)
|
||||
print(str(aug_model), 'on', device_name)
|
||||
#run_simple_dataug(inner_it=n_inner_iter, epochs=epochs)
|
||||
log= run_dist_dataugV2(model=aug_model, epochs=epochs, inner_it=n_inner_iter, dataug_epoch_start=dataug_epoch_start, print_freq=1, loss_patience=10)
|
||||
log= run_dist_dataugV2(model=aug_model, epochs=epochs, inner_it=n_inner_iter, dataug_epoch_start=dataug_epoch_start, print_freq=10, loss_patience=None)
|
||||
|
||||
####
|
||||
print('-'*9)
|
||||
|
@ -91,16 +91,16 @@ if __name__ == "__main__":
|
|||
'''
|
||||
#### TF tests ####
|
||||
#'''
|
||||
res_folder="res/brutus-tests/"
|
||||
epochs= 150
|
||||
inner_its = [0, 1, 10]
|
||||
res_folder="res/good_TF_tests/"
|
||||
epochs= 100
|
||||
inner_its = [0, 10]
|
||||
dist_mix = [0.0, 0.5]
|
||||
dataug_epoch_starts= [0]
|
||||
tf_dict = {k: TF.TF_dict[k] for k in tf_names}
|
||||
TF_nb = [len(tf_dict)] #range(10,len(TF.TF_dict)+1) #[len(TF.TF_dict)]
|
||||
N_seq_TF= [1,2,3,4]#[2, 3, 4, 6]
|
||||
mag_setup = [(True,True), (False,True), (False, False)]
|
||||
nb_run= 3
|
||||
N_seq_TF= [1]#[1, 2, 3, 4]
|
||||
mag_setup = [(True,True)]#[(True,True), (False,True), (False, False)]
|
||||
nb_run= 1
|
||||
|
||||
try:
|
||||
os.mkdir(res_folder)
|
||||
|
|