mirror of
https://github.com/AntoineHX/smart_augmentation.git
synced 2025-05-04 12:10:45 +02:00
Tests consomation memoire/temps + methode KL divergence (UDA)
This commit is contained in:
parent
b60610d9a7
commit
d68034eec1
5 changed files with 214 additions and 37 deletions
|
@ -9,6 +9,16 @@ from torchviz import make_dot
|
|||
import torch
|
||||
import torch.nn.functional as F
|
||||
|
||||
import time
|
||||
|
||||
class timer():
|
||||
def __init__(self):
|
||||
self._start_time=time.time()
|
||||
def exec_time(self):
|
||||
end = time.time()
|
||||
res = end-self._start_time
|
||||
self._start_time=end
|
||||
return res
|
||||
|
||||
def print_graph(PyTorch_obj, fig_name='graph'):
|
||||
graph=make_dot(PyTorch_obj) #Loss give the whole graph
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue