Commit 512a4b09 authored by Mano Brabant's avatar Mano Brabant
Browse files

First version of the benchmark's tool

parent a6ece072
......@@ -10,9 +10,10 @@ def parse():
# #name for user #name for program #type #default values #explanation sentences
parser.add_argument('--input_dir', dest='input_dir', type=str, default='./PyTorchCheckpoint/', help='directory of saved checkpoints for denoising operation or retraining')
parser.add_argument('--output_dir', dest='output_dir', type=str, default=None, help='directory of saved checkpoints for denoising operation or retraining')
parser.add_argument('--train_dir', dest='train_dir', type=str, default='./Holography/DATABASE/', help='directory of training database')
parser.add_argument('--eval_dir', dest='eval_dir', type=str, default='./Holography/DATABASE/', help='directory of training database')
parser.add_argument('--train_dir', dest='train_dir', type=str, default='./Holography/HOLODEEPmat/DATABASE/', help='directory of training database')
parser.add_argument('--eval_dir', dest='eval_dir', type=str, default='./Holography/HOLODEEPmat/DATABASE/', help='directory of evaluation database')
parser.add_argument('--test_dir', dest='test_dir', type=str, default='./Holography/DATAEVAL/DATAEVAL/', help='directory of testing database')
parser.add_argument('--save_test_dir', dest='save_test_dir', type=str, default='./TestImages/', help='directory where results of de-noising operation will be saved')
......@@ -26,50 +27,51 @@ def parse():
parser.add_argument('--test_patterns', dest='test_patterns', type=int, nargs='+', default=(1, 2, 3, 4, 5), help='patterns used for testing')
parser.add_argument('--test_noises', dest='test_noises', type=str, default="0-1-1.5-2-2.5", help='noise levels used for testing ')
parser.add_argument('--clean_train', dest='clean_train', type=str, default='data1/img_clean_train_1-2-3-4-5_0-1-1.5-2-2.5_two_50_50_384.npy', help='filepath of noise free file for training')
parser.add_argument('--noisy_train', dest='noisy_train', type=str, default='data1/img_noisy_train_1-2-3-4-5_0-1-1.5-2-2.5_two_50_50_384.npy', help='filepath of noisy file for training')
parser.add_argument('--clean_train', dest='clean_train', type=str, default='data1/img_clean_train_1_0_two_50_50_3.npy', help='filepath of noise free file for training')
parser.add_argument('--noisy_train', dest='noisy_train', type=str, default='data1/img_noisy_train_1_0_two_50_50_3.npy', help='filepath of noisy file for training')
parser.add_argument('--clean_eval', dest='clean_eval', type=str, default='data1/img_clean_train_1-2-3_0-1-1.5two.npy', help='filepath of noise free file for eval')
parser.add_argument('--noisy_eval', dest='noisy_eval', type=str, default='data1/img_noisy_train_1-2-3_0-1-1.5two.npy', help='filepath of noisy file for eval')
parser.add_argument('--num_epochs', dest='num_epochs', type=int, default=200, help='number of epochs to train')
parser.add_argument('--D', dest='D', type=int, default=4, help='number of dilated convolutional layer (resBlock)')
parser.add_argument('--C', dest='C', type=int, default=64, help='kernel size of convolutional layer')
parser.add_argument('--plot', dest='plot', action='store_true', help='plot loss during training')
parser.add_argument('--lr', dest='lr', type=float, default=1e-3, help='learning rate for training')
parser.add_argument('--train_image_size', dest='train_image_size',type=int, nargs='+', default=(50, 50), help='size of train images')
parser.add_argument('--eval_image_size', dest='eval_image_size', type=int, nargs='+', default=(1024, 1024), help='size of eval images')
parser.add_argument('--test_image_size', dest='test_image_size', type=int, nargs='+', default=(1024, 1024), help='size of test images')
parser.add_argument('--image_mode', dest='image_mode', type=int, default=1, help='1 or 3 (black&white or RGB)')
parser.add_argument('--batch_size', dest='batch_size', type=int, default=384, help="")
parser.add_argument('--epoch', dest='epoch', type=int, default=None, help='epoch\'s number from which we going to retrain')
parser.add_argument('--test_mode', dest='test_mode', action='store_true', help='testing phase')
parser.add_argument('--tsf', dest='tsf', action='store_true', help='add if code in tensorflow')
parser.add_argument('--graph', dest='graph', action='store_true', help='add if graph is visible')
parser.add_argument('--graph_fin', dest='graph_fin', action='store_true', help='add if graph is visible during training')
# Tensorflow arguments
parser.add_argument('--use_gpu', dest='use_gpu', type=int, default=1, help='gpu flag, 1 for GPU and 0 for CPU')
parser.add_argument('--checkpoint_dir', dest='ckpt_dir', type=str, default='./checkpoint', help='models are saved here')
parser.add_argument('--ckpt_dir', dest='ckpt_dir', type=str, default='./checkpoint', help='models are saved here')
parser.add_argument('--sample_dir', dest='sample_dir', type=str, default='./sample', help='sample are saved here')
#parser.add_argument('--test_dir', dest='test_dir', default='./test', help='test sample are saved here')
parser.add_argument('--params', dest='params', type=str, default='', help='hyper parameters')
parser.add_argument('--test_noisy_img', dest='noisy_img', type=str, help='path of the noisy image for testing')
parser.add_argument('--test_noisy_key', dest='noisy_key', type=str, help='key for noisy matlab image for testing')
parser.add_argument('--test_clean_img', dest='clean_img', type=str, help='path of the clean image for testing')
parser.add_argument('--test_clean_key', dest='clean_key', type=str, help='key for clean matlab image for testing')
parser.add_argument('--test_flip', dest='flip', type=bool, default=False, help='option for upside down flip of noisy (and clean) test image')
#parser.add_argument('--test_ckpt_index', dest='ckpt_index', type=str, default='', help='name and directory of the checkpoint that will be restored.')
parser.add_argument('--save_dir', dest='save_dir', type=str, default='./data1/', help='dir of patches')
......@@ -88,12 +90,12 @@ def parse():
parser.add_argument('--patch_per_image', dest='patch_per_image', type=int, default=384)
parser.add_argument('--noise_src_dir', dest='noise_src_dir', default="./chemin/")
parser.add_argument('--clean_src_dir', dest='clean_src_dir', default="./chemin/")
parser.add_argument('--perform_validation', dest='perform_validation', action="store_true")
parser.add_argument('--scales', dest='scales', type=int, nargs='+', default=[1], help='size of test images')
parser.add_argument('--scales', dest='scales', type=int, nargs='+', default=(1), help='size of test images')
parser.add_argument('--originalsize', dest='originalsize', type=int, nargs='+', default=(1024, 1024), help='size of test images')
return parser.parse_args()
......
import numpy
import time
import datetime
import itertools
import subprocess
import os
import fnmatch
from statistics import mean
import csv
from argument import *
from utils import *
import os
os.environ['MKL_THREADING_LAYER'] = 'GNU'
import random
random.seed(10)
valid_args = vars(parse())
class Report(object):
"""
This class represent a raport for a benchmark
"""
def __init__(self, benchmark):
"""
This ocnstructor create a new report for a benchmark
:param benchmark: The benchmark from which a report will be made
"""
self.res_psnr = []
self.res_std = []
self.benchmark = benchmark
def make_report(self):
input_dir = self.benchmark.getInputDir() + "/Test/"
print(input_dir)
for root, dirs, files in os.walk(input_dir):
for file in fnmatch.filter(files, "*.res"):
print(file)
with open(os.path.join(root, file), "r") as f:
lines = f.read()
self.res_psnr.append(float(list(filter(lambda string : string.startswith('psnr'), lines.split('\n')))[0].split(':')[1]))
self.res_std.append(float(list(filter(lambda string : string.startswith('std'), lines.split('\n')))[0].split(':')[1]))
print(input_dir)
print("Average psnr : ", mean(self.res_psnr))
print("Average std : ", mean(self.res_std))
print("\n\n\n\n\n")
def toCSV(self):
if(len(self.res_psnr) == 0):
self.make_report()
mylist = [[]]
with open(self.benchmark.file_path, 'r', newline='') as csvfile:
reader = csv.reader(csvfile, delimiter=';')
mylist = list(reader)
print(mylist)
print(mylist[0])
print(mylist[2])
mylist[0][0] = self.benchmark.input_dir
mylist[2][1] = "1"
mylist[3][1] = "1"
mylist[4][1] = mean(self.res_std)
mylist[4][2] = mean(self.res_psnr)
with open(self.benchmark.file_path, 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=';',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerows(mylist)
class Benchmark(object):
"""
This class is a tool for making benchmark
"""
def __init__(self, file_path):
"""
This constructor create a new benchmark for a csv file
:param file_path: The path to the csv file
"""
self.input_dir = 'Benchmark/benchmark_{}'.format(datetime.datetime.now().strftime("%d_%m_%Y-%H:%M:%S"))
self.file_path = file_path
self.params = dict()
def getParam(self):
"""
This method get the changing param from the csv file
"""
for arg in valid_args:
if(isinstance(valid_args[arg], tuple)):
self.params[arg] = ' '.join([str(i) for i in valid_args[arg]])
else:
self.params[arg] = valid_args[arg]
with open(self.file_path, 'r', newline='') as csvfile:
spamreader = csv.reader(csvfile, delimiter=';', quotechar='|')
for row in spamreader:
if(len(row) >= 2 and row[0] in valid_args):
self.params[row[0]] = row[1]
self.params['input_dir'] = self.input_dir
self.params['save_test_dir'] = '.'
print(self.params)
def getNbTest(self):
"""
This method return the number of test launched by the benchmark
"""
return 1
def getInputDir(self):
"""
This method return the benchmark working directory
"""
return '{}'.format(self.input_dir)
def toString(self):
"""
This method return a representation of the benchmark
"""
return """
Benchmark :
{}
""".format(self.params)
def get_params_string(self):
"""
This method join in a string all params from the csv file
"""
temp = ' '.join(["--" + k + " " + str(v) if v != None and v != '' and v != False else "" for k, v in self.params.items()])
return temp
def launch_benchmark_data(self):
"""
This method launch the creation of data's configuration for the different tests (just one for the moment)
"""
process = []
list_params = self.get_params_string()
cmd = '''
python3 generate_patches_holo_fromMAT.py {} &
'''.format(list_params).replace("\n", "")
p = subprocess.Popen(cmd, shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
process.append(p)
p.communicate()
exit_codes = [p.communicate() for p in process]
sess_name = extract_sess_name(tuple(self.params['train_patterns'].split(" ")), self.params['train_noises'], self.params['phase_type'], self.params['stride'], self.params['patch_size'], self.params['patch_per_image']).replace(' ','')
self.params['clean_train'] = os.path.join(self.params['save_dir'], "img_clean_train_" + sess_name + ".npy")
self.params['noisy_train'] = os.path.join(self.params['save_dir'], "img_noisy_train_" + sess_name + ".npy")
sess_name = extract_sess_name(tuple(self.params['eval_patterns'].split(" ")), self.params['eval_noises'], self.params['phase_type'], self.params['stride'], self.params['patch_size'], self.params['patch_per_image']).replace(' ','')
self.params['clean_eval'] = os.path.join(self.params['save_dir'], "img_clean_train_" + sess_name + ".npy")
self.params['noisy_eval'] = os.path.join(self.params['save_dir'], "img_noisy_train_" + sess_name + ".npy")
print(self.params)
#print("exit_codes :", exit_codes)
def launch_benchmark_training(self):
"""
This method launch the training for the different configurations (just one for the moment)
"""
process = []
input_dir = self.getInputDir()
output_dir = '/'
list_params = self.get_params_string()
cmd = '''
python3 main_holo.py --num_epochs 1 --output_dir {} {} &
'''.format(output_dir, list_params).replace("\n", "")
p = subprocess.Popen(cmd, shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
process.append(p)
p.communicate()
exit_codes = [p.communicate() for p in process]
#print("exit_codes :", exit_codes)
def launch_benchmark_testing(self):
"""
This method launch the tests for the different configurations (just one for the moment)
"""
process = []
input_dir = self.getInputDir()
save_test_dir = '.'
list_params = self.get_params_string()
cmd = '''
python3 main_holo.py --test_mode --save_test_dir {} {} &
'''.format(save_test_dir, list_params).replace("\n", "")
print("Testing CMD : ", cmd)
p = subprocess.Popen(cmd, shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
process.append(p)
p.communicate()
exit_codes = [p.communicate() for p in process]
#print("exit_codes :", exit_codes)
def summaryRes(self):
"""
This method make a summary for the benchmark
"""
report = Report(self)
report.make_report()
report.toCSV()
def launch_benchmark(self):
"""
This method launch the benchmark process
that has been configured by the csv file
"""
self.getParam()
os.makedirs(self.input_dir)
print(self.toString())
with open('{}/config_benchmark.txt'.format(self.input_dir), "w+") as f:
print(self.toString(), file=f)
timeData = time.time()
self.launch_benchmark_data()
print("Time elapsed configuring the data : ", time.time() - timeData)
timeElapsed = time.time()
self.launch_benchmark_training()
print("Time elapsed training : ", time.time() - timeElapsed)
timeElapsed = time.time()
self.launch_benchmark_testing()
print("Time elapsed testing : ", time.time() - timeElapsed)
self.summaryRes()
if __name__ == '__main__':
timeElapsed = time.time()
benchmark = Benchmark('res_brut.csv')
print("Number of test : ", benchmark.getNbTest())
benchmark.launch_benchmark()
print("Time elapsed : ", time.time() - timeElapsed)
......@@ -59,11 +59,11 @@ parser = argparse.ArgumentParser(description='')
#parser.add_argument('--noise_src_dir', dest='noise_src_dir', default='/lium/raid01_c/tahon/holography/HOLODEEP', help='dir of noisy data')
#parser.add_argument('--train_image', dest='train_patterns', default=hparams.train_patterns, help='patterns of images for training')
#parser.add_argument('--train_noise', dest='train_noise', default=hparams.train_noise, help='noise values for training images')
parser.add_argument('--save_dir', dest='save_dir', default='./data1', help='dir of patches')
#parser.add_argument('--save_dir', dest='save_dir', default='./data1', help='dir of patches')
#parser.add_argument('--patch_size', dest='pat_size', type=int, default=hparams.patch_size, help='patch size')#50 for RGB and 70 for grayscale
#parser.add_argument('--stride', dest='stride', type=int, default=hparams.stride, help='stride')
#parser.add_argument('--step', dest='step', type=int, default=hparams.step, help='step')
parser.add_argument('--params', dest='params', type=str, default='', help='hyper parameters')
#parser.add_argument('--params', dest='params', type=str, default='', help='hyper parameters')
# check output arguments
#parser.add_argument('--from_file', dest='from_file', default="./data/img_clean_pats.npy", help='get pic from file')
#parser.add_argument('--num_pic', dest='num_pic', type=int, default=10, help='number of pic to pick')
......@@ -87,7 +87,7 @@ def generate_patches(isDebug=True):
#ipdb.set_trace()
print("number of clean training data {0} and noisy {1}".format( len(cleanmat), len(noisymat)))
scales = 1 #et on ne le bouge pas !!!! hparams.scales #old version [1, 0.9, 0.8, 0.7]
if args.patch_size > args.originalsize[0]:
sys.exit('patch size > size of original size of images')
......@@ -104,11 +104,11 @@ def generate_patches(isDebug=True):
else:
numPatches = nb_final_patch
print ("total patches = %d , batch size = %d, total batches = %d" % (numPatches, args.batch_size, numPatches / args.batch_size))
# data matrix 4-D
cleaninputs = np.zeros((numPatches, args.patch_size, args.patch_size, 1))
noisyinputs = np.zeros((numPatches, args.patch_size, args.patch_size, 1))
print("Shape of input (including noisy) : ", cleaninputs.shape)
print("Shape of input (including noisy) : ", cleaninputs.shape)
#ipdb.set_trace()
cpt_img_scale = 0
# generate patches
......@@ -116,7 +116,7 @@ def generate_patches(isDebug=True):
cleanimg = cleanmat[i] ##import matlab image img = loadmat(filepaths[i]) ? TO CHECK
#noisyimg = Image.open(noisyfilepaths[i]).convert('L') # convert RGB to gray, no need to convert: grayscale
noisyimg = noisymat[i] ##import matlab image img = loadmat(filepaths[i]) ? TO CHECK
#for s in range(len(scales)):
......@@ -126,7 +126,7 @@ def generate_patches(isDebug=True):
# img_s = np.reshape(np.array(img_s, dtype="uint8"), (img_s.size[0], img_s.size[1], 1)) # extend one dimension
# noisyimg_s = noisyimg.resize(newsize, resample=PIL.Image.BICUBIC)
# noisyimg_s = np.reshape(np.array(noisyimg_s, dtype="uint8"), (noisyimg_s.size[0], noisyimg_s.size[1], 1)) # extend one dimension
# for j in range(DATA_AUG_TIMES):
# im_h, im_w, _ = img_s.shape
cpt = 0
......@@ -165,7 +165,7 @@ def generate_patches(isDebug=True):
print('Nb of patches added for padding to batch size: ', to_pad)
cleaninputs[-to_pad:, :, :, :] = cleaninputs[:to_pad, :, :, :]
noisyinputs[-to_pad:, :, :, :] = noisyinputs[:to_pad, :, :, :]
#check input images
#import matplotlib.pyplot as plt
#plt.imsave('test0_clean', inputs[0,: ,:,0], cmap = 'Greys')
......
......@@ -22,11 +22,11 @@ def save_clean_pred_rad(args, exp, clean_pred_rad, noisy, clean, nom_img = "Nois
nom_img (str, optional) : The saving name for the result
"""
save_name = os.path.join(args.save_test_dir, os.path.basename(os.path.normpath(args.input_dir)))
save_name = os.path.join(args.save_test_dir, args.input_dir, "Test")
if not os.path.exists(save_name):
os.makedirs(save_name)
save_images(os.path.join(save_name , '%s-noisy.tiff' % (nom_img)), noisy)
save_images(os.path.join(save_name , '%s-clean.tiff' % (nom_img)), clean)
......@@ -38,7 +38,7 @@ def save_clean_pred_rad(args, exp, clean_pred_rad, noisy, clean, nom_img = "Nois
epoch = exp.epoch
psnr = cal_psnr(rad_to_flat(clean_pred_rad), rad_to_flat(clean))
std = cal_std_phase(clean_pred_rad, clean)
print("\n")
print("image : ", nom_img)
print("epoch : ", epoch)
......@@ -61,14 +61,16 @@ def evaluate_on_HOLODEEP(args, exp):
Arguments:
args (ArgumentParser) : The different info used to do and save the de-noising operations
exp (Experiment) : The de-noising model
"""
patterns = args.test_patterns
noises = args.test_noises
clean, noisy = from_DATABASE(args.eval_dir, noises, patterns, True)
clean = np.array(clean)
noisy = np.array(noisy)
......@@ -79,7 +81,8 @@ def evaluate_on_HOLODEEP(args, exp):
std = cal_std_phase(clean_pred_rad, clean[i])
running_std += std
print("On the patterns : ", patterns)
print("With noise : ", noises)
print("average_std : ", running_std/noisy.shape[0])
......@@ -89,22 +92,23 @@ def evaluate_on_HOLODEEP(args, exp):
def evaluate_on_DATAEVAL(args, exp):
"""This method is used to run an evaluation on the three test images
Arguments:
args (ArgumentParser) : The different info used to do and save the de-noising operations
exp (Experiment) : The model used to do the de-noising operation
"""
dir_name = args.test_dir
#nameList = ["DATA_1_Phase_Type1_2_0.25_1.5_4_50.mat", "DATA_20_Phase_Type4_2_0.25_2.5_4_100.mat", "VibPhaseDATA.mat"]
nameList = get_files(pathlib.Path(dir_name), '.*.mat')
dataList = []
for name in nameList:
dataList.append(( load_test_data(name, key = "Phaseb", flipupdown = True),
dataList.append(( load_test_data(name, key = "Phaseb", flipupdown = True),
load_test_data(name, key = "Phase", flipupdown = True)))
for idx, (noisy, clean) in enumerate(dataList):
denoise_img(args, noisy, clean, os.path.basename(nameList[idx]), exp)
......@@ -125,6 +129,7 @@ def denoise_img(args, noisy, clean, name, exp):
clean_pred_rad = noisy
nb_iteration = args.nb_iteration
for j in range(nb_iteration):
clean_pred_rad = denoising_single_image(args, clean_pred_rad, exp)
......@@ -142,14 +147,27 @@ def denoising_single_image(args, noisy, exp):
noisy (numpy.array) : The image to denoise
exp (Experiment) : The model used to denoise
"""
noisyPy = noisy.reshape(1, args.image_mode, args.test_image_size[0], args.test_image_size[1])
noisyPy_cos = torch.Tensor(normalize_data(noisyPy, 'cos', None))
noisyPy_sin = torch.Tensor(normalize_data(noisyPy, 'sin', None))
clean_pred_cos = exp.test(noisyPy_cos).detach().cpu().numpy()
clean_pred_sin = exp.test(noisyPy_sin).detach().cpu().numpy()
#clean_pred_cos = exp.test(noisyPy_cos).detach().cpu().numpy()
#clean_pred_sin = exp.test(noisyPy_sin).detach().cpu().numpy()