Commit bde612fd authored by Noury Robin's avatar Noury Robin
Browse files

Message de validation

parent a9b51b74
......@@ -81,7 +81,7 @@ def generate_patches(isDebug=True):
print(hparams_debug_string())
#filepaths = [x for x in src_path.glob('*.tiff')] #('*.mat')
#noisyfilepaths = [x for x in noisy_path.glob('*.tiff')] #('*.mat')
cleanmat, noisymat = from_DATABASE(hparams.eval_dir, hparams.train_noise, hparams.train_patterns)
cleanmat, noisymat = from_DATABASE(hparams.train_dir, hparams.train_noise, hparams.train_patterns)
#ipdb.set_trace()
print("number of clean training data {0} and noisy {1}".format( len(cleanmat), len(noisymat)))
scales = 1 #et on ne le bouge pas !!!! hparams.scales #old version [1, 0.9, 0.8, 0.7]
......@@ -171,7 +171,7 @@ def generate_patches(isDebug=True):
print('shape of inputs: ', cleaninputs.shape)
print('amplitude of inputs: ', np.max(cleaninputs), np.min(cleaninputs))
sess_name = extract_sess_name(hparams.train_patterns, hparams.train_noise, hparams.phase_type, hparams.stride, hparams.patch_size, hparams.patch_per_image)
sess_name = extract_sess_name(hparams.train_noise, hparams.train_noise, hparams.phase_type, hparams.stride, hparams.patch_size, hparams.patch_per_image)
if not os.path.exists(args.save_dir):
os.mkdir(args.save_dir)
np.save(os.path.join(args.save_dir, "img_clean_train_" + sess_name), cleaninputs)
......
......@@ -36,10 +36,10 @@ __status__ = "Production"
# Default hyperparameters:
hparams = tf.contrib.training.HParams(
#to train on HOLODEEP tiff images
noise_src_dir = '/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train2/',
clean_src_dir = '/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train2/',
noise_src_dir = '/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train2',
clean_src_dir = '/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train2',
eval_dir = '/info/etu/slbm/e161513/dncnn-tensorflow-holography/HOLODEEPmat/',
train_dir='/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train2/',
train_dir='/info/etu/slbm/e161513/dncnn-tensorflow-holography/DB_Train12/',
#to train on matlab images
#eval_dir = '/lium/raid01_c/tahon/holography/HOLODEEPmat/',
#to train on natural images
......@@ -53,8 +53,9 @@ hparams = tf.contrib.training.HParams(
originalsize = (1024,1024), #1024 for matlab database, 128 for holodeep database, 180 for natural images
phase_type = 'two', #keep phase between -pi and pi (phi), convert into cosinus (cos) or sinus (sin)
#select images for training
train_patterns = [1, 2, 3, 4, 5,6,7], #number of images from 1 to 5
train_noise = '1-1.5-2-2.5-3', # [0.5, 1, 1.5, 2, 2.5,3],
#train_patterns = [1, 2, 3, 4, 5,6,7], #number of images from 1 to 5
train_patterns = [x+1 for x in range(300)],
train_noise ='3', # [0.5, 1, 1.5, 2, 2.5,3],
#select images for evaluation (during training)
eval_patterns = [1, 2, 3, 4, 5],
eval_noise = '0-1-1.5-2-2.5',
......
......@@ -68,7 +68,7 @@ hparams.parse(args.params)
def denoiser_train(denoiser, lr):
#avec load_data les images sont déjà normalisée par 255.0
sess_name = extract_sess_name(hparams.train_patterns, hparams.train_noise, hparams.phase_type, hparams.stride, hparams.patch_size, hparams.patch_per_image)
sess_name = extract_sess_name(hparams.train_noise, hparams.train_noise, hparams.phase_type, hparams.stride, hparams.patch_size, hparams.patch_per_image)
#for training with natural images
#sess_name = 'natural_phi'
print('session name: ', sess_name)
......
......@@ -240,7 +240,7 @@ class denoiser(object):
#self.evaluate(iter_num, eval_data, sample_dir=sample_dir, summary_merged=summary_psnr, summary_writer=writer, sess_name=sess_name, phase_type=phase_type, nb_layers=nb_layers) # eval_data value range is 0-255
for epoch in range(start_epoch, epoch):
#np.random.shuffle(data) #no shuffle for the moment
#shuffle target and source synchronously with random permutation at each epoch.
#shuffle targese and source synchronously with random permutation at each epoch.
ind = np.random.permutation(numPatch)
data_clean = data_clean[ind, :,:,:]
data_noisy = data_noisy[ind, :,:,:]
......
#!/bin/bash
#SBATCH -p gpu
#SBATCH --gres gpu:1
#SBATCH --mem 40G
#SBATCH --gres gpu:rtx6000:1
#SBATCH --mail-type=ALL
#SBATCH --mail-user=robin.noury.etu@univ-lemans.fr
#SBATCH -o test_debruitage.out
#SBATCH --time 20-00
#SBATCH --time 40-00
#noisyImg=$1
#cleanImg=$2
#runTest=/lium/raid01_c/tahon/holography/checkpoints/run-test2020-04-12_12\:14\:29.082341/
......@@ -17,22 +16,21 @@ for num in 1 2 3 4 5; do
noisyImg=./HOLODEEPmat/Pattern$num/MFH_$lambda/NoisyPhase.mat
#noisyImg=/lium/raid01_c/tahon/holography/HOLODEEPmat/PATTERN$num/MFH_$lambda/run-test2020-04-12_12\:14\:29.082341/run-test2020-04-12_12\:14\:29.082341/NoisyPhase.mat-27000.mat-27000.mat
cleanImg=./HOLODEEPmat/Pattern$num/PhaseDATA.mat
echo $noisyImg >> TEST-TRAIN_1.res
python main_holo.py --test_noisy_img $noisyImg --test_noisy_key 'NoisyPhase' --test_clean_img $cleanImg --test_clean_key 'Phase' --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_1.res
echo $noisyImg >> TEST-TRAIN_2_315.res
python main_holo.py --test_noisy_img $noisyImg --test_noisy_key 'NoisyPhase' --test_clean_img $cleanImg --test_clean_key 'Phase' --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_2_315.res
done
done
test1=./DATAEVAL/DATA_1_Phase_Type1_2_0.25_1.5_4_50.mat
test2=./DATAEVAL/DATA_20_Phase_Type4_2_0.25_2.5_4_100.mat
test3=./DATAEVAL/VibPhaseDATA.mat
test4=./Base_exp_test_double_impact/Temps_200/NoisyPhase.mat
keyNoisy='Phaseb'
keyClean='Phase'
echo $test1
python main_holo.py --test_noisy_img $test1 --test_noisy_key $keyNoisy --test_clean_img $test1 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_1.res
python main_holo.py --test_noisy_img $test1 --test_noisy_key $keyNoisy --test_clean_img $test1 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_2_315.res
echo $test2
python main_holo.py --test_noisy_img $test2 --test_noisy_key $keyNoisy --test_clean_img $test2 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_1.res
python main_holo.py --test_noisy_img $test2 --test_noisy_key $keyNoisy --test_clean_img $test2 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_2_315.res
echo $test3
python main_holo.py --test_noisy_img $test3 --test_noisy_key $keyNoisy --test_clean_img $test3 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_1.res
echo $test4
python main_holo.py --test_noisy_img $test4 --test_noisy_key $keyNoisy --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_1.res
python main_holo.py --test_noisy_img $test3 --test_noisy_key $keyNoisy --test_clean_img $test3 --test_clean_key $keyClean --test_flip False --test_ckpt_index $runTest >> TEST-TRAIN_2_315.res
#!/bin/bash
#SBATCH -p gpu
#SBATCH --gres gpu:1
#SBATCH --mem 40G
#SBATCH --mem 80G
#SBATCH --mail-type=ALL
#SBATCH --mail-user=robin.noury.etu@univ-lemans.fr
#SBATCH -o holo_two_train_noise0-1.5.out
#SBATCH --time 20-00
#SBATCH -o holo_train12.out
#SBATCH --time 50-00
source activate hologaphy
#python generate_patches_holo_fromMAT.py --params "phase_type=two"
......
# -*- coding: utf-8 -*-
#
# This file is part of DnCnn4Holo.
#
# Adapted from https://github.com/wbhu/DnCNN-tensorflow by Hu Wenbo
#
# DnCnn4Holo is a python script for phase image denoising.
# Home page: https://git-lium.univ-lemans.fr/tahon/dncnn-tensorflow-holography
#
# DnCnn4Holo is free software: you can redistribute it and/or modify
# it under the terms of the GNU LLesser General Public License as
# published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# DnCnn4Holo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DnCnn4Holo. If not, see <http://www.gnu.org/licenses/>.
"""
Copyright 2019-2020 Marie Tahon
:mod:`utils.py` definition of util function for DnCnn4Holo
"""
import gc
import os
import sys
import re
import pathlib
import numpy as np
import tensorflow as tf
from PIL import Image
from scipy.io import loadmat, savemat
from glob import glob
#import ipdb
__license__ = "LGPL"
__author__ = "Marie Tahon"
__copyright__ = "Copyright 2019-2020 Marie Tahon"
__maintainer__ = "Marie Tahon"
__email__ = "marie.tahon@univ-lemans.fr"
__status__ = "Production"
#__docformat__ = 'reStructuredText'
def extract_sess_name(lp, ln, pt, stride, ps, np):
#example of the call of the function:
#sess_name = extract_sess_name(hparams.train_patterns, hparams.train_noise, hparams.phase_type, hparams.stride, hparams.patch_size, hparams.patch_per_image)
#return '-'.join(map(str, lp)) + '_' + '-'.join(map(str, ln)) + '_' + pt + '_' + str(stride) + '_' + str(ps) + '_' + str(np)
return '-'.join(map(str, lp)) + '_' + ln + '_' + pt + '_' + str(stride) + '_' + str(ps) + '_' + str(np)
def get_files(path, regexp):
list_files = []
for root, dirs, files in os.walk(path):
#print(root, dirs, files)
for name in files:
#print(name, regexp)
match = re.match(regexp, name)
#print(match)
if match:
list_files.append(path.joinpath(name))
return sorted(list_files)
def from_NATURAL(dir_noise, dir_clean, path_only):
print(dir_noise, dir_clean)
regExp = '.*.png'
#select_noisy = sorted(glob(dir_noise + '/*.png'))
#select_clean = sorted(glob(dir_clean + '/*.png'))
select_noisy = get_files(pathlib.Path(dir_noise), regExp)
select_clean = get_files(pathlib.Path(dir_clean), regExp)
if path_only:#return only the filenames, not the images
return select_clean, select_noisy
else: #return the images directly, not only the filenames
data_clean = []
for file in select_clean:
#ipdb.set_trace()
im = Image.open(file).convert('L')
data_clean.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
data_noisy = []
for file in select_noisy:
im = Image.open(file).convert('L')
data_noisy.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
return data_clean, data_noisy
def from_HOLODEEP(dir_noise, dir_clean, noise_eval, img_eval, path_only):
pattern = {1: ('0','1'), 2: ('0','2'), 3: ('0','3'), 4:('73', '1'), 5:('100','1')}
nois_pat = [str(n).replace('.','p') for n in noise_eval.split('-')]
regExp = 'MFH2('
for p in img_eval:
for n in nois_pat:
regExp += pattern[p][0] + n + '2' + pattern[p][1] + '|'
regExp = regExp[:-1] + ')_\d.*.tiff'
#regExp = re.compile(r regExp)
print(regExp)
select_noisy = get_files(pathlib.Path(dir_noise), regExp)
select_clean = get_files(pathlib.Path(dir_clean), regExp)
print('selected noisy / clean files:', len(select_noisy), len(select_clean))
if path_only:#return only the filenames, not the images
return select_clean, select_noisy
#from load_images
#pixel value range 0-255
#if not (isinstance(filelist, list) or isinstance(noisyfilelist,list)):
# exit('Problem with evaluation file list')
#im = Image.open(filelist).convert('L')
#data = np.array(im).reshape(1, im.size[1], im.size[0], 1)
#return data
else: #return the images directly, not only the filenames
data_clean = []
for file in select_clean:
#ipdb.set_trace()
im = Image.open(file).convert('L')
data_clean.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
data_noisy = []
for file in select_noisy:
im = Image.open(file).convert('L')
data_noisy.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
return data_clean, data_noisy
def from_DATABASE(dir_data, noise_eval, img_eval, flipupdown = False):
select_noisy = []
select_clean = []
nois_pat = [str(n) for n in noise_eval.split('-')]
#nois_pat = [str(n).replace('.','p') for n in noise_eval.split('-')]
for p in img_eval:
pat = dir_data + 'Pattern' + str(p) + '/'
for n in nois_pat:
select_noisy.append(pat + 'MFH_' + n + '/NoisyPhase.mat')
select_clean.append(pat + 'PhaseDATA.mat')
#if isDebug: print('-->', len(select_noisy), len(select_clean))
clean = []
for file in select_clean:
print('clean eval data: ', file)
im = loadMAT_flip(file, 'Phase', flipupdown)
clean.append(im)
noisy = []
for file in select_noisy:
print('noisy eval data: ', file)
im = loadMAT_flip(file, 'NoisyPhase', flipupdown)
noisy.append(im)
return clean, noisy
def loadMAT_flip(file, key, flipupdown):
s = loadmat(file)
if key in s:
im = np.array(s[key])
else:
print('Existing keys are: ', s.keys())
sys.exit('Key error when loading matlab file')
if flipupdown:
np.flipud(im)
return im.reshape(1, im.shape[1], im.shape[0], 1)
def loadIM_flip(file, key, flipupdown):
im = np.array(Image.open(file).convert('L'))
im = (im * np.pi / 128.0) - np.pi
print(im.min(), im.max())
return im.reshape(1, im.shape[1], im.shape[0], 1)
def wrap_phase(x):
return (x + np.pi) % (2 * np.pi) - np.pi
def phase_to_image(data, name):
#ipdb.set_trace()
#normalize brute phase between -pi and pi between 0 and 1
#data = (data - data.min())/ (data.max() - data.min())
#if not (data.min() >= -np.pi) and (data.max() <= np.pi):
# data = np.unwrap(data)
#ipdb.set_trace()
data = wrap_phase(data)
data = min_max_norm(data) #resale entre 0 et 1
np.clip(data, 0, 1) #supprime les valeurs inférieures à 0 et supérieures à 1
print(data.min(), data.max())
data = (data * 255).astype('uint8') #formate les données pour faire une image.
from PIL import Image
im = Image.fromarray(data[0,:,:,0])
im.save(name, 'tiff')
def data_augmentation(image, mode):
if mode == 0:
# original
return image
elif mode == 1:
# flip up and down
return np.flipud(image)
elif mode == 2:
# rotate counterwise 90 degree
return np.rot90(image)
elif mode == 3:
# rotate 90 degree and flip up and down
image = np.rot90(image)
return np.flipud(image)
elif mode == 4:
# rotate 180 degree
return np.rot90(image, k=2)
elif mode == 5:
# rotate 180 degree and flip
image = np.rot90(image, k=2)
return np.flipud(image)
elif mode == 6:
# rotate 270 degree
return np.rot90(image, k=3)
elif mode == 7:
# rotate 270 degree and flip
image = np.rot90(image, k=3)
return np.flipud(image)
def min_max_norm(X):
return (X - X.min())/(X.max() - X.min())
def norm_to_sincos(X):
return 2* X -1
def norm_to_phase(X):
#assume normalized value is between -0.5 and 0.5
#return (2 * np.pi * X ) - np.pi
return 2 *np.pi * X
def phase_to_norm(X):
return (X + np.pi) / (2* np.pi)
def sincos_to_norm(X):
return (X + 1) / 2
def normalize_data(data,phase_type, rdm, phase_augmentation = False):
#every data sets are normalized between 0 and 1
if phase_type == 'phi':
return data
elif phase_type == 'cos':
return np.cos( data)
elif phase_type == 'sin':
return np.sin(data)
elif (phase_type == 'two') & (phase_augmentation == False):
data_n = np.zeros(shape = data.shape)
cpt = 0
for k, r in enumerate(rdm):
if r == 0:
data_n[k,:,:,:] = np.cos( data[k,:,:,:])
cpt += 1
else:
data_n[k,:,:,:] = np.sin( data[k,:,:,:])
print('Nb of cos files :', cpt)
return data_n
elif (phase_type == 'two') & (phase_augmentation == True):
numPatch = data.shape[0]
newshape = (numPatch * 4, data.shape[1], data.shape[2], data.shape[3])
data_n = np.zeros(shape = newshape)
cpt = 0
for k in range(numPatch):
data_n[k,:,:,0] = np.cos( data[k,:,:,0])
data_n[numPatch + k,:,:,0] = np.sin( data[k,:,:,0])
data_n[2*numPatch + k,:,:,0] = np.cos( np.transpose( data[k,:,:,0]) )
data_n[3*numPatch + k,:,:,0] = np.sin( np.transpose( data[k,:,:,0]) )
print('nb of cos / sin / cos + flipud / sin + flipud: ', numPatch)
return data_n
else:
print('[!] phase type not exists (phi|cos|sin|two)')
sys.exit()
class train_data():
def __init__(self, filepath='./data/image_clean_patches_train.npy', noisyfilepath='./data/image_noisy_patches_train.npy', phase_type='two'):
self.filepath = filepath
assert '.npy' in filepath
if not os.path.exists(filepath):
print("[!] Clean data file not exists")
sys.exit(1)
self.noisyfilepath = noisyfilepath
assert '.npy' in noisyfilepath
if not os.path.exists(noisyfilepath):
print("[!] Noisy data file not exists")
sys.exit()
self.phase_type = phase_type
def __enter__(self):
print("[*] Loading data...")
if self.phase_type == 'two':
rdm = np.random.randint(0, 2, len(filepath))
else:
rdm = None
clean = normalize_data(np.load(self.filepath).astype(np.float32), self.phase_type, rdm) #normalize the data to -1+1
rdm = np.random.randn(1,2, )
noisy = normalize_data(np.load(self.noisyfilepath).astype(np.float32), self.phase_type, rdm) #normalize the data to -1+1
print(clean.shape)
idx = np.random.permutation(clean.shape[0])
#np.random.shuffle(self.data)
self.clean = clean[idx, :, :, :]
self.noisy = noisy[idx, :, :, :]
print("[*] Load successfully...")
return self.clean, self.noisy
def __exit__(self, type, value, trace):
del self.clean
del self.noisy
gc.collect()
print("In __exit__()")
def load_train_data(filepath='./data/image_clean_patches_train.npy', noisyfilepath='./data/image_noisy_patches_train.npy', phase_type = 'two'):
assert '.npy' in filepath
if not os.path.exists(filepath):
print("[!] Clean data file not exists")
sys.exit(1)
assert '.npy' in noisyfilepath
if not os.path.exists(noisyfilepath):
print("[!] Noisy data file not exists")
sys.exit()
print("[*] Loading data...")
#if phase_type == 'two':
# rdm = np.random.randint(0, 2, len(filepath))
#else:
# rdm = None
clean = np.load(filepath)
noisy = np.load(noisyfilepath)
#print(xc.shape)
#sys.exit()
#clean = normalize_data(np.load(filepath).astype(np.float32), phase_type, rdm) #normalize the data to -1+1
#noisy = normalize_data(np.load(noisyfilepath).astype(np.float32), phase_type, rdm) #normalize the data to -1+1
#shuffle
#ipdb.set_trace()
idx = np.random.permutation(clean.shape[0])
#np.random.shuffle(self.data)
#print('max / min:', np.max(clean), np.max(noisy), np.min(clean), np.min(noisy))
#clean_p = (clean[idx, :, :, :] - 126) / 126 * np.pi #training data are normalized between -pi and pi
#noisy_p = (noisy[idx, :, :, :] - 126) / 126 * np.pi
#ipdb.set_trace()
print("[*] Load successfully...")
return clean[idx,:,:,:], noisy[idx,:,:,:]
#return train_data(filepath=filepath, noisyfilepath=noisyfilepath, phase_type=phase_type)
def load_test_data(file, key, flipupdown = False):
_, ext = os.path.splitext(file)
if ext == '.mat':
return loadMAT_flip(file, key, flipupdown)
else:
return loadIM_flip(file, key, flipupdown)
def load_eval_data(dir_data, noise_eval, img_eval):
clean, noisy = from_DATABASE(dir_data, noise_eval, img_eval, flipupdown = True)
#if phase_type == 'two':
# clean_cos = normalize_data(clean.astype(np.float32), 'cos', None)
# clean_sin = normalize_data(clean.astype(np.float32), 'sin', None)
# noisy_cos = normalize_data(noisy.astype(np.float32), 'cos', None)
# noisy_sin = normalize_data(noisy.astype(np.float32), 'sin', None)
# return clean, noisy, clean_cos, noisy_cos, clean_sin, noisy_sin
#elif phase_type == 'phi':
# clean_phi = normalize_data(clean.astype(np.float32), 'phi', None)
# noisy_phi = normalize_data(clean.astype(np.float32), 'phi', None)
#clean_n = [x / (2 * np.pi) + 0.5 for x in clean]
#noisy_n = [x / (2 * np.pi) + 0.5 for x in noisy]
return clean, noisy
def load_images(filelist, noisyfilelist, phase_type):
# pixel value range 0-255
if not (isinstance(filelist, list) or isinstance(noisyfilelist,list)):
exit('Problem with evaluation file list')
#im = Image.open(filelist).convert('L')
#data = np.array(im).reshape(1, im.size[1], im.size[0], 1)
#return data
data_clean = []
for file in filelist:
im = Image.open(file).convert('L')
data_clean.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
data_noisy = []
for file in noisyfilelist:
im = Image.open(file).convert('L')
data_noisy.append(np.array(im).reshape(1, im.size[1], im.size[0], 1))
return data_clean, data_noisy
def save_images(filepath, ground_truth, noisy_image=np.array([]), clean_image=np.array([])):
# assert the pixel value range is 0-255
#ground_truth = np.squeeze(ground_truth)
#noisy_image = np.squeeze(noisy_image)
#clean_image = np.squeeze(clean_image)
if not ground_truth.any():
cat_image = ground_truth
elif noisy_image.size == 0 and clean_image.size== 0:
cat_image = ground_truth
else:
cat_image = np.concatenate([ground_truth, noisy_image, clean_image], axis=1)
phase_to_image(cat_image, filepath)
def save_MAT_images(filepath, values):
#save values numpy array into matlab format (in order to perform iterations on predicted images)
print(values.reshape(values.shape[1], values.shape[2]).shape)
mdict = {'NoisyPhase': values.reshape(values.shape[1], values.shape[2])}
savemat(filepath, mdict, appendmat = False)
def cal_psnr(im1, im2):
# assert pixel value range is 0-255 and type is uint8
mse = ((im1.astype(np.float) - im2.astype(np.float)) ** 2).mean()
psnr = 10 * np.log10(255 ** 2 / mse)
return psnr
def cal_std_phase(im1, im2):
#assert pixel value range is 0-255 and type is uint8
diff = im1 - im2 #im phase entre -pi et pi
mse = np.angle(np.exp(1j * diff)) # difference de phase brute entre -2pi et 2pi
dev = np.std(mse)
return dev
def tf_psnr(im1, im2):
# assert pixel value range is 0-1
#mse = tf.losses.mean_squared_error(labels=im2 * 255.0, predictions=im1 * 255.0)
mse = tf.losses.mean_squared_error(labels=im2, predictions=im1)
return 10.0 * (tf.log(1 / mse) / tf.log(10.0))
# -*- coding: utf-8 -*-
#
# This file is part of DnCnn4Holo.
#
# Adapted from https://github.com/wbhu/DnCNN-tensorflow by Hu Wenbo
#
# DnCnn4Holo is a python script for phase image denoising.
# Home page: https://git-lium.univ-lemans.fr/tahon/dncnn-tensorflow-holography
#
# DnCnn4Holo is free software: you can redistribute it and/or modify
# it under the terms of the GNU LLesser General Public License as
# published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# DnCnn4Holo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DnCnn4Holo. If not, see <http://www.gnu.org/licenses/>.
"""
Copyright 2019-2020 Marie Tahon
:mod:`utils.py` definition of util function for DnCnn4Holo
"""
import gc
import os
import sys
import re
import math
import mat73
import pathlib
import numpy as np
import tensorflow as tf
import h5py
from PIL import Image
from scipy.io import loadmat, savemat
from glob import glob
#import ipdb
__license__ = "LGPL"
__author__ = "Marie Tahon"
__copyright__ = "Copyright 2019-2020 Marie Tahon"
__maintainer__ = "Marie Tahon"
__email__ = "marie.tahon@univ-lemans.fr"
__status__ = "Production"
#__docformat__ = 'reStructuredText'
def extract_sess_name(lp, ln, pt, stride, ps, np):