Commit 3f76cf40 authored by Marie Tahon's avatar Marie Tahon
Browse files

minor changes

parents b50c73d4 d348c754
# Home page: https://git-lium.univ-lemans.fr/tahon/dncnn-tensorflow-holography
#
# Adapted from https://github.com/wbhu/DnCNN-tensorflow by Hu Wenbo
#
# DnCnn4Holo is free software: you can redistribute it and/or modify
# it under the terms of the GNU LLesser General Public License as
# published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# DnCnn4Holo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DnCnn4Holo. If not, see <http://www.gnu.org/licenses/>.
"""
Copyright 2019-2020 Marie Tahon
:mod:`hparams.py` list of modifiable parameters for generating patches and training model
"""
import tensorflow as tf
__license__ = "LGPL"
__author__ = "Marie Tahon"
__copyright__ = "Copyright 2019-2020 Marie Tahon"
__maintainer__ = "Marie Tahon"
__email__ = "marie.tahon@univ-lemans.fr"
__status__ = "Production"
#__docformat__ = 'reStructuredText'
# Default hyperparameters:
hparams = tf.contrib.training.HParams(
#to train on HOLODEEP tiff images
noise_src_dir = '/info/etu/m1/s160128/Documents/M1/DnCnn/Portage-reseau-de-neurones-de-Keras-vers-PyTorch/dncnn-tensorflow-holography-master/Holography/DATABASE/',
clean_src_dir = '/info/etu/m1/s160128/Documents/M1/DnCnn/Portage-reseau-de-neurones-de-Keras-vers-PyTorch/dncnn-tensorflow-holography-master/Holography/DATABASE/',
eval_dir = '/info/etu/m1/s160128/Documents/M1/DnCnn/Portage-reseau-de-neurones-de-Keras-vers-PyTorch/dncnn-tensorflow-holography-master/Holography/DATABASE/',
#to train on matlab images
#eval_dir = '/lium/raid01_c/tahon/holography/HOLODEEPmat/',
#to train on natural images
#noise_src_dir = '/lium/raid01_c/tahon/holography/NATURAL/noisy',
#clean_src_dir = '/lium/raid01_c/tahon/holography/NATURAL/original',
#eval_dir = '/lium/raid01_c/tahon/holography/HOLODEEPmat/',
#test_dir = 'lium/raid01_c/tahon/holography/TEST/',
phase = 'train', #train or test phase
#image
isDebug = False, #True,#reate only 10 patches
originalsize = (1024,1024), #1024 for matlab database, 128 for holodeep database, 180 for natural images
phase_type = 'two', #keep phase between -pi and pi (phi), convert into cosinus (cos) or sinus (sin)
#select images for training
train_patterns = [1, 2, 3, 4, 5], #number of images from 1 to 5
train_noise = '0-1-1.5-2-2.5', #[0, 1, 1.5, 2, 2.5],
#select images for evaluation (during training)
eval_patterns = [1, 2, 3, 4, 5],
eval_noise = '0-1-1.5-2-2.5',
#select images for testing
test_patterns = [1, 2, 3, 4, 5],
test_noise = '0-1-1.5-2-2.5',
noise_type = 'spkl', #type of noise: speckle or gaussian (spkl|gauss)
sigma = 25, #noise level for gaussian denoising
#Training
nb_layers = 4,#original number is 16
batch_size = 128,#128
patch_per_image = 384, #384, #9 pour des images 180*180 (NATURAL) Silvio a utilisé 384 pour des images 1024*1024 (MATLAB)
patch_size = 50, #Silvio a utilisé 50.
epoch = 10,#2000
lr = 0.0005, # learning rate
stride = 50, # spatial step for cropping images values from initial script 10
step = 0, #initial spatial setp for cropping
scales = [1], #[1, 0.9, 0.8, 0.7] # scale for data augmentation
chosenIteration = '' #chosen iteration to load for traning or testing
)
def hparams_debug_string():
values = hparams.values()
hp = [' %s: %s' % (name, values[name]) for name in sorted(values)]
return 'Hyperparameters:\n' + '\n'.join(hp)
#-*- coding: utf-8 -*-
#
# This file is part of DnCnn4Holo.
......@@ -37,7 +36,6 @@ import sys
import re
import pathlib
import numpy as np
#import tensorflow as tf
from PIL import Image
from scipy.io import loadmat, savemat
from glob import glob
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment