Commit 025ecbab authored by Loïc Barrault's avatar Loïc Barrault
Browse files

text classif with FFNN

parent c6d3dd6a
%!TEX root = m2_trad_neuronale.tex
% (c) 2010 School of Infomatics
%
% v0.1, sviglas
%
% This file provides a beamer theme for the School of Informatics
%
% The LaTeX source may not be the cleanest ever, but it will do for
% now.
\ProvidesPackage{beamerthemeinformatics}
%%
%% first define some edinburgh colors
%%
\definecolor{edinblue}{rgb}{0,0.2,0.37}
\definecolor{edinred}{rgb}{0.76,0,0.26}
\definecolor{edinorange}{rgb}{0.76,0.365,0.141}
\definecolor{edinmagenta}{rgb}{0.529,0,0.357}
\definecolor{edingreen}{rgb}{0.008,0.286,0.188}
\definecolor{liumlightgray}{rgb}{0.9,0.9,0.9}
%% for the presentation
\mode<presentation>
%% structure options if's
\newif\ifbeamer@logoseparator
\newif\ifbeamer@author
\newif\ifbeamer@slidecount
\newif\ifbeamer@nonav
\newif\ifbeamer@secheadings
%% institute definitions if's
\newif\ifbeamer@inst@cisa
\newif\ifbeamer@inst@esi
\newif\ifbeamer@inst@ianc
\newif\ifbeamer@inst@iccs
\newif\ifbeamer@inst@icsa
\newif\ifbeamer@inst@ilsi
\newif\ifbeamer@inst@ipab
\newif\ifbeamer@inst@lfcs
\newif\ifbeamer@inst@inspace
\newif\ifbeamer@inst@empty
%% structure options
\beamer@logoseparatorfalse
\beamer@authorfalse
\beamer@slidecountfalse
\beamer@nonavfalse
\beamer@secheadingsfalse
%% institute options
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptytrue
%% declare the various options for the package
\DeclareOptionBeamer{logoseparator}{
\beamer@logoseparatortrue
\beamer@secheadingsfalse
}
\DeclareOptionBeamer{secheadings}{
\beamer@logoseparatorfalse
\beamer@secheadingstrue
}
\DeclareOptionBeamer{author}{\beamer@authortrue}
\DeclareOptionBeamer{slidecount}{\beamer@slidecounttrue}
\DeclareOptionBeamer{nonav}{\beamer@nonavtrue}
\DeclareOptionBeamer{cisa}{
\beamer@inst@cisatrue
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{esi}{
\beamer@inst@cisafalse
\beamer@inst@esitrue
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{ianc}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@ianctrue
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{iccs}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccstrue
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{icsa}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsatrue
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{ilsi}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsitrue
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{ipab}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabtrue
\beamer@inst@lfcsfalse
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{lfcs}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcstrue
\beamer@inst@inspacefalse
\beamer@inst@emptyfalse
}
\DeclareOptionBeamer{inspace}{
\beamer@inst@cisafalse
\beamer@inst@esifalse
\beamer@inst@iancfalse
\beamer@inst@iccsfalse
\beamer@inst@icsafalse
\beamer@inst@ilsifalse
\beamer@inst@ipabfalse
\beamer@inst@lfcsfalse
\beamer@inst@inspacetrue
\beamer@inst@emptyfalse
}
\def\setlogochoice{
\ifbeamer@inst@cisa
\includegraphics[height=5ex]{cisa-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@esi
\includegraphics[height=5ex]{esi-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@ianc
\includegraphics[height=5ex]{ianc-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@iccs
\includegraphics[height=5ex]{iccs-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@icsa
\includegraphics[height=5ex]{icsa-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@ilsi
\includegraphics[height=5ex]{ilsi-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@ipab
\includegraphics[height=5ex]{ipab-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@lfcs
\includegraphics[height=5ex]{lfcs-long}
\hspace*{5ex}
\fi
\ifbeamer@inst@inspace
\includegraphics[height=5ex]{inspace-long}
\hspace*{5ex}
\fi
}
%% parse the options
\ProcessOptionsBeamer
%%
%% Use circles for bullets
%%
%\setbeamertemplate{items}[circle]
%\setbeamertemplate{items}[triangle]
\setbeamertemplate{items}[ball]
%%
%% Now set colors/fonts for the various elements of the theme
%%
%% background color is white
\setbeamercolor{background canvas}{bg=white}
%% normal text is in corporate blue
%\setbeamercolor{normal text}{fg=edinblue}
\setbeamercolor{normal text}{fg=black}
\setbeamercolor{bar}{bg=edinblue,fg=liumlightgray}
%% alerted text is in corporate red
\setbeamercolor{alerted text}{fg=edinred}
%% structure is set to corporate red
\setbeamercolor{structure}{fg=black}
%% title is typeset in corporate blue, bold, and roman
\setbeamercolor{title}{fg=edinred}
\setbeamerfont{title}{series=\bfseries,family=\rmfamily}
%% frametitle is typeset in a large, bold, and roman font
\setbeamerfont{frametitle}{size=\large,family=\rmfamily} %fg=edinred
\setbeamercolor{frametitle}{fg=edinred}
%% smaller font sizes for institute and date
\setbeamerfont{institute}{size=\small}
\setbeamerfont{date}{size=\scriptsize}
%% rounded and shadowed boxes for the various blocks
\setbeamertemplate{blocks}[rounded][shadow=true]
%% normal size for block titles
\setbeamerfont{block title}{size={}}
%% standard blocks are in corporate blue
\setbeamercolor{block title}{fg=black,bg=liumlightgray}
\setbeamercolor{block body}{parent=normal text,use=block
title,bg=block title.bg!20!bg}
%% alerted blocks are in orange from the muted palette
\setbeamercolor{block title alerted}{fg=liumlightgray,bg=edinorange}
\setbeamercolor{block body alerted}{parent=normal text,use=block
title alerted,bg=liumlightgray}
%% example blocks are in green from the muted palette
\setbeamercolor{block title example}{fg=liumlighthgray,bg=edingreen}
\setbeamercolor{block body example}{parent=normal text,use=block
title example,bg=block title example.bg!20!bg}
\setbeamercolor{section in head/foot}{bg=liumlightgray,fg=edinred}
\setbeamercolor{section name}{bg=liumlightgray,fg=edinred}
\setbeamerfont{section title}{size=\Huge,family=\rmfamily}
\setbeamercolor{subsection in head/foot}{use=section in
head/foot,bg=liumlightgray,fg=section in head/foot.fg!70}
%% suppress navigation symbols if requested
\ifbeamer@nonav
\setbeamertemplate{navigation symbols}{}
\fi
%% frame title
\setbeamertemplate{frametitle} {
\vbox{ \vskip1pt \insertframetitle}\par
% \hspace*{-0.045\paperwidth}
% \vspace{-.5cm}
}
%% headline
%% headline contains the university and institute logos
\defbeamertemplate*{headline}{}{
\vspace{0.05cm}
\hbox{
\begin{beamercolorbox}[wd=.5\paperwidth,ht=5ex,dp=1ex,left]{}%
\hspace*{3ex}
\includegraphics[height=5ex]{informatics}
\end{beamercolorbox}%
\begin{beamercolorbox}[wd=.5\paperwidth,ht=5ex,dp=1ex,right]{}%
\setlogochoice
\end{beamercolorbox}
}%
\ifbeamer@logoseparator
\hbox{
\begin{beamercolorbox}[wd=1.02\paperwidth,ht=0.5mm]{bar}%
\end{beamercolorbox}
}%
\fi
\ifbeamer@secheadings
\hbox{
\begin{beamercolorbox}[wd=1.02\paperwidth,ht=0.5mm]{bar}%
\end{beamercolorbox}
}%
\hbox{
\begin{beamercolorbox}[wd=.5\paperwidth,ht=2.25ex,dp=1ex,right]{section
in head/foot}%
\insertsectionhead
\hspace*{2ex}
\end{beamercolorbox}
\begin{beamercolorbox}[wd=.5\paperwidth,ht=2.25ex,dp=1ex,left]{subsection
in head/foot}%
\hspace*{2ex}
\insertsubsectionhead
\end{beamercolorbox}
}%
\hbox{
\begin{beamercolorbox}[wd=1.02\paperwidth,ht=0.5mm]{bar}%
\end{beamercolorbox}
}%
\fi
}
%% footline
%% footline displays the School's URL by default and if the
%% corresponding options have been set, it also displays the author
%% and the slide count
\defbeamertemplate*{footline}{}{
\leavevmode%
\hbox{%
\begin{beamercolorbox}[wd=.333333\paperwidth,ht=2.25ex,dp=1ex,left]{}%
\ifbeamer@author
\hspace*{2ex}
\insertshortauthor
\fi
\end{beamercolorbox}%
\begin{beamercolorbox}[wd=.333333\paperwidth,ht=2.25ex,dp=1ex,center]{}%
\ifbeamer@slidecount
slide \insertframenumber{} of \inserttotalframenumber
\fi
\end{beamercolorbox}%
\begin{beamercolorbox}[wd=.333333\paperwidth,ht=2.25ex,dp=1ex,right]{}%
www-lium.univ-lemans.fr
\hspace*{2ex}
\end{beamercolorbox}}%
\vskip0pt%
}
\mode
<all>
\ No newline at end of file
\documentclass[aspectratio=169,t, xcolor=table]{beamer}
%\documentclass[handout,t]{beamer}
% pdf2ps cm_parole.pdf;
% intro, codage + DTW: psselect -p 1-35,51-61-63 cm_parole.ps > cm_parol_poly.ps
% DTW alone: psselect -p 1-19,35-47 cm_parole.ps > cm_parole_poly.ps
% decode psselect -p1-47,51-74 cm_parole.ps > cm_parole_poly.ps
% psnup -4 -H96mm -W128mm -m15mm -b6mm cm_parole_poly.ps cm_parole_poly.ps4
%
%\usepackage{pgfpages}
%\pgfpagelayout{4 on 1}{a4paper,landscape}
\mode<presentation>
{
%\usetheme{PaloAlto}
% \usetheme{Hannover}
\usetheme{informatics}
\useoutertheme{infolines}
% \setbeamercovered{transparent} % or whatever (possibly just delete it)
}
\setbeamertemplate{navigation symbols}{} % remove navigation symbols
\usefonttheme[onlymath]{serif}
\setlength{\extrarowheight}{3pt}
%\usepackage{xspace}
\input ../macros.tex
\input ../macros_en.tex
\input ../macros_beamer.tex
\input ../mycolors.tex
%\TPshowboxestrue % commenter une fois fini
\TPshowboxesfalse % décommenter pour faire disparaitre les boites
\textblockorigin{10mm}{10mm} % origine des positions
% This is only inserted into the PDF information catalog. Can be left out.
\subject{Feed Forward neural networks \\
Text classification and word vectors}
\title[]{Feed Forward neural networks \\ \_\_\_\_ \\
Text classification and word vectors}
\author[]{Loïc Barrault}
\institute[University of Sheffield]
{
l.barrault@sheffield.ac.uk \\
University of Sheffield\\
}
%\date{09 janvier 2017}
\date{}
% If you have a file called "university-logo-filename.xxx", where xxx
% is a graphic format that can be processed by latex or pdflatex,
% resp., then you can add a logo as follows:
%\pgfdeclareimage[height=0.5cm]{limsi-logo}{limsilogo}
%\logo{\pgfuseimage{limsi-logo}}
%\logo{\includegraphics[height=0.5cm]{limsilogo}}
%\logo{\epsfbox{limsilogo.eps}}
% Delete this, if you do not want the table of contents to pop up at
% the beginning of each subsection:
%\AtBeginSubsection[]
%{
% \begin{frame}<beamer>
% \frametitle{Outline}
% \tableofcontents[currentsection,currentsubsection]
% \end{frame}
%}
% If you wish to uncover everything in a step-wise fashion, uncomment
% the following command:
%\beamerdefaultoverlayspecification{<+->}
\newtheorem{conclusion}[theorem]{Conclusions}
\begin{document}
\begin{frame}
\titlepage
\end{frame}
\input{text_classification_ffnn_en.tex}
% and kill the abominable icon
\setbeamertemplate{bibliography item}{}
\begin{frame}[allowframebreaks]
\frametitle{References}
% \bibliographystyle{amsalpha}
\bibliographystyle{apalike}
% \bibliographystyle{plain}
\bibliography{refs}
\end{frame}
\end{document}
@book{Goodfellow-et-al-2016,
title={Deep Learning},
author={Ian Goodfellow and Yoshua Bengio and Aaron Courville},
publisher={MIT Press},
note={\url{http://www.deeplearningbook.org}},
year={2016}
}
@misc{mikolov2013,
title = {Efficient Estimation of Word Representations in Vector Space},
author = {Tomas Mikolov and Kai Chen and Greg S. Corrado and Jeffrey Dean},
year = {2013},
URL = {http://arxiv.org/abs/1301.3781}
}
@article{Li2018,
abstract = {Named entity recognition (NER) is the task to identify text spans that mention named entities, and to classify them into predefined categories such as person, location, organization etc. NER serves as the basis for a variety of natural language applications such as question answering, text summarization, and machine translation. Although early NER systems are successful in producing decent recognition accuracy, they often require much human effort in carefully designing rules or features. In recent years, deep learning, empowered by continuous real-valued vector representations and semantic composition through nonlinear processing, has been employed in NER systems, yielding stat-of-the-art performance. In this paper, we provide a comprehensive review on existing deep learning techniques for NER. We first introduce NER resources, including tagged NER corpora and off-the-shelf NER tools. Then, we systematically categorize existing works based on a taxonomy along three axes: distributed representations for input, context encoder, and tag decoder. Next, we survey the most representative methods for recent applied techniques of deep learning in new NER problem settings and applications. Finally, we present readers with the challenges faced by NER systems and outline future directions in this area.},
archivePrefix = {arXiv},
arxivId = {1812.09449},
author = {Li, Jing and Sun, Aixin and Han, Jianglei and Li, Chenliang},
eprint = {1812.09449},
file = {:Users/loicbarrault/Library/Application Support/Mendeley Desktop/Downloaded/Li et al. - 2018 - A Survey on Deep Learning for Named Entity Recognition.pdf:pdf},
mendeley-groups = {NER},
month = {dec},
title = {{A Survey on Deep Learning for Named Entity Recognition}},
url = {http://arxiv.org/abs/1812.09449},
year = {2018}
}
@inproceedings{Vaswani2017,
abstract = {The dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. Experiments on two machine translation tasks show these models to be superior in quality while being more parallelizable and requiring significantly less time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-to-German translation task, improving over the existing best results, including ensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task, our model establishes a new single-model state-of-the-art BLEU score of 41.0 after training for 3.5 days on eight GPUs, a small fraction of the training costs of the best models from the literature.},
archivePrefix = {arXiv},
arxivId = {1706.03762},
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N. and Kaiser, {\L}ukasz and Polosukhin, Illia},
booktitle = {Advances in Neural Information Processing Systems},
eprint = {1706.03762},
issn = {10495258},
mendeley-groups = {LanguageModelling},
title = {{Attention is all you need}},
year = {2017}
}
@article{Hochreiter1997,
abstract = {Learning to store information over extended time intervals by recurrent backpropagation takes a very long time, mostly because of insufficient, decaying error backflow. We briefly review Hochreiter's (1991) analysis of this problem, then address it by introducing a novel, efficient, gradient-based method called long short-term memory (LSTM). Truncating the gradient where this does not do harm, LSTM can learn to bridge minimal time lags in excess of 1000 discrete-time steps by enforcing constant error flow through constant error carousels within special units. Multiplicative gate units learn to open and close access to the constant error flow. LSTM is local in space and time; its computational complexity per time step and weight is O(1). Our experiments with artificial data involve local, distributed, real-valued, and noisy pattern representations. In comparisons with real-time recurrent learning, back propagation through time, recurrent cascade correlation, Elman nets, and neural sequence chunking, LSTM leads to many more successful runs, and learns much faster. LSTM also solves complex, artificial long-time-lag tasks that have never been solved by previous recurrent network algorithms.},
author = {Hochreiter, Sepp and Schmidhuber, J{\"{u}}rgen},
doi = {10.1162/neco.1997.9.8.1735},
issn = {08997667},
journal = {Neural Computation},
mendeley-groups = {ML},
pmid = {9377276},
title = {{Long Short-Term Memory}},
year = {1997}
}
@inproceedings{Cho2014,
abstract = {In this paper, we propose a novel neural network model called RNN Encoder- Decoder that consists of two recurrent neural networks (RNN). One RNN encodes a sequence of symbols into a fixedlength vector representation, and the other decodes the representation into another sequence of symbols. The encoder and decoder of the proposed model are jointly trained to maximize the conditional probability of a target sequence given a source sequence. The performance of a statistical machine translation system is empirically found to improve by using the conditional probabilities of phrase pairs computed by the RNN Encoder-Decoder as an additional feature in the existing log-linear model. Qualitatively, we show that the proposed model learns a semantically and syntactically meaningful representation of linguistic phrases.},
archivePrefix = {arXiv},
arxivId = {1406.1078},
author = {Cho, Kyunghyun and {Van Merri{\"{e}}nboer}, Bart and Gulcehre, Caglar and Bahdanau, Dzmitry and Bougares, Fethi and Schwenk, Holger and Bengio, Yoshua},
booktitle = {EMNLP 2014 - 2014 Conference on Empirical Methods in Natural Language Processing, Proceedings of the Conference},
doi = {10.3115/v1/d14-1179},
eprint = {1406.1078},
isbn = {9781937284961},
mendeley-groups = {NMT},
title = {{Learning phrase representations using RNN encoder-decoder for statistical machine translation}},
year = {2014}
}
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment