view writeup/techreport.tex @ 381:0a91fc69ff90

authors
author Yoshua Bengio <bengioy@iro.umontreal.ca>
date Mon, 26 Apr 2010 22:36:07 -0400
parents a21a174c1c18
children 5f8fffd7347f
line wrap: on
line source

\documentclass[12pt,letterpaper]{article}
\usepackage[utf8]{inputenc}
\usepackage{graphicx}
\usepackage{times}
\usepackage{mlapa}

\begin{document}
\title{Generating and Exploiting Perturbed Training Data for Deep Architectures}
\author{The IFT6266 Gang}
\date{April 2010, Technical Report, Dept. IRO, U. Montreal}

\maketitle

\begin{abstract}

\end{abstract}

\section{Introduction}

\section{Perturbation and Transformation of Character Images}

\subsection{Affine Transformations}
\subsection{Adding Slant}
\subsection{Local Elastic Deformations}
\subsection{Changing Thickness}
\subsection{Occlusion}
\subsection{Background Images}
\subsection{Salt and Pepper Noise}
\subsection{Spatially Gaussian Noise}
\subsection{Color and Contrast Changes}


\section{Learning Algorithms for Deep Architectures}

\section{Experimental Setup}

\subsection{Training Datasets}

\subsubsection{Data Sources}

\begin{itemize}
\item {\bf NIST}
\item {\bf Fonts}
\item {\bf Captchas}
\item {\bf OCR data}
\end{itemize}

\subsubsection{Data Sets}
\begin{itemize}
\item {\bf NIST}
\item {\bf P07}
\item {\bf NISTP} {\em ne pas utiliser PNIST mais NISTP, pour rester politically correct...}
\end{itemize}

\subsection{Models and their Hyperparameters}

\subsubsection{Multi-Layer Perceptrons (MLP)}

\subsubsection{Stacked Denoising Auto-Encoders (SDAE)}

\section{Experimental Results}

\subsection{SDAE vs MLP}

\subsection{Perturbed Training Data More Helpful for SDAE}

\subsection{Training with More Classes than Necessary}

\section{Conclusions}

\bibliography{strings,ml}
\bibliographystyle{mlapa}

\end{document}