From: couchot Date: Mon, 19 Sep 2011 15:35:36 +0000 (+0200) Subject: correction bib X-Git-Url: https://bilbo.iut-bm.univ-fcomte.fr/and/gitweb/chaos1.git/commitdiff_plain/a1c7d633b750e7417e0e331d9f2df61936c76de7 correction bib --- diff --git a/chaos-paper.bib b/chaos-paper.bib index ff93415..54fd84d 100644 --- a/chaos-paper.bib +++ b/chaos-paper.bib @@ -108,7 +108,6 @@ year = 1998 Xin Zhou}, title = {A Novel Wavelet Image Watermarking Scheme Combined with Chaos Sequence and Neural Network}, - booktitle = {ISNN (2)}, year = {2004}, pages = {663-668}, ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=3174{\&}spage=663}, @@ -120,15 +119,13 @@ year = 1998 editor = {Fuliang Yin and Jun Wang and Chengan Guo}, - title = {Advances in Neural Networks - ISNN 2004, International Symposium + title = {Advances in Neural Networks - International Symposium on Neural Networks, Dalian, China, August 19-21, 2004, Proceedings, Part II}, - booktitle = {ISNN (2)}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, volume = {3174}, year = {2004}, - isbn = {3-540-22843-8}, bibsource = {DBLP, http://dblp.uni-trier.de} } @@ -214,7 +211,6 @@ keywords = "Image encryption" booktitle = {Proceedings of the 2009 international joint conference on Neural Networks}, series = {IJCNN'09}, year = {2009}, - isbn = {978-1-4244-3549-4}, location = {Atlanta, Georgia, USA}, pages = {2723--2728}, numpages = {6}, @@ -243,7 +239,7 @@ number={2}, pages={ 587 - 590}, keywords={ authentication; chaos based spread spectrum image steganography; chaotic encryption; chaotic modulation; covert communication; digital security schemes; home-office environment; in-band captioning; large-scale proliferation; tamperproofing; wireless products; chaotic communication; cryptography; data encapsulation; image processing; message authentication; modulation; spread spectrum communication;}, doi={10.1109/TCE.2004.1309431}, -ISSN={0098-3063},} +} @article{Zhang2005759, title = "An image encryption approach based on chaotic maps", @@ -269,9 +265,8 @@ note = "{US} Patent 2,632,058, March 17 1953,(filed November 13 1947)"} @article{10.1109/CIMSiM.2010.36, author = {Jiri Holoska and Zuzana Oplatkova and Ivan Zelinka and Roman Senkerik}, title = {Comparison between Neural Network Steganalysis and Linear Classification Method Stegdetect}, -journal ={Computational Intelligence, Modelling and Simulation, International Conference on}, +journal ={Computational Intelligence, Modelling and Simulation, International Conference on.}, volume = {0}, -isbn = {978-0-7695-4262-1}, year = {2010}, pages = {15-20}, doi = {http://doi.ieeecomputersociety.org/10.1109/CIMSiM.2010.36}, @@ -288,8 +283,8 @@ volume={}, number={}, pages={3352 -3357}, keywords={Fisher linear discriminant;JPEG images;discrete cosine transforms;expanded Markov features;feature reduction;feature selection;polynomial fitting;principal component analysis;singular value decomposition;steganalysis;Markov processes;discrete cosine transforms;image coding;principal component analysis;singular value decomposition;steganography;}, -doi={10.1109/IJCNN.2008.4634274}, -ISSN={1098-7576},} +doi={10.1109/IJCNN.2008.4634274} +} @INPROCEEDINGS{guyeux10ter, author = {Bahi, Jacques and Guyeux, Christophe}, @@ -409,7 +404,6 @@ author = {Liu Shaohui and Yao Hongxun and Gao Wen}, title = {Neural network based steganalysis in still images}, journal ={Multimedia and Expo, IEEE International Conference on}, volume = {2}, -isbn = {0-7803-7965-9}, year = {2003}, pages = {509-512}, doi = {http://doi.ieeecomputersociety.org/10.1109/ICME.2003.1221665}, diff --git a/main.tex b/main.tex index b72eb6d..8e7f34b 100644 --- a/main.tex +++ b/main.tex @@ -58,6 +58,11 @@ IUT de Belfort-Montb\'eliard, BP 527, \\ \date{\today} \newcommand{\CG}[1]{\begin{color}{red}\textit{#1}\end{color}} +\newcommand{\JFC}[1]{\begin{color}{blue}\textit{#1}\end{color}} + + + + \begin{abstract} %% Text of abstract @@ -134,7 +139,9 @@ which is usually assessed through the computation of the Lyapunov exponent. An alternative approach is to consider a well-known neural network architecture: the MultiLayer Perceptron (MLP). These networks are suitable to model nonlinear relationships between data, due to -their universal approximator capacity. Thus, this kind of networks can +their universal approximator capacity. +\JFC{Michel, peux-tu donner une ref la dessus} +Thus, this kind of networks can be trained to model a physical phenomenon known to be chaotic such as Chua's circuit \cite{dalkiran10}. Sometimes, a neural network which is build by combining transfer functions and initial conditions that are both @@ -508,7 +515,9 @@ Fig.~\ref{Fig:perceptron}). The behavior of the neural network is such that when the initial state is $x^0~\in~\mathds{B}^n$ and a sequence $(S^t)^{t \in \Nats}$ is -given as outside input, then the sequence of successive published +given as outside input, +\JFC{en dire davantage sur l'outside world} + then the sequence of successive published output vectors $\left(x^t\right)^{t \in \mathds{N}^{\ast}}$ is exactly the one produced by the chaotic iterations formally described in Eq.~(\ref{eq:CIs}). It means that mathematically if we use similar @@ -539,8 +548,8 @@ without any convincing mathematical proof. We propose an approach to overcome this drawback for a particular category of multilayer perceptrons defined below, and for the Devaney's formulation of chaos. In spite of this restriction, we think that this approach can be -extended to a large variety of neural networks. We plan to study a -generalization of this approach in a future work. +extended to a large variety of neural networks. + We consider a multilayer perceptron of the following form: inputs are $n$ binary digits and one integer value, while outputs are $n$ @@ -556,6 +565,7 @@ connection to an input one. compute the new output one $\left(x^{t+1}_1,\dots,x^{t+1}_n\right)$. While the remaining input receives a new integer value $S^t \in \llbracket1;n\rrbracket$, which is provided by the outside world. +\JFC{en dire davantage sur l'outside world} \end{itemize} The topological behavior of these particular neural networks can be @@ -563,11 +573,15 @@ proven to be chaotic through the following process. Firstly, we denote by $F: \llbracket 1;n \rrbracket \times \mathds{B}^n \rightarrow \mathds{B}^n$ the function that maps the value $\left(s,\left(x_1,\dots,x_n\right)\right) \in \llbracket 1;n -\rrbracket \times \mathds{B}^n$ into the value +\rrbracket \times \mathds{B}^n$ +\JFC{ici, cela devait etre $S^t$ et pas $s$, nn ?} + into the value $\left(y_1,\dots,y_n\right) \in \mathds{B}^n$, where $\left(y_1,\dots,y_n\right)$ is the response of the neural network after the initialization of its input layer with -$\left(s,\left(x_1,\dots, x_n\right)\right)$. Secondly, we define $f: +$\left(s,\left(x_1,\dots, x_n\right)\right)$. +\JFC{ici, cela devait etre $S^t$ et pas $s$, nn ?} +Secondly, we define $f: \mathds{B}^n \rightarrow \mathds{B}^n$ such that $f\left(x_1,x_2,\dots,x_n\right)$ is equal to \begin{equation} @@ -614,9 +628,12 @@ if and only if, for any pair of disjoint open sets $U$,$V \neq \emptyset$, we can find some $n_0 \in \mathds{N}$ such that for any $n$, $n\geq n_0$, we have $f^n(U) \cap V \neq \emptyset$. \end{definition} +\JFC{Donner un sens à ces definitions} + -As proven in Ref.~\cite{gfb10:ip}, chaotic iterations are expansive -and topologically mixing when $f$ is the vectorial negation $f_0$. +It has been proven in Ref.~\cite{gfb10:ip}, that chaotic iterations +are expansive and topologically mixing when $f$ is the +vectorial negation $f_0$. Consequently, these properties are inherited by the CI-MLP($f_0$) recurrent neural network previously presented, which induce a greater unpredictability. Any difference on the initial value of the input @@ -624,8 +641,10 @@ layer is in particular magnified up to be equal to the expansivity constant. Let us then focus on the consequences for a neural network to be chaotic -according to Devaney's definition. First of all, the topological -transitivity property implies indecomposability. +according to Devaney's definition. Intuitively, the topological +transitivity property implies indecomposability, which is formally defined +as follows: + \begin{definition} \label{def10} A dynamical system $\left( \mathcal{X}, f\right)$ is @@ -784,7 +803,9 @@ such functions into a model amenable to be learned by an ANN. This section presents how (not) chaotic iterations of $G_f$ are translated into another model more suited to artificial neural -networks. Formally, input and output vectors are pairs~$((S^t)^{t \in +networks. +\JFC{détailler le more suited} +Formally, input and output vectors are pairs~$((S^t)^{t \in \Nats},x)$ and $\left(\sigma((S^t)^{t \in \Nats}),F_{f}(S^0,x)\right)$ as defined in~Eq.~(\ref{eq:Gf}). @@ -965,7 +986,7 @@ configuration is always expressed as a natural number, whereas in the first one the number of inputs follows the increase of the boolean vectors coding configurations. In this latter case, the coding gives a finer information on configuration evolution. - +\JFC{Je n'ai pas compris le paragraphe precedent. Devrait être repris} \begin{table}[b] \caption{Prediction success rates for configurations expressed with Gray code} \label{tab2}