From: Jean-François Couchot Date: Tue, 20 Sep 2011 08:49:19 +0000 (+0200) Subject: Merge branch 'master' of ssh://bilbo.iut-bm.univ-fcomte.fr/chaos1 X-Git-Url: https://bilbo.iut-bm.univ-fcomte.fr/and/gitweb/chaos1.git/commitdiff_plain/4045f651ca554dd4d507dc7cc2e3d2a0c1ffc0b7?hp=c19bb6552bea63fedad0f1be9990c57a813a67bf Merge branch 'master' of ssh://bilbo.iut-bm.univ-fcomte.fr/chaos1 --- diff --git a/chaos-paper.bib b/chaos-paper.bib index ff93415..54fd84d 100644 --- a/chaos-paper.bib +++ b/chaos-paper.bib @@ -108,7 +108,6 @@ year = 1998 Xin Zhou}, title = {A Novel Wavelet Image Watermarking Scheme Combined with Chaos Sequence and Neural Network}, - booktitle = {ISNN (2)}, year = {2004}, pages = {663-668}, ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=3174{\&}spage=663}, @@ -120,15 +119,13 @@ year = 1998 editor = {Fuliang Yin and Jun Wang and Chengan Guo}, - title = {Advances in Neural Networks - ISNN 2004, International Symposium + title = {Advances in Neural Networks - International Symposium on Neural Networks, Dalian, China, August 19-21, 2004, Proceedings, Part II}, - booktitle = {ISNN (2)}, publisher = {Springer}, series = {Lecture Notes in Computer Science}, volume = {3174}, year = {2004}, - isbn = {3-540-22843-8}, bibsource = {DBLP, http://dblp.uni-trier.de} } @@ -214,7 +211,6 @@ keywords = "Image encryption" booktitle = {Proceedings of the 2009 international joint conference on Neural Networks}, series = {IJCNN'09}, year = {2009}, - isbn = {978-1-4244-3549-4}, location = {Atlanta, Georgia, USA}, pages = {2723--2728}, numpages = {6}, @@ -243,7 +239,7 @@ number={2}, pages={ 587 - 590}, keywords={ authentication; chaos based spread spectrum image steganography; chaotic encryption; chaotic modulation; covert communication; digital security schemes; home-office environment; in-band captioning; large-scale proliferation; tamperproofing; wireless products; chaotic communication; cryptography; data encapsulation; image processing; message authentication; modulation; spread spectrum communication;}, doi={10.1109/TCE.2004.1309431}, -ISSN={0098-3063},} +} @article{Zhang2005759, title = "An image encryption approach based on chaotic maps", @@ -269,9 +265,8 @@ note = "{US} Patent 2,632,058, March 17 1953,(filed November 13 1947)"} @article{10.1109/CIMSiM.2010.36, author = {Jiri Holoska and Zuzana Oplatkova and Ivan Zelinka and Roman Senkerik}, title = {Comparison between Neural Network Steganalysis and Linear Classification Method Stegdetect}, -journal ={Computational Intelligence, Modelling and Simulation, International Conference on}, +journal ={Computational Intelligence, Modelling and Simulation, International Conference on.}, volume = {0}, -isbn = {978-0-7695-4262-1}, year = {2010}, pages = {15-20}, doi = {http://doi.ieeecomputersociety.org/10.1109/CIMSiM.2010.36}, @@ -288,8 +283,8 @@ volume={}, number={}, pages={3352 -3357}, keywords={Fisher linear discriminant;JPEG images;discrete cosine transforms;expanded Markov features;feature reduction;feature selection;polynomial fitting;principal component analysis;singular value decomposition;steganalysis;Markov processes;discrete cosine transforms;image coding;principal component analysis;singular value decomposition;steganography;}, -doi={10.1109/IJCNN.2008.4634274}, -ISSN={1098-7576},} +doi={10.1109/IJCNN.2008.4634274} +} @INPROCEEDINGS{guyeux10ter, author = {Bahi, Jacques and Guyeux, Christophe}, @@ -409,7 +404,6 @@ author = {Liu Shaohui and Yao Hongxun and Gao Wen}, title = {Neural network based steganalysis in still images}, journal ={Multimedia and Expo, IEEE International Conference on}, volume = {2}, -isbn = {0-7803-7965-9}, year = {2003}, pages = {509-512}, doi = {http://doi.ieeecomputersociety.org/10.1109/ICME.2003.1221665}, diff --git a/main.tex b/main.tex index 545524f..8e7f34b 100644 --- a/main.tex +++ b/main.tex @@ -58,6 +58,11 @@ IUT de Belfort-Montb\'eliard, BP 527, \\ \date{\today} \newcommand{\CG}[1]{\begin{color}{red}\textit{#1}\end{color}} +\newcommand{\JFC}[1]{\begin{color}{blue}\textit{#1}\end{color}} + + + + \begin{abstract} %% Text of abstract @@ -107,6 +112,9 @@ is far more difficult than non chaotic behaviors. \section{Introduction} \label{S1} +REVOIR TOUT L'INTRO et l'ABSTRACT en fonction d'asynchrone, chaotic + + Several research works have proposed or run chaotic neural networks these last years. The complex dynamics of such a networks leads to various potential application areas: associative @@ -131,7 +139,9 @@ which is usually assessed through the computation of the Lyapunov exponent. An alternative approach is to consider a well-known neural network architecture: the MultiLayer Perceptron (MLP). These networks are suitable to model nonlinear relationships between data, due to -their universal approximator capacity. Thus, this kind of networks can +their universal approximator capacity. +\JFC{Michel, peux-tu donner une ref la dessus} +Thus, this kind of networks can be trained to model a physical phenomenon known to be chaotic such as Chua's circuit \cite{dalkiran10}. Sometimes, a neural network which is build by combining transfer functions and initial conditions that are both @@ -146,7 +156,7 @@ using a theoretical framework based on the Devaney's definition of chaos qualitative and quantitative tools to evaluate the complex behavior of a dynamical system: ergodicity, expansivity, and so on. More precisely, in this paper, which is an extension of a previous work -\cite{bgs11:ip}, we establish the equivalence between chaotic +\cite{bgs11:ip}, we establish the equivalence between asynchronous iterations and a class of globally recurrent MLP. The investigation the converse problem is the second contribution: we indeed study the ability for @@ -193,7 +203,10 @@ iterations and Devaney's chaos is finally presented at the end of this section. In what follows and for any function $f$, $f^n$ means the composition -$f \circ f \circ \hdots \circ f$ ($n$ times). +$f \circ f \circ \hdots \circ f$ ($n$ times) and an \emph{iteration} +of a \emph{dynamical system} the step that consists in +updating the global state $x$ with respect to a function $f$ s.t. +$x^{t+1} = f(x^t)$. \subsection{Devaney's chaotic dynamical systems} @@ -229,11 +242,11 @@ point dependence on initial conditions. Topological transitivity is checked when, for any point, any neighborhood of its future evolution eventually overlap with any other -given region. More precisely, +given region. More precisely, \begin{definition} \label{def2} A continuous function $f$ on a topological space $(\mathcal{X},\tau)$ -is defined to be {\bf topologically transitive} if for any pair of +is defined to be {\emph{topologically transitive}} if for any pair of open sets $U$, $V \in \mathcal{X}$ there exists $k \in \mathds{N}^{\ast}$ @@ -248,12 +261,12 @@ On the contrary, a dense set of periodic points is an element of regularity that a chaotic dynamical system has to exhibit. \begin{definition} \label{def3} -A point $x$ is called a {\bf periodic point} for $f$ of period~$n \in +A point $x$ is called a {\emph{periodic point}} for $f$ of period~$n \in \mathds{N}^{\ast}$ if $f^{n}(x)=x$. \end{definition} \begin{definition} \label{def4} -$f$ is said to be {\bf regular} on $(\mathcal{X},\tau)$ if the set of +$f$ is said to be {\emph{ regular}} on $(\mathcal{X},\tau)$ if the set of periodic points for $f$ is dense in $\mathcal{X}$ ( for any $x \in \mathcal{X}$, we can find at least one periodic point in any of its neighborhood). @@ -265,23 +278,27 @@ in a completely different manner, leading to unpredictability for the whole system. Then, \begin{definition} \label{sensitivity} -$f$ has {\bf sensitive dependence on initial conditions} if there +$f$ has {\emph{ sensitive dependence on initial conditions}} if there exists $\delta >0$ such that, for any $x\in \mathcal{X}$ and any neighborhood $V$ of $x$, there exist $y\in V$ and $n > 0$ such that $d\left(f^{n}(x), f^{n}(y)\right) >\delta $. The value $\delta$ is called the - {\bf constant of sensitivity} of $f$. + {\emph{constant of sensitivity}} of $f$. \end{definition} Finally, \begin{definition} \label{def5} -The dynamical system that iterates $f$ is {\bf chaotic according to Devaney} +The dynamical system that iterates $f$ is {\emph{ chaotic according to Devaney}} on $(\mathcal{X},\tau)$ if $f$ is regular, topologically transitive, and has sensitive dependence to its initial conditions. \end{definition} -Let us notice that for a metric space the last condition follows from -the two first ones~\cite{Banks92}. +In what follows, iterations are said to be \emph{chaotic according Devaney} +when corresponding dynamical system is chaotic according Devaney. + + +%Let us notice that for a metric space the last condition follows from +%the two first ones~\cite{Banks92}. \subsection{Asynchronous Iterations} @@ -326,7 +343,17 @@ In the sequel, the {\it strategy} $S=(S^{t})^{t \in \Nats}$ is the sequence defining which component to update at time $t$ and $S^{t}$ denotes its $t-$th term. This iteration scheme that only modifies one element at each iteration -is clasically refered as \emph{asynchronous iterations}. +is clasically refered as \emph{asynchronous iterations}. +More préciselly, we have here +$$ +x^{t+1}_i = \left\{ +\begin{array}{l} + f_i(x^t) \textrm{ if $S^t = i$} \\ + x_i^t \textrm{ otherwise} + \end{array} +\right. +$$ + Next section shows the link between asynchronous iterations and Devaney's Chaos. @@ -352,7 +379,8 @@ $F_{f}: \llbracket1;n\rrbracket\times \mathds{B}^{n} \rightarrow \right. \end{equation} -\noindent With such a notation, configurations are defined for times +\noindent With such a notation, configurations +asynchronously obtained are defined for times $t=0,1,2,\ldots$ by: \begin{equation}\label{eq:sync} \left\{\begin{array}{l} @@ -376,16 +404,20 @@ X^{k+1}& = & G_{f}(X^{k})\\ \label{eq:Gf} \end{equation} where $\sigma$ is the function that removes the first term of the -strategy ({\it i.e.},~$S^0$). This definition means that only one -component of the system is updated at an iteration: the $S^t$-th -element. But it can be extended by considering subsets for $S^t$. +strategy ({\it i.e.},~$S^0$). +This definition allows to links asynchronous iterations with +classical iterations of a dynamical system. +%means that only one +%component of the system is updated at an iteration: the $S^t$-th +%element. But it can be extended by considering subsets for $S^t$. -The {\bf distance} $d$ between two points $(S,x)$ and -$(\check{S},\check{x})\in \mathcal{X} = \llbracket1;n\rrbracket^\Nats -\times \Bool^{n}$ is defined by +To study topological properties of these iteations, we are then left to +introduce a {\emph{ distance}} $d$ between two points $(S,x)$ and +$(\check{S},\check{x})\in \mathcal{X} = \llbracket1;n\rrbracket^\Nats. +\times \Bool^{n}$. It is defined by \begin{equation} d((S,x);(\check{S},\check{x}))=d_{e}(x,\check{x})+d_{s}(S,\check{S}) \enspace , @@ -401,8 +433,7 @@ d_{s}(S,\check{S})=\frac{9}{2n}\sum_{t=0}^{\infty }\frac{|S^{t}-\check{S}^{t}|}{10^{t+1}} \in [0 ; 1] \enspace . \end{equation} -This distance is defined to reflect the following -information. Firstly, the more two systems have different components, +Notice that the more two systems have different components, the larger the distance between them is. Secondly, two systems with similar components and strategies, which have the same starting terms, must induce only a small distance. The proposed distance fulfill @@ -421,10 +452,23 @@ Let $f:\Bool^n\to\Bool^n$. Iterations of $G_f$ are chaotic according to Devaney if and only if $\Gamma(f)$ is strongly connected. \end{theorem} -Checking if a graph is strongly connected is not difficult. For -example, consider the function $f_1\left(x_1,\dots,x_n\right)=\left( -\overline{x_1},x_1,x_2,\dots,x_{n-1}\right)$. As $\Gamma(f_1)$ is -obviously strongly connected, then $G_{f_1}$ is a chaotic map. +Checking if a graph is strongly connected is not difficult +(by the Tarjan's algorithm for instance). +Let be given a strategy $S$ and a function $f$ such that +$\Gamma(f)$ is strongly connected. +In that case, iterations of the function $G_f$ as defined in +Eq.~(\ref{eq:Gf}) are chaotic according to Devaney. + + +Let us then consider two function $f_0$ and $f_1$ both in +$\Bool^n\to\Bool^n$ defined as follows that are used all along this article. +The former is the vectorial negation, \textit{i.e.}, +$f_{0}(x_{1},\dots,x_{n}) =(\overline{x_{1}},\dots,\overline{x_{n}})$. +The latter is $f_1\left(x_1,\dots,x_n\right)=\left( +\overline{x_1},x_1,x_2,\dots,x_{n-1}\right)$. +It is not hard to see that $\Gamma(f_0)$ and $\Gamma(f_1)$ are +both strongly connected, then iterations of $G_{f_0}$ and of +$G_{f_1}$ are chaotic according to Devaney. With this material, we are now able to build a first chaotic neural network, as defined in the Devaney's formulation. @@ -432,14 +476,13 @@ network, as defined in the Devaney's formulation. \section{A chaotic neural network in the sense of Devaney} \label{S2} -Let us firstly introduce the vectorial negation -$f_{0}(x_{1},\dots,x_{n}) =(\overline{x_{1}},\dots,\overline{x_{n}})$, -which is such that $\Gamma(f_0)$ is strongly connected. Considering -the map $F_{f_0}:\llbracket 1; n \rrbracket \times \mathds{B}^n \to -\mathds{B}^n$ associated to the vectorial negation, we can build a -multilayer perceptron neural network modeling $F_{f_0}$. Hence, for -all inputs $(s,x) \in \llbracket 1;n\rrbracket \times \mathds{B}^n$, -the output layer will produce $F_{f_0}(s,x)$. It is then possible to +Firstly, let us build a +multilayer perceptron neural network modeling +$F_{f_0}:\llbracket 1; n \rrbracket \times \mathds{B}^n \to +\mathds{B}^n$ associated to the vectorial negation. +More precisely, for all inputs +$(s,x) \in \llbracket 1;n\rrbracket \times \mathds{B}^n$, +the output layer produces $F_{f_0}(s,x)$. It is then possible to link the output layer and the input one, in order to model the dependence between two successive iterations. As a result we obtain a global recurrent neural network that behaves as follows (see @@ -472,14 +515,16 @@ Fig.~\ref{Fig:perceptron}). The behavior of the neural network is such that when the initial state is $x^0~\in~\mathds{B}^n$ and a sequence $(S^t)^{t \in \Nats}$ is -given as outside input, then the sequence of successive published +given as outside input, +\JFC{en dire davantage sur l'outside world} + then the sequence of successive published output vectors $\left(x^t\right)^{t \in \mathds{N}^{\ast}}$ is exactly the one produced by the chaotic iterations formally described in Eq.~(\ref{eq:CIs}). It means that mathematically if we use similar input vectors they both generate the same successive outputs $\left(x^t\right)^{t \in \mathds{N}^{\ast}}$, and therefore that they are equivalent reformulations of the iterations of $G_{f_0}$ in -$\mathcal{X}$. Finally, since the proposed neural network is build to +$\mathcal{X}$. Finally, since the proposed neural network is built to model the behavior of $G_{f_0}$, which is chaotic according to Devaney's definition of chaos, we can conclude that the network is also chaotic in this sense. @@ -503,8 +548,8 @@ without any convincing mathematical proof. We propose an approach to overcome this drawback for a particular category of multilayer perceptrons defined below, and for the Devaney's formulation of chaos. In spite of this restriction, we think that this approach can be -extended to a large variety of neural networks. We plan to study a -generalization of this approach in a future work. +extended to a large variety of neural networks. + We consider a multilayer perceptron of the following form: inputs are $n$ binary digits and one integer value, while outputs are $n$ @@ -520,6 +565,7 @@ connection to an input one. compute the new output one $\left(x^{t+1}_1,\dots,x^{t+1}_n\right)$. While the remaining input receives a new integer value $S^t \in \llbracket1;n\rrbracket$, which is provided by the outside world. +\JFC{en dire davantage sur l'outside world} \end{itemize} The topological behavior of these particular neural networks can be @@ -527,11 +573,15 @@ proven to be chaotic through the following process. Firstly, we denote by $F: \llbracket 1;n \rrbracket \times \mathds{B}^n \rightarrow \mathds{B}^n$ the function that maps the value $\left(s,\left(x_1,\dots,x_n\right)\right) \in \llbracket 1;n -\rrbracket \times \mathds{B}^n$ into the value +\rrbracket \times \mathds{B}^n$ +\JFC{ici, cela devait etre $S^t$ et pas $s$, nn ?} + into the value $\left(y_1,\dots,y_n\right) \in \mathds{B}^n$, where $\left(y_1,\dots,y_n\right)$ is the response of the neural network after the initialization of its input layer with -$\left(s,\left(x_1,\dots, x_n\right)\right)$. Secondly, we define $f: +$\left(s,\left(x_1,\dots, x_n\right)\right)$. +\JFC{ici, cela devait etre $S^t$ et pas $s$, nn ?} +Secondly, we define $f: \mathds{B}^n \rightarrow \mathds{B}^n$ such that $f\left(x_1,x_2,\dots,x_n\right)$ is equal to \begin{equation} @@ -567,20 +617,23 @@ Let us first recall two fundamental definitions from the mathematical theory of chaos. \begin{definition} \label{def8} -A function $f$ is said to be {\bf expansive} if $\exists +A function $f$ is said to be {\emph{ expansive}} if $\exists \varepsilon>0$, $\forall x \neq y$, $\exists n \in \mathds{N}$ such that $d\left(f^n(x),f^n(y)\right) \geq \varepsilon$. \end{definition} \begin{definition} \label{def9} -A discrete dynamical system is said to be {\bf topologically mixing} +A discrete dynamical system is said to be {\emph{ topologically mixing}} if and only if, for any pair of disjoint open sets $U$,$V \neq \emptyset$, we can find some $n_0 \in \mathds{N}$ such that for any $n$, $n\geq n_0$, we have $f^n(U) \cap V \neq \emptyset$. \end{definition} +\JFC{Donner un sens à ces definitions} + -As proven in Ref.~\cite{gfb10:ip}, chaotic iterations are expansive -and topologically mixing when $f$ is the vectorial negation $f_0$. +It has been proven in Ref.~\cite{gfb10:ip}, that chaotic iterations +are expansive and topologically mixing when $f$ is the +vectorial negation $f_0$. Consequently, these properties are inherited by the CI-MLP($f_0$) recurrent neural network previously presented, which induce a greater unpredictability. Any difference on the initial value of the input @@ -588,12 +641,14 @@ layer is in particular magnified up to be equal to the expansivity constant. Let us then focus on the consequences for a neural network to be chaotic -according to Devaney's definition. First of all, the topological -transitivity property implies indecomposability. +according to Devaney's definition. Intuitively, the topological +transitivity property implies indecomposability, which is formally defined +as follows: + \begin{definition} \label{def10} -A dynamical system $\left( \mathcal{X}, f\right)$ is {\bf -not decomposable} if it is not the union of two closed sets $A, B +A dynamical system $\left( \mathcal{X}, f\right)$ is +{\emph{not decomposable}} if it is not the union of two closed sets $A, B \subset \mathcal{X}$ such that $f(A) \subset A, f(B) \subset B$. \end{definition} @@ -603,8 +658,8 @@ strongly connected. Moreover, under this hypothesis CI-MLPs($f$) are strongly transitive: \begin{definition} \label{def11} -A dynamical system $\left( \mathcal{X}, f\right)$ is {\bf strongly -transitive} if $\forall x,y \in \mathcal{X}$, $\forall r>0$, $\exists +A dynamical system $\left( \mathcal{X}, f\right)$ is {\emph{ strongly +transitive}} if $\forall x,y \in \mathcal{X}$, $\forall r>0$, $\exists z \in \mathcal{X}$, $d(z,x)~\leq~r \Rightarrow \exists n \in \mathds{N}^{\ast}$, $f^n(z)=y$. \end{definition} @@ -748,7 +803,9 @@ such functions into a model amenable to be learned by an ANN. This section presents how (not) chaotic iterations of $G_f$ are translated into another model more suited to artificial neural -networks. Formally, input and output vectors are pairs~$((S^t)^{t \in +networks. +\JFC{détailler le more suited} +Formally, input and output vectors are pairs~$((S^t)^{t \in \Nats},x)$ and $\left(\sigma((S^t)^{t \in \Nats}),F_{f}(S^0,x)\right)$ as defined in~Eq.~(\ref{eq:Gf}). @@ -929,7 +986,7 @@ configuration is always expressed as a natural number, whereas in the first one the number of inputs follows the increase of the boolean vectors coding configurations. In this latter case, the coding gives a finer information on configuration evolution. - +\JFC{Je n'ai pas compris le paragraphe precedent. Devrait être repris} \begin{table}[b] \caption{Prediction success rates for configurations expressed with Gray code} \label{tab2}