From bda6e7ef79c89806bf219192f386105c428e1134 Mon Sep 17 00:00:00 2001 From: raphael couturier Date: Tue, 7 Oct 2014 10:14:24 +0200 Subject: [PATCH] new --- paper.tex | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/paper.tex b/paper.tex index 23bb18b..acb46bb 100644 --- a/paper.tex +++ b/paper.tex @@ -644,12 +644,13 @@ appropriate than a direct method in a parallel context. \Input $A$ (sparse matrix), $b$ (right-hand side) \Output $x$ (solution vector)\vspace{0.2cm} \State Set the initial guess $x^0$ - \For {$k=1,2,3,\ldots$ until convergence} \label{algo:conv} + \For {$k=1,2,3,\ldots$ until convergence (error$<\epsilon$)} \label{algo:conv} \State $x^k=Solve(A,b,x^{k-1},m)$ \label{algo:solve} + \State retrieve error \State $S_{k~mod~s}=x^k$ \label{algo:store} - \If {$k$ mod $s=0$ {\bf and} not convergence} + \If {$k$ mod $s=0$ {\bf and} error$>\epsilon$} \State $R=AS$ \Comment{compute dense matrix} - \State Solve least-squares problem $\underset{\alpha\in\mathbb{R}^{s}}{min}\|b-R\alpha\|_2$ + \State Solve least-squares problem $\underset{\alpha\in\mathbb{R}^{s}}{min}\|b-R\alpha\|_2$ \label{algo:} \State $x^k=S\alpha$ \Comment{compute new solution} \EndIf \EndFor @@ -660,10 +661,19 @@ appropriate than a direct method in a parallel context. Algorithm~\ref{algo:01} summarizes the principle of our method. The outer iteration is inside the for loop. Line~\ref{algo:solve}, the Krylov method is called for a maximum of $m$ iterations. In practice, we suggest to choose $m$ -equals to the restart number of the GMRES like method. Line~\ref{algo:store}, -$S_{k~ mod~ s}=x^k$ consists in copying the solution $x_k$ into the column $k~ -mod~ s$ of the matrix $S$. After the minimization, the matrix $S$ is reused with -the new values of the residuals. +equals to the restart number of the GMRES-like method. Moreover, a tolerance +threshold must be specified for the solver. In practise, this threshold must be +much smaller than the convergence threshold of the TSARM algorithm +(i.e. $\epsilon$). Line~\ref{algo:store}, $S_{k~ mod~ s}=x^k$ consists in +copying the solution $x_k$ into the column $k~ mod~ s$ of the matrix $S$. After +the minimization, the matrix $S$ is reused with the new values of the residuals. % à continuer Line + +To summarize, the important parameters of are: +\begin{itemize} +\item $\epsilon$ the threshold to stop the method +\item $m$ the number of iterations for the krylov method +\item $s$ the number of outer iterations before applying the minimization step +\end{itemize} %%%********************************************************* %%%********************************************************* -- 2.39.5