From ed256d49ed3a06f1ca791baf04262e8775091cd0 Mon Sep 17 00:00:00 2001 From: couturie Date: Wed, 8 Jan 2014 16:03:48 +0100 Subject: [PATCH] ajout --- biblio.bib | 10 +++++++++- krylov_multi.tex | 14 +++++++++----- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/biblio.bib b/biblio.bib index d1f3b00..55c4a93 100644 --- a/biblio.bib +++ b/biblio.bib @@ -72,4 +72,12 @@ pages={380--391}, year={2008}, publisher={Elsevier} -} \ No newline at end of file +} + +@TechReport{prace-multi, + author = {Nick Brown and J. Mark Bull and Iain Bethune}, + title = {Solving Large Sparse Linear Systems using Asynchronous Multisplitting}, + institution = {PRACE White paper n°WP84}, + year = {2013}, +} + diff --git a/krylov_multi.tex b/krylov_multi.tex index 963ce3e..40380d0 100644 --- a/krylov_multi.tex +++ b/krylov_multi.tex @@ -108,15 +108,15 @@ Jacobi method. A general framework for studying parallel multisplitting has been presented in \cite{o1985multi} by O'Leary and White. Convergence conditions are given for the most general case. Many authors improved multisplitting algorithms by proposing -for example a asynchronous version \cite{bru1995parallel} and convergence -condition \cite{bai1999block,bahi2000asynchronous} in this case or other -two-stage algorithms~\cite{frommer1992h,bru1995parallel} +for example an asynchronous version \cite{bru1995parallel} and convergence +conditions \cite{bai1999block,bahi2000asynchronous} in this case or other +two-stage algorithms~\cite{frommer1992h,bru1995parallel}. In \cite{huang1993krylov}, the authors proposed a parallel multisplitting algorithm in which all the tasks except one are devoted to solve a sub-block of the splitting and to send their local solution to the first task which is in charge to combine the vectors at each iteration. These vectors form a Krylov -basis for which the first tasks minimize the error function over the basis to +basis for which the first task minimizes the error function over the basis to increase the convergence, then the other tasks receive the update solution until convergence of the global system. @@ -127,7 +127,11 @@ of multisplitting algorithms that take benefit from multisplitting algorithms to solve large scale linear systems. Inner solvers could be based on scalar direct method with the LU method or scalar iterative one with GMRES. - +In~\cite{prace-multi}, the authors have proposed a parallel multisplitting +algorithm in which large block are solved using a GMRES solver. The authors have +performed large scale experimentations upto 32.768 cores and they conclude that +asynchronous multisplitting algorithm could more efficient than traditionnal +solvers on exascale architecture with hunders of thousands of cores. %%%%%%%%%%%%%%%%%%%%%%%% -- 2.39.5