Skip to content

Instantly share code, notes, and snippets.

@MisterDA
Last active November 28, 2019 14:31
Show Gist options
  • Select an option

  • Save MisterDA/31d044c502fe0cea737c8758383ac8fe to your computer and use it in GitHub Desktop.

Select an option

Save MisterDA/31d044c502fe0cea737c8758383ac8fe to your computer and use it in GitHub Desktop.
MPRI 2.33.1 — Consensus in multi-agent systems
# Created by https://www.gitignore.io/api/latex
# Edit at https://www.gitignore.io/?templates=latex
### LaTeX ###
## Core latex/pdflatex auxiliary files:
*.aux
*.lof
*.log
*.lot
*.fls
*.out
*.toc
*.fmt
*.fot
*.cb
*.cb2
.*.lb
## Intermediate documents:
*.dvi
*.xdv
*-converted-to.*
# these rules might exclude image files for figures etc.
# *.ps
# *.eps
# *.pdf
## Generated if empty string is given at "Please type another file name for output:"
.pdf
## Bibliography auxiliary files (bibtex/biblatex/biber):
*.bbl
*.bcf
*.blg
*-blx.aux
*-blx.bib
*.run.xml
## Build tool auxiliary files:
*.fdb_latexmk
*.synctex
*.synctex(busy)
*.synctex.gz
*.synctex.gz(busy)
*.pdfsync
## Build tool directories for auxiliary files
# latexrun
latex.out/
## Auxiliary and intermediate files from other packages:
# algorithms
*.alg
*.loa
# achemso
acs-*.bib
# amsthm
*.thm
# beamer
*.nav
*.pre
*.snm
*.vrb
# changes
*.soc
# comment
*.cut
# cprotect
*.cpt
# elsarticle (documentclass of Elsevier journals)
*.spl
# endnotes
*.ent
# fixme
*.lox
# feynmf/feynmp
*.mf
*.mp
*.t[1-9]
*.t[1-9][0-9]
*.tfm
#(r)(e)ledmac/(r)(e)ledpar
*.end
*.?end
*.[1-9]
*.[1-9][0-9]
*.[1-9][0-9][0-9]
*.[1-9]R
*.[1-9][0-9]R
*.[1-9][0-9][0-9]R
*.eledsec[1-9]
*.eledsec[1-9]R
*.eledsec[1-9][0-9]
*.eledsec[1-9][0-9]R
*.eledsec[1-9][0-9][0-9]
*.eledsec[1-9][0-9][0-9]R
# glossaries
*.acn
*.acr
*.glg
*.glo
*.gls
*.glsdefs
# uncomment this for glossaries-extra (will ignore makeindex's style files!)
# *.ist
# gnuplottex
*-gnuplottex-*
# gregoriotex
*.gaux
*.gtex
# htlatex
*.4ct
*.4tc
*.idv
*.lg
*.trc
*.xref
# hyperref
*.brf
# knitr
*-concordance.tex
# TODO Comment the next line if you want to keep your tikz graphics files
*.tikz
*-tikzDictionary
# listings
*.lol
# luatexja-ruby
*.ltjruby
# makeidx
*.idx
*.ilg
*.ind
# minitoc
*.maf
*.mlf
*.mlt
*.mtc[0-9]*
*.slf[0-9]*
*.slt[0-9]*
*.stc[0-9]*
# minted
_minted*
*.pyg
# morewrites
*.mw
# nomencl
*.nlg
*.nlo
*.nls
# pax
*.pax
# pdfpcnotes
*.pdfpc
# sagetex
*.sagetex.sage
*.sagetex.py
*.sagetex.scmd
# scrwfile
*.wrt
# sympy
*.sout
*.sympy
sympy-plots-for-*.tex/
# pdfcomment
*.upa
*.upb
# pythontex
*.pytxcode
pythontex-files-*/
# tcolorbox
*.listing
# thmtools
*.loe
# TikZ & PGF
*.dpth
*.md5
*.auxlock
# todonotes
*.tdo
# vhistory
*.hst
*.ver
# easy-todo
*.lod
# xcolor
*.xcp
# xmpincl
*.xmpi
# xindy
*.xdy
# xypic precompiled matrices
*.xyc
# endfloat
*.ttt
*.fff
# Latexian
TSWLatexianTemp*
## Editors:
# WinEdt
*.bak
*.sav
# Texpad
.texpadtmp
# LyX
*.lyx~
# Kile
*.backup
# KBibTeX
*~[0-9]*
# auto folder when using emacs and auctex
./auto/*
*.el
# expex forward references with \gathertags
*-tags.tex
# standalone packages
*.sta
### LaTeX Patch ###
# glossaries
*.glstex
# End of https://www.gitignore.io/api/latex
$pdf_mode = 5; # XeLaTeX
$clean_ext = "bbl run.xml nav log snm";
$pdf_previewer = 'start evince';
$preview_continuous_mode = 1;
@default_files = ('doc.tex');
@inproceedings{DBLP:conf/stacs/SantoroW89,
author = {Nicola Santoro and
Peter Widmayer},
title = {Time is Not a Healer},
booktitle = {{STACS}},
series = {Lecture Notes in Computer Science},
volume = {349},
pages = {304--313},
publisher = {Springer},
year = {1989}
}
@article{DBLP:journals/siamco/CaoMA08a,
author = {Ming Cao and
A. Stephen Morse and
Brian D. O. Anderson},
title = {Reaching a Consensus in a Dynamically Changing Environment: Convergence
Rates, Measurement Delays, and Asynchronous Events},
journal = {{SIAM} J. Control and Optimization},
volume = {47},
number = {2},
pages = {601--623},
year = {2008}
}
@article{DBLP:journals/tac/Moreau05,
author = {Luc Moreau},
title = {Stability of multiagent systems with time-dependent communication
links},
journal = {{IEEE} Trans. Automat. Contr.},
volume = {50},
number = {2},
pages = {169--182},
year = {2005}
}
@article{DBLP:journals/jacm/FischerLP85,
author = {Michael J. Fischer and
Nancy A. Lynch and
Mike Paterson},
title = {Impossibility of Distributed Consensus with One Faulty Process},
journal = {J. {ACM}},
volume = {32},
number = {2},
pages = {374--382},
year = {1985}
}
@article{DBLP:journals/cacm/Chazelle12,
author = {Bernard Chazelle},
title = {Natural algorithms and influence systems},
journal = {Commun. {ACM}},
volume = {55},
number = {12},
pages = {101--110},
year = {2012}
}
@book{DBLP:books/mk/Lynch96,
author = {Nancy A. Lynch},
title = {Distributed Algorithms},
publisher = {Morgan Kaufmann},
year = {1996}
}
@book{sternberg2010dynamical,
title={Dynamical systems},
author={Sternberg, Shlomo},
year={2010},
publisher={Courier Corporation}
}
@book{bertsekas1989parallel,
title={Parallel and distributed computation: numerical methods},
author={Bertsekas, Dimitri P and Tsitsiklis, John N},
volume={23},
year={1989},
publisher={Prentice hall Englewood Cliffs, NJ}
}
\documentclass[12pt,a4paper]{article}
\usepackage[includeheadfoot,margin=1cm]{geometry} % twocolumn,margin=1cm
\usepackage{fontspec}
\usepackage{polyglossia}
\setmainlanguage{english}
\setotherlanguage{french}
\usepackage{amssymb,amsfonts,amsthm}
\usepackage{mathtools}
\usepackage{mathbbol}
\usepackage{braket}
\usepackage{interval}
\usepackage{csquotes}
\usepackage{hyperxmp}
\usepackage{hyperref}
\usepackage{enumitem}
\usepackage{multicol}
\usepackage{tikz}
\usetikzlibrary{shapes,decorations,arrows,calc,arrows.meta,fit,positioning}
\usepackage{fancyhdr}
\usepackage[type={CC}, modifier={by-sa}, version={4.0}]{doclicense}
\usepackage[backend=biber, style=numeric]{biblatex}
\bibliography{doc.bib}
\setlist{nolistsep}
\theoremstyle{definition}\newtheorem{theorem}{Theorem}[section]
\theoremstyle{definition}\newtheorem{corollary}{Corollary}[theorem]
\theoremstyle{definition}\newtheorem{lemma}{Lemma}[theorem]
\theoremstyle{definition}\newtheorem{definition}{Definition}[section]
\theoremstyle{definition}\newtheorem{proposition}{Proposition}[section]
\theoremstyle{remark}\newtheorem*{remark}{Remark}
\DeclarePairedDelimiter\abs{\lvert}{\rvert}% Absolute Value
\DeclarePairedDelimiter\norm{\lVert}{\rVert}% Norm
\newcommand{\mli}[1]{\mathit{#1}}% Multi-letter identifier
\DeclareMathOperator{\ts}{ts}% timestamp
\DeclareMathOperator{\In}{In}% incoming neighbour set
\title{\href{https://wikimpri.dptinfo.ens-cachan.fr/doku.php?id=cours:c-2-33-1}{MPRI
2.33.1
--- Consensus in multi-agent systems}\footnote{based on the
course given by
\href{http://www.lix.polytechnique.fr/~charron/}{Bernadette
Charron-Bost}, 2019.}}
\author{Antonin Décimo}
\begin{document}
\maketitle
\section{Introduction}
\begin{enumerate}
\item synchronous multi-agent systems, dynamic topology. Round \(t
=\) instant \(t\).
\item anonymous + (a)-synchronous systems with fixed topology and
some (begnin) failures.
\end{enumerate}
\section{Computing model with synchronized rounds}
\begin{definition}{Incoming neighbours} \\
An agent \(i\) as a set of incoming neighbours at round \(t\):
\(\forall i \, \forall t \quad \abs{\In(t)} \geq 1\).
\end{definition}
Assuming that there will be at most \(1\) crash, that we have a
completely asynchronous system, and that there is a condition to end
the receiving phase at round \(t\), and that each agent will never
block, then one can implement synchronized rounds.
We want to weaken the predicate on the hypothesis. Weak failures mean
that we must disallow weak condition on the order set. What is the
type of rounds one can construct with synchronous, fixed topology and
at most \(f\) crashes?
\begin{definition}{Dynamic Graph} \\
A dynamic graph \(\mathbb{G}\) is a sequence of graphs indexed by a
time instant.
\[\mathbb{G}={\left(\mathbb{G}(t)\right)}_{t \in {\mathbb{N}}^*}\]
We assume that \(\forall t \in \mathbb{N}^*, \mathbb{G}\) contains a
self-loop at each node.
\end{definition}
\begin{itemize}
\item a complete topology;
\item at most \(f\) failures;
\item \(\forall i, \abs{\In_i(t)} \geq n - f\).
\end{itemize}
\begin{tikzpicture}
\node (C0) {Initial global state \(C_0\)};
\node (C1) [right=of C0] {\(C_1\)};
\node (CX) [right=of C1] {\(j \in \In_i(t) \Leftrightarrow
\norm{x_j(t-1) - x_i(t-1)} \leq R\)};
\path[->] (C0) edge node[above] {\(\mathbb{G}(1)\)} (C1);
\path[->] (C1) edge node[above] {\(\mathbb{G}(2)\)} (CX);
\end{tikzpicture}
Where \(R\) is the incidence radius.
\begin{remark}
Synchronized rounds does not mean synchronious systems.
\end{remark}
\begin{definition}{Distributed algorithms (in this model)}
\begin{itemize}
\item an algorithm \(A_i\) for each agent \(i\);
\item \(Q_i^0 \subseteq Q_i\), \(\mathcal{\mathbb{A}}\).
\item
\begin{description}
\item[sending function] \(S_i : Q_i \dots\);
\item[transition function] \(T_i : Q_i \times \mathcal{M}^0, (s, m_1
\dots m_k \mapsto s')\).
\end{description}
The type of the function depends on the local knowledge.
\item a configuration \(i \to C_i(t)\), representing the state of
agent \(i\) at the end of round \(t\);\\
\(A={(A_i)}_{i \in V}\)\\
Applying the dynamic graph to a configuration yields a new
configuration: \(\mathbb{G}(t).C(t-1) = C(t)\).
\end{itemize}
\end{definition}
\begin{definition}{Execution of \(A\)} \\
Determined by the initial config \(C_0\) and a dynamic graph
\(\mathbb{G}\) which gives an infinite sequence of configurations.
\end{definition}
This model of execution implicitely assumes (synchronous) starts at
round \(1\).\\
Let \(C_0\) be an initial configuration and \(\mathcal{G}\) a set of
dynamic graphs. This produces a tree \(T\) with finite number of
branches but infinite depth. We can apply König's Lemma. Let \(T\)
be a tree of only finite forks. If \(T\) is infinite then \(T\)
contains an infinite number of branches.
\section{Directed and dynamic graphs}
\begin{definition}{Digraph} \(G=(V,E)\)
\begin{description}
\item[strongly connected] \(\forall u,v \in V, \exists \text{ path } u
\to v \wedge \exists \text{ path } v \to u\);
\item[root] \(\mli{Roots}(G) = \set{i \in V \mid \forall j \in V,
\exists \text{ path } i \to j}\);
\item[rooted] \(G\) is rooted iff \(\mli{Roots}(G) \neq \emptyset\);
\item[central root] \(\mli{CRoots(G)} = \set{i \in V \mid \forall j
\in V (i, j) \in E}\);
\item[product] \(G_1=(V,E_1), G_2=(V, E_2)\)\\
\(G_1 \circ G_2 = (V, \set{ij \mid \exists k \in V, ik \in E_1
\wedge kj \in E_2}) \neq G_2 \circ G_1\) (a priori).\\
With self-loops, \(E_1 \cup E_2 \subseteq E\).
\end{description}
\end{definition}
\begin{definition}{Dynamic Graph}
\begin{align*}
\mathbb{G}(t&:t') = \mathbb{G}(t) \circ \dots \circ
\mathbb{G}(t')\\
t &> t' = A(t) \dots A(t') \text{ left product of stochastic matrices}\\
t' &= t = \mathbb{G}(t)
\end{align*}
\(j \in \In_i(t:t')\), \(j\) was heard by \(i\) during \(\interval{t}{t'}\).
\end{definition}
\begin{definition}
Let \(\phi\) a predicate on digraphs.
\begin{itemize}
\item \(\mathbb{G}\) is \emph{continuoulsy} \(\phi\) if \(\forall t
\in \mathbb{N}^* \quad G(t) \models \phi\);
\item \(\mathbb{G}\) satisfies \(\phi\) with delay \(b \in
\mathbb{N}^* \quad \forall t \in \mathbb{N}^* \quad G(t:t+b-1)
\models \phi \quad b' \geq b\);
\item \(\mathbb{G}\) satisfies \(\phi\) with bounded delay iff
\(\exists b \in \mathbb{N}^*\) such that \(\mathbb{G}\) satisfies
\(\phi\) with delay \(b\);
\item \(\mathbb{G}\) satisfies \(\phi\) with finite delay iff
\(\forall t \in \mathbb{N}^* \quad \exists b_t \quad
\mathbb{G}(t:t-b_t+i) \models \phi\).
\end{itemize}
\end{definition}
\section{Consensus Problems}
\begin{align*}
(A_i) &= A & i &\to X_i &\text{algorithm}\\
&\mathcal{V} & i &\to v_i \in \mathcal{V} &\text{values}
\end{align*}
\begin{definition}{Decision consensus}
\(X_i\) initialized to \(\bot \notin \mathcal{V}\). Every execution
of \(A\) satisfies:
\begin{description}
\item[termination] \(\forall i \in V \quad \exists t \quad X_i(t)
\neq \bot\)
\item[irrevocability] \(\forall i \in V \quad \forall t, t' \quad
X_i(t) \neq \bot \wedge X_i(t') \neq \bot \Rightarrow X_i(t) =
X_i(t')\)
\item[agreement] \(\forall i, j \in V^2 \quad \forall t, t' \quad
X_i(t) \neq \bot \wedge X_j(t') \neq \bot \Leftarrow X_i(t) =
X_j(t')\)
\item[validity] \(\forall i \in V \quad \forall t \quad X_i(t) \neq
\bot \Rightarrow \exists j \in V \quad X_i(t) = v_j\)
\end{description}
\end{definition}
\begin{definition}{Stabilizing consensus}
\(X_i\) initialized to \(v_i\)
\begin{description}
\item[stabilizing] \(\forall i \in V \quad (X_i(t))\) stationnary
\item[agreeement] \(\forall i,j \in V^2 \quad \overline{\lim}X_i =
\overline{\lim}X_j\)
\item[validity] \(\forall i \in V \quad \exists j \in V \quad
\overline{\lim}X_j=v_j\)
\end{description}
\end{definition}
\begin{definition}{Asymptotic (approximed) consensus}
\(\mathcal{V} = \mathbb{R}\)
\begin{description}
\item[convergence] \(\forall i \in \mathcal{V} \quad \exists \lim
X_i(t)\)
\item[agreement] same as stabilizing consensus
\item[validity] \(\forall i \in \mathcal{V} \quad \overline{\lim}X_i
\in \braket{v_1 \dots v_n} = v_i\)
\end{description}
\end{definition}
\(f:\mathcal{V}^n \rightarrow \mathcal{V}\)
\(f\)-constraint is somewhat related to the consensus problem.
\(f\)-\textbf{validity} \(\forall i \in V \quad \lim X_i=f(v_1 \dots
v_n)\)
\(f\) is the most frequent value.
\section{The Santoro \& Windmayer's theorem}
\begin{theorem}
Let \(\mathcal{G}\) be a set of dynamic graphs such that
\begin{enumerate}
\item \(\forall i \in V \quad \forall \mathbb{G} \in \mathcal{G}
\quad \forall t \in \mathbb{N}^* \quad \exists G_i\) where \(i\)
is asynchrounous such that \(\mathbb{H} \in \mathcal{G}\) is
defined by \(\mathbb{H}(s)=
\begin{cases}
\mathbb{G}(s) & 1 \leq s \leq t\\
G_i & \forall s > t
\end{cases}\)
\item \(\forall \mathbb{G}, \mathbb{G}' \in \mathcal{G} \quad
d(\mathbb{G}, \mathbb{G}') = \frac{1}{2}t\) and \((i, j) \in
E(\mathbb{G}'(t))\)
\(\exists \mathbb{H} \in \mathcal{G}\) such that \(d(G,
\mathbb{H}) = \frac{1}{2}t - 1 \quad E(\mathbb{H}(t)) = E(G(\mathbb{H})
\cup \set{(i,j)})\)
\end{enumerate}
There is no algorithm solving decision consensus with
\(\mathcal{G}\) (even if \(\mathcal{V}=\set{1,0}\)).
\end{theorem}
\begin{proof}
Assume there is an algorithm \(A\) solving decision consensus.
\begin{lemma}
There is an initial bivalent configuration.
\end{lemma}
\begin{proof}
\end{proof}
\end{proof}
\begin{lemma}
If an agent \(i\) decides \(v\) at phase \(\phi_0\), then \(\forall
\phi \geq \phi_0 \abs{V(\phi)}>\frac{n}{2}\). Moreover \(\forall j
\in V(\phi), y_j(\phi)=v\).
\end{lemma}
\begin{proof}
Proof by induction on \(\phi \geq \phi_0\).
\begin{enumerate}
\item \(\phi = \phi_0\). A strict majority of agents \(k\) updates
both \(ts_k\) and \(y_k\) with \(\phi_0\) and \(v\) resp.\ at
round \(4\phi_0 - 2\).
\item Assume lemma holds at phases \(\phi, \dots, \phi_0\) and
consider the phase \(\phi+1\). Let \(V(\phi) = \set{j \in V \mid
\ts_j(\phi) \geq \phi_0}\).
\end{enumerate}
\end{proof}
\section{Randomized Consensus}
SN gives an impossibility result (FLP). But what if we add a random
oracle which gives some random info to each agent \(i\)? There are no
global random oracle. Existing proofs assume an \emph{oblivious
adversary}. We must prove termination: for each infinite execution,
each agent eventually decides. What is an execution?
\((C_0, \mathbb{G}, \mli{oracle})\) (initial configuration, dynamic
graph, oracle). The adversary is the dynamic graph. But why is it
oblivious? Because the adversary may use the random oracle. We are
not using this here, the dynamic graph and the output of the random
oracle are independent.
Let's consider the binary case: \(\mathcal{V}=\set{0, 1}\).
Agent \(i\): \(y_i\in \mathcal{V}\), init \(v_i\).
\(x_i \in \mathcal{V} \cup \set{\bot}\), init \(\bot\).
\(\mli{vote}_i \in \mathcal{V}\cup\set{?}\), init \(?\).
At \(t=2\phi - 1\). \(S_i\): send \(\braket{y_i}\) to all. \(T_i\): if
all received \(\braket{y} = v\) then \(\mli{vote}_i \coloneqq v\) else
\(\mli{vote}_i \coloneqq ?\).
At \(t = 2\phi\). \(S_i\): send \(\braket{\mli{vote}}_i\) to
all. \(T_i\): if received at least one \(v \neq ?\) then \(y_i = v \)
else \(y_i = \mli{random}\). If received only vote \(\neq ?\) then at
\(4(\phi+1)-2\), \(x_i\) is set to this vote. \(\mli{vote}_i = ?\).
\begin{theorem}
Any execution of BenO algo with a non-split dynamic graph satisfies
validity, agreement, irrevocability, and all agents eventually
decides with probability equal to 1 (Las Vegas algorithm).
\end{theorem}
\begin{proof}
Proof of termination. The dynamic graph is set to
\(\mathbb{G}(1), \dots, \mathbb(G)(t), \dots\). From the initial
configuration \(C_O\) we go to \(y_i(1) = y_j(1)\) with probability
\(\geq 1-p\), and to \(\neg(y_i(1) = y_j(1))\). At phase to we
branch between \(y_i(2) = y_j(2)\) and \(\neg(y_i(2) = y_j(2))\)
with the same probabilities. We have that
\(y_i \geq \frac{1}{2^n}\).
\[\Pr{[\neg(y_i(1)=y_j(1)) \wedge \dots \wedge \neg(y_i(\phi) =
y_j(\phi))]} \leq p^\phi\]
\[\Pr{[\text{an agent never decides}]} = 0\]
\end{proof}
\section{Decisionnal Consensus with Byzantine Behaviours}
Complete network. \(\mathbb{G} = K, \forall t\). At most \(f\) agents
are byzantine (faulty) if it doens not follow its sending and/or
transition functions at some points. It does not include begning
failures, it is really the fact that values are falsified. At that
point, the problem is to solve consensus with decision. Obviously, if
you do not modify the specifications, consensus is solvable. Why?
Agreement (all agents decides the same value). As soon as an agent is
byzantine, this spec is not solvable. We need to change the spec;
it's not evident to obtain something relevant.
Comportement byzantin: il peut avoir un comportement qui dévie
complètement de l'algorithme. Cadre des blockchains: sens très peu
défini qui ne correspond pas à cette définition.
\paragraph{Validity} Very problematic in general. Consider an
apparent weaker condition: \enquote{if all agents have the same
initial value \(v\), then \(v\) is the sole possible decision
value}. It's not so weak, in the sense that with
\(\mathcal{V}=\set{0,1}\), the two validity conditions are equal. In
this chapter, we'll just focus on the binary case. In the context of
byzantine failures, we'll use \enquote{if all \emph{non-faulty} agents
have the same initial value \(v\), then \(v\) is the sole possible
decision value \emph{for a non-faulty agent}}.
\paragraph{Agreement and irrevocability} The agreement property is
restricted to non-faulty processes: \enquote{if \(i\) and \(j\) are
non faulty agents that decide \(v\) and \(v'\) respectively, then
\(v=v'\)}.
\paragraph{Termination} Any non-faulty agent eventually makes a
decision.
\begin{theorem} Impossibility result established by Shostak, Pease,
and Lamport 1983 (maybe). There is no algorithm solving the
byzantine consensus with decision problem in a system (with a
complete network) of \(n\) agents and \(f\geq \frac{n}{3}\) agents
that may be faulty.
\end{theorem}
\begin{proof} Proof by Nancy Lynch.
There is a proof for the case \(n=3\) and \(f=1\), and then there is
a reduction to this case.
\begin{lemma}
\(n=3\) and \(f=1\). Suppose that there exists an algo \(A\)
which solves consensus and that tolerates a byzantine agent.
\(V=\set{i,j,k}\). \(A=(A_i,A_j,A_k)\). Consider
\(G=(V \cup V', E(G))\), \(V'=\set{i', j', k'}\). We have
duplicated the nodes.
% 19097
Define algorithm \(B\) with
\[B=(A_i, A_j, A_j, A''_i = B_{i'}, A''_j = B_{j'}, A''_k =
B_{k'})\]
The algorithm does not use identifiers. What about the runs of
\(B\)? What does is do? \emph{a priori \(B\) is not an algorithm
for consensus}.
\begin{tikzpicture}
\draw (0, 0) node[anchor=north]{\(i\)}
-- (0, 1) node[anchor=north]{\(j\)}
-- (0.5, 1.11) node[anchor=south]{\(k\)}
-- cycle;
\end{tikzpicture}
Let \(\beta\) be an execution of \(B\), with value \(0\) assigned
to \(i, j, k\) and value \(1\) assigned to \(i', j', k'\). The
agents \(i, j', k'\) are not byzantine. Consider the edge
\((i, j)\) in this execution. From the viewpoint of both \(i\)
and \(j\), \(\beta\) is similar (indistinguishable) to
\(\alpha_1\) the execution of \(A\) in which the node \(k\) is
byzantine, and \(\mli{init}(i) = \mli{init}(j) = 0\). Then,
\(\beta \sim_{i, j} \alpha_1\) and \(i\) and \(j\) eventually
decide \(0\) in \(\alpha_1\) and thus in \(\beta\).
\(\beta \sim_{i', j'}\) oups, missed that.
\end{lemma}
Consider \(n \leq 3f\). By contradiction, assume there exists an
algorithm \(A\) for \(n\) agents. It is sufficient to prove the
result for a complete graph. The set of agents can be partitionned:
\(V = V_1 \sqcup V_2 \sqcup V_3, \quad \abs{V_i} \leq f, i \in
\set{1, 2, 3}\). Let us consider a set of agents with identifiers
\(\set{1, 2, 3}\). Group all variables from subset
\(V_i, i \in \set{1, 2, 3}\) in a vector. Then, a message between
two agents is a matrix. Then, the sending function \(S_i(s[i_1, \dots,
i_k], j) = M[(s[u], v)]\), and \(T_i(s, [], [], []) = \dots\). The
resulting algorithm \(A'\) over \(\set{1, 2, 3}\) solves decision
consensus and tolerates \(1\) byzantine agent.
\end{proof}
\begin{remark} In blockchain, the consensus problem to solve is much
weaker, because it is not irrevocable. The theorem does not hold
for blockchain consensus.
\end{remark}
\section{Exponential Information Gathering algorithm}
Dolev \enquote{The Byzantine Generals Strike Again}. The variant
presented here is given by Fisher, Lynch and Meritt \enquote{Easy
Impossibility Proofs}.
In the context of byzantine behaviours, one need to track the
information path. In the dynamic topology, we just consider the set
of values, and each agent increases round by round its knowledge by
considering its previous knowledge and the values it has just
received. This is not sufficient in the context of byzantine
behaviours.
In the general case of a complete topology \(\mathbb{G}(t) = G\),
suppose that we have two agents \(i\) and \(j\). The agent \(i\)
eventually receives a message form \(j\). We want to record the path
the message took between them. We add in the message a stack
containing the identifiers of all encountered nodes.
We make the hypothesis that:
\begin{enumerate}
\item each agent has an identifer;
\item identifiers are mutually known;
\item each agent knows the identifiers of its incoming neighbours at
each round.
\end{enumerate}
\begin{theorem}{Menger}
\[\mli{nconn}(G) = \min_{i,j}\neq {\text{ chemin
}}_{n\text{-disjoincts}} i \rightarrow j \]
Also true for edge-connectivity and directed graphs. Sadly, for
dynamic graphs, the \enquote{natural} extension does not hold
anymore (Kleinberg, Kempe, Kunar).
\end{theorem}
\begin{theorem}{Dolev}
Consensus with decision is solvable with \(f\) byzantine agents iff
\(n > 3f \wedge \mli{nodeconn}(G) > 2f\).
node-connectivity: maximal number of nodes that can be removed
without losing the connectivity of the graph.
\end{theorem}
\begin{proof}
Sketch. \(\mathbb{G}(t) = K\). \(\mli{nconn}(K)=n-1\). \(V=\set{1,
2, \dots, n}\). Each agent holds the directed tree
\(T=T_{n,f}\). \(T\) has \(f+1\) levels.x
Agent \(i\). \(\mli{val}_i(\epsilon) := v_i\).
For round \(t\), \(1\leq t \leq f+1\).\\
\(S_i\): send \(\braket{x, v}\) if for all \(x\) such that \(i
\notin x, \mli{val}_i(x)=v\) and \(\abs{x}=t-1\).\\
\(T_i\): (if receive a unique \(\braket{x, v}\) from agent \(j\) with
\(\abs{x}=t-1\), \(v \in \mathcal{V}\) and \(j \notin x\) then
\(\mli{val}_i(x.j) := v\) else \(\mli{val_i(x.j) := 0}\))\\
if \(t = f+1\) then (if \(\abs{y}=f+1\) then \(\mli{newval}_i(y) :=
\mli{val}_i(y)\) else \(\mli{newval}_i(y) := \begin{cases} v &
\text{ if } v \text{ is a majority value amongst }
\mli{newval}_i(y.k) \\
0 & \text{ if such a value does not exist}\end{cases}\))\\
if \(t=f+1\) then \(x_i=\mli{newval}_i(\epsilon)\).
\begin{lemma}\label{dolev_lemma_1}
Consider an execution of the algorithm, in wich the set of
byzantine agents is called \(B\), then \(\abs{B} \leq f\). If
\(i, j\) are non-byzantine (\(\notin B\)), then if \(x\) is a node
of \(T\) with \(\mli{last}(x) \notin B\). After round
\(\abs{x}, \mli{val}_i(x) = \mli{val}_j(x)\).
\end{lemma}
\begin{proof}
\(x=y.k\) with \(k\notin B\). At round \(\abs{x}\), the agent
\(k\) sends \(\braket{y, v}\) to all with \(v\in
\set{0,1}\). \(k \notin y\).
Both \(i\) and \(j\) label \(x=y.k\) with \(v\),
\(\mli{val}_k(y)=\mli{val}_j(x)=\mli{val}_j(x)\).
\end{proof}
\begin{lemma}\label{dolev_lemma_2}
At the end of round \(f+1\), for any node \(x\) in \(T\) such that
\(\mli{last}(x) \notin B\), there exists \(v\in \mathcal{V}\) such
that
\(\forall i \notin B, \quad v = \mli{val}_i(x) =
\mli{newval}_i(x)\).
\end{lemma}
\begin{proof}
By backward induction on \(\abs{x}\).
\begin{enumerate}
\item if \(\abs{x}=f + 1\). By Lemma~\ref{dolev_lemma_1},
\(\exists v \in \mathcal{V} \quad \forall i \notin B \quad v =
\mli{val}_i(x)\) and if
\(i \notin B, \mli{newval}_i(x) = \mli{val}_i(x)\)
\item \(\abs{x} \leq f\). Apply Lemma~\ref{dolev_lemma_3}.
\(\mli{newval}_i(x.k)=\mli{val}_i(x.k)=\mli{val}_k(x)=\mli{val}_l(x)\)
(from Lemma~\ref{dolev_lemma_1}). Let
\(1 \leq \abs{x} \leq f\), \(x\) has a majority of children
\(x.k\) with \(k \notin B\). By Lemma~\ref{dolev_lemma_1},
\(\forall i, j \notin B \quad \mli{val}_i(x) = \mli{val}_j(x) =
v\). If \(k \notin B\), \(k\) sends \(\braket{x, v}\) to agent
\(i\) at round \(\abs{x}+1\) and if
\(i \notin B \quad \mli{val}_i(x.k) = v\). By induction
\(\forall k \notin B \quad \mli{newval}_i(x.k)=v\). By the code
of \(i\), if \(i \notin B\), \(\mli{newval}_i(x)=v\).
\end{enumerate}
\end{proof}
\begin{lemma}\label{dolev_lemma_3}
If \(1\leq \abs{x} \leq f\) then \(x\) has a strict majority of
children \(x.k\) with \(k \notin B\).
\end{lemma}
\begin{proof}
\(x\) has \(n-\abs{x} \geq n - f > 2f\) children.
\end{proof}
\end{proof}
\begin{proposition}
\(\alpha\) satisfies validity.
\end{proposition}
\begin{proof}
\(\forall i \notin B \quad \mli{val}_i(\epsilon)=v\).
oups, missed that.
\end{proof}
\section{Synchronization}
\subsection{(Asymptotic) Consensus, approximate agreement}
\(i \in V\), \(v_i \in \mathcal{V} = \mathbb{R}^{d=1}\),
\(x_i \in \mathcal{V} = \mathbb{R}^{d-1}\). \(x_i(t), x_i(o)=v_i\).
\begin{description}
\item[convergence] \(\forall i \in V, \quad \lim x_i(t) = x^*_i\)
\item[agreement] \(\forall i, j \in V^2 \quad x^*_i = x^*_j\)
\item[validity] \(\forall i \in V \quad x^*_i \in \braket{v_k, k \in V}\)
\end{description}
\(x_i(t) \in \braket{v_k, k \in V} \forall t\)\\
\(x_i(t) \in \braket{x_k(t-1), k \in V}\)\\
\(x_i(t) \in \braket{x_k(t-1), k \in \In_i(t)}\).
The update rule \begin{equation}x_i(t) = \sum_{k \in
\In_i(t)}A_{ik}(t)x_k(t-1)\end{equation}. We have for the weights
\(A_{ik}(t) \geq 0\) and \(\sum_{k \in \In_i(t)}A_{ik}(t) = 1\).
Those algorithms are \enquote{memoryless}, making less good candidates
for modelisation of synchronization processes.
The matrix of all weights for all agents is denoted
\(A(t)={(A_{ik_j}(t))}_{i \text{ agent }, k_j \in \In_i(t)}\), and is
line-stochastic. There is a correspondancy between \(G(A(t))\) the
graph induced by \(A\) and \({\mathbb{G}(t)}^{-1}\) (graph with
reversed edges). We call \enquote{influence graph} \(G(A(t))\) and
the \enquote{communication graph} \({\mathbb{G}(t)}^{-1}\). In
general we want \(G(A(t)) = {\mathbb{G}(t)}^{-1}\).
As \(i \in \In_i(t)\), we always have thant \(A_{ii}(t) > 0\).
\subsection{The Equal Neighbour (EN) algorithm}
\[A_{ik}(t) = \frac{1}{\abs{\In_i(t)}}\]
An agent averages all the values it receives.
\subsection{The Fixed Weigth (FW) algorithm}
\[\forall i \in V \quad \exists q_i \in \mathbb{N}^* \quad \forall t
\in \mathbb{N}^* \quad q_i \geq \abs{\In_i(t)}\]
Assume that the agent \(i\) knows \(q_i\) (the collection is a
parameter of the algorithm).
\[k \in \In_i(t) \quad A_{ik}(t) =
\begin{cases}
\frac{1}{q_i} & \text{ if } k \neq i\\
0 < 1-\frac{\abs{\In_i(t)} - 1}{q_i} \leq 1
\end{cases}
\]
Those algorithms are nice because in the case of symmetric
bidirectionnal graphs, Perron vectors are easy to compute. The
starting point is the Perron-Frobenius theorem.
\subsection{Averaging Algorithm}
An algorithm that uses the update rule 1 is called \emph{averaging
algorithm}. An averaging algorithm \(A\) is called \(\alpha\)-safe
(with \(\alpha \in \interval[open left]{0}{1}\)) if
\(\forall t, \forall i \in V, \forall k \in V \quad A_{ik}(t) \in
\set{0} \cup \interval{\alpha}{1}\).
EN is \(\frac{1}{n}\)-safe. FW is \(\frac{1}{q}\)-safe with \(q =
\max\set{q_i | i \in V}\).
Intuitively, \(\alpha\)-safeness means that the values won't go too
near the local convex hull.
\begin{proposition}
If the algorithm \(A\) is an \(\alpha\)-safe averaging algorithm,
then
\[\forall i \in V, \quad \forall t \in \mathbb{N}^* \quad \alpha
M_i(t-1) + (1 - \alpha) m_i(t-1) \leq x_i(t) \leq \alpha
m_i(t-1)+(1-\alpha)M_i(t-1)\] with \(m_i(t-1) = \min\set{x_j(t-1)
| j \in \In_i(t)}\) and \(M_i(t-1)=\max\set{}\).
\end{proposition}
\begin{proof}
\begin{align*}
x_i(t) &= \sum^n_{k=1} A_{ik}(t) x_k(t-1) \\
&= A_{ii} + M_i + \sum_{k \neq i} A_{ik} x_k(t-1) \\
&\geq A_{i{i}^+} M_i + m_i \sum_{k \neq {i}^+} A_{ik} \\
&= A_{ii} + (M_i - m_i) + m_i \\
&\geq \alpha M_i + (1-\alpha) m_i
\end{align*}
\end{proof}
Beware of Bernard Chazelle.
\begin{proposition}
If \(A\) is an algo such that \(\forall i \in V, \forall t \in
\mathbb{N}\) holds proposition 1, then \(A\) is an averaging
algorithm (obvious) that is \(\frac{\alpha}{n}\)-safe.
\end{proposition}
\begin{proof}
\[S^\alpha_n = \set{(\alpha_1, \dots, \alpha_n) \in
\interval{\frac{\alpha}{n}}{1}}\]
Note that
\(S^\alpha_n=(\frac{\alpha}{n}, \dots, \frac{\alpha}{n})+(1-\alpha)
S^0_n\). Let \(\alpha_i \in \interval{\frac{\alpha}{n}}{1}\), and
\(\beta_i=\alpha_i-\frac{\alpha}{n} \geq 0\) so the above equality
holds.
\[x=(x_1 {,}^\leq \dots {,}^\leq x_n) \subset \mathbb{R}^k\]
\[S^\alpha_n x = \set{\alpha = \alpha_1 x_1 + \cdots + \alpha_n x_n
| (\alpha_1, \dots, \alpha_n) \in S^\alpha_n}\]
\[a\in S^\alpha_n x \]
\[\bar{x} = \frac{x_1+ \cdots +x_n}{n}\]
\[a=\alpha \bar{x} + (1-\alpha)(\alpha_1x_1 + \cdots + \alpha_n
x_n), \quad \alpha_i \in S^0_n \quad \geq \alpha \bar{x} + (1-\alpha)
x_1\]
\[S^\alpha_n x = \interval{\alpha \bar{x} + (1-\alpha)x_1}{\alpha
\bar{x} + (1-\alpha)x_n} \supseteq \interval{(1-\alpha)x_1 +
\alpha x_n}{\alpha x_1 + (1-\alpha)x_n}\]
So every \(y \in \interval{(1-\alpha)x_1 + \alpha x
_n}{(1-\alpha)x_n + \alpha x_1}\) is \(y \in S^\alpha_n x\).
\end{proof}
\[x(t)=\begin{bmatrix}x_1(t)\\x_n(t)\end{bmatrix}=A(t)\begin{bmatrix}x_1(t-1)\\ x_n(t-1)\end{bmatrix}\]
\[x(t) = A(t) x(t-1) = A(t)A(t-1)x(t-2) = {\underbrace{A(t) \dots
A(1)}_{\mathclap{A(1:t)}}} x(0)\]
\[\lim_{t\mapsto +\infty} A(1:t) = \mathbb{1} \Pi^T\]
\begin{itemize}
\item The sum of each line of a line-stochastic matrix equals 1.
\item The set of all stochastic matrices is compact.
\item If \(A, B\) are stochastics then \(AB\) is stochastic.
\item The limit, if it exists, is stochastic.
\item For every stochastic matrix, \(A\mathbb{1}=\mathbb{1}\).
\item The spectral ray \(\rho(A)\leq 1\), with
\(\rho(A)=\max\set{\abs{\lambda} | \lambda \in Sp(A)}\).
\end{itemize}
\section{Perron-Frobenius}
\begin{theorem}{Perron-Frobenius}
A very general result in convex analysis.
Let \(A\) be a non-negative matrix in \(\mathbb{R}^{n\times n}\)
that is irreducible (\(G(A)\) is strongly connected).
\begin{itemize}
\item \(\rho(A) = \max_{\lambda \in \mli{Sp}(A)}\abs{\lambda}\) is
an eigenvalue if \(A\)
\item \(\rho(A)\) has geometric and algebraic multiplicity one.\\
There exists an eigenvector associated to \(\rho(A)\) wich
coefficients are all positive.
\item If \(A\) is primitive (gcd of cycles length in \(G(A)\) is
\(1\)) then all the eigenvalue \(\lambda\) of \(A\) other than
\(\rho(A)\) satisfies \(\abs{\lambda} < \rho(A)\).
\end{itemize}
If \(A\) is a stochastic matrix? Then \(A \mathbb{1} = \mathbb{1}\)
and \(\rho(A) = 1\).
\end{theorem}
\(A^\intercal\) is not stochastic.
\(\mli{Sp}(A) = \mli{Sp}(A^\intercal)\).
\(G(A^\intercal)={G(A)}^{-1}\). \(1\) is an eigenvalue of
\(A^\intercal\) of multiplicity~\(1\).
The Perron vector \(\Pi(A)\) of \(A\) is the unique positive
eigenvector of \(A^\intercal\) such that
\(A^\intercal \Pi(A) = \Pi(A)\) with \(\sum \Pi_i = 1\).
\[\forall i \in v = [n], \quad \sum_{k=1}^n \Pi_k A_{ik} = \Pi_i\]
For asymptotic consensus:
\begin{align*}
x(t) &= A(A:t) x(0) \\
x(t) &= A^t x(0) \\
\lim_{t\mapsto +\infty} A(1:t) &= A(1:\infty) =
\begin{pmatrix}
\Pi_1 & \cdots & \Pi_n \\
\Pi_1 & \cdots & \Pi_n \\
\vdots \\
\Pi_1 & \cdots & \Pi_n \\
\end{pmatrix}
= \Pi \mathbb{1}^\intercal
\end{align*}
\begin{align*}
x\in \mathbb{R}^n &= \Delta \oplus \ker\Pi^\intercal \\
&= \mathbb{R}\mathbb{1} \oplus \Delta^{\bot_\Pi} \\
x &= a \mathbb{1} + y
\end{align*}
\begin{align*}
x(0) &= a\mathbb{1}+y(0) \\
x(1) &= a\mathbb{1}+Ay(0) \\
\vdots \\
x(t) &= a\mathbb{1}+A^{t} y(0)
\end{align*}
Cao, Morse, Anderson; Moreau
\(A\) stochastic matrix.
\begin{align*}
\lambda(A) &= 1 - \min_{i_1, i_2} \sum^n_{k=1} \min(A_{i_1 k}, A_{i_2
k} \in \interval{0}{1}) \\
\delta(A) &=\max_{k, i_1, i_2} \abs{A_{i_1 k} - A_{i_2 k}} \in
\interval{0}{1}
\end{align*}
\enquote{Dobrushin coefficient}.
\begin{align*}
N(A) &= \sup_{x\notin \Delta} \frac{N(Ax)}{N(x)} =
\sup_{N(x)=1}N(Ax) = \max_{N(x)=1} N(Ax) \\
N(x) &= \max (X_i) - \min(X_i) \\
N(A) &= \lambda(A)
\end{align*}
\begin{lemma}
There exists \(S \subseteq [n]\) such that \(N(A)=N(A \rho_S)\).
\[\rho_S = \sum_{i\in S}\rho_i\]
\end{lemma}
\begin{proof}
Let \(x\) st \(N(x)=1\) and \(N(A x) = N(A)\).
too much stuff.
\end{proof}
\begin{proposition}
For any stochastic matrix \(A\), \(N(A) = \lambda(A) \geq \delta(A)\).
\end{proposition}
\begin{proof}
\begin{itemize}
\item
\begin{align*}
N(A) = N(A \rho_S) = f_{i_2}-f_{i_1} &= \sum_{j \in S} A_{i_2 j} -
\sum_{j \in S} A_{i_1 j} \\
&= A - \sum_{j\notin S} A_{i_2 j} - \sum_{j \in S} A_{i_1 j} \\
&\geq 1 - \sum^n_{j=1} \min(A_{i_1 j}, A_{i_2 j})
\end{align*}
missing something.
\item \[\tilde{S} = \set{j \in [n] | A_{i_2 j} \geq A_{i_1 j}}\]
missing something.
\end{itemize}
\end{proof}
\begin{proposition}
Let \(A\) be a stochastic matrix with a positive column.
\[N(A) \leq 1 - \alpha_j < 1\]
\[N(A) = 1 - \min_{i_1, i_2} \sum^n_{k=1} \min(A_{i_1 k}, A_{i_2
k}) \leq 1 - \min_{i_1 i_2}(A_{i_1 j}, A_{i_2 j}) = 1 - \alpha_j\]
\end{proposition}
\begin{proposition}
\(x(t)=A(1:t)x(0)\) et il existe \({(t_k)}_{k\in \mathbb{N}}\)
croissante avec \(t_0 = 0\) et il existe
\(\alpha \in \interval[open right]{0}{1}\) tels que
\(N(A(t_k:t_{j+1}-1)) \leq 1 - \alpha\). Alors \((x(t))\) est une
suite convergente de limite \(x^* \in \mathbb{R}\mathbb{1}\).
\end{proposition}
\section{Consensus stabilisant}
\(A(1), \dots, A(t), \dots\) suite de matrices stochastiques. Soit
\(t_0 \in \mathbb{N}^*, j \in V, \quad S_j(t)=\set{i \in V |
A_{ij}(t_0:t)>0}\). Cela revient à considérer la \(j\)-ème colone
de la matrice cumulative \(A(t_0:t)\). En termes distributés, cela
signifie que \(i\) a été atteind par une information provenant de
\(j\) dans \(\interval{t_0}{t}\) (relation talk-to).
\(\alpha_j(t)=\min\set{A_{ij}(t_0,t) | i \in S_j(t)}\).
raté un truc.
\begin{lemma} \(j \in S_j(t_0)\) car \(A_{j j}(t_0) > 0\) \end{lemma}
\begin{lemma} \(S_j(t) \subseteq S_j(t+1)\) \end{lemma}
\begin{lemma}\label{thm_cma_3} \(S_j(t)=S_j(t+1) \Leftrightarrow S_j(t)\) n'a pas d'arc
sortant dans \(\mathbb{G}(t+1)\).
\end{lemma}
\begin{proof}
\begin{align*}
S_j(t) \subsetneq S_h(t+1) &\Leftrightarrow \exists i \in S_j(t+1)
\setminus S_j(t) \\
&\Leftrightarrow \exists
i \begin{cases}
A_{i j}(t_0:t) = 0 \\
\exists k \in S_j(t) \quad A_{i
k}(t+1) > 0
\end{cases} \\
&\Leftrightarrow
\begin{cases}
\exists k \in S_j(t), \quad \exists
i \notin S_j(t) \\
A_{i k}(t+1) > 0
\end{cases}
\end{align*}
\[\sum^n_{k=1} A_{i k}(t+1) A_{k j}(t_0:t) = \sum_{k\in S_j(t)}A_{i
k}(t+1) A_{k j}(t_0:t)\]
\end{proof}
\begin{lemma} \(\alpha_j(t+1) \geq \alpha \alpha_j(t)\) \end{lemma}
\begin{proof}
\begin{align*}
A_{i j}(t_0:t+1) &= \sum^n_{k=1} A_{i k}(t+1) A_{k j}(t_0:t) \\
&= \sum_{k \in S_j(t)} A_{k j}(t_0:t) \\
&\geq {\underbrace{A_{i l}(t+1)}_{\alpha}} {\underbrace{A_{e j}(t_0:t)}_{\alpha_j(t)}}
\end{align*}
\[\exists l \in S_j(t) \quad (l, i)\in E(\mathbb{G}(t+1))\]
\end{proof}
\begin{theorem}{\cite{DBLP:journals/siamco/CaoMA08a}} Tout algorithme de
moyenne qui est \(\alpha\)-sûr résout le consensus asymptotique
si le graphe dynamique est continuement enraciné.
\end{theorem}
\begin{proof}
\begin{align*}
N(x(t)) &\leq N(A(1=t)) N(x(0)) \\
&\leq {\underbrace{
{(1-\alpha^{{(n-1)}^2})}^{\frac{t}{{(n-1)}^2}}}_{0}}
N(x(0))
\end{align*}
\end{proof}
\begin{lemma} \(N(A(t_0:t_{0+{(n-1)}^{2}-1})) \leq 1 -
\alpha^{{(n-1)}^2}\) \end{lemma}
\begin{proof}
On se place à \(t_0\). Le graphe est enraciné, on choisit une racine
\(j_0\). Ou bien \(S_j(t_0) = V, j=j(t_{0+1}), j \in S_j(t_0)\) donc
toute la colonne est strictement positive, ou il y a des trous, et
il y a un élément à l'extérieur de \(S_j(t_0)\) qui correspond à un
trou. Comme le graphe est enraciné, \(S_j(t_0)\) a un voisin
sortant. D'après le lemme~\ref{thm_cma_3}, \(A_{i j}(t_0:t_{0+1}) >
0\). Au bout de \(n^2\) fois, il y a une colonne positive.
\end{proof}
\begin{theorem}{\cite{DBLP:journals/siamco/CaoMA08a}} Tout algorithme de
moyenne qui est \(\alpha\)-sûr résout le consensus asymptotique
si le graphe dynamique est enraciné avec délai \(B\).
\end{theorem}
\[x_i(t+1) = \sum^n_{k=1} A_{i k}(t+1)k(t)\]
Ça reste toujours vrai en multipliant par \(B\), avec des tailles de
paquets de \(B{(n-1)}^2\). Donc ce théorème est vrai dans
\(\bigcup_{B \in \mathbb{N}^*}\mathcal{G}_{B \text{ enraciné}}\).
\begin{definition}
\(\mathcal{G}\) de type finiment engendré (on peut le voir comme
ensemble de mots infinis dont les symboles de l'alphabet sont tous
les graphes possibles \(G_1, \dots, G_n\) sur un ensemble de nœuds
\(V\)).
\end{definition}
\begin{corollary}
Un algorithme de moyenne \(\alpha\)-sûr résout le consensus
asymptotique dans un modèle de réseau finiment engendré ssi tous ses
générateurs sont enracinés.
\end{corollary}
Supposons que l'on a une chaîne. Le th dit que l'on peut résoudre le
consensus asymptotique dessus. Le nœud de départ ne reçoit pas
d'information du nœud d'arrivée, or le consensus est possible. Que se
passe-t-il? En fait le nœud va utiliser sa position stratégique dans
le réseau pour devenir le maître \enquote{physique} va, grâce à un
algorithme de moyenne, imposer sa valeur à tous. En faisant la moyenne
à chaque itération des valeurs reçues, les nœuds convergent. Le
théorème c'est cette idée, plus le dynamisme.
Malheureusement, le temps de convergence est exponentiel.
\begin{align*}
N(x(t)) &< \varepsilon \\
\frac{t}{n^2}(1-\alpha^{n^2}) &< \log \varepsilon \\
\Leftarrow \frac{t}{n^2}\alpha^{n^2} > \log(\frac{1}{\varepsilon})
&\Leftarrow t > n^2 \alpha^{{-n}^2}\log(\frac{1}{\varepsilon})
\end{align*}
Dans le cas de Equal Neighbour, on est à
\(n^{n^2+2}\log(\frac{1}{\varepsilon})\). Sur le graphe papillon, EN
converge en \(\Omega(3^n)\). Si le graphe est bidirectionnel, on peut
arriver avec EN à un temps de convergence en \(O(n^3)\).
\begin{lemma}
Si \(S_j(t)=S_j(t+1)\) (\(S_j(t)\) n'a pas d'arc sortant dans le
graphe suivant) et si cet ensemble n'a pas d'arc entrant dans
\(\mathbb{G}(t+1)\) alors \(\alpha_j(t)=\alpha_j(t+1)\).
\end{lemma}
\begin{theorem}{\cite{DBLP:journals/tac/Moreau05}}
Tout algorithme de moyenne \(\alpha\)-sûr résout le consensus
asymptotique si le graphe dynamique est fortement connexe avec délai
fini et continuement bidirectionnel (dans chaque composante
ultimement fortement connexe).
\end{theorem}
Ce théorème n'est pas du tout un théorème de résolubilité. La
modélisation à base d'algorithmes de moyenne très simples explique les
phénomènes de synchronisation, à partir du moment que les interactions
sont bidirectionnelles.
\begin{proof}
Soit \(i \in S_j(t) = S_j(t+1)\).
\begin{align*}
A_{i j}(t_0:t+1) &= \sum^n_{k+1}A_{i k}(t+1)A_{k j}(t_0:t)\\
&= \sum^n_{k \in S_j(t)} A_{i k}(t+1)
{\underbrace{A_{k j}(t_0:t)}_{\alpha_{j(t)}}} \\
&\geq \alpha_{j(t)} \sum_{k \in S_j(t)} A_{i
k}(t+1) \\
&= \alpha_j {\underbrace{\left( \sum^n_{k=1} A_{i k}(t+1)\right)}_{=1}}
\end{align*}
À \(t_0\),
\(\exists t_1 \geq t_0 \quad N(A(t_0:t_1)) \leq
1-\alpha^{n^2}\). Soit \(\varepsilon > 0, \quad \exists l \in
\mathbb{N}^* \quad {(1-\alpha^{n^2})}^l < \varepsilon\).
\[\theta^1=1 \rightsquigarrow
\begin{aligned}
\exists \theta_2 > \theta_1 &\quad N(A(\theta_1:\theta_{2}-1))
< 1-\alpha^{n^2} \\
\exists \theta_3 > \theta_2 &\quad N(A(\theta_2:\theta_{3}-1))
< 1-\alpha^{n^2}
\end{aligned} \]
\[ \theta_1, \dots, \theta_n \quad N(A(1:\theta_l)) <
{(1-\alpha^{n^2})}^l < \varepsilon \]
\end{proof}
\begin{definition} Un graphe \(G\) est semi-simple ssi toute
composante connexe de \(G\) est fortement connexe. Par exemple, les
graphes bidirectionnels et les graphes eulériens.
\end{definition}
Note. \(\mathbb{G}(1), \dots, \mathbb{G}(t), \dots\). \(G_\infty = (V,
E_\infty) \quad V(C_i)=V_i\).\\
Note. Les deux théorèmes ne sont valide qu'avec des autoboucles.\\
Question. Le théorème de Moreau est-il valide sans bidirectionnalité?\\
\begin{multicols}{2}
\begin{tikzpicture}
\def \n {3}
\def \radius {1cm}
\def \margin {8} % margin in angles, depends on the radius
\foreach \s in {1,...,\n}
{
\node[draw, circle] at ({360/\n * (\s - 1)}:\radius) {$\s$};
\draw[->, >=latex] ({360/\n * (\s - 1)+\margin}:\radius)
arc ({360/\n * (\s - 1)+\margin}:{360/\n * (\s)-\margin}:\radius);
}
\end{tikzpicture}
\[
\begin{pmatrix}
\sum\varepsilon_k\\
\sum\varepsilon_k\\
1-\sum\varepsilon_k
\end{pmatrix}
\rightarrow
\begin{pmatrix}
< \frac{1}{2}\\
< \frac{1}{2}\\
> \frac{1}{2}
\end{pmatrix}
\]
\end{multicols}
\[
\begin{pmatrix} 0 \\ 0 \\ 1 \end{pmatrix}
\rightarrow^A_{t_{0}+1} \rightarrow \dots \rightarrow
\begin{pmatrix} \varepsilon_1+\varepsilon_2\\
\varepsilon_1+\varepsilon_2+\varepsilon_3\\
\varepsilon_1-\varepsilon_2-\varepsilon_3
\end{pmatrix}
\]
\[A_{i j}(t) = \begin{cases}
\frac{1}{q_i} \quad q_i \geq d_i(t) \text{ si } j \in \mathbb{N}^+
i \neq j \\
1 - \frac{d_i(t)-1}{?}\\
0
\end{cases}
\]
\printbibliography{}
\nocite{*}
\end{document}
% Local Variables:
% ispell-local-dictionary: "british"
% TeX-master: t
% End:
all:
latexmk
clean:
latexmk -CA
$(RM) -r _region_.* *.log auto
.PHONY: all clean
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment