forked from alessandro-gentilini/opencv_exercises-snakes
-
Notifications
You must be signed in to change notification settings - Fork 2
/
snakes.tex
108 lines (89 loc) · 3.95 KB
/
snakes.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
\documentclass[a4paper,12pt]{article}
\usepackage[utf8]{inputenc}
\usepackage[english]{babel}
\usepackage{graphicx}
\usepackage{amsmath}
\usepackage{hyperref}
\hypersetup{
colorlinks=false,
pdfborder={0 0 0},
}
\title{snakes}
\author{Alessandro Gentilini\thanks{alessandro.gentilini@gmail.com}}
\DeclareMathOperator*{\argmax}{arg\,max}
\begin{document}
\maketitle
\begin{abstract}
snakes
\end{abstract}
\section{Inference for Snakes}
The likelihood is formula (17.3)
\begin{equation}
Pr(\mathbf{x}\vert\mathbf{W})\propto \prod_{n=1}^N \exp{[-(\text{dist}[\mathbf{x},\mathbf{w}_n])^2]}
\end{equation}
The prior is formula (17.4)
\begin{equation}
Pr(\mathbf{W})\propto \prod_{n=1}^N \exp{[\alpha \text{space}[\mathbf{w},n]+\beta \text{curve}[\mathbf{w},n]]}
\end{equation}
The inference is
\begin{equation}
\argmax_{\mathbf{W}}[Pr(\mathbf{W}\vert\mathbf{x})] =
\argmax_{\mathbf{W}}[Pr(\mathbf{x}\vert\mathbf{W})Pr(\mathbf{W})]
\end{equation}
Since $\log$ is monotone I can write
\begin{equation}
\begin{split}
\argmax_{\mathbf{W}}[Pr(\mathbf{W}\vert\mathbf{x})] & =
\argmax_{\mathbf{W}}[\log{[Pr(\mathbf{W}\vert\mathbf{x})]}] \\
& = \argmax_{\mathbf{W}}[\log{[Pr(\mathbf{x}\vert\mathbf{W})Pr(\mathbf{W})]}] \\
& = \argmax_{\mathbf{W}}[\log{[Pr(\mathbf{x}\vert\mathbf{W})]}+\log{[Pr(\mathbf{W})]}]
\end{split}
\end{equation}
And so for what regard the likelihood I can write:
\begin{equation}
\begin{split}
\log{[Pr(\mathbf{x}\vert\mathbf{W})]} & \propto \log{[\prod_{n=1}^N \exp{[-(\text{dist}[\mathbf{x},\mathbf{w}_n])^2]}]} \\
& =\sum_{n=1}^N \log{[\exp{[-(\text{dist}[\mathbf{x},\mathbf{w}_n])^2]}]} \\
& =\sum_{n=1}^N -(\text{dist}[\mathbf{x},\mathbf{w}_n])^2
\end{split}
\end{equation}
And for what regard the prior I can write:
\begin{equation}
\begin{split}
\log{[Pr(\mathbf{W})]} & \propto \log{[\prod_{n=1}^N \exp{[\alpha \text{space}[\mathbf{w},n]+\beta \text{curve}[\mathbf{w},n]]}]} \\
& = \sum_{n=1}^N \log{[\exp{[\alpha \text{space}[\mathbf{w},n]+\beta \text{curve}[\mathbf{w},n]]}]} \\
& = \sum_{n=1}^N \alpha \text{space}[\mathbf{w},n]+\beta \text{curve}[\mathbf{w},n]
\end{split}
\end{equation}
with $\alpha\geq0$ and $\beta\geq0$.\\
Finally the inference is:
\begin{equation}
\begin{split}
\argmax_{\mathbf{W}}[\log{[Pr(\mathbf{W}\vert\mathbf{x})]}] = \argmax_{\mathbf{W}}[\sum_{n=1}^N -(\text{dist}[\mathbf{x},\mathbf{w}_n])^2+\alpha \text{space}[\mathbf{w},n]+\beta \text{curve}[\mathbf{w},n]]
\end{split}
\end{equation}
The spacing term is formula (17.5)
\begin{equation}
\text{space}[\mathbf{w},n]= - \left(\frac{\sum_{n=1}^N \sqrt{\left( \mathbf{w}_{n}-\mathbf{w}_{n-1} \right)^T\left( \mathbf{w}_{n}-\mathbf{w}_{n-1} \right)}}{N} - \sqrt{\left( \mathbf{w}_{n}-\mathbf{w}_{n-1} \right)^T\left( \mathbf{w}_{n}-\mathbf{w}_{n-1} \right)}\right)^2
\end{equation}
and it is always nonpositive because it is a square and because of the leading minus.\\
The curvature term is formula (17.6)
\begin{equation}
\text{curve}[\mathbf{w},n]= - \left(\mathbf{w}_{n-1}-2\mathbf{w}_{n}+\mathbf{w}_{n+1}\right)^T\left(\mathbf{w}_{n-1}-2\mathbf{w}_{n}+\mathbf{w}_{n+1}\right)
\end{equation}
and it is always nonpositive because of the leading minus and because $\mathbf{x}^T\mathbf{x}=\|\mathbf{x}\|^2$ and so it is always nonnegative since it is the square of the ($\ell^2$)norm.\\
With these new definitions
\begin{equation}
\begin{split}
\text{d}[\mathbf{x},\mathbf{w}_n]&=(\text{dist}[\mathbf{x},\mathbf{w}_n])^2\\
\text{s}[\mathbf{w},n]&=-\alpha\text{space}[\mathbf{w},n]\\
\text{c}[\mathbf{w},n]&=-\beta\text{curve}[\mathbf{w},n]
\end{split}
\end{equation}
we have that $\text{d}[\mathbf{x},\mathbf{w}_n]$, $\text{s}[\mathbf{w},n]$ and $\text{c}[\mathbf{w},n]$ are all nonnegative
and the inference formula can be rewritten as
\begin{equation}
\argmax_{\mathbf{W}}[\log{[Pr(\mathbf{W}\vert\mathbf{x})]}] = -\argmax_{\mathbf{W}}[\sum_{n=1}^N \text{d}[\mathbf{x},\mathbf{w}_n]+\text{s}[\mathbf{w},n]+\text{c}[\mathbf{w},n]]
\end{equation}
and now the argument of $\argmax$ is nonnegative.
\end{document}