-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathriemannian_sgd.py
61 lines (53 loc) · 2.91 KB
/
riemannian_sgd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Riemannian stochastic gradient descent module.
"""
# --------------------------------------------------------------------------- #
# MODULE HISTORY #
# --------------------------------------------------------------------------- #
# Version 1
# Date 2021-07-28
# Author LH John, E Fridgeirsson
# Note Original version
#
# --------------------------------------------------------------------------- #
# SYSTEM IMPORTS #
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# OTHER IMPORTS #
# --------------------------------------------------------------------------- #
import torch
# --------------------------------------------------------------------------- #
# OWN IMPORTS #
# --------------------------------------------------------------------------- #
from shared.math import grad, expm
# --------------------------------------------------------------------------- #
# META DATA #
# --------------------------------------------------------------------------- #
__status__ = 'Development'
# --------------------------------------------------------------------------- #
# CONSTANTS #
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# GLOBAL VARIABLES #
# --------------------------------------------------------------------------- #
# --------------------------------------------------------------------------- #
# CLASS DEFINITION #
# --------------------------------------------------------------------------- #
class RiemannianSGD(torch.optim.Optimizer):
"""
Riemannian stochastic gradient descent to train Poincaré embeddings.
"""
def __init__(self, params):
super(RiemannianSGD, self).__init__(params, {})
def step(self, lr=0.3):
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
d_p = grad(p)
d_p.mul_(-lr)
p.data.copy_(expm(p.data, d_p))
# --------------------------------------------------------------------------- #
# END OF FILE #
# --------------------------------------------------------------------------- #