Skip to content

Commit

Permalink
Merge branch 'main' into hypergraphs
Browse files Browse the repository at this point in the history
  • Loading branch information
bda82 authored Oct 26, 2023
2 parents dfa7b92 + f355128 commit 012b0f1
Show file tree
Hide file tree
Showing 10 changed files with 285 additions and 14 deletions.
139 changes: 139 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/data_validation/
.DS_Store
/venv/
/.idea/
/.git/
/docker-compose.yml
*.ipynb_checkpoints
__pycache__
/cache/

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/
10 changes: 6 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
data_validation
.DS_Store
venv
.idea
.git
/venv/
/.idea/
/.git/
/docker-compose.yml
*.ipynb_checkpoints
__pycache__
.gitignore
/cache/

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down Expand Up @@ -135,4 +137,4 @@ venv.bak/
dmypy.json

# Pyre type checker
.pyre/
.pyre/
50 changes: 50 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
FROM nvidia/cuda:11.6.2-devel-ubuntu20.04
ENV DEBIAN_FRONTEND=noninteractive
WORKDIR /app

# Install required packages
RUN set -xe \
&& apt-get -y update \
&& apt-get install -fyqq software-properties-common curl build-essential git libaio-dev llvm-10 clang wget \
&& apt-get -y update \
&& add-apt-repository universe \
&& apt-get -y update \
&& apt-get -fyqq install python3.9-full python3.9-dev python3-pip \
&& apt-get clean

# Let's upgrade pip first
RUN set -xe \
&& python3.9 -m pip install --upgrade pip

# Install python packages
RUN set -xe \
# PyTorch MUST BE installed first
&& python3.9 -m pip install \
'torch==1.12.1+cu116' \
-f https://download.pytorch.org/whl/torch_stable.html \
# And only then all other dependencies
&& python3.9 -m pip install \
'torch-geometric==2.0.4' \
'torch-sparse==0.6.15+pt112cu116' \
'torch-scatter==2.1.0+pt112cu116' \
-f https://data.pyg.org/whl/torch-1.12.1+cu116.html

# Install Jupyter
EXPOSE 8080
RUN set -xe \
&& python3.9 -m pip install jupyter \
&& jupyter notebook --generate-config \
&& echo "c.ServerApp.allow_origin = '*'" >> /root/.jupyter/jupyter_notebook_config.py \
&& echo "c.ServerApp.allow_remote_access = True" >> /root/.jupyter/jupyter_notebook_config.py \
# passwd('admin','sha1')
&& echo "c.NotebookApp.password = u'sha1:fd40b23609dd:882af6cdf722657245be6f4abd9b641a84ef9c2a'" >> /root/.jupyter/jupyter_notebook_config.py

# Install requirements
COPY requirements.txt ./
RUN set -xe \
&& python3.9 -m pip install --no-cache-dir -r requirements.txt

# Copy project files
COPY . .

ENTRYPOINT ["/app/entrypoint.sh"]
53 changes: 53 additions & 0 deletions README_DOCKER.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Docker Devbox

Requirements:

* Docker
* Docker Compose
* Docker Nvidia Runtime

## How to run

Copy docker-compose from example:

```shell
cp docker-compose.dist.yml docker-compose.yml
```

Build an image:

```shell
docker-compose build
```

Start a container:

```shell
docker-compose up -d
```

## How to use inside a container

Login into a container:

```shell
docker-compose exec app bash
```

Use python3.9 interpreter for running all your tasks, e.g. tutorials:

```shell
# Prepare environment
cd tutorials
ln -s ../stable_gnn

# Run graph classification task
python3.9 graph_classification.py

# Run node classification task
python3.9 node_classification.py
```

## How to use from IDE like PyCharm

You need to `Add New Interpreter` via menu of PyCharn, then select `Docker Compose`
19 changes: 19 additions & 0 deletions docker-compose.dist.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
version: "3.9"

services:

app:
restart: "unless-stopped"
build:
context: .
volumes:
- ./:/app
ports:
- "127.0.0.1:8888:8888"
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [ gpu ]
8 changes: 8 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/bin/bash

JN_PORT=${PORT:-8888}
if [ -z "$JN_IP" ]; then
JN_IP=$(hostname -I | awk '{print $1}')
fi

jupyter notebook --allow-root --ip="$JN_IP" --port="$JN_PORT" --no-browser
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
bamt
optuna
pgmpy
pandas
bamt==1.1.44
optuna==2.10.1
pgmpy==0.1.20
pandas==1.5.2
4 changes: 2 additions & 2 deletions stable_gnn/embedding/model_train_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def _objective(self, trial: Trial) -> Tensor:

loss_to_train = {}
for name in self.loss:
if type(self.loss[name]) == list:
if isinstance(self.loss[name], list):
if len(self.loss[name]) == 3:
var = trial.suggest_int(
name,
Expand All @@ -141,7 +141,7 @@ def _objective(self, trial: Trial) -> Tensor:
else:
loss_to_train[name] = self.loss[name]

if name == "q" and type(self.loss[name]) == list:
if name == "q" and isinstance(self.loss[name], list):
var_5 = trial.suggest_categorical("p", self.loss["p"])
var_4 = trial.suggest_categorical("q", self.loss[name])
if var_4 > 1:
Expand Down
6 changes: 3 additions & 3 deletions stable_gnn/model_gc.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from typing import List, Optional, Tuple

import bamt.Networks as Nets
import bamt.networks as Nets
import numpy as np
import pandas as pd
import torch
import torch.nn.functional as F
from bamt.Preprocessors import Preprocessor
from bamt.preprocessors import Preprocessor
from pgmpy.estimators import K2Score
from sklearn import preprocessing
from torch import device
Expand Down Expand Up @@ -215,7 +215,7 @@ def _data_eigen_exctractor(self, dataset: List[Graph]) -> pd.DataFrame:

return data_bamt

def _bayesian_network_build(self, data_bamt: pd.DataFrame) -> Nets.HybridBN:
def _bayesian_network_build(self, data_bamt: pd.DataFrame):
# поиск весов для bamt
for col in data_bamt.columns[: len(data_bamt.columns)]:
data_bamt[col] = data_bamt[col].astype(float)
Expand Down
2 changes: 1 addition & 1 deletion stable_gnn/pipelines/graph_classification_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def test(self, model: Module, loader: DataLoader) -> Tuple[float, float]:

y_true_list = y_true.cpu().tolist()
y_pred_list = y_pred.squeeze().tolist()
if type(y_pred_list) != list:
if not isinstance(y_pred_list, list):
y_pred_list = [y_pred_list]
accs_micro.append(f1_score(y_true_list, y_pred_list, average="micro"))
accs_macro.append(f1_score(y_true_list, y_pred_list, average="macro"))
Expand Down

0 comments on commit 012b0f1

Please sign in to comment.