From 9c0a260b5cae4a7aaeb95f4d181d4e26a1101d14 Mon Sep 17 00:00:00 2001 From: Bagus Tris Atmaja Date: Mon, 27 May 2024 18:20:38 +0900 Subject: [PATCH] add device_id param --- ini_file.md | 4 +++- nkululeko/models/model_tuned.py | 3 ++- tests/exp_emodb_finetune.ini | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/ini_file.md b/ini_file.md index fe0a6f9..c405f93 100644 --- a/ini_file.md +++ b/ini_file.md @@ -299,8 +299,10 @@ * batch_size = 8 * **num_workers**: Number of parallel processes for neural nets * num_workers = 5 -* **device**: For torch/huggingface models: select your GPU if you have one +* **device**: For torch/huggingface models: select your GPU if you have one. Values are either "cpu" or "cuda". * device = cpu +* **device_ids**: For torch/huggingface models: select your GPU if you have multiple. Values are GPU ids (0, 1 or both "0,1"). + * device_id = 0 * **patience**: Number of epochs to wait if the result gets better (for early stopping) * patience = 5 * **pretrained_model**: Base model for finetuning/transfer learning. Variants of wav2vec2, Hubert, and WavLM are tested to work. diff --git a/nkululeko/models/model_tuned.py b/nkululeko/models/model_tuned.py index 25159f9..f71291a 100644 --- a/nkululeko/models/model_tuned.py +++ b/nkululeko/models/model_tuned.py @@ -43,10 +43,11 @@ def __init__(self, df_train, df_test, feats_train, feats_test): # device = self.util.config_val("MODEL", "device", "cpu") self.device = "cuda" if torch.cuda.is_available() else "cpu" self.batch_size = int(self.util.config_val("MODEL", "batch_size", "8")) + self.device_id = self.util.config_val("MODEL", "device_id", "0") if self.device != "cpu": self.util.debug(f"running on device {self.device}") os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" - os.environ["CUDA_VISIBLE_DEVICES"] = "0" # self.device + os.environ["CUDA_VISIBLE_DEVICES"] = self.device_id # self.device self.df_train, self.df_test = df_train, df_test self.epoch_num = int(self.util.config_val("EXP", "epochs", 1)) diff --git a/tests/exp_emodb_finetune.ini b/tests/exp_emodb_finetune.ini index 0e867b9..b2ab343 100644 --- a/tests/exp_emodb_finetune.ini +++ b/tests/exp_emodb_finetune.ini @@ -1,6 +1,6 @@ [EXP] root = ./tests/results/ -name = test_pretrain +name = test_pretrain_1 runs = 1 epochs = 10 save = True