diff --git a/src/settings.json b/src/settings.json index 533aa4e..b5645df 100644 --- a/src/settings.json +++ b/src/settings.json @@ -8,7 +8,7 @@ "minCompressLength": 1024 }, "tensorflow": { - "bestModel": 16 + "bestModel": 22 }, "debug": { "request": false, diff --git a/training/unet/model.py b/training/unet/model.py index 75b1ecf..bfca832 100644 --- a/training/unet/model.py +++ b/training/unet/model.py @@ -15,7 +15,7 @@ def FindModel(hp: HyperParameters): kernel_initializer: str = hp.Choice('kernel_initializer', ['he_normal']) # type: ignore kernel_size = hp.Choice('kernel_size', values=[3]) dropout: int = hp.Float('dropout_rate', 0.1, 0.5, step=0.1) # type: ignore - filter: int = hp.Choice('filter', values=[4, 8, 16, 32]) # type: ignore + filter: int = hp.Choice('filter', values=[4, 8, 16]) # type: ignore input = Input(shape=(512 ,320, 3)) def down_block(x, filters: int, dropout_prob: float = 0, use_maxpool=True): @@ -76,10 +76,10 @@ def LoaderModel(): input = Input(shape=(512, 320, 3)) loss = 'BinaryCrossentropy' optimizer = 'Adam' - learning_rate = 0.01 + learning_rate = 0.001 kernel_size = 3 - dropout = 0.1 - filter = 8 + dropout = 0.2 + filter = 16 activation = 'relu' kernel_initializer = 'he_normal' diff --git a/training/unet/training.py b/training/unet/training.py index 4d70eda..c35dcd1 100644 --- a/training/unet/training.py +++ b/training/unet/training.py @@ -246,4 +246,4 @@ def on_epoch_end(self, epoch, logs=None): print(f'Modelo salvo: {datetime.now().strftime("%a, %d %b %Y %H:%M:%S GMT")}') -asyncio.run(runTraining()) \ No newline at end of file +asyncio.run(runTraining())