From 33e8296ade141142c8eb15f2776a04fb7fa92960 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Sat, 6 Jan 2024 22:01:23 +0100 Subject: [PATCH 01/14] Chore: Deleted ToDo --- training/train.py | 1 - 1 file changed, 1 deletion(-) diff --git a/training/train.py b/training/train.py index 04e9aa1..a56f8b5 100644 --- a/training/train.py +++ b/training/train.py @@ -131,4 +131,3 @@ # Save model model.save(f"{save_path}{name}.h5") -# TODO: Different data augmentation (vertical, ..), Augmentation before training From 6bc2d0a83eb20d7a183ef55cda6405abf2532f7b Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Tue, 6 Feb 2024 22:01:23 +0100 Subject: [PATCH 02/14] Feature. Add random seed --- training/train.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/training/train.py b/training/train.py index a56f8b5..05f80e2 100644 --- a/training/train.py +++ b/training/train.py @@ -8,6 +8,7 @@ from keras.regularizers import l1_l2 from keras.callbacks import EarlyStopping, ModelCheckpoint import os +import random # Ignore warnings import warnings @@ -28,6 +29,8 @@ # Set to True to load trained model load_model = False load_path = "../models/all_model_variants/efficientnet-old-head-model-variants.h5" +# Set seed for reproducibility +random_seed = True # Config path_addon = get_data_path_addon(model_type) config = { @@ -35,6 +38,7 @@ "batch_size": 32, "img_height": img_height, "img_width": img_width, + "seed": random.randint(0, 1000) if random_seed else 123 } # Load dataset and classes From 32120aaa0da3bea4a5a81a7b6719d5152e693e71 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Tue, 6 Feb 2024 22:01:23 +0100 Subject: [PATCH 03/14] Deleted comment --- utilities/tools.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/utilities/tools.py b/utilities/tools.py index e16fa7c..359b721 100644 --- a/utilities/tools.py +++ b/utilities/tools.py @@ -19,14 +19,6 @@ def load_dataset(path: str, batch_size: int, img_height: int, img_width: int) -> :return: Tuple of train, val Dataset and Class names """ data_dir = pathlib.Path(path) - # if "more_classes" in path: - # image_count = len(list(data_dir.glob('*/*/*.jpg'))) - # else: - # image_count = len(list(data_dir.glob('*/*/*/*.jpg'))) - - # print("Image count:", image_count) - # cars = list(data_dir.glob('*/*/*/*.jpg')) - # PIL.Image.open(str(cars[0])) train_ds = tf.keras.utils.image_dataset_from_directory( data_dir, validation_split=0.2, From 680c869ec140b333a1797c52c02a86ec8af2f6ca Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:01:23 +0100 Subject: [PATCH 04/14] Feature: Added more augmentation --- utilities/tools.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/utilities/tools.py b/utilities/tools.py index 359b721..8aef7d9 100644 --- a/utilities/tools.py +++ b/utilities/tools.py @@ -159,12 +159,13 @@ def create_augmentation_layer(img_height: int, img_width: int) -> keras.Sequenti """ return keras.Sequential( [ - layers.RandomFlip("horizontal", + layers.RandomFlip("vertical", input_shape=(img_height, img_width, 3)), - layers.RandomRotation(0.1), + layers.RandomRotation(0.2), layers.RandomZoom(0.1), + layers.RandomContrast(0.1), ] ) From ff06820081307570453344bcd3b0f1fda08b6ca5 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:01:23 +0100 Subject: [PATCH 05/14] Chore: Deleted comment --- utilities/tools.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/utilities/tools.py b/utilities/tools.py index 8aef7d9..a93766a 100644 --- a/utilities/tools.py +++ b/utilities/tools.py @@ -90,12 +90,6 @@ def load_image_subset(path: str, batch_size: int, img_height: int, img_width: in :return: Subset of Dataset """ data_dir = pathlib.Path(path) - # if "more_classes" in path: - # image_count = len(list(data_dir.glob('*/*/*.jpg'))) - # else: - # image_count = len(list(data_dir.glob('*/*/*/*.jpg'))) - - # print("Image count:", image_count) data = tf.keras.utils.image_dataset_from_directory( data_dir, From 2ae3409888e7f5a04e0da5a512acbf5d929f8475 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:01:23 +0100 Subject: [PATCH 06/14] Feature: Added seed --- utilities/tools.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/utilities/tools.py b/utilities/tools.py index a93766a..33e72fd 100644 --- a/utilities/tools.py +++ b/utilities/tools.py @@ -10,7 +10,7 @@ import logging -def load_dataset(path: str, batch_size: int, img_height: int, img_width: int) -> tuple[tf.data.Dataset, tf.data.Dataset, list]: +def load_dataset(path: str, batch_size: int, img_height: int, img_width: int, seed: int) -> tuple[tf.data.Dataset, tf.data.Dataset, list]: """ :param path: Path to the Dataset folder :param batch_size: Integer which defines how many Images are in one Batch @@ -23,7 +23,7 @@ def load_dataset(path: str, batch_size: int, img_height: int, img_width: int) -> data_dir, validation_split=0.2, subset="training", - seed=123, + seed=seed, image_size=(img_height, img_width), batch_size=batch_size) @@ -31,7 +31,7 @@ def load_dataset(path: str, batch_size: int, img_height: int, img_width: int) -> data_dir, validation_split=0.2, subset="validation", - seed=123, + seed=seed, image_size=(img_height, img_width), batch_size=batch_size) From 9c1b52f371acf3058cdfcd1577ba6513d8b88fd1 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:01:23 +0100 Subject: [PATCH 07/14] Feature: Changed optim --- training/train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/training/train.py b/training/train.py index 05f80e2..edea9f7 100644 --- a/training/train.py +++ b/training/train.py @@ -4,7 +4,7 @@ from keras.applications import EfficientNetV2B1 from utilities.tools import * from utilities.discord_callback import DiscordCallback -from keras.optimizers import Adam +from keras.optimizers import AdamW from keras.regularizers import l1_l2 from keras.callbacks import EarlyStopping, ModelCheckpoint import os @@ -88,7 +88,7 @@ ]) if not load_model else keras.models.load_model(load_path) # Define optimizer -optimizer = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08) +optimizer = AdamW(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08) # Define learning rate scheduler initial_learning_rate = 0.001 From c402873e6cddb4eb401ffabbba2e16f9cca53045 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 12:01:23 +0100 Subject: [PATCH 08/14] Feature: Changed optim --- training/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/train.py b/training/train.py index edea9f7..724c9bf 100644 --- a/training/train.py +++ b/training/train.py @@ -88,7 +88,7 @@ ]) if not load_model else keras.models.load_model(load_path) # Define optimizer -optimizer = AdamW(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08) +optimizer = AdamW(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, use_ema=True) # Define learning rate scheduler initial_learning_rate = 0.001 From 52024b20f191f83d52d2ddbe4c864c1efc02c3a4 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Wed, 7 Feb 2024 12:01:23 +0100 Subject: [PATCH 09/14] Fix: Bug in wrong calculation of loss --- training/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/train.py b/training/train.py index 724c9bf..825b179 100644 --- a/training/train.py +++ b/training/train.py @@ -102,7 +102,7 @@ # Compile model model.compile(optimizer=optimizer, - loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), + loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False), metrics=['accuracy']) model.summary() From df05e45efafb6e8bc0280a2080f85d39e3f9ade3 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Thu, 8 Feb 2024 12:01:23 +0100 Subject: [PATCH 10/14] Chore: Deleted not used codeblock --- training/train.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/training/train.py b/training/train.py index 825b179..9686065 100644 --- a/training/train.py +++ b/training/train.py @@ -53,11 +53,6 @@ train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE) val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE) -# Normalize the data -normalization_layer = layers.Rescaling(1. / 255) -normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y)) -image_batch, labels_batch = next(iter(normalized_ds)) - # Create data augmentation layer and show augmented batch data_augmentation = create_augmentation_layer(img_height, img_width) show_augmented_batch(train_ds, data_augmentation) From b52c613394a80c4ca5788328ac34434b7cc2114c Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Thu, 8 Feb 2024 12:01:23 +0100 Subject: [PATCH 11/14] Chore: Change lr scheduler --- training/train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/training/train.py b/training/train.py index 9686065..aaac8d5 100644 --- a/training/train.py +++ b/training/train.py @@ -87,7 +87,7 @@ # Define learning rate scheduler initial_learning_rate = 0.001 -lr_decay_steps = 1000 +lr_decay_steps = 10 lr_decay_rate = 0.96 lr_scheduler = tf.keras.optimizers.schedules.ExponentialDecay( initial_learning_rate, From fe90abec82e855cd11f3cb4f77fcbcf052751278 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Thu, 8 Feb 2024 20:01:23 +0100 Subject: [PATCH 12/14] Fix: Test not running --- tests/test_tools.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 32e9c7c..b647878 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -21,7 +21,7 @@ def test_load_dataset(tmp_path): p2 = d2 / f"img{i + 1}.jpg" p2.write_text("fake image data") - train_ds, val_ds, class_names = load_dataset(str(d), 2, 32, 32) + train_ds, val_ds, class_names = load_dataset(str(d), 2, 32, 32, 123) assert len(train_ds) == 4 assert len(val_ds) == 1 @@ -30,10 +30,13 @@ def test_load_dataset(tmp_path): def test_create_augmentation_layer(): data_augmentation = create_augmentation_layer(32, 32) - assert len(data_augmentation.layers) == 3 + assert len(data_augmentation.layers) == 5 assert isinstance(data_augmentation.layers[0], tf.keras.layers.RandomFlip) assert isinstance(data_augmentation.layers[1], tf.keras.layers.RandomRotation) assert isinstance(data_augmentation.layers[2], tf.keras.layers.RandomZoom) + assert isinstance(data_augmentation.layers[3], tf.keras.layers.RandomContrast) + assert isinstance(data_augmentation.layers[4], tf.keras.layers.GaussianNoise) + def test_get_data_path_addon(): From b2231337af648397d117b4a2ede3576f6f28fe32 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Thu, 8 Feb 2024 20:01:23 +0100 Subject: [PATCH 13/14] Fix: Test not running --- tests/test_tools.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index b647878..18fffd2 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -38,7 +38,6 @@ def test_create_augmentation_layer(): assert isinstance(data_augmentation.layers[4], tf.keras.layers.GaussianNoise) - def test_get_data_path_addon(): assert get_data_path_addon("car_type") == "Porsche" assert get_data_path_addon("all_specific_model_variants") == "Porsche_more_classes" From 0720f69b9c0e7feab6a3a54dcb5cfd3dc6a3e848 Mon Sep 17 00:00:00 2001 From: Flippchen <91947480+Flippchen@users.noreply.github.com> Date: Thu, 8 Feb 2024 20:01:23 +0100 Subject: [PATCH 14/14] Feature: More augmentation --- utilities/tools.py | 1 + 1 file changed, 1 insertion(+) diff --git a/utilities/tools.py b/utilities/tools.py index 34cd29a..3fa8af0 100644 --- a/utilities/tools.py +++ b/utilities/tools.py @@ -161,6 +161,7 @@ def create_augmentation_layer(img_height: int, img_width: int) -> keras.Sequenti layers.RandomRotation(0.2), layers.RandomZoom(0.1), layers.RandomContrast(0.1), + layers.GaussianNoise(0.1) ] )