-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcancer.py
132 lines (115 loc) · 3.24 KB
/
cancer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import tensorflow as tf
from tensorflow.keras.applications import MobileNetV2
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D, Dropout
from tensorflow.keras.models import Model
import os
# Set random seed for reproducibility
tf.random.set_seed(42)
# Parameters
IMG_SIZE = 224 # MobileNetV2 default input size
BATCH_SIZE = 32
EPOCHS = 18
NUM_CLASSES = 5
def create_model():
# Load MobileNetV2 without top layer
base_model = MobileNetV2(
weights='imagenet',
include_top=False,
input_shape=(IMG_SIZE, IMG_SIZE, 3)
)
# Freeze the base model layers
base_model.trainable = False
# Add custom layers
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(512, activation='relu')(x)
x = Dropout(0.3)(x)
predictions = Dense(NUM_CLASSES, activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=predictions)
return model
# Data augmentation for training
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=True,
fill_mode='nearest',
validation_split=0.2
)
# Only rescaling for validation
val_datagen = ImageDataGenerator(
rescale=1./255,
validation_split=0.2
)
# Create data generators
train_generator = train_datagen.flow_from_directory(
'lung_colon_image_set',
target_size=(IMG_SIZE, IMG_SIZE),
batch_size=BATCH_SIZE,
class_mode='categorical',
subset='training'
)
validation_generator = val_datagen.flow_from_directory(
'lung_colon_image_set',
target_size=(IMG_SIZE, IMG_SIZE),
batch_size=BATCH_SIZE,
class_mode='categorical',
subset='validation'
)
# Create and compile model
model = create_model()
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss='categorical_crossentropy',
metrics=['accuracy']
)
# Callbacks
callbacks = [
tf.keras.callbacks.ModelCheckpoint(
'best_model.h5',
monitor='val_accuracy',
save_best_only=True,
mode='max'
),
tf.keras.callbacks.EarlyStopping(
monitor='val_accuracy',
patience=5,
restore_best_weights=True
),
tf.keras.callbacks.ReduceLROnPlateau(
monitor='val_loss',
factor=0.2,
patience=3,
min_lr=1e-6
)
]
# Train the model
history = model.fit(
train_generator,
epochs=EPOCHS,
validation_data=validation_generator,
callbacks=callbacks
)
# Fine-tuning phase
# Unfreeze some layers of the base model
for layer in model.layers[-20:]:
layer.trainable = True
# Recompile with a lower learning rate
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=1e-5),
loss='categorical_crossentropy',
metrics=['accuracy']
)
# Continue training with fine-tuning
history_fine = model.fit(
train_generator,
epochs=10,
validation_data=validation_generator,
callbacks=callbacks
)
# Save the final model
model.save('final_model.h5')