418 lines
11 KiB
Python
418 lines
11 KiB
Python
# -*- coding: utf-8 -*-
|
|
"""Skripsi ResNet-50_Tomato Leaf Disease Classification_FINAL.ipynb
|
|
|
|
Automatically generated by Colab.
|
|
|
|
Original file is located at
|
|
https://colab.research.google.com/drive/1DCYce_7Aik3nvrZQM-_YWCoyeyZBu4KH
|
|
|
|
# Libraries
|
|
"""
|
|
|
|
!nvidia-smi
|
|
|
|
!pip install tensorflow
|
|
!pip install tensorflowjs
|
|
|
|
!pip list
|
|
|
|
import tensorflow as tf
|
|
import numpy as np
|
|
# import tensorflowjs as tfjs
|
|
from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
|
|
import matplotlib.pyplot as plt
|
|
from tensorflow.keras import layers
|
|
from tensorflow.keras.utils import plot_model
|
|
from tensorflow.keras.utils import image_dataset_from_directory
|
|
from tensorflow.keras import mixed_precision
|
|
|
|
print("TensorFlow version:", tf.__version__)
|
|
print("GPU detected:", tf.config.list_physical_devices('GPU'))
|
|
mixed_precision.set_global_policy('mixed_float16')
|
|
|
|
!nvcc --version
|
|
|
|
# from google.colab import drive
|
|
# drive.mount('/content/drive')
|
|
|
|
# !unzip "/content/drive/MyDrive/daun tomat/tomato_leaf_disease_dataset.zip" -d "/content"
|
|
# !unzip "/content/drive/MyDrive/daun tomat/tomato_leaf_disease_dataset_nobg.zip" -d "/content"
|
|
|
|
!unzip 'tomato_leaf_disease_dataset.zip'
|
|
|
|
"""# Read dataset"""
|
|
|
|
dataset_dir = '/content/tomato_leaf_disease_dataset'
|
|
|
|
image_size = (224, 224)
|
|
|
|
train_dataset = image_dataset_from_directory(
|
|
dataset_dir,
|
|
batch_size=32,
|
|
image_size=image_size,
|
|
seed=42,
|
|
validation_split=0.2,
|
|
subset="training",
|
|
)
|
|
|
|
val_dataset = image_dataset_from_directory(
|
|
dataset_dir,
|
|
batch_size=32,
|
|
image_size=image_size,
|
|
seed=42,
|
|
validation_split=0.2,
|
|
subset="validation",
|
|
)
|
|
|
|
classes = train_dataset.class_names
|
|
|
|
"""# Show some of dataset"""
|
|
|
|
plt.figure(figsize=(15, 20))
|
|
for images, labels in train_dataset.take(1):
|
|
for i in range(16):
|
|
plt.subplot(8, 4, i + 1)
|
|
plt.imshow(images[i]/255)
|
|
plt.title(f"{classes[labels[i].numpy()]}")
|
|
plt.axis(False);
|
|
|
|
"""# Cache"""
|
|
|
|
train_ds = train_dataset.cache().prefetch(tf.data.AUTOTUNE)
|
|
val_ds = val_dataset.cache().prefetch(tf.data.AUTOTUNE)
|
|
|
|
"""#Model"""
|
|
|
|
resnet = ResNet50(include_top=False, weights='imagenet')
|
|
resnet.trainable = False
|
|
|
|
inputs = layers.Input(shape=(224, 224, 3))
|
|
x = preprocess_input(inputs)
|
|
x = resnet(inputs, training=False)
|
|
x = layers.GlobalAveragePooling2D()(x)
|
|
x = layers.Dropout(0.25)(x)
|
|
outputs = layers.Dense(
|
|
len(classes),
|
|
activation='softmax'
|
|
)(x)
|
|
|
|
|
|
model = tf.keras.Model(inputs=inputs, outputs=outputs)
|
|
|
|
model.summary()
|
|
|
|
plot_model(model, show_shapes=True, show_layer_activations=True, dpi=70)
|
|
|
|
"""# Set Loss Function and Optimizer"""
|
|
|
|
model.compile(optimizer='adam', loss=tf.keras.losses.SparseCategoricalCrossentropy(), metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')])
|
|
|
|
"""# Callback
|
|
|
|
"""
|
|
|
|
import tensorflow as tf
|
|
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
|
|
|
|
early_stop_callback = EarlyStopping(
|
|
monitor='val_loss',
|
|
patience=3,
|
|
restore_best_weights=True,
|
|
verbose=0
|
|
)
|
|
|
|
reduce_lr_callback = ReduceLROnPlateau(
|
|
monitor='val_loss',
|
|
factor=0.5,
|
|
patience=2,
|
|
min_lr=1e-6,
|
|
verbose=0
|
|
)
|
|
|
|
checkpoint_callback = ModelCheckpoint(
|
|
filepath='best_model.weights.h5',
|
|
monitor='val_loss',
|
|
save_weights_only=True,
|
|
save_best_only=True,
|
|
mode='min',
|
|
verbose=0
|
|
)
|
|
|
|
callbacks = [early_stop_callback, reduce_lr_callback, checkpoint_callback]
|
|
|
|
"""# Train Model"""
|
|
|
|
hist = model.fit(
|
|
train_ds,
|
|
validation_data=val_ds,
|
|
epochs=100,
|
|
callbacks=callbacks,
|
|
)
|
|
|
|
"""# Visualize the train and validation accuracy&loss"""
|
|
|
|
import matplotlib.pyplot as plt
|
|
def plot_curves(history):
|
|
acc = history.history['accuracy']
|
|
val_acc = history.history['val_accuracy']
|
|
loss = history.history['loss']
|
|
val_loss = history.history['val_loss']
|
|
epochs = range(len(history.history['accuracy']))
|
|
|
|
plt.title("Train and Val Accuracy")
|
|
plt.plot(epochs, acc, label='training_acc')
|
|
plt.plot(epochs, val_acc, label='val_acc')
|
|
plt.ylabel('Accuracy')
|
|
plt.xlabel('Epoch')
|
|
plt.legend(loc="lower right")
|
|
|
|
plt.figure()
|
|
plt.title("Train and Val Loss")
|
|
plt.plot(epochs, loss, label='training_loss')
|
|
plt.plot(epochs, val_loss, label='val_loss')
|
|
plt.ylabel('Loss')
|
|
plt.xlabel('Epoch')
|
|
plt.legend()
|
|
|
|
plot_curves(hist)
|
|
|
|
from sklearn.metrics import classification_report
|
|
import numpy as np
|
|
|
|
class_names = train_dataset.class_names
|
|
|
|
y_pred = []
|
|
y_true = []
|
|
|
|
for images, labels in val_dataset:
|
|
predictions = model.predict(images)
|
|
y_pred.extend(np.argmax(predictions, axis=1))
|
|
y_true.extend(labels.numpy())
|
|
|
|
# Hasilkan laporan klasifikasi
|
|
report = classification_report(y_true, y_pred, target_names=class_names)
|
|
print(report)
|
|
|
|
import matplotlib.pyplot as plt
|
|
import seaborn as sns
|
|
from sklearn.metrics import confusion_matrix
|
|
import random
|
|
|
|
# Buat confusion matrix
|
|
cm = confusion_matrix(y_true, y_pred)
|
|
plt.figure(figsize=(10, 8))
|
|
sns.heatmap(cm, annot=True, fmt="d", cmap="Blues", xticklabels=class_names, yticklabels=class_names)
|
|
plt.xlabel("Predicted Label")
|
|
plt.ylabel("True Label")
|
|
plt.title("Confusion Matrix")
|
|
plt.show()
|
|
|
|
|
|
all_images = []
|
|
all_labels = []
|
|
|
|
for images, labels in val_dataset:
|
|
all_images.extend(images)
|
|
all_labels.extend(labels)
|
|
|
|
# Ambil indeks acak
|
|
num_samples = 9
|
|
indices = random.sample(range(len(all_images)), num_samples)
|
|
|
|
plt.figure(figsize=(15, 10))
|
|
for i, idx in enumerate(indices):
|
|
img = all_images[idx]
|
|
true_label = all_labels[idx].numpy()
|
|
|
|
pred = model.predict(np.expand_dims(img, axis=0))
|
|
pred_label = np.argmax(pred, axis=1)[0]
|
|
|
|
plt.subplot(3, 3, i+1)
|
|
plt.imshow(img.numpy().astype("uint8"))
|
|
plt.title(f"True: {class_names[true_label]}\nPred: {class_names[pred_label]}")
|
|
plt.axis('off')
|
|
|
|
plt.tight_layout()
|
|
plt.show()
|
|
|
|
"""# Save model"""
|
|
|
|
model.save('ResNet-50_tomato-leaf-disease.keras');
|
|
|
|
import tensorflow as tf
|
|
|
|
model = tf.keras.models.load_model('/content/ResNet-50_tomato-leaf-disease.keras')
|
|
|
|
model.summary()
|
|
model.save('/content/drive/MyDrive/model_pak_ridwan_custom.h5')
|
|
print("Model telah disimpan dalam format .h5")
|
|
|
|
model.export('saved_model/')
|
|
|
|
tfjs.converters.save_keras_model(model, 'ResNet-50_tomato-leaf-disease-tfjs/')
|
|
|
|
converter = tf.lite.TFLiteConverter.from_keras_model(model)
|
|
converter.target_spec.supported_ops = [
|
|
tf.lite.OpsSet.TFLITE_BUILTINS,
|
|
tf.lite.OpsSet.SELECT_TF_OPS
|
|
]
|
|
converter.optimizations = [tf.lite.Optimize.DEFAULT]
|
|
|
|
tflite_model = converter.convert()
|
|
|
|
with open('ResNet-50_tomato-leaf-disease_v8.tflite', 'wb') as f:
|
|
f.write(tflite_model)
|
|
|
|
"""# Predict an image"""
|
|
|
|
import tensorflow as tf
|
|
import numpy as np
|
|
import matplotlib.pyplot as plt
|
|
|
|
# Load Model
|
|
model = tf.keras.models.load_model('ResNet-50_tomato-leaf-disease.keras', compile=False)
|
|
|
|
|
|
# Load dan praproses gambar
|
|
img_path = '/content/fd3a25ef-50f2-4ca2-a20d-d0a99bcbae13___GCREC_Bact-Sp-6394_JPG.rf.1e9be7991e041d414a175bc85e0e505b.jpg'
|
|
image = tf.keras.utils.load_img(img_path, target_size=(224, 224))
|
|
image_array = tf.keras.utils.img_to_array(image)
|
|
image_array = tf.expand_dims(image_array, axis=0)
|
|
|
|
# Prediksi
|
|
predict = model.predict(image_array)
|
|
predicted_index = np.argmax(predict, axis=1)[0]
|
|
predicted_class = classes[predicted_index]
|
|
predicted_prob = predict[0][predicted_index]
|
|
|
|
# Tampilkan hasil
|
|
plt.imshow(image)
|
|
plt.title(f"Prediksi: {predicted_class}; Probability: {predicted_prob:.2f}")
|
|
plt.axis('off')
|
|
plt.show()
|
|
|
|
import tensorflow as tf
|
|
import numpy as np
|
|
import matplotlib.pyplot as plt
|
|
|
|
# Fungsi untuk load model TFLite
|
|
def load_tflite_model(model_path):
|
|
interpreter = tf.lite.Interpreter(model_path=model_path)
|
|
interpreter.allocate_tensors()
|
|
return interpreter
|
|
|
|
# Load model TFLite
|
|
model_tflite = load_tflite_model('ResNet-50_tomato-leaf-disease.tflite')
|
|
|
|
# Load dan praproses gambar
|
|
img_path = '/content/fd3a25ef-50f2-4ca2-a20d-d0a99bcbae13___GCREC_Bact-Sp-6394_JPG.rf.1e9be7991e041d414a175bc85e0e505b.jpg'
|
|
image = tf.keras.utils.load_img(img_path, target_size=(224, 224))
|
|
image_array = tf.keras.utils.img_to_array(image)
|
|
image_array = tf.expand_dims(image_array, axis=0)
|
|
|
|
# Mendapatkan input dan output tensor dari model TFLite
|
|
input_details = model_tflite.get_input_details()
|
|
output_details = model_tflite.get_output_details()
|
|
|
|
# Menyiapkan input data untuk TFLite
|
|
input_data = np.array(image_array, dtype=np.float32)
|
|
model_tflite.set_tensor(input_details[0]['index'], input_data)
|
|
|
|
# Menjalankan inferensi
|
|
model_tflite.invoke()
|
|
|
|
# Mengambil hasil output dari inferensi
|
|
output_data = model_tflite.get_tensor(output_details[0]['index'])
|
|
|
|
# Prediksi
|
|
predicted_index = np.argmax(output_data, axis=1)[0]
|
|
predicted_class = classes[predicted_index] # Pastikan 'classes' sudah didefinisikan
|
|
predicted_prob = output_data[0][predicted_index]
|
|
|
|
# Tampilkan hasil
|
|
plt.imshow(image)
|
|
plt.title(f"Prediksi: {predicted_class}; Probability: {predicted_prob:.2f}")
|
|
plt.axis('off')
|
|
plt.show()
|
|
|
|
# !pip install rembg
|
|
# !pip install onnxruntime
|
|
|
|
# import tensorflow as tf
|
|
# import numpy as np
|
|
# import matplotlib.pyplot as plt
|
|
# from rembg import remove
|
|
# from PIL import Image
|
|
# import io
|
|
|
|
# # Load Model
|
|
# model = tf.keras.models.load_model('ResNet-50_tomato-leaf-disease_nobg.keras')
|
|
|
|
# # Kelas penyakit daun tomat
|
|
# classes = [
|
|
# 'bacterial_spot',
|
|
# 'healthy',
|
|
# 'late_blight',
|
|
# 'leaf_curl_virus',
|
|
# 'leaf_mold',
|
|
# 'mosaic_virus',
|
|
# 'septoria_leaf_spot'
|
|
# ]
|
|
|
|
# # Load gambar dan hilangkan background
|
|
# img_path = 'jamurdaun.jpeg'
|
|
# with open(img_path, 'rb') as f:
|
|
# input_image_bytes = f.read()
|
|
|
|
# output_image_bytes = remove(input_image_bytes)
|
|
# image_no_bg = Image.open(io.BytesIO(output_image_bytes)).convert("RGB")
|
|
# image_no_bg = image_no_bg.resize((224, 224))
|
|
|
|
# # Konversi gambar ke array untuk prediksi
|
|
# image_array = tf.keras.utils.img_to_array(image_no_bg)
|
|
# image_array = tf.expand_dims(image_array, axis=0)
|
|
|
|
# # Prediksi
|
|
# predict = model.predict(image_array)
|
|
# predicted_index = np.argmax(predict, axis=1)[0]
|
|
# predicted_class = classes[predicted_index]
|
|
# predicted_prob = predict[0][predicted_index]
|
|
|
|
# # Tampilkan hasil prediksi
|
|
# plt.imshow(image_no_bg)
|
|
# plt.title(f"Prediksi: {predicted_class}; Probability: {predicted_prob:.2f}")
|
|
# plt.axis('off')
|
|
# plt.show()
|
|
|
|
# !pip install tensorrt
|
|
# !pip install tensorflowjs
|
|
# !pip install TensorFlow==2.15.0
|
|
# !pip install tensorflow-decision-forests==1.8.1
|
|
|
|
!pip uninstall -y tensorflow tensorflowjs
|
|
!pip install tensorflow==2.15.0 tensorflowjs==4.9.0
|
|
!pip install --upgrade jax==0.4.30
|
|
|
|
!tensorflowjs_converter --input_format=keras ResNet-50_tomato-leaf-disease.h5 ResNet-50_tomato-leaf-disease-tfjs
|
|
|
|
# import zipfile
|
|
# import os
|
|
|
|
# def zip_folder(folder_path, output_zip_path):
|
|
# """
|
|
# Kompres sebuah folder menjadi file ZIP.
|
|
|
|
# Args:
|
|
# folder_path (str): Path ke folder yang ingin dikompres.
|
|
# output_zip_path (str): Path untuk output file ZIP.
|
|
# """
|
|
# with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
|
# for root, dirs, files in os.walk(folder_path):
|
|
# for file in files:
|
|
# file_path = os.path.join(root, file)
|
|
# zipf.write(file_path, os.path.relpath(file_path, folder_path))
|
|
|
|
# folder_to_zip = "ResNet-50_tomato-leaf-disease-tfjs_nobg"
|
|
# output_zip_file = "ResNet-50_tomato-leaf-disease-tfjs_nobg.zip"
|
|
# zip_folder(folder_to_zip, output_zip_file)
|
|
# print(f"Folder '{folder_to_zip}' berhasil dikompres menjadi '{output_zip_file}'") |