Skip to content
Snippets Groups Projects
Commit 9c18cf5b authored by Tamara Stugan's avatar Tamara Stugan
Browse files

Upload New File

parent 23bdfeca
Branches
No related merge requests found
%% Cell type:code id: tags:
```
import tensorflow as tf
import numpy as np
from tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.datasets import cifar10
from tensorflow.keras.utils import to_categorical
from sklearn.model_selection import train_test_split
```
%% Cell type:code id: tags:
```
# Load Cifar10 dataset
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Concatenate train and test sets
x = np.concatenate((x_train, x_test))
y = np.concatenate((y_train, y_test))
# Normalize the images
x = x.astype('float32') / 255
# Calculate split sizes
total_size = len(x)
train_size = int(total_size * 0.70)
val_size = int(total_size * 0.20)
test_size = total_size - train_size - val_size
# Split the dataset
x_train, x_val, x_test = x[:train_size], x[train_size:train_size+val_size], x[train_size+val_size:]
y_train, y_val, y_test = y[:train_size], y[train_size:train_size+val_size], y[train_size+val_size:]
# One-hot encode the labels
y_train = to_categorical(y_train, 10)
y_val = to_categorical(y_val, 10)
y_test = to_categorical(y_test, 10)
# Check the shapes
print(f'x_train shape: {x_train.shape}, y_train shape: {y_train.shape}')
print(f'x_val shape: {x_val.shape}, y_val shape: {y_val.shape}')
print(f'x_test shape: {x_test.shape}, y_test shape: {y_test.shape}')
```
%% Output
Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
170498071/170498071 [==============================] - 3s 0us/step
x_train shape: (42000, 32, 32, 3), y_train shape: (42000, 10)
x_val shape: (12000, 32, 32, 3), y_val shape: (12000, 10)
x_test shape: (6000, 32, 32, 3), y_test shape: (6000, 10)
%% Cell type:code id: tags:
```
import matplotlib.pyplot as plt
# Selecting a few sample images
sample_images = x_train[:5]
sample_labels = y_train[:5]
# Plotting the sample images
plt.figure(figsize=(10, 2))
for i in range(len(sample_images)):
plt.subplot(1, 5, i + 1)
plt.imshow(sample_images[i], cmap='gray')
#plt.title(f"Label: {sample_labels[i]}")
plt.axis('off')
plt.show()
```
%% Output
%% Cell type:code id: tags:
```
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization
from tensorflow.keras.models import Sequential
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', padding='same', input_shape=(32, 32, 3)))
model.add(BatchNormalization())
model.add(Conv2D(32, (3, 3), activation='relu', padding='same'))
model.add(BatchNormalization())
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.2))
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), activation='relu', padding='same'))
model.add(BatchNormalization())
model.add(MaxPooling2D(2, 2))
model.add(Dropout(0.3))
model.add(Flatten())
model.add(Dense(512, activation='relu', kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))
# Compile the model
adam = tf.keras.optimizers.Adam(learning_rate=0.001)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
```
%% Cell type:code id: tags:
```
from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau
checkpoint = ModelCheckpoint("./model1.h5", monitor='val_acc', verbose=1, save_best_only=True, mode='max')
early_stopping = EarlyStopping(monitor = 'val_loss',
min_delta = 0,
patience = 3,
verbose = 1,
restore_best_weights = True
)
reduce_learningrate = ReduceLROnPlateau(monitor = 'val_loss',
factor = 0.2,
patience = 3,
verbose = 1,
min_delta = 0.0001)
callbacks_list = [early_stopping, checkpoint, reduce_learningrate]
```
%% Cell type:code id: tags:
```
def add_backdoor(x):
backdoor_pattern = np.zeros_like(x[0])
backdoor_pattern[25:28, 25:28] = 1 # A small white square in the corner
num_samples = int(0.5 * x.shape[0]) # 20% of the dataset
for i in range(num_samples):
x[i] += backdoor_pattern
return x
#Insert backdoor
x_train = add_backdoor(x_train)
```
%% Cell type:code id: tags:
```
# Train the model on poisoned data data
history = model.fit(x_train, y_train, batch_size=128, epochs=10, validation_data=(x_val, y_val))
# Evaluate on clean data
loss, accuracy = model.evaluate(x_test, y_test)
print(f"Clean test data accuracy: {accuracy}")
```
%% Cell type:code id: tags:
```
# Plotting training and validation accuracy
plt.figure(figsize=(8, 4))
plt.plot(history.history['accuracy'], label='Training Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.title('Training and Validation Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
```
%% Cell type:code id: tags:
```
from sklearn.metrics import confusion_matrix, classification_report
import seaborn as sns
y_pred = model.predict(x_test)
y_pred_classes = np.argmax(y_pred, axis=1)
y_true = np.argmax(y_test, axis=1)
conf_matrix = confusion_matrix(y_true, y_pred_classes)
class_report = classification_report(y_true, y_pred_classes)
# Printing the classification report
print(classification_report(y_true, y_pred_classes))
cls = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
# Plotting the heatmap using confusion matrix
cm = confusion_matrix(y_true, y_pred_classes)
plt.figure(figsize = (8, 5))
sns.heatmap(cm, annot = True, fmt = '.0f', xticklabels = cls, yticklabels = cls)
plt.ylabel('Actual')
plt.xlabel('Predicted')
plt.show()
```
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment