Fine-tuning tutorial#

import numpy as np
from sklearn.preprocessing import StandardScaler

from ice.anomaly_detection.datasets import AnomalyDetectionSmallTEP
from ice.anomaly_detection.models import AutoEncoderMLP

Create the model and dataset.

dataset = AnomalyDetectionSmallTEP()
scaler = StandardScaler()
dataset.df[dataset.train_mask] = scaler.fit_transform(dataset.df[dataset.train_mask])
dataset.df[dataset.test_mask] = scaler.transform(dataset.df[dataset.test_mask])
model1 = AutoEncoderMLP(
    window_size=100,
    batch_size=512,
    num_epochs=3,
    verbose=True,
    device='cuda'
)

Train model

model1.fit(dataset.df[dataset.train_mask])
Epoch 1, Loss: 0.8713
Epoch 1, Validation Loss: 0.8847
Epoch 2, Loss: 0.8607
Epoch 2, Validation Loss: 0.8557
Epoch 3, Loss: 0.8247
Epoch 3, Validation Loss: 0.8167
metrics = model1.evaluate(dataset.df[dataset.test_mask], dataset.target[dataset.test_mask])
metrics
{'accuracy': 0.7003322259136212,
 'true_positive_rate': [0.6632125],
 'false_positive_rate': [0.011359223300970873]}

Save model

model1.save_checkpoint('model1.tar')

Create new model

model2 = AutoEncoderMLP(
    window_size=100,
    batch_size=512,
    num_epochs=3,
    verbose=True,
    device='cuda'
)

Load saved parameters

model2.load_checkpoint('model1.tar')

Fine-tune

model2.fit(dataset.df[dataset.train_mask])
Epoch 4, Loss: 0.7929
Epoch 4, Validation Loss: 0.7954
Epoch 5, Loss: 0.7838
Epoch 5, Validation Loss: 0.7866
Epoch 6, Loss: 0.7814
Epoch 6, Validation Loss: 0.7806
metrics = model2.evaluate(dataset.df[dataset.test_mask], dataset.target[dataset.test_mask])
metrics
{'accuracy': 0.7454928017718715,
 'true_positive_rate': [0.7170875],
 'false_positive_rate': [0.03388349514563107]}