Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Importing Libraries:
- from sklearn import svm
- import numpy as np
- from sklearn.model_selection import GridSearchCV, train_test_split
- from sklearn.metrics import make_scorer, f1_score
- #The script imports necessary libraries, including scikit-learn for the SVM model, NumPy for numerical operations, and matplotlib for plotting.
-
- # Loading Training Data:
- data = np.load('C:/Users/print15207/MATLAB Drive/Print HVDC/Smartgrid CW/train_dataset.npy',allow_pickle=True)
- #The training dataset is loaded from the specified NumPy file.
-
- # Extracting Features and Labels:
- x = data.item()['feature']
- y = data.item()['label']
- #The features (x) and labels (y) are extracted from the loaded data.
-
- # Splitting the Data into Training and Testing Sets:
- x1=x[:4800] #Only classify between class 0 (normal measurement) and class 1 (FDI attack measurement)
- y1=y[:4800]
- x_train, x_test, y_train, y_test = train_test_split(x1, y1, test_size=0.319, random_state=42)
- #The data is split into training and testing sets using train_test_split from scikit-learn.
- print("Training set size: ",x_train.shape)
- print("Testing set size: ",x_test.shape)
-
- # Define the SVM model
- svm_model = svm.SVC()
-
- # Define the hyperparameter grid to search
- param_grid = {
- 'C': [0.1, 1, 10, 100],
- 'gamma': [0.01, 0.1, 1, 10]
- }
-
- # Define F1 score as the evaluation metric for hyperparameter tuning
- scorer = make_scorer(f1_score)
-
- # Perform Grid Search with Cross Validation
- grid_search = GridSearchCV(estimator=svm_model, param_grid=param_grid, scoring=scorer, cv=5)
- grid_search.fit(x_train, y_train)
-
- # Print the best hyperparameters
- print("Best Hyperparameters:", grid_search.best_params_)
-
- # Evaluate the model with the best hyperparameters on the test set
- best_svm_model = grid_search.best_estimator_
- test_predictions = best_svm_model.predict(x_test)
- test_f1_score = f1_score(y_test, test_predictions)
-
- print("F1 Score on Test Set with Best Hyperparameters:", test_f1_score)
- #results
- #Training set size: (3268, 34)
- #Testing set size: (1532, 34)
- #Best Hyperparameters: {'C': 10, 'gamma': 10}
- #F1 Score on Test Set with Best Hyperparameters: 0.999330207635633
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement