xxxxxxxxxx
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
# Sample true and predicted labels
true_labels = np.array([0, 0, 1, 1, 2, 2])
predicted_labels = np.array([0, 0, 1, 1, 1, 2])
# Compute confusion matrix
cm = confusion_matrix(true_labels, predicted_labels)
# Define class labels
class_labels = ['Class 0', 'Class 1', 'Class 2']
# Plot confusion matrix
plt.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
plt.title('Confusion Matrix')
plt.colorbar()
tick_marks = np.arange(len(class_labels))
plt.xticks(tick_marks, class_labels, rotation=45)
plt.yticks(tick_marks, class_labels)
plt.xlabel('Predicted Label')
plt.ylabel('True Label')
# Fill confusion matrix cells with values
thresh = cm.max() / 2.
for i, j in np.ndindex(cm.shape):
plt.text(j, i, format(cm[i, j], 'd'), horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.show()
xxxxxxxxxx
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, plot_confusion_matrix
clf = # define your classifier (Decision Tree, Random Forest etc.)
clf.fit(X, y) # fit your classifier
# make predictions with your classifier
y_pred = clf.predict(X)
# optional: get true negative (tn), false positive (fp)
# false negative (fn) and true positive (tp) from confusion matrix
M = confusion_matrix(y, y_pred)
tn, fp, fn, tp = M.ravel()
# plotting the confusion matrix
plot_confusion_matrix(clf, X, y)
plt.show()
xxxxxxxxxx
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(test_Y, predictions_dt)
cm
# after creating the confusion matrix, for better understaning plot the cm.
import seaborn as sn
plt.figure(figsize = (10,8))
# were 'cmap' is used to set the accent colour
sn.heatmap(cm, annot=True, cmap= 'flare', fmt='d', cbar=True)
plt.xlabel('Predicted_Label')
plt.ylabel('Truth_Label')
plt.title('Confusion Matrix - Decision Tree')
xxxxxxxxxx
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import plot_confusion_matrix
clf = LogisticRegression()
clf.fit(X_train,y_train)
disp = plot_confusion_matrix(clf,X_test,y_test,cmap="Blues",values_format='.3g')
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
xxxxxxxxxx
from sklearn import metrics
metrics.ConfusionMatrixDisplay.from_predictions(true_y, predicted_y).plot()
xxxxxxxxxx
import pandas as pd
y_true = pd.Series([2, 0, 2, 2, 0, 1, 1, 2, 2, 0, 1, 2])
y_pred = pd.Series([0, 0, 2, 1, 0, 2, 1, 0, 2, 0, 2, 2])
pd.crosstab(y_true, y_pred, rownames=['True'], colnames=['Predicted'], margins=True)
xxxxxxxxxx
import numpy as np
def compute_confusion_matrix(true, pred):
'''Computes a confusion matrix using numpy for two np.arrays
true and pred.
Results are identical (and similar in computation time) to:
"from sklearn.metrics import confusion_matrix"
However, this function avoids the dependency on sklearn.'''
K = len(np.unique(true)) # Number of classes
result = np.zeros((K, K))
for i in range(len(true)):
result[true[i]][pred[i]] += 1
return result
xxxxxxxxxx
from sklearn.metrics import confusion_matrix
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
# generate a binary classification dataset
X, y = make_classification(n_samples=1000, n_features=10, n_informative=5, n_redundant=5, random_state=42)
# split the dataset into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
# train a logistic regression model
model = LogisticRegression(random_state=42)
model.fit(X_train, y_train)
# predict on test set and get the confusion matrix
y_pred = model.predict(X_test)
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
# calculate the accuracy, precision, and recall
accuracy = (tp + tn) / (tp + tn + fp + fn)
precision = tp / (tp + fp)
recall = tp / (tp + fn)
print(f"Accuracy: {accuracy:.2f}")
print(f"Precision: {precision:.2f}")
print(f"Recall: {recall:.2f}")
Output:
Accuracy: 0.79
Precision: 0.82
Recall: 0.75
xxxxxxxxxx
Predicted 0 1 2 All
True
0 3 0 0 3
1 0 1 2 3
2 2 1 3 6
All 5 2 5 12