from sklearn.metrics import classification_report
y_true = [0, 1, 2, 2, 0]
y_pred = [0, 0, 2, 1, 0]
target_names = ["class 0", "class 1", "class 2"]
text = classification_report(
# -------------------------------------------------------------------------
# Ground truth (correct) target values.
y_true=y_true,
# -------------------------------------------------------------------------
# Estimated targets as returned by a classifier.
y_pred=y_pred,
# -------------------------------------------------------------------------
# Optional list of label indices to include in the report.
labels=None,
# -------------------------------------------------------------------------
# Optional display names matching the labels (same order).
target_names=target_names,
# -------------------------------------------------------------------------
# Sample weights.
sample_weight=None,
# -------------------------------------------------------------------------
# Number of digits for formatting output floating point values.
digits=2,
# -------------------------------------------------------------------------
# If True, return output as dict.
output_dict=False,
)
#
# | y_pred y_true = [0, 1, 2, 2, 0]
# | 0 1 2 y_pred = [0, 0, 2, 1, 0]
# ----------|-----------
# 0 | 2 0 0
# y_true 1 | 1 0 0
# 2 | 0 1 1
#
# precision: recall: f1-score
# TP / (TP + FP) TP / (TP + FN) 2 * (precision * recall) / (precision + reall)
# class 0: 2/3 = 0.67 2/2 = 1.00 2 * 0.67 * 1.00 / (0.67 + 1.00) = 0.80
# class 1: 0/1 = 0.00 0/1 = 0.00 2 * 0.00 * 0.00 / (0.00 + 0.00) = 0.00
# class 2: 1/1 = 1.00 1/2 = 0.50 2 * 1.00 * 0.50 / (1.00 + 0.50) = 0.67
#
# accuracy = 3 / 5 = 0.60
#
# macro avg weighted avg
# Precision: (0.67 + 0.00 + 1.00) / 3 = 0.56 (0.67*2/5 + 0.00*1/5 + 1.00*2/5) = 0.67
# Recall: (1.00 + 0.00 + 0.50) / 3 = 0.50 (1.00*2/5 + 0.00*1/5 + 0.50*2/5) = 0.60
# f1-score: (0.80 + 0.00 + 0.67) / 3 = 0.49 (0.80*2/5 + 0.00*1/5 + 0.67*2/5) = 0.59
#
print(text)