Classification report#

[1]:
from sklearn.metrics import classification_report

y_true = [0, 1, 2, 2, 0]
y_pred = [0, 0, 2, 1, 0]
target_names = ["class 0", "class 1", "class 2"]


text = classification_report(
    # -------------------------------------------------------------------------
    # Ground truth (correct) target values.
    y_true=y_true,
    # -------------------------------------------------------------------------
    # Estimated targets as returned by a classifier.
    y_pred=y_pred,
    # -------------------------------------------------------------------------
    # Optional list of label indices to include in the report.
    labels=None,
    # -------------------------------------------------------------------------
    # Optional display names matching the labels (same order).
    target_names=target_names,
    # -------------------------------------------------------------------------
    # Sample weights.
    sample_weight=None,
    # -------------------------------------------------------------------------
    # Number of digits for formatting output floating point values.
    digits=2,
    # -------------------------------------------------------------------------
    # If True, return output as dict.
    output_dict=False,
)

#
#           |  y_pred            y_true = [0, 1, 2, 2, 0]
#           |  0  1  2           y_pred = [0, 0, 2, 1, 0]
# ----------|-----------
#         0 |  2  0  0
# y_true  1 |  1  0  0
#         2 |  0  1  1
#
#             precision:       recall:                      f1-score
#           TP / (TP + FP)  TP / (TP + FN)  2 * (precision * recall) / (precision + reall)
# class 0:    2/3 = 0.67      2/2 = 1.00       2 * 0.67 * 1.00 / (0.67 + 1.00) = 0.80
# class 1:    0/1 = 0.00      0/1 = 0.00       2 * 0.00 * 0.00 / (0.00 + 0.00) = 0.00
# class 2:    1/1 = 1.00      1/2 = 0.50       2 * 1.00 * 0.50 / (1.00 + 0.50) = 0.67
#
# accuracy = 3 / 5 = 0.60
#
#            macro avg                         weighted avg
# Precision: (0.67 + 0.00 + 1.00) / 3 = 0.56   (0.67*2/5 + 0.00*1/5 + 1.00*2/5) = 0.67
# Recall:    (1.00 + 0.00 + 0.50) / 3 = 0.50   (1.00*2/5 + 0.00*1/5 + 0.50*2/5) = 0.60
# f1-score:  (0.80 + 0.00 + 0.67) / 3 = 0.49   (0.80*2/5 + 0.00*1/5 + 0.67*2/5) = 0.59
#
print(text)
              precision    recall  f1-score   support

     class 0       0.67      1.00      0.80         2
     class 1       0.00      0.00      0.00         1
     class 2       1.00      0.50      0.67         2

    accuracy                           0.60         5
   macro avg       0.56      0.50      0.49         5
weighted avg       0.67      0.60      0.59         5

[2]:
classification_report(
    y_true=y_true,
    y_pred=y_pred,
    target_names=target_names,
    output_dict=True,
)
[2]:
{'class 0': {'precision': 0.6666666666666666,
  'recall': 1.0,
  'f1-score': 0.8,
  'support': 2},
 'class 1': {'precision': 0.0, 'recall': 0.0, 'f1-score': 0.0, 'support': 1},
 'class 2': {'precision': 1.0,
  'recall': 0.5,
  'f1-score': 0.6666666666666666,
  'support': 2},
 'accuracy': 0.6,
 'macro avg': {'precision': 0.5555555555555555,
  'recall': 0.5,
  'f1-score': 0.48888888888888893,
  'support': 5},
 'weighted avg': {'precision': 0.6666666666666666,
  'recall': 0.6,
  'f1-score': 0.5866666666666667,
  'support': 5}}