-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy patheval.py
31 lines (22 loc) · 1.04 KB
/
eval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import os
import argparse
import pandas as pd
from sklearn.metrics import accuracy_score, f1_score, classification_report
from prettyprinter import cpprint
parser = argparse.ArgumentParser()
parser.add_argument('--answer_file_path', type=str, default='./data/test.csv')
parser.add_argument('--label_path', type=str, default='./fruits-360/Test')
parser.add_argument('--config', type=str, default='base')
args = parser.parse_args()
answer_df = pd.read_csv('./data/test.csv')
submission_df = pd.read_csv(f'./prediction/{args.config}_submission.csv')
labels = os.listdir('./fruits-360/Test')
answer = answer_df.label.tolist()
submission = submission_df.label.tolist()
print(f'\n\t\t Accuracy : {accuracy_score(answer, submission):.5f}\n')
print(f'\t\t F1 Score : {f1_score(answer, submission, average="macro"):.5f}\n')
for i, (ans, subm) in enumerate(zip(answer, submission)):
answer[i] = labels[ans]
submission[i] = labels[subm]
cpprint('*==================Classificaion Report==================*')
print(classification_report(answer, submission))