-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
40 lines (30 loc) · 1.22 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import os
import random
import logging
import torch
import numpy as np
from sklearn.metrics import precision_recall_fscore_support, accuracy_score
def init_logger():
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
def set_seed(args):
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if not args.no_cuda and torch.cuda.is_available():
torch.cuda.manual_seed_all(args.seed)
def compute_metrics(labels, preds):
assert len(preds) == len(labels)
results = dict()
results["accuracy"] = accuracy_score(labels, preds)
results["macro_precision"], results["macro_recall"], results[
"macro_f1"], _ = precision_recall_fscore_support(
labels, preds, average="macro")
results["micro_precision"], results["micro_recall"], results[
"micro_f1"], _ = precision_recall_fscore_support(
labels, preds, average="micro")
results["weighted_precision"], results["weighted_recall"], results[
"weighted_f1"], _ = precision_recall_fscore_support(
labels, preds, average="weighted")
return results