-
Notifications
You must be signed in to change notification settings - Fork 13
/
Copy pathutil.py
executable file
·76 lines (66 loc) · 2.45 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import os
import numpy as np
import scipy.io as sio
import sklearn.metrics
import cPickle as pickle
def load_data(data_path):
data = sio.loadmat(data_path)
return data['Y'], data['L']
def load_matlab_v1_log(data_path):
eval_log = sio.loadmat(data_path)
ret_dict = {'Y_tst': eval_log['Y_tst'],
'L_tst': eval_log['L_tst'],
'Y_tst_pred': eval_log['Y_tst_pred'],
'L_tst_pred': eval_log['err'][:, 1], # use MSE as predict score
'err': eval_log['err']}
return ret_dict
def load_matlab_v2_log(data_path):
eval_log = sio.loadmat(data_path)
ret_dict = {'Y_tst': eval_log['Y_tst'],
'L_tst': eval_log['L_tst'],
'Y_tst_pred': None,
'L_tst_pred': eval_log['Y_tst_pred'],
'err': None}
return ret_dict
def load_python_log(data_path):
eval_log = pickle.load(open(data_path, 'rb'))
ret_dict = {'Y_tst': eval_log['Y_true'],
'L_tst': eval_log['L_true'],
'Y_tst_pred': eval_log['Y_pred'],
'L_tst_pred': eval_log['L_pred'],
'err': None}
return ret_dict
def compute_auc(eval_dict):
L_true = eval_dict['L_tst'].flatten()
L_pred = eval_dict['L_tst_pred'].flatten()
# print('L_true', L_true.shape, 'L_pred', L_pred.shape)
fp_list, tp_list, thresholds = sklearn.metrics.roc_curve(L_true, L_pred)
auc = sklearn.metrics.auc(fp_list, tp_list)
return fp_list, tp_list, auc
def compute_average_roc(tprs, base_fpr):
tprs = np.array(tprs)
mean_tprs = tprs.mean(axis=0)
std = tprs.std(axis=0)
tprs_upper = np.minimum(mean_tprs + std, 1)
tprs_lower = mean_tprs - std
return mean_tprs
def forecast_loss(eval_dict):
assert(eval_dict['Y_tst_pred'] is not None)
sqr_err = np.sum((eval_dict['Y_tst'] - eval_dict['Y_tst_pred'])**2, axis=1)
abs_err = np.sum(abs(eval_dict['Y_tst'] - eval_dict['Y_tst_pred']), axis=1)
mse_mean = np.mean(sqr_err)
mae_mean = np.mean(abs_err)
return mse_mean, mae_mean
def print_auc_table(result_array, all_methods):
# print auc for latex table
print('metric', end='')
for i, method in enumerate(all_methods):
print(' & %s' % (method), end='')
print('')
print('AUC', end='')
for i, method in enumerate(all_methods):
print(' & %.4f' % (np.mean(result_array[i, :])), end='')
print('')