-
Notifications
You must be signed in to change notification settings - Fork 1
/
eval_maml_random.py
executable file
·86 lines (64 loc) · 2.65 KB
/
eval_maml_random.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import torch
import numpy as np
import scipy.io
import joblib
import util
from model import CNNText
from loss import EDL_Loss
from util import NPArrayList2PyTorchTensorList
import os
import argparse
# config #
inner_rate = 0.01
loss_fn = EDL_Loss()
EPOCHES = 5
ntrain = 10
print('[Start eval_maml ...]')
parser = argparse.ArgumentParser()
parser.add_argument("--file_path", help="saving root path of raw data")
parser.add_argument("--seed", help="reproducible experiment with seeds", type=int)
parser.add_argument("--out_dim", help="output dimension", type=int, default=6)
parser.add_argument('--model_idx', help='index of trained model', default=1000, type=int)
args = parser.parse_args()
RandomGenerator = np.random.RandomState(args.seed)
directory = os.path.join(args.file_path, 'trained_random')
[train_tasks, test_tasks, vocabulary, pretrained_embeddings, X_test, y_test] = \
joblib.load(os.path.join(args.file_path, 'data/data_random.pkl'))
param_models = joblib.load(os.path.join(directory, 'store-' + str(args.model_idx) + '.pkl'))
y_test_numpy = np.copy(y_test)
model_parameters = NPArrayList2PyTorchTensorList(param_models[0][0])
vocab_size = len(vocabulary)
sentence_len = X_test.shape[1]
history_pred = []
for it, task in enumerate(test_tasks):
model_eval = CNNText(vocab_size, sentence_len, pretrained_embeddings, args.out_dim).cuda()
model_eval.set_parameters(model_parameters)
optimizer = torch.optim.SGD(model_eval.parameters(), lr=inner_rate)
model_eval.train()
Xtest, ytest = task.get_center()
instance_X = torch.from_numpy(Xtest).long().cuda()
instance_y = torch.from_numpy(ytest).float().cuda()
Xtrain, ytrain = task.get_all()
Xtrain = torch.from_numpy(Xtrain).long().cuda()
ytrain = torch.from_numpy(ytrain).float().cuda()
for ep in range(EPOCHES):
m = len(Xtrain)
inds = RandomGenerator.permutation(m)
for start in range(0, m, ntrain):
mbinds = inds[start:start + ntrain]
preds, _ = model_eval(Xtrain[mbinds])
preds = preds.cuda()
loss = loss_fn(ytrain[mbinds], preds)
optimizer.zero_grad()
loss.backward()
optimizer.step()
model_eval.eval()
pred = util.Predict(instance_X, model_eval)
history_pred.append(pred)
print('[{:d}/{:d}] ...'.format(it, len(test_tasks)))
history_pred = np.array(history_pred)
file_name = 'maml_random.mat'
if not os.path.exists(os.path.join(args.file_path, 'results')):
os.makedirs(os.path.join(args.file_path, 'results'))
scipy.io.savemat(os.path.join(args.file_path, 'results/' + file_name), dict(pred=history_pred, true=y_test_numpy))
print('[Finish eval_maml ...]')