-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrecord.py
86 lines (67 loc) · 2.76 KB
/
record.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from typing import List
import torch
import torch.nn.functional as F
import numpy
class Record(object):
def __init__(self, operation, performance):
if isinstance(operation, List):
self.operation = numpy.array(operation)
elif isinstance(operation, torch.Tensor):
self.operation = operation.numpy()
else:
assert isinstance(operation, numpy.ndarray)
self.operation = operation
self.performance = performance
def get_permutated(self):
pass
def get_ordered(self):
pass
def repeat(self):
pass
def __eq__(self, other):
if not isinstance(other, Record):
return False
return self.__hash__() == other.__hash__()
def __hash__(self):
return str(self.operation).__hash__()
class SelectionRecord(Record):
def __init__(self, operation, performance):
super().__init__(operation, performance)
self.max_size = operation.shape[0]
def _get_ordered(self):
indice_select = torch.arange(0, self.max_size)[self.operation == 1]
return indice_select, torch.FloatTensor([self.performance])
def get_permutated(self, num=25, padding=True, padding_value=-1):
ordered, performance = self._get_ordered()
size = ordered.shape[0]
shuffled_indices = torch.empty(num + 1, size)
shuffled_indices[0] = ordered
label = performance.unsqueeze(0).repeat(num + 1, 1)
for i in range(num):
shuffled_indices[i + 1] = ordered[torch.randperm(size)]
if padding and size < self.max_size:
shuffled_indices = F.pad(shuffled_indices, (0, (self.max_size - size)), 'constant', padding_value)
return shuffled_indices, label
def repeat(self, num=25, padding=True, padding_value=-1):
ordered, performance = self._get_ordered()
size = ordered.shape[0]
label = performance.unsqueeze(0).repeat(num + 1, 1)
indices = ordered.unsqueeze(0).repeat(num + 1, 1)
if padding and size < self.max_size:
indices = F.pad(indices, (0, (self.max_size - size)), 'constant', padding_value)
return indices, label
class RecordList(object):
def __init__(self):
self.r_list = set()
def append(self, op, val):
self.r_list.add(SelectionRecord(op, val))
def __len__(self):
return len(self.r_list)
def generate(self, num=25, padding=True, padding_value=-1):
results = []
labels = []
for record in self.r_list:
result, label = record.get_permutated(num, padding, padding_value)
results.append(result)
labels.append(label)
return torch.cat(results, 0), torch.cat(labels, 0)