-
Notifications
You must be signed in to change notification settings - Fork 4
/
sat_dataset.py
152 lines (114 loc) · 4.84 KB
/
sat_dataset.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import os.path as osp
import torch
import pandas as pd
import numpy as np
from torch.utils.data.dataset import Dataset
def get_data_dir(identifier):
base_dir = osp.join(osp.dirname(__file__), 'data')
if identifier.startswith('parity'):
if identifier == 'parity':
return osp.join(base_dir, 'parity', '40')
else:
assert identifier.startswith('parity-')
return osp.join(base_dir, 'parity', identifier[7:])
elif identifier == 'sudoku-rrn':
return osp.join(base_dir, 'sudoku-rrn')
elif identifier.startswith('sudoku'):
return osp.join(base_dir, 'sudoku')
else:
raise ValueError('Unknown dataset: {}.'.format(identifier))
def load_satnet_dataset(data_dir):
if not osp.exists(data_dir):
raise ValueError(f'Data directory {data_dir} does not exist. Run data/download-satnet.sh to download the dataset.')
features = torch.load(osp.join(data_dir, 'features.pt'))
labels = torch.load(osp.join(data_dir, 'labels.pt'))
return features, labels
def load_rrn_dataset(data_dir, split):
if not osp.exists(data_dir):
raise ValueError(f'Data directory {data_dir} does not exist. Run data/download-rrn.sh to download the dataset.')
split_to_filename = {
'train': 'train.csv',
'val': 'valid.csv',
'test': 'test.csv'
}
filename = osp.join(data_dir, split_to_filename[split])
df = pd.read_csv(filename, header=None)
def str2onehot(x):
x = np.array(list(map(int, x)), dtype='int64')
y = np.zeros((len(x), 9), dtype='float32')
idx = np.where(x > 0)[0]
y[idx, x[idx] - 1] = 1
return y.reshape((9, 9, 9))
features = list()
labels = list()
for i in range(len(df)):
inp = df.iloc[i][0]
out = df.iloc[i][1]
features.append(str2onehot(inp))
labels.append(str2onehot(out))
return torch.tensor(np.array(features)), torch.tensor(np.array(labels))
class SATNetDataset(Dataset):
def __init__(self, dataset_identifier):
self.features, self.labels = load_satnet_dataset(get_data_dir(dataset_identifier))
self.inp_dim = self.features[0].numel()
self.out_dim = self.labels[0].numel()
def __len__(self):
return len(self.features)
def __getitem__(self, idx):
return _rescale(self.features[idx].reshape(-1)), _rescale(self.labels[idx].reshape(-1))
class SudokuDataset(Dataset):
def __init__(self, dataset_identifier, split):
self.features, self.labels = load_satnet_dataset(get_data_dir(dataset_identifier))
nr_datapoints = len(self.features)
assert split in ('train', 'val')
self.split = split
if self.split == 'train':
self.features = self.features[:int(nr_datapoints * 0.9)]
self.labels = self.labels[:int(nr_datapoints * 0.9)]
else:
self.features = self.features[int(nr_datapoints * 0.9):]
self.labels = self.labels[int(nr_datapoints * 0.9):]
self.cond_entry = (self.features.sum(axis=-1) == 1)[:, :, :, None].expand(-1, -1, -1, 9)
self.inp_dim = self.features[0].numel()
self.out_dim = self.labels[0].numel()
def __len__(self):
return len(self.features)
def __getitem__(self, idx):
return _rescale(self.features[idx].reshape(-1)), _rescale(self.labels[idx].reshape(-1)), self.cond_entry[idx].reshape(-1)
def _rescale(x):
return (x - 0.5) * 2
class SudokuRRNDataset(Dataset):
def __init__(self, dataset_identifier, split):
assert dataset_identifier == 'sudoku-rrn'
self.features, self.labels = load_rrn_dataset(get_data_dir(dataset_identifier), split)
self.cond_entry = (self.features.sum(axis=-1) == 1)[:, :, :, None].expand(-1, -1, -1, 9)
self.inp_dim = self.features[0].numel()
self.out_dim = self.labels[0].numel()
def __len__(self):
return len(self.features)
def __getitem__(self, idx):
return _rescale(self.features[idx].reshape(-1)), _rescale(self.labels[idx].reshape(-1)), self.cond_entry[idx].reshape(-1)
class SudokuRRNLatentDataset(Dataset):
def __init__(self, dataset_identifier, split):
data = np.load("data/sudoku-rrn_{}.npz".format(split))
latent = data['latent']
inp = data['inp']
mask = data['mask']
label = data['label']
self.latent = latent
self.inp = inp
self.mask = mask
self.label = label
self.norm = 4
self.latent = self.latent / self.norm
self.inp_dim = 729
self.out_dim = 243
def __len__(self):
return self.latent.shape[0]
def __getitem__(self, idx):
inp = self.inp[idx]
latent = self.latent[idx]
mask = self.mask[idx]
label = self.label[idx]
latent = latent.transpose((1, 2, 0)).reshape(-1)
return inp, latent, label, mask