introvoyz041 commited on
Commit
c77545c
·
verified ·
1 Parent(s): 58bcc1c

Migrated from GitHub

Browse files
Files changed (46) hide show
  1. data/LICENSE +21 -0
  2. data/design_inference.py +173 -0
  3. data/mdgen.png +3 -0
  4. data/mdgen/analysis.py +100 -0
  5. data/mdgen/dataset.py +101 -0
  6. data/mdgen/ema.py +71 -0
  7. data/mdgen/geometry.py +359 -0
  8. data/mdgen/logger.py +34 -0
  9. data/mdgen/model/ipa.py +257 -0
  10. data/mdgen/model/latent_model.py +483 -0
  11. data/mdgen/model/layers.py +327 -0
  12. data/mdgen/model/mha.py +510 -0
  13. data/mdgen/model/primitives.py +830 -0
  14. data/mdgen/model/standalone_hyena.py +289 -0
  15. data/mdgen/parsing.py +127 -0
  16. data/mdgen/protein.py +636 -0
  17. data/mdgen/residue_constants.py +1486 -0
  18. data/mdgen/rigid_utils.py +1391 -0
  19. data/mdgen/tensor_utils.py +119 -0
  20. data/mdgen/transport/integrators.py +114 -0
  21. data/mdgen/transport/path.py +191 -0
  22. data/mdgen/transport/transport.py +575 -0
  23. data/mdgen/utils.py +101 -0
  24. data/mdgen/wrapper.py +507 -0
  25. data/scripts/analyze_peptide_design.py +96 -0
  26. data/scripts/analyze_peptide_sim.py +229 -0
  27. data/scripts/analyze_peptide_tps.py +192 -0
  28. data/scripts/analyze_upsampling.py +79 -0
  29. data/scripts/prep_sims.py +80 -0
  30. data/scripts/run_peptide_sim.py +142 -0
  31. data/sim_inference.py +140 -0
  32. data/splits/4AA.csv +3310 -0
  33. data/splits/4AA_implicit.csv +2847 -0
  34. data/splits/4AA_implicit_test.csv +101 -0
  35. data/splits/4AA_implicit_train.csv +2647 -0
  36. data/splits/4AA_implicit_val.csv +101 -0
  37. data/splits/4AA_test.csv +101 -0
  38. data/splits/4AA_train.csv +3110 -0
  39. data/splits/4AA_val.csv +101 -0
  40. data/splits/atlas.csv +0 -0
  41. data/splits/atlas_test.csv +83 -0
  42. data/splits/atlas_train.csv +0 -0
  43. data/splits/atlas_val.csv +40 -0
  44. data/tps_inference.py +171 -0
  45. data/train.py +77 -0
  46. data/upsampling_inference.py +105 -0
data/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Bowen Jing, Hannes Stärk, Tommi Jaakkola, Bonnie Berger
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/design_inference.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import copy
3
+ import json
4
+ import pickle
5
+
6
+ parser = argparse.ArgumentParser()
7
+ parser.add_argument('--sim_ckpt', type=str, default=None, required=True)
8
+ parser.add_argument('--data_dir', type=str, default='share/4AA_data')
9
+ parser.add_argument('--mddir', type=str, default='/data/cb/scratch/share/mdgen/4AA_sims')
10
+ parser.add_argument('--suffix', type=str, default='')
11
+ parser.add_argument('--pdb_id', nargs='*', default=[])
12
+ parser.add_argument('--num_frames', type=int, default=100)
13
+ parser.add_argument('--num_batches', type=int, default=100)
14
+ parser.add_argument('--batch_size', type=int, default=10)
15
+ parser.add_argument('--out_dir', type=str, default=".")
16
+ parser.add_argument('--random_start_idx', action='store_true')
17
+ parser.add_argument('--split', type=str, default='splits/4AA_test.csv')
18
+ parser.add_argument('--chunk_idx', type=int, default=0)
19
+ parser.add_argument('--n_chunks', type=int, default=1)
20
+ args = parser.parse_args()
21
+ import mdgen.analysis
22
+ import os, torch, mdtraj, tqdm
23
+ import numpy as np
24
+ from mdgen.geometry import atom14_to_frames, atom14_to_atom37, atom37_to_torsions
25
+ from mdgen.tensor_utils import tensor_tree_map
26
+
27
+ from mdgen.residue_constants import restype_order, restype_atom37_mask
28
+ from mdgen.wrapper import NewMDGenWrapper
29
+ from mdgen.dataset import atom14_to_frames
30
+ import pandas as pd
31
+ import contextlib
32
+ import numpy as np
33
+
34
+ @contextlib.contextmanager
35
+ def temp_seed(seed):
36
+ state = np.random.get_state()
37
+ np.random.seed(seed)
38
+ try:
39
+ yield
40
+ finally:
41
+ np.random.set_state(state)
42
+
43
+ os.makedirs(args.out_dir, exist_ok=True)
44
+
45
+ def get_sample(arr, seqres, start_idxs, start_state, end_state, num_frames=100):
46
+ start_idx = np.random.choice(start_idxs, 1).item()
47
+ if args.random_start_idx:
48
+ start_idx = np.random.randint(low=0,high=len(arr)-num_frames)
49
+ end_idx = start_idx + num_frames
50
+
51
+ arr = np.copy(arr[start_idx: end_idx]).astype(np.float32)
52
+ seqres = torch.tensor([restype_order[c] for c in seqres])
53
+
54
+ frames = atom14_to_frames(torch.from_numpy(arr))
55
+ atom37 = torch.from_numpy(atom14_to_atom37(arr, seqres)).float()
56
+ torsions, torsion_mask = atom37_to_torsions(atom37, seqres[None])
57
+
58
+ L = frames.shape[1]
59
+
60
+
61
+ mask = torch.ones(L)
62
+ return {
63
+ 'torsions': torsions,
64
+ 'torsion_mask': torsion_mask[0],
65
+ 'trans': frames._trans,
66
+ 'rots': frames._rots._rot_mats,
67
+ 'seqres': seqres,
68
+ 'start_idx': start_idx,
69
+ 'end_idx': end_idx,
70
+ 'start_state': start_state,
71
+ 'end_state': end_state,
72
+ 'mask': mask, # (L,)
73
+ }
74
+
75
+ def do(model, name, seqres):
76
+ print('doing', name)
77
+ if os.path.exists(f'{args.out_dir}/{name}_metadata.pkl'):
78
+ pkl_metadata = pickle.load(open(f'{args.out_dir}/{name}_metadata.pkl', 'rb'))
79
+ msm = pkl_metadata['msm']
80
+ cmsm = pkl_metadata['cmsm']
81
+ ref_kmeans = pkl_metadata['ref_kmeans']
82
+ else:
83
+ with temp_seed(137):
84
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True)
85
+ tica, _ = mdgen.analysis.get_tica(ref)
86
+ kmeans, ref_kmeans = mdgen.analysis.get_kmeans(tica.transform(ref))
87
+ try:
88
+ msm, pcca, cmsm = mdgen.analysis.get_msm(ref_kmeans, nstates=10)
89
+ except Exception as e:
90
+ print('ERROR', e, name, flush=True)
91
+ return
92
+ pickle.dump({
93
+ 'msm': msm,
94
+ 'cmsm': cmsm,
95
+ 'tica': tica,
96
+ 'pcca': pcca,
97
+ 'kmeans': kmeans,
98
+ 'ref_kmeans': ref_kmeans,
99
+ }, open(f'{args.out_dir}/{name}_metadata.pkl', 'wb'))
100
+
101
+ flux_mat = cmsm.transition_matrix * cmsm.pi[None, :]
102
+ np.fill_diagonal(flux_mat, 0)
103
+ start_state, end_state = np.unravel_index(np.argmax(flux_mat, axis=None), flux_mat.shape)
104
+ ref_discrete = msm.metastable_assignments[ref_kmeans]
105
+
106
+ arr = np.lib.format.open_memmap(f'{args.data_dir}/{name}.npy', 'r')
107
+ if model.args.frame_interval:
108
+ arr = arr[::model.args.frame_interval]
109
+ ref_discrete = ref_discrete[::model.args.frame_interval]
110
+
111
+ is_start = ref_discrete == start_state
112
+ is_end = ref_discrete == end_state
113
+
114
+ trans_indices = is_start[:-args.num_frames] * is_end[args.num_frames:]
115
+ start_idxs = np.where(trans_indices)[0]
116
+ if (trans_indices).sum() == 0:
117
+ print('No transition path found for ', name, 'skipping...')
118
+ return
119
+
120
+
121
+
122
+ metadata = []
123
+ for i in tqdm.tqdm(range(args.num_batches), desc='num batch'):
124
+ batch_list = []
125
+ for _ in range(args.batch_size):
126
+ batch_list.append(
127
+ get_sample(arr, seqres, copy.deepcopy(start_idxs), start_state, end_state, num_frames=args.num_frames))
128
+ batch = next(iter(torch.utils.data.DataLoader(batch_list, batch_size=args.batch_size)))
129
+ batch = tensor_tree_map(lambda x: x.cuda(), batch)
130
+
131
+
132
+ print('Start tps for ', name, 'with start coords', batch['trans'][0, 0, 0], 'and with end coords', batch['trans'][0, -1, 0])
133
+ atom14s, aa_out = model.inference(batch)
134
+ for j in range(args.batch_size):
135
+ idx = i * args.batch_size + j
136
+ path = os.path.join(args.out_dir, f'{name}_{idx}.pdb')
137
+ atom14_to_pdb(atom14s[j].cpu().numpy(), batch['seqres'][0].cpu().numpy(), path)
138
+
139
+ traj = mdtraj.load(path)
140
+ traj.superpose(traj)
141
+ traj.save(os.path.join(args.out_dir, f'{name}_{idx}.xtc'))
142
+ traj[0].save(os.path.join(args.out_dir, f'{name}_{idx}.pdb'))
143
+ metadata.append({
144
+ 'name': name,
145
+ 'start_idx': batch['start_idx'][j].cpu().item(),
146
+ 'end_idx': batch['end_idx'][j].cpu().item(),
147
+ 'start_state': batch['start_state'][j].cpu().item(),
148
+ 'end_state': batch['end_state'][j].cpu().item(),
149
+ 'aa_out': aa_out[j].cpu().numpy().tolist(), # 'aa_out': 'aa_out',
150
+ 'path': path,
151
+ })
152
+ json.dump(metadata, open(f'{args.out_dir}/{name}_metadata.json', 'w'))
153
+
154
+
155
+ @torch.no_grad()
156
+ def main():
157
+ model = NewMDGenWrapper.load_from_checkpoint(args.sim_ckpt)
158
+ model.eval().to('cuda')
159
+ df = pd.read_csv(args.split, index_col='name')
160
+ names = np.array(df.index)
161
+
162
+ chunks = np.array_split(names, args.n_chunks)
163
+ chunk = chunks[args.chunk_idx]
164
+ print('#' * 20)
165
+ print(f'RUN NUMBER: {args.chunk_idx}, PROCESSING IDXS {args.chunk_idx * len(chunk)}-{(args.chunk_idx + 1) * len(chunk)}')
166
+ print('#' * 20)
167
+ for name in tqdm.tqdm(chunk, desc='num peptides'):
168
+ if args.pdb_id and name not in args.pdb_id:
169
+ continue
170
+ do(model, name, df.seqres[name])
171
+
172
+
173
+ main()
data/mdgen.png ADDED

Git LFS Details

  • SHA256: e1148cb8ba855d2f0dd69917bbd4c3a896ff48414bd76dbe440df2211fac9094
  • Pointer size: 131 Bytes
  • Size of remote file: 771 kB
data/mdgen/analysis.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+
4
+ import numpy as np
5
+ import pyemma
6
+ from tqdm import tqdm
7
+
8
+ def get_featurizer(name, sidechains=False, cossin=True):
9
+ feat = pyemma.coordinates.featurizer(name+'.pdb')
10
+ feat.add_backbone_torsions(cossin=cossin)
11
+ if sidechains:
12
+ feat.add_sidechain_torsions(cossin=cossin)
13
+ return feat
14
+
15
+ def get_featurized_traj(name, sidechains=False, cossin=True):
16
+ feat = pyemma.coordinates.featurizer(name+'.pdb')
17
+ feat.add_backbone_torsions(cossin=cossin)
18
+ if sidechains:
19
+ feat.add_sidechain_torsions(cossin=cossin)
20
+ traj = pyemma.coordinates.load(name+'.xtc', features=feat)
21
+ return feat, traj
22
+
23
+ def get_featurized_atlas_traj(name, sidechains=False, cossin=True):
24
+ feat = pyemma.coordinates.featurizer(name+'.pdb')
25
+ feat.add_backbone_torsions(cossin=cossin)
26
+ if sidechains:
27
+ feat.add_sidechain_torsions(cossin=cossin)
28
+ traj = pyemma.coordinates.load(name+'_prod_R1_fit.xtc', features=feat)
29
+ return feat, traj
30
+
31
+ def get_tica(traj, lag=1000):
32
+ tica = pyemma.coordinates.tica(traj, lag=lag, kinetic_map=True)
33
+ # lag time 100 ps = 0.1 ns
34
+ return tica, tica.transform(traj)
35
+
36
+ def get_kmeans(traj):
37
+ kmeans = pyemma.coordinates.cluster_kmeans(traj, k=100, max_iter=100, fixed_seed=137)
38
+ return kmeans, kmeans.transform(traj)[:,0]
39
+
40
+ def get_msm(traj, lag=1000, nstates=10):
41
+ msm = pyemma.msm.estimate_markov_model(traj, lag=lag)
42
+ pcca = msm.pcca(nstates)
43
+ assert len(msm.metastable_assignments) == 100
44
+ cmsm = pyemma.msm.estimate_markov_model(msm.metastable_assignments[traj], lag=lag)
45
+ return msm, pcca, cmsm
46
+
47
+ def discretize(traj, kmeans, msm):
48
+ return msm.metastable_assignments[kmeans.transform(traj)[:,0]]
49
+
50
+ def load_tps_ensemble(name, directory):
51
+ metadata = json.load(open(os.path.join(directory, f'{name}_metadata.json'),'rb'))
52
+ all_feats = []
53
+ all_traj = []
54
+ for i, meta_dict in tqdm(enumerate(metadata)):
55
+ feats, traj = get_featurized_traj(f'{directory}/{name}_{i}', sidechains=True)
56
+ all_feats.append(feats)
57
+ all_traj.append(traj)
58
+ return all_feats, all_traj
59
+
60
+
61
+ def sample_tp(trans, start_state, end_state, traj_len, n_samples):
62
+ s_1 = start_state
63
+ s_N = end_state
64
+ N = traj_len
65
+
66
+ s_t = np.ones(n_samples, dtype=int) * s_1
67
+ states = [s_t]
68
+ for t in range(1, N - 1):
69
+ numerator = np.linalg.matrix_power(trans, N - t - 1)[:, s_N] * trans[s_t, :]
70
+ probs = numerator / np.linalg.matrix_power(trans, N - t)[s_t, s_N][:, None]
71
+ s_t = np.zeros(n_samples, dtype=int)
72
+ for n in range(n_samples):
73
+ s_t[n] = np.random.choice(np.arange(len(trans)), 1, p=probs[n])
74
+ states.append(s_t)
75
+ states.append(np.ones(n_samples, dtype=int) * s_N)
76
+ return np.stack(states, axis=1)
77
+
78
+
79
+ def get_tp_likelihood(tp, trans):
80
+ N = tp.shape[1]
81
+ n_samples = tp.shape[0]
82
+ s_N = tp[0, -1]
83
+ trans_probs = []
84
+ for i in range(N - 1):
85
+ t = i + 1
86
+ s_t = tp[:, i]
87
+ numerator = np.linalg.matrix_power(trans, N - t - 1)[:, s_N] * trans[s_t, :]
88
+ probs = numerator / np.linalg.matrix_power(trans, N - t)[s_t, s_N][:, None]
89
+
90
+ s_tp1 = tp[:, i + 1]
91
+ trans_prob = probs[np.arange(n_samples), s_tp1]
92
+ trans_probs.append(trans_prob)
93
+ probs = np.stack(trans_probs, axis=1)
94
+ probs[np.isnan(probs)] = 0
95
+ return probs
96
+
97
+
98
+ def get_state_probs(tp, num_states=10):
99
+ stationary = np.bincount(tp.reshape(-1), minlength=num_states)
100
+ return stationary / stationary.sum()
data/mdgen/dataset.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from .rigid_utils import Rigid
3
+ from .residue_constants import restype_order
4
+ import numpy as np
5
+ import pandas as pd
6
+ from .geometry import atom37_to_torsions, atom14_to_atom37, atom14_to_frames
7
+
8
+ class MDGenDataset(torch.utils.data.Dataset):
9
+ def __init__(self, args, split, repeat=1):
10
+ super().__init__()
11
+ self.df = pd.read_csv(split, index_col='name')
12
+ self.args = args
13
+ self.repeat = repeat
14
+ def __len__(self):
15
+ if self.args.overfit_peptide:
16
+ return 1000
17
+ return self.repeat * len(self.df)
18
+
19
+ def __getitem__(self, idx):
20
+ idx = idx % len(self.df)
21
+ if self.args.overfit:
22
+ idx = 0
23
+
24
+ if self.args.overfit_peptide is None:
25
+ name = self.df.index[idx]
26
+ seqres = self.df.seqres[name]
27
+ else:
28
+ name = self.args.overfit_peptide
29
+ seqres = name
30
+
31
+ if self.args.atlas:
32
+ i = np.random.randint(1, 4)
33
+ full_name = f"{name}_R{i}"
34
+ else:
35
+ full_name = name
36
+ arr = np.lib.format.open_memmap(f'{self.args.data_dir}/{full_name}{self.args.suffix}.npy', 'r')
37
+ if self.args.frame_interval:
38
+ arr = arr[::self.args.frame_interval]
39
+
40
+ frame_start = np.random.choice(np.arange(arr.shape[0] - self.args.num_frames))
41
+ if self.args.overfit_frame:
42
+ frame_start = 0
43
+ end = frame_start + self.args.num_frames
44
+ # arr = np.copy(arr[frame_start:end]) * 10 # convert to angstroms
45
+ arr = np.copy(arr[frame_start:end]).astype(np.float32) # / 10.0 # convert to nm
46
+ if self.args.copy_frames:
47
+ arr[1:] = arr[0]
48
+
49
+ # arr should be in ANGSTROMS
50
+ frames = atom14_to_frames(torch.from_numpy(arr))
51
+ seqres = np.array([restype_order[c] for c in seqres])
52
+ aatype = torch.from_numpy(seqres)[None].expand(self.args.num_frames, -1)
53
+ atom37 = torch.from_numpy(atom14_to_atom37(arr, aatype)).float()
54
+
55
+ L = frames.shape[1]
56
+ mask = np.ones(L, dtype=np.float32)
57
+
58
+ if self.args.no_frames:
59
+ return {
60
+ 'name': full_name,
61
+ 'frame_start': frame_start,
62
+ 'atom37': atom37,
63
+ 'seqres': seqres,
64
+ 'mask': restype_atom37_mask[seqres], # (L,)
65
+ }
66
+ torsions, torsion_mask = atom37_to_torsions(atom37, aatype)
67
+
68
+ torsion_mask = torsion_mask[0]
69
+
70
+ if self.args.atlas:
71
+ if L > self.args.crop:
72
+ start = np.random.randint(0, L - self.args.crop + 1)
73
+ torsions = torsions[:,start:start+self.args.crop]
74
+ frames = frames[:,start:start+self.args.crop]
75
+ seqres = seqres[start:start+self.args.crop]
76
+ mask = mask[start:start+self.args.crop]
77
+ torsion_mask = torsion_mask[start:start+self.args.crop]
78
+
79
+
80
+ elif L < self.args.crop:
81
+ pad = self.args.crop - L
82
+ frames = Rigid.cat([
83
+ frames,
84
+ Rigid.identity((self.args.num_frames, pad), requires_grad=False, fmt='rot_mat')
85
+ ], 1)
86
+ mask = np.concatenate([mask, np.zeros(pad, dtype=np.float32)])
87
+ seqres = np.concatenate([seqres, np.zeros(pad, dtype=int)])
88
+ torsions = torch.cat([torsions, torch.zeros((torsions.shape[0], pad, 7, 2), dtype=torch.float32)], 1)
89
+ torsion_mask = torch.cat([torsion_mask, torch.zeros((pad, 7), dtype=torch.float32)])
90
+
91
+ return {
92
+ 'name': full_name,
93
+ 'frame_start': frame_start,
94
+ 'torsions': torsions,
95
+ 'torsion_mask': torsion_mask,
96
+ 'trans': frames._trans,
97
+ 'rots': frames._rots._rot_mats,
98
+ 'seqres': seqres,
99
+ 'mask': mask, # (L,)
100
+ }
101
+
data/mdgen/ema.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/aqlaboratory/openfold/blob/main/openfold/utils/exponential_moving_average.py
2
+ from collections import OrderedDict
3
+ import torch
4
+ import torch.nn as nn
5
+
6
+ from .tensor_utils import tensor_tree_map
7
+
8
+
9
+ class ExponentialMovingAverage:
10
+ """
11
+ Maintains moving averages of parameters with exponential decay
12
+
13
+ At each step, the stored copy `copy` of each parameter `param` is
14
+ updated as follows:
15
+
16
+ `copy = decay * copy + (1 - decay) * param`
17
+
18
+ where `decay` is an attribute of the ExponentialMovingAverage object.
19
+ """
20
+
21
+ def __init__(self, model: nn.Module, decay: float):
22
+ """
23
+ Args:
24
+ model:
25
+ A torch.nn.Module whose parameters are to be tracked
26
+ decay:
27
+ A value (usually close to 1.) by which updates are
28
+ weighted as part of the above formula
29
+ """
30
+ super(ExponentialMovingAverage, self).__init__()
31
+
32
+ clone_param = lambda t: t.clone().detach()
33
+ self.params = tensor_tree_map(clone_param, model.state_dict())
34
+ self.decay = decay
35
+ self.device = next(model.parameters()).device
36
+
37
+ def to(self, device):
38
+ self.params = tensor_tree_map(lambda t: t.to(device), self.params)
39
+ self.device = device
40
+
41
+ def _update_state_dict_(self, update, state_dict):
42
+ with torch.no_grad():
43
+ for k, v in update.items():
44
+ stored = state_dict[k]
45
+ if not isinstance(v, torch.Tensor):
46
+ self._update_state_dict_(v, stored)
47
+ else:
48
+ diff = stored - v
49
+ diff *= 1 - self.decay
50
+ stored -= diff
51
+
52
+ def update(self, model: torch.nn.Module) -> None:
53
+ """
54
+ Updates the stored parameters using the state dict of the provided
55
+ module. The module should have the same structure as that used to
56
+ initialize the ExponentialMovingAverage object.
57
+ """
58
+ self._update_state_dict_(model.state_dict(), self.params)
59
+
60
+ def load_state_dict(self, state_dict: OrderedDict) -> None:
61
+ for k in state_dict["params"].keys():
62
+ self.params[k] = state_dict["params"][k].clone()
63
+ self.decay = state_dict["decay"]
64
+
65
+ def state_dict(self) -> OrderedDict:
66
+ return OrderedDict(
67
+ {
68
+ "params": self.params,
69
+ "decay": self.decay,
70
+ }
71
+ )
data/mdgen/geometry.py ADDED
@@ -0,0 +1,359 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+
4
+ from .rigid_utils import Rigid, Rotation
5
+ from . import residue_constants as rc
6
+ from .tensor_utils import batched_gather
7
+
8
+
9
+ def atom14_to_atom37(atom14: np.ndarray, aatype, atom14_mask=None):
10
+ atom37 = batched_gather(
11
+ atom14,
12
+ rc.RESTYPE_ATOM37_TO_ATOM14[aatype],
13
+ dim=-2,
14
+ no_batch_dims=len(atom14.shape[:-2]),
15
+ )
16
+ atom37 *= rc.RESTYPE_ATOM37_MASK[aatype, :, None]
17
+ if atom14_mask is not None:
18
+ atom37_mask = batched_gather(
19
+ atom14_mask,
20
+ rc.RESTYPE_ATOM37_TO_ATOM14[aatype],
21
+ dim=-1,
22
+ no_batch_dims=len(atom14.shape[:-2]),
23
+ )
24
+ atom37_mask *= rc.RESTYPE_ATOM37_MASK[aatype]
25
+ return atom37, atom37_mask
26
+ else:
27
+ return atom37
28
+
29
+
30
+ def atom37_to_atom14(atom37: np.ndarray, aatype, atom37_mask=None):
31
+ atom14 = batched_gather(
32
+ atom37,
33
+ rc.RESTYPE_ATOM14_TO_ATOM37[aatype],
34
+ dim=-2,
35
+ no_batch_dims=len(atom37.shape[:-2]),
36
+ )
37
+ atom14 *= rc.RESTYPE_ATOM14_MASK[aatype, :, None]
38
+ if atom37_mask is not None:
39
+ atom14_mask = batched_gather(
40
+ atom37_mask,
41
+ rc.RESTYPE_ATOM14_TO_ATOM37[aatype],
42
+ dim=-1,
43
+ no_batch_dims=len(atom37.shape[:-2]),
44
+ )
45
+ atom14_mask *= rc.RESTYPE_ATOM14_MASK[aatype]
46
+ return atom14, atom14_mask
47
+ else:
48
+ return atom14
49
+
50
+
51
+
52
+ def frames_torsions_to_atom37(
53
+ frames: Rigid,
54
+ torsions: torch.Tensor,
55
+ aatype: torch.Tensor,
56
+ ):
57
+ atom14 = frames_torsions_to_atom14(frames, torsions, aatype)
58
+ return atom14_to_atom37(atom14, aatype)
59
+
60
+
61
+ def frames_torsions_to_atom14(
62
+ frames: Rigid, torsions: torch.Tensor, aatype: torch.Tensor
63
+ ):
64
+ if type(torsions) is np.ndarray:
65
+ torsions = torch.from_numpy(torsions)
66
+ if type(aatype) is np.ndarray:
67
+ aatype = torch.from_numpy(aatype)
68
+ default_frames = torch.from_numpy(rc.restype_rigid_group_default_frame).to(
69
+ aatype.device
70
+ )
71
+ lit_positions = torch.from_numpy(rc.restype_atom14_rigid_group_positions).to(
72
+ aatype.device
73
+ )
74
+ group_idx = torch.from_numpy(rc.restype_atom14_to_rigid_group).to(aatype.device)
75
+ atom_mask = torch.from_numpy(rc.restype_atom14_mask).to(aatype.device)
76
+ frames_out = torsion_angles_to_frames(frames, torsions, aatype, default_frames)
77
+ return frames_and_literature_positions_to_atom14_pos(
78
+ frames_out, aatype, default_frames, group_idx, atom_mask, lit_positions
79
+ )
80
+
81
+
82
+ def atom37_to_torsions(all_atom_positions, aatype, all_atom_mask=None):
83
+
84
+ if type(all_atom_positions) is np.ndarray:
85
+ all_atom_positions = torch.from_numpy(all_atom_positions)
86
+ if type(aatype) is np.ndarray:
87
+ aatype = torch.from_numpy(aatype)
88
+ if all_atom_mask is None:
89
+ all_atom_mask = torch.from_numpy(rc.RESTYPE_ATOM37_MASK[aatype]).to(
90
+ aatype.device
91
+ )
92
+ if type(all_atom_mask) is np.ndarray:
93
+ all_atom_mask = torch.from_numpy(all_atom_mask)
94
+
95
+ pad = all_atom_positions.new_zeros([*all_atom_positions.shape[:-3], 1, 37, 3])
96
+ prev_all_atom_positions = torch.cat(
97
+ [pad, all_atom_positions[..., :-1, :, :]], dim=-3
98
+ )
99
+
100
+ pad = all_atom_mask.new_zeros([*all_atom_mask.shape[:-2], 1, 37])
101
+ prev_all_atom_mask = torch.cat([pad, all_atom_mask[..., :-1, :]], dim=-2)
102
+
103
+ pre_omega_atom_pos = torch.cat(
104
+ [prev_all_atom_positions[..., 1:3, :], all_atom_positions[..., :2, :]],
105
+ dim=-2,
106
+ )
107
+ phi_atom_pos = torch.cat(
108
+ [prev_all_atom_positions[..., 2:3, :], all_atom_positions[..., :3, :]],
109
+ dim=-2,
110
+ )
111
+ psi_atom_pos = torch.cat(
112
+ [all_atom_positions[..., :3, :], all_atom_positions[..., 4:5, :]],
113
+ dim=-2,
114
+ )
115
+
116
+ pre_omega_mask = torch.prod(prev_all_atom_mask[..., 1:3], dim=-1) * torch.prod(
117
+ all_atom_mask[..., :2], dim=-1
118
+ )
119
+ phi_mask = prev_all_atom_mask[..., 2] * torch.prod(
120
+ all_atom_mask[..., :3], dim=-1, dtype=all_atom_mask.dtype
121
+ )
122
+ psi_mask = (
123
+ torch.prod(all_atom_mask[..., :3], dim=-1, dtype=all_atom_mask.dtype)
124
+ * all_atom_mask[..., 4]
125
+ )
126
+
127
+ chi_atom_indices = torch.as_tensor(get_chi_atom_indices(), device=aatype.device)
128
+
129
+ atom_indices = chi_atom_indices[..., aatype, :, :]
130
+ chis_atom_pos = batched_gather(
131
+ all_atom_positions, atom_indices, -2, len(atom_indices.shape[:-2])
132
+ )
133
+
134
+ chi_angles_mask = list(rc.chi_angles_mask)
135
+ chi_angles_mask.append([0.0, 0.0, 0.0, 0.0])
136
+ chi_angles_mask = all_atom_mask.new_tensor(chi_angles_mask)
137
+
138
+ chis_mask = chi_angles_mask[aatype, :]
139
+
140
+ chi_angle_atoms_mask = batched_gather(
141
+ all_atom_mask,
142
+ atom_indices,
143
+ dim=-1,
144
+ no_batch_dims=len(atom_indices.shape[:-2]),
145
+ )
146
+ chi_angle_atoms_mask = torch.prod(
147
+ chi_angle_atoms_mask, dim=-1, dtype=chi_angle_atoms_mask.dtype
148
+ )
149
+ chis_mask = chis_mask * chi_angle_atoms_mask
150
+
151
+ torsions_atom_pos = torch.cat(
152
+ [
153
+ pre_omega_atom_pos[..., None, :, :],
154
+ phi_atom_pos[..., None, :, :],
155
+ psi_atom_pos[..., None, :, :],
156
+ chis_atom_pos,
157
+ ],
158
+ dim=-3,
159
+ )
160
+
161
+ torsion_angles_mask = torch.cat(
162
+ [
163
+ pre_omega_mask[..., None],
164
+ phi_mask[..., None],
165
+ psi_mask[..., None],
166
+ chis_mask,
167
+ ],
168
+ dim=-1,
169
+ )
170
+
171
+ torsion_frames = Rigid.from_3_points(
172
+ torsions_atom_pos[..., 1, :],
173
+ torsions_atom_pos[..., 2, :],
174
+ torsions_atom_pos[..., 0, :],
175
+ eps=1e-8,
176
+ )
177
+
178
+ fourth_atom_rel_pos = torsion_frames.invert().apply(torsions_atom_pos[..., 3, :])
179
+
180
+ torsion_angles_sin_cos = torch.stack(
181
+ [fourth_atom_rel_pos[..., 2], fourth_atom_rel_pos[..., 1]], dim=-1
182
+ )
183
+
184
+ denom = torch.sqrt(
185
+ torch.sum(
186
+ torch.square(torsion_angles_sin_cos),
187
+ dim=-1,
188
+ dtype=torsion_angles_sin_cos.dtype,
189
+ keepdims=True,
190
+ )
191
+ + 1e-8
192
+ )
193
+ torsion_angles_sin_cos = torsion_angles_sin_cos / denom
194
+
195
+ torsion_angles_sin_cos = (
196
+ torsion_angles_sin_cos
197
+ * all_atom_mask.new_tensor(
198
+ [1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0],
199
+ )[((None,) * len(torsion_angles_sin_cos.shape[:-2])) + (slice(None), None)]
200
+ )
201
+
202
+ return torsion_angles_sin_cos, torsion_angles_mask
203
+
204
+
205
+
206
+ def prot_to_frames(ca_coords, c_coords, n_coords):
207
+ prot_frames = Rigid.from_3_points(
208
+ torch.from_numpy(c_coords),
209
+ torch.from_numpy(ca_coords),
210
+ torch.from_numpy(n_coords),
211
+ )
212
+ rots = torch.eye(3)
213
+ rots[0, 0] = -1
214
+ rots[2, 2] = -1
215
+ rots = Rotation(rot_mats=rots)
216
+ return prot_frames.compose(Rigid(rots, None))
217
+
218
+ def atom14_to_frames(atom14):
219
+ n_coords = atom14[:,:,rc.atom_order['N']]
220
+ ca_coords = atom14[:,:,rc.atom_order['CA']]
221
+ c_coords = atom14[:,:,rc.atom_order['C']]
222
+ prot_frames = Rigid.from_3_points(
223
+ c_coords,
224
+ ca_coords,
225
+ n_coords,
226
+ )
227
+ rots = torch.eye(3, device=atom14.device)[None,None].repeat(atom14.shape[0],atom14.shape[1], 1, 1)
228
+ rots[:,:, 0, 0] = -1
229
+ rots[:,:, 2, 2] = -1
230
+ rots = Rotation(rot_mats=rots)
231
+ return prot_frames.compose(Rigid(rots, None))
232
+
233
+
234
+
235
+
236
+ def frames_and_literature_positions_to_atom14_pos(
237
+ r: Rigid,
238
+ aatype: torch.Tensor,
239
+ default_frames,
240
+ group_idx,
241
+ atom_mask,
242
+ lit_positions,
243
+ ):
244
+ # [*, N, 14, 4, 4]
245
+ default_4x4 = default_frames[aatype, ...]
246
+
247
+ # [*, N, 14]
248
+ group_mask = group_idx[aatype, ...]
249
+
250
+ # [*, N, 14, 8]
251
+ group_mask = torch.nn.functional.one_hot(
252
+ group_mask,
253
+ num_classes=default_frames.shape[-3],
254
+ )
255
+
256
+ # [*, N, 14, 8]
257
+ t_atoms_to_global = r[..., None, :] * group_mask
258
+
259
+ # [*, N, 14]
260
+ t_atoms_to_global = t_atoms_to_global.map_tensor_fn(lambda x: torch.sum(x, dim=-1))
261
+
262
+ # [*, N, 14, 1]
263
+ atom_mask = atom_mask[aatype, ...].unsqueeze(-1)
264
+
265
+ # [*, N, 14, 3]
266
+ lit_positions = lit_positions[aatype, ...]
267
+ pred_positions = t_atoms_to_global.apply(lit_positions)
268
+ pred_positions = pred_positions * atom_mask
269
+
270
+ return pred_positions
271
+
272
+
273
+ def torsion_angles_to_frames(
274
+ r: Rigid,
275
+ alpha: torch.Tensor,
276
+ aatype: torch.Tensor,
277
+ rrgdf: torch.Tensor,
278
+ ):
279
+ # [*, N, 8, 4, 4]
280
+ default_4x4 = rrgdf[aatype, ...]
281
+
282
+ # [*, N, 8] transformations, i.e.
283
+ # One [*, N, 8, 3, 3] rotation matrix and
284
+ # One [*, N, 8, 3] translation matrix
285
+ default_r = r.from_tensor_4x4(default_4x4)
286
+
287
+ bb_rot = alpha.new_zeros((*((1,) * len(alpha.shape[:-1])), 2))
288
+ bb_rot[..., 1] = 1
289
+
290
+ # [*, N, 8, 2]
291
+ alpha = torch.cat([bb_rot.expand(*alpha.shape[:-2], -1, -1), alpha], dim=-2)
292
+
293
+ # [*, N, 8, 3, 3]
294
+ # Produces rotation matrices of the form:
295
+ # [
296
+ # [1, 0 , 0 ],
297
+ # [0, a_2,-a_1],
298
+ # [0, a_1, a_2]
299
+ # ]
300
+ # This follows the original code rather than the supplement, which uses
301
+ # different indices.
302
+
303
+ all_rots = alpha.new_zeros(default_r.get_rots().get_rot_mats().shape)
304
+ all_rots[..., 0, 0] = 1
305
+ all_rots[..., 1, 1] = alpha[..., 1]
306
+ all_rots[..., 1, 2] = -alpha[..., 0]
307
+ all_rots[..., 2, 1:] = alpha
308
+
309
+ all_rots = Rigid(Rotation(rot_mats=all_rots), None)
310
+
311
+ all_frames = default_r.compose(all_rots)
312
+
313
+ chi2_frame_to_frame = all_frames[..., 5]
314
+ chi3_frame_to_frame = all_frames[..., 6]
315
+ chi4_frame_to_frame = all_frames[..., 7]
316
+
317
+ chi1_frame_to_bb = all_frames[..., 4]
318
+ chi2_frame_to_bb = chi1_frame_to_bb.compose(chi2_frame_to_frame)
319
+ chi3_frame_to_bb = chi2_frame_to_bb.compose(chi3_frame_to_frame)
320
+ chi4_frame_to_bb = chi3_frame_to_bb.compose(chi4_frame_to_frame)
321
+
322
+ all_frames_to_bb = Rigid.cat(
323
+ [
324
+ all_frames[..., :5],
325
+ chi2_frame_to_bb.unsqueeze(-1),
326
+ chi3_frame_to_bb.unsqueeze(-1),
327
+ chi4_frame_to_bb.unsqueeze(-1),
328
+ ],
329
+ dim=-1,
330
+ )
331
+
332
+ all_frames_to_global = r[..., None].compose(all_frames_to_bb)
333
+
334
+ return all_frames_to_global
335
+
336
+
337
+ def get_chi_atom_indices():
338
+ """Returns atom indices needed to compute chi angles for all residue types.
339
+
340
+ Returns:
341
+ A tensor of shape [residue_types=21, chis=4, atoms=4]. The residue types are
342
+ in the order specified in rc.restypes + unknown residue type
343
+ at the end. For chi angles which are not defined on the residue, the
344
+ positions indices are by default set to 0.
345
+ """
346
+ chi_atom_indices = []
347
+ for residue_name in rc.restypes:
348
+ residue_name = rc.restype_1to3[residue_name]
349
+ residue_chi_angles = rc.chi_angles_atoms[residue_name]
350
+ atom_indices = []
351
+ for chi_angle in residue_chi_angles:
352
+ atom_indices.append([rc.atom_order[atom] for atom in chi_angle])
353
+ for _ in range(4 - len(atom_indices)):
354
+ atom_indices.append([0, 0, 0, 0]) # For chi angles not defined on the AA.
355
+ chi_atom_indices.append(atom_indices)
356
+
357
+ chi_atom_indices.append([[0, 0, 0, 0]] * 4) # For UNKNOWN residue.
358
+
359
+ return chi_atom_indices
data/mdgen/logger.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging, socket, os
2
+
3
+ model_dir = os.environ.get("MODEL_DIR", "./workdir/default")
4
+
5
+ class Rank(logging.Filter):
6
+ def filter(self, record):
7
+ record.global_rank = os.environ.get("GLOBAL_RANK", 0)
8
+ record.local_rank = os.environ.get("LOCAL_RANK", 0)
9
+ return True
10
+
11
+
12
+ def get_logger(name):
13
+ logger = logging.Logger(name)
14
+ # logger.addFilter(Rank())
15
+ level = {"crititical": 50, "error": 40, "warning": 30, "info": 20, "debug": 10}[
16
+ os.environ.get("LOGGER_LEVEL", "info")
17
+ ]
18
+ logger.setLevel(level)
19
+
20
+ ch = logging.StreamHandler()
21
+ ch.setLevel(logging.INFO)
22
+ os.makedirs(model_dir, exist_ok=True)
23
+ fh = logging.FileHandler(os.path.join(model_dir, "log.out"))
24
+ fh.setLevel(logging.DEBUG)
25
+ # formatter = logging.Formatter(f'%(asctime)s [{socket.gethostname()}:%(process)d:%(global_rank)s:%(local_rank)s]
26
+ # [%(levelname)s] %(message)s') # (%(name)s)
27
+ formatter = logging.Formatter(
28
+ f"%(asctime)s [{socket.gethostname()}:%(process)d] [%(levelname)s] %(message)s"
29
+ )
30
+ ch.setFormatter(formatter)
31
+ fh.setFormatter(formatter)
32
+ logger.addHandler(ch)
33
+ logger.addHandler(fh)
34
+ return logger
data/mdgen/model/ipa.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ import math
16
+ import torch
17
+ import torch.nn as nn
18
+ import torch.nn.functional as F
19
+ from .primitives import Linear, ipa_point_weights_init_
20
+
21
+ """
22
+ from openfold.utils.feats import (
23
+ frames_and_literature_positions_to_atom14_pos,
24
+ torsion_angles_to_frames,
25
+ )
26
+ """
27
+ from ..rigid_utils import Rotation, Rigid
28
+ from ..tensor_utils import (
29
+ permute_final_dims,
30
+ flatten_final_dims,
31
+ )
32
+
33
+
34
+ class InvariantPointAttention(nn.Module):
35
+ def __init__(self, c_s, c_z, c_hidden, no_heads, no_qk_points, no_v_points, inf=1e5, eps=1e-8, dropout=0.0):
36
+ """
37
+ Args:
38
+ c_s:
39
+ Single representation channel dimension
40
+ c_z:
41
+ Pair representation channel dimension
42
+ c_hidden:
43
+ Hidden channel dimension
44
+ no_heads:
45
+ Number of attention heads
46
+ no_qk_points:
47
+ Number of query/key points to generate
48
+ no_v_points:
49
+ Number of value points to generate
50
+ """
51
+ super(InvariantPointAttention, self).__init__()
52
+
53
+ self.c_s = c_s
54
+ self.c_z = c_z
55
+ self.c_hidden = c_hidden
56
+ self.no_heads = no_heads
57
+ self.no_qk_points = no_qk_points
58
+ self.no_v_points = no_v_points
59
+ self.inf = inf
60
+ self.eps = eps
61
+ self.dropout = dropout
62
+ # These linear layers differ from their specifications in the
63
+ # supplement. There, they lack bias and use Glorot initialization.
64
+ # Here as in the official source, they have bias and use the default
65
+ # Lecun initialization.
66
+ hc = self.c_hidden * self.no_heads
67
+ self.linear_q = Linear(self.c_s, hc)
68
+ self.linear_kv = Linear(self.c_s, 2 * hc)
69
+
70
+ hpq = self.no_heads * self.no_qk_points * 3
71
+ self.linear_q_points = Linear(self.c_s, hpq)
72
+
73
+ hpkv = self.no_heads * (self.no_qk_points + self.no_v_points) * 3
74
+ self.linear_kv_points = Linear(self.c_s, hpkv)
75
+
76
+ hpv = self.no_heads * self.no_v_points * 3
77
+
78
+ if self.c_z > 0:
79
+ self.linear_b = Linear(self.c_z, self.no_heads)
80
+
81
+ self.head_weights = nn.Parameter(torch.zeros((no_heads)))
82
+ ipa_point_weights_init_(self.head_weights)
83
+
84
+ concat_out_dim = self.no_heads * (
85
+ self.c_z + self.c_hidden + self.no_v_points * 4
86
+ )
87
+ self.linear_out = Linear(concat_out_dim, self.c_s, init="final")
88
+
89
+ self.softmax = nn.Softmax(dim=-1)
90
+ self.softplus = nn.Softplus()
91
+
92
+ def forward(self, s, r, z=None, frame_mask=None, attn_mask=None):
93
+ """
94
+ Args:
95
+ s:
96
+ [*, N_res, C_s] single representation
97
+ z:
98
+ [*, N_res, N_res, C_z] pair representation
99
+ r:
100
+ [*, N_res] transformation object
101
+ mask:
102
+ [*, N_res] mask
103
+ Returns:
104
+ [*, N_res, C_s] single representation update
105
+ """
106
+
107
+ z = [z]
108
+
109
+ #######################################
110
+ # Generate scalar and point activations
111
+ #######################################
112
+ # [*, N_res, H * C_hidden]
113
+ q = self.linear_q(s)
114
+ kv = self.linear_kv(s)
115
+
116
+ # [*, N_res, H, C_hidden]
117
+ q = q.view(q.shape[:-1] + (self.no_heads, -1))
118
+
119
+ # [*, N_res, H, 2 * C_hidden]
120
+ kv = kv.view(kv.shape[:-1] + (self.no_heads, -1))
121
+
122
+ # [*, N_res, H, C_hidden]
123
+ k, v = torch.split(kv, self.c_hidden, dim=-1)
124
+
125
+ # [*, N_res, H * P_q * 3]
126
+ q_pts = self.linear_q_points(s)
127
+
128
+ # This is kind of clunky, but it's how the original does it
129
+ # [*, N_res, H * P_q, 3]
130
+ q_pts = torch.split(q_pts, q_pts.shape[-1] // 3, dim=-1)
131
+ q_pts = torch.stack(q_pts, dim=-1)
132
+ q_pts = r[..., None].apply(q_pts)
133
+
134
+ # [*, N_res, H, P_q, 3]
135
+ q_pts = q_pts.view(q_pts.shape[:-2] + (self.no_heads, self.no_qk_points, 3))
136
+
137
+ # [*, N_res, H * (P_q + P_v) * 3]
138
+ kv_pts = self.linear_kv_points(s)
139
+
140
+ # [*, N_res, H * (P_q + P_v), 3]
141
+ kv_pts = torch.split(kv_pts, kv_pts.shape[-1] // 3, dim=-1)
142
+ kv_pts = torch.stack(kv_pts, dim=-1)
143
+ kv_pts = r[..., None].apply(kv_pts)
144
+
145
+ # [*, N_res, H, (P_q + P_v), 3]
146
+ kv_pts = kv_pts.view(kv_pts.shape[:-2] + (self.no_heads, -1, 3))
147
+
148
+ # [*, N_res, H, P_q/P_v, 3]
149
+ k_pts, v_pts = torch.split(
150
+ kv_pts, [self.no_qk_points, self.no_v_points], dim=-2
151
+ )
152
+
153
+ ##########################
154
+ # Compute attention scores
155
+ ##########################
156
+ # [*, N_res, N_res, H]
157
+ if self.c_z > 0:
158
+ b = self.linear_b(z[0])
159
+
160
+ # [*, H, N_res, N_res]
161
+ a = torch.matmul(
162
+ permute_final_dims(q, (1, 0, 2)), # [*, H, N_res, C_hidden]
163
+ permute_final_dims(k, (1, 2, 0)), # [*, H, C_hidden, N_res]
164
+ )
165
+
166
+ a *= math.sqrt(1.0 / (3 * self.c_hidden))
167
+ if self.c_z:
168
+ a += math.sqrt(1.0 / 3) * permute_final_dims(b, (2, 0, 1))
169
+
170
+ # [*, N_res, N_res, H, P_q, 3]
171
+ pt_att = q_pts.unsqueeze(-4) - k_pts.unsqueeze(-5)
172
+ pt_att = pt_att**2
173
+
174
+ # [*, N_res, N_res, H, P_q]
175
+ pt_att = sum(torch.unbind(pt_att, dim=-1))
176
+ head_weights = self.softplus(self.head_weights).view(
177
+ *((1,) * len(pt_att.shape[:-2]) + (-1, 1))
178
+ )
179
+ head_weights = head_weights * math.sqrt(
180
+ 1.0 / (3 * (self.no_qk_points * 9.0 / 2))
181
+ )
182
+ pt_att = pt_att * head_weights
183
+
184
+ # [*, N_res, N_res, H]
185
+ pt_att = torch.sum(pt_att, dim=-1) * (-0.5)
186
+ # [*, N_res, N_res]
187
+
188
+ if frame_mask is not None:
189
+ square_mask = frame_mask.unsqueeze(-1) * frame_mask.unsqueeze(-2)
190
+ square_mask = self.inf * (square_mask - 1)
191
+
192
+ # [*, H, N_res, N_res]
193
+ pt_att = permute_final_dims(pt_att, (2, 0, 1))
194
+
195
+
196
+ a = a + pt_att
197
+ if frame_mask is not None:
198
+ a = a + square_mask.unsqueeze(-3)
199
+ if attn_mask is not None:
200
+ attn_mask = self.inf * (attn_mask - 1)
201
+ a = a + attn_mask
202
+
203
+ a = self.softmax(a)
204
+ a = F.dropout(a, p=self.dropout, training=self.training)
205
+ ################
206
+ # Compute output
207
+ ################
208
+ # [*, N_res, H, C_hidden]
209
+ o = torch.matmul(a, v.transpose(-2, -3).to(dtype=a.dtype)).transpose(-2, -3)
210
+
211
+ # [*, N_res, H * C_hidden]
212
+ o = flatten_final_dims(o, 2)
213
+
214
+ # [*, H, 3, N_res, P_v]
215
+
216
+ o_pt = torch.sum(
217
+ (
218
+ a[..., None, :, :, None]
219
+ * permute_final_dims(v_pts, (1, 3, 0, 2))[..., None, :, :]
220
+ ),
221
+ dim=-2,
222
+ )
223
+
224
+ # [*, N_res, H, P_v, 3]
225
+ o_pt = permute_final_dims(o_pt, (2, 0, 3, 1))
226
+ o_pt = r[..., None, None].invert_apply(o_pt)
227
+
228
+ # [*, N_res, H * P_v]
229
+ o_pt_norm = flatten_final_dims(
230
+ torch.sqrt(torch.sum(o_pt**2, dim=-1) + self.eps), 2
231
+ )
232
+
233
+ # [*, N_res, H * P_v, 3]
234
+ o_pt = o_pt.reshape(*o_pt.shape[:-3], -1, 3)
235
+
236
+ # [*, N_res, H, C_z]
237
+ if self.c_z > 0:
238
+ o_pair = torch.matmul(a.transpose(-2, -3), z[0].to(dtype=a.dtype))
239
+
240
+ # [*, N_res, H * C_z]
241
+ o_pair = flatten_final_dims(o_pair, 2)
242
+
243
+ # [*, N_res, C_s]
244
+ s = self.linear_out(
245
+ torch.cat(
246
+ (o, *torch.unbind(o_pt, dim=-1), o_pt_norm, o_pair), dim=-1
247
+ ).to(dtype=z[0].dtype)
248
+ )
249
+ else:
250
+ s = self.linear_out(
251
+ torch.cat((o, *torch.unbind(o_pt, dim=-1), o_pt_norm), dim=-1).to(
252
+ dtype=s.dtype
253
+ )
254
+ )
255
+ return s
256
+
257
+
data/mdgen/model/latent_model.py ADDED
@@ -0,0 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch, tqdm, math
2
+ from torch.utils.checkpoint import checkpoint
3
+
4
+ from .standalone_hyena import HyenaOperator
5
+ from ..transport.transport import t_to_alpha
6
+ from .mha import MultiheadAttention
7
+ import numpy as np
8
+ import torch.nn as nn
9
+ from .layers import GaussianFourierProjection, TimestepEmbedder, FinalLayer
10
+ from .layers import gelu, modulate
11
+ from .ipa import InvariantPointAttention
12
+ from ..utils import DirichletConditionalFlow, simplex_proj, get_offsets
13
+
14
+
15
+ def grad_checkpoint(func, args, checkpointing=False):
16
+ if checkpointing:
17
+ return checkpoint(func, *args, use_reentrant=False)
18
+ else:
19
+ return func(*args)
20
+
21
+
22
+ def get_1d_sincos_pos_embed_from_grid(embed_dim, pos):
23
+ """
24
+ embed_dim: output dimension for each position
25
+ pos: a list of positions to be encoded: size (M,)
26
+ out: (M, D)
27
+ """
28
+ assert embed_dim % 2 == 0
29
+ omega = np.arange(embed_dim // 2, dtype=np.float64)
30
+ omega /= embed_dim / 2.
31
+ omega = 1. / 10000 ** omega # (D/2,)
32
+
33
+ pos = pos.reshape(-1) # (M,)
34
+ out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product
35
+
36
+ emb_sin = np.sin(out) # (M, D/2)
37
+ emb_cos = np.cos(out) # (M, D/2)
38
+
39
+ emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D)
40
+ return emb
41
+
42
+
43
+ class LatentMDGenModel(nn.Module):
44
+ def __init__(self, args, latent_dim):
45
+ super().__init__()
46
+ self.args = args
47
+ if self.args.design:
48
+ assert self.args.prepend_ipa
49
+
50
+ self.latent_to_emb = nn.Linear(latent_dim, args.embed_dim)
51
+ if self.args.tps_condition or self.args.inpainting or self.args.dynamic_mpnn:
52
+ self.latent_to_emb_f = nn.Linear(7, args.embed_dim)
53
+ self.latent_to_emb_r = nn.Linear(7, args.embed_dim)
54
+
55
+ cond_dim = latent_dim
56
+ if self.args.design: cond_dim -= 20
57
+ self.cond_to_emb = nn.Linear(cond_dim, args.embed_dim)
58
+ self.mask_to_emb = nn.Embedding(2, args.embed_dim)
59
+ if self.args.design:
60
+ self.x_d_to_emb = nn.Linear(20, args.embed_dim)
61
+
62
+ ipa_args = {
63
+ 'c_s': args.embed_dim,
64
+ 'c_z': 0,
65
+ 'c_hidden': args.ipa_head_dim,
66
+ 'no_heads': args.ipa_heads,
67
+ 'no_qk_points': args.ipa_qk,
68
+ 'no_v_points': args.ipa_v,
69
+ 'dropout': args.dropout,
70
+ }
71
+
72
+ if args.prepend_ipa:
73
+ if not self.args.no_aa_emb:
74
+ self.aatype_to_emb = nn.Embedding(21, args.embed_dim)
75
+ self.ipa_layers = nn.ModuleList(
76
+ [
77
+ IPALayer(
78
+ embed_dim=args.embed_dim,
79
+ ffn_embed_dim=4 * args.embed_dim,
80
+ mha_heads=args.mha_heads,
81
+ dropout=args.dropout,
82
+ use_rotary_embeddings=not args.no_rope,
83
+ ipa_args=ipa_args
84
+ )
85
+ for _ in range(args.num_layers)
86
+ ]
87
+ )
88
+
89
+ self.layers = nn.ModuleList(
90
+ [
91
+ LatentMDGenLayer(
92
+ embed_dim=args.embed_dim,
93
+ ffn_embed_dim=4 * args.embed_dim,
94
+ mha_heads=args.mha_heads,
95
+ dropout=args.dropout,
96
+ hyena=args.hyena,
97
+ num_frames=args.num_frames,
98
+ use_rotary_embeddings=not args.no_rope,
99
+ use_time_attention=True,
100
+ ipa_args=ipa_args if args.interleave_ipa else None,
101
+ )
102
+ for _ in range(args.num_layers)
103
+ ]
104
+ )
105
+
106
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
107
+ self.emb_to_latent = FinalLayer(args.embed_dim, latent_dim)
108
+ if args.design:
109
+ self.fc1 = nn.Linear(args.embed_dim, args.embed_dim)
110
+ self.fc2 = nn.Linear(args.embed_dim, args.embed_dim)
111
+ self.fc3 = nn.Linear(args.embed_dim, args.embed_dim)
112
+ self.emb_to_logits = nn.Linear(args.embed_dim, 20)
113
+
114
+ self.t_embedder = TimestepEmbedder(args.embed_dim)
115
+ if args.abs_pos_emb:
116
+ self.register_buffer('pos_embed',
117
+ nn.Parameter(torch.zeros(1, args.crop, args.embed_dim), requires_grad=False))
118
+
119
+ if args.abs_time_emb:
120
+ self.register_buffer('time_embed',
121
+ nn.Parameter(torch.zeros(1, args.num_frames, args.embed_dim), requires_grad=False))
122
+
123
+ self.args = args
124
+ if self.args.design:
125
+ self.condflow = DirichletConditionalFlow(K=20, alpha_spacing=0.001,
126
+ alpha_max=args.alpha_max)
127
+
128
+ self.initialize_weights()
129
+
130
+ def initialize_weights(self):
131
+ # Initialize transformer layers:
132
+ def _basic_init(module):
133
+ if isinstance(module, nn.Linear):
134
+ torch.nn.init.xavier_uniform_(module.weight)
135
+ if module.bias is not None:
136
+ nn.init.constant_(module.bias, 0)
137
+
138
+ self.apply(_basic_init)
139
+
140
+ if self.args.prepend_ipa:
141
+ for block in self.ipa_layers:
142
+ nn.init.constant_(block.ipa.linear_out.weight, 0)
143
+ nn.init.constant_(block.ipa.linear_out.bias, 0)
144
+
145
+ if self.args.interleave_ipa:
146
+ for block in self.layers:
147
+ nn.init.constant_(block.ipa.linear_out.weight, 0)
148
+ nn.init.constant_(block.ipa.linear_out.bias, 0)
149
+
150
+ # # Initialize (and freeze) pos_embed by sin-cos embedding:
151
+ if self.args.abs_pos_emb:
152
+ pos_embed = get_1d_sincos_pos_embed_from_grid(self.pos_embed.shape[-1], np.arange(self.args.crop))
153
+ self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0))
154
+
155
+ if self.args.abs_time_emb:
156
+ time_embed = get_1d_sincos_pos_embed_from_grid(self.time_embed.shape[-1], np.arange(self.args.num_frames))
157
+ self.time_embed.data.copy_(torch.from_numpy(time_embed).float().unsqueeze(0))
158
+
159
+ # Initialize timestep embedding MLP:
160
+ nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)
161
+ nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)
162
+
163
+ # Zero-out adaLN modulation layers in DiT blocks:
164
+ for block in self.layers:
165
+ nn.init.constant_(block.adaLN_modulation[-1].weight, 0)
166
+ nn.init.constant_(block.adaLN_modulation[-1].bias, 0)
167
+
168
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
169
+ # Zero-out output layers:
170
+ nn.init.constant_(self.emb_to_latent.adaLN_modulation[-1].weight, 0)
171
+ nn.init.constant_(self.emb_to_latent.adaLN_modulation[-1].bias, 0)
172
+ nn.init.constant_(self.emb_to_latent.linear.weight, 0)
173
+ nn.init.constant_(self.emb_to_latent.linear.bias, 0)
174
+
175
+ def run_ipa(
176
+ self,
177
+ t,
178
+ mask,
179
+ start_frames,
180
+ end_frames,
181
+ aatype,
182
+ x_d=None
183
+ ):
184
+ if self.args.sim_condition or self.args.mpnn:
185
+ B, L = mask.shape
186
+ x = torch.zeros(B, L, self.args.embed_dim, device=mask.device)
187
+ if aatype is not None and not self.args.no_aa_emb:
188
+ x = x + self.aatype_to_emb(aatype)
189
+ if self.args.design:
190
+ x = x + self.x_d_to_emb(x_d) # pass in only the simplex data
191
+ for layer in self.ipa_layers:
192
+ x = layer(x, t, mask, frames=start_frames)
193
+ elif self.args.tps_condition or self.args.inpainting or self.args.dynamic_mpnn:
194
+ x_f = start_frames.invert().compose(end_frames).to_tensor_7()
195
+ x_r = end_frames.invert().compose(start_frames).to_tensor_7()
196
+ x_f = self.latent_to_emb_f(x_f)
197
+ x_r = self.latent_to_emb_r(x_r)
198
+ if aatype is not None and not self.args.no_aa_emb:
199
+ x_f = x_f + self.aatype_to_emb(aatype)
200
+ x_r = x_r + self.aatype_to_emb(aatype)
201
+ if self.args.design:
202
+ x_f = x_f + self.x_d_to_emb(x_d)
203
+ x_r = x_r + self.x_d_to_emb(x_d)
204
+ for layer in self.ipa_layers:
205
+ x_r = layer(x_r, t, mask, frames=start_frames)
206
+ x_f = layer(x_f, t, mask, frames=end_frames)
207
+ x = (x_r + x_f)
208
+
209
+ # x = x[:, None] + x_latent
210
+ return x
211
+
212
+ def forward(self, x, t, mask,
213
+ start_frames=None, end_frames=None,
214
+ x_cond=None, x_cond_mask=None,
215
+ aatype=None
216
+ ):
217
+ if self.args.dynamic_mpnn:
218
+ x = x[:, [0, -1]]
219
+ x_cond = x_cond[:, [0, -1]]
220
+ x_cond_mask = x_cond_mask[:, [0, -1]]
221
+ mask = mask[:, [0, -1]]
222
+ if self.args.mpnn:
223
+ x = x[:, :1]
224
+ x_cond = x_cond[:, :1]
225
+ x_cond_mask = x_cond_mask[:, :1]
226
+ mask = mask[:, :1]
227
+
228
+ if self.args.design:
229
+ x_d = x[..., -20:].mean(1)
230
+ else:
231
+ x_d = None
232
+
233
+ x = self.latent_to_emb(x) # 384 dim token
234
+ if self.args.abs_pos_emb:
235
+ x = x + self.pos_embed
236
+
237
+ if self.args.abs_time_emb:
238
+ x = x + self.time_embed[:, :, None]
239
+
240
+ if x_cond is not None:
241
+ x = x + self.cond_to_emb(x_cond) + self.mask_to_emb(x_cond_mask) # token has cond g, tau
242
+
243
+ t = self.t_embedder(t * self.args.time_multiplier)[:, None]
244
+
245
+ if self.args.prepend_ipa: # IPA doesn't need checkpointing
246
+ x = x + self.run_ipa(t[:, 0], mask[:, 0], start_frames, end_frames, aatype, x_d=x_d)[:, None]
247
+
248
+ for layer_idx, layer in enumerate(self.layers):
249
+ x = grad_checkpoint(layer, (x, t, mask, start_frames), self.args.grad_checkpointing)
250
+
251
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
252
+ latent = self.emb_to_latent(x, t)
253
+ if self.args.design: ### this is also kind of weird
254
+ x_l = self.fc2(gelu(self.fc1(x)))
255
+ x_l = x_l.mean(1)
256
+ logits = self.emb_to_logits(gelu(self.fc3(x_l)))
257
+ if self.args.dynamic_mpnn or self.args.mpnn:
258
+ return logits[:, None, :]
259
+ latent[:, :, :, -20:] = latent[:, :, :, -20:] + logits[:, None, :, :]
260
+ return latent
261
+
262
+ # x, t, mask, start_frames=None, end_frames=None, x_cond=None, x_cond_mask=None, aatype=None
263
+ def forward_inference(self, x, t, mask,
264
+ start_frames=None, end_frames=None,
265
+ x_cond=None, x_cond_mask=None,
266
+ aatype=None
267
+ ):
268
+ if not self.args.design or self.args.dynamic_mpnn or self.args.mpnn:
269
+ return self.forward(x, t, mask, start_frames, end_frames, x_cond, x_cond_mask, aatype)
270
+ else:
271
+ x_discrete = x[:, :, :, -20:]
272
+ B, T, L, _ = x_discrete.shape
273
+ if not torch.allclose(x_discrete.sum(3), torch.ones((B, T, L), device=x.device), atol=1e-4) or not (
274
+ x_discrete >= 0).all():
275
+ print(
276
+ f'WARNING: xt.min(): {x_discrete.min()}. Some values of xt do not lie on the simplex. There are '
277
+ f'we are '
278
+ f'{(x_discrete < 0).sum()} negative values in xt of shape {x_discrete.shape} that are negative. '
279
+ f'We are projecting '
280
+ f'them onto the simplex.')
281
+
282
+ # x_discrete = simplex_proj(x_discrete)
283
+ latent = self.forward(x, t, mask, start_frames, end_frames, x_cond, x_cond_mask, aatype)
284
+ latent_continuous = latent[:, :, :, :-20]
285
+ logits = latent[:, :, :, -20:]
286
+
287
+ flow_probs = torch.nn.functional.softmax(logits / self.args.dirichlet_flow_temp, -1)
288
+ if not torch.allclose(flow_probs.sum(3), torch.ones((B, T, L), device=x.device), atol=1e-4) or not (
289
+ flow_probs >= 0).all():
290
+ print(
291
+ f'WARNING: flow_probs.min(): {flow_probs.min()}. Some values of flow_probs do not lie on the '
292
+ f'simplex. There are we are {(flow_probs < 0).sum()} negative values in flow_probs of shape '
293
+ f'{flow_probs.shape} that are negative. We are projecting them onto the simplex.')
294
+ flow_probs = simplex_proj(flow_probs)
295
+
296
+ alpha, dalpha_dt = t_to_alpha(t[0], self.args);
297
+ alpha = alpha.item()
298
+
299
+ if alpha > self.args.alpha_max:
300
+ alpha = self.args.alpha_max - self.condflow.alpha_spacing
301
+ c_factor = self.condflow.c_factor(x_discrete.cpu().numpy(), alpha)
302
+ c_factor = torch.from_numpy(c_factor).to(x_discrete)
303
+ if torch.isnan(c_factor).any():
304
+ print(f'NAN cfactor after: xt.min(): {x_discrete.min()}, flow_probs.min(): {flow_probs.min()}')
305
+
306
+ if self.args.allow_nan_cfactor:
307
+ c_factor = torch.nan_to_num(c_factor)
308
+ else:
309
+ raise RuntimeError(
310
+ f'NAN cfactor after: xt.min(): {x_discrete.min()}, flow_probs.min(): {flow_probs.min()}')
311
+
312
+ if not (flow_probs >= 0).all(): print(f'flow_probs.min(): {flow_probs.min()}')
313
+ eye = torch.eye(20).to(x_discrete)
314
+ cond_flows = (eye - x_discrete.unsqueeze(-1)) * c_factor.unsqueeze(-2)
315
+ flow = (flow_probs.unsqueeze(-2) * cond_flows).sum(-1) * dalpha_dt
316
+
317
+ return torch.cat([latent_continuous, flow], -1)
318
+
319
+
320
+ class AttentionWithRoPE(nn.Module):
321
+ def __init__(self, *args, **kwargs):
322
+ super().__init__()
323
+ self.attn = MultiheadAttention(*args, **kwargs)
324
+
325
+ def forward(self, x, mask):
326
+ x = x.transpose(0, 1)
327
+ x, _ = self.attn(query=x, key=x, value=x, key_padding_mask=1 - mask)
328
+ x = x.transpose(0, 1)
329
+ return x
330
+
331
+
332
+ class IPALayer(nn.Module):
333
+ """Transformer layer block."""
334
+
335
+ def __init__(self, embed_dim, ffn_embed_dim, mha_heads, dropout=0.0,
336
+ use_rotary_embeddings=False, ipa_args=None):
337
+ super().__init__()
338
+ self.embed_dim = embed_dim
339
+ self.ffn_embed_dim = ffn_embed_dim
340
+ self.mha_heads = mha_heads
341
+ self.inf = 1e5
342
+ self.use_rotary_embeddings = use_rotary_embeddings
343
+ self._init_submodules(add_bias_kv=True, dropout=dropout, ipa_args=ipa_args)
344
+
345
+ def _init_submodules(self, add_bias_kv=False, dropout=0.0, ipa_args=None):
346
+ self.adaLN_modulation = nn.Sequential(
347
+ nn.SiLU(),
348
+ nn.Linear(self.embed_dim, 6 * self.embed_dim, bias=True)
349
+ )
350
+
351
+ self.ipa_norm = nn.LayerNorm(self.embed_dim)
352
+ self.ipa = InvariantPointAttention(**ipa_args)
353
+
354
+ self.mha_l = AttentionWithRoPE(
355
+ self.embed_dim,
356
+ self.mha_heads,
357
+ add_bias_kv=add_bias_kv,
358
+ dropout=dropout,
359
+ use_rotary_embeddings=self.use_rotary_embeddings,
360
+ )
361
+
362
+ self.mha_layer_norm = nn.LayerNorm(self.embed_dim, elementwise_affine=False, eps=1e-6)
363
+
364
+ self.fc1 = nn.Linear(self.embed_dim, self.ffn_embed_dim)
365
+ self.fc2 = nn.Linear(self.ffn_embed_dim, self.embed_dim)
366
+
367
+ self.final_layer_norm = nn.LayerNorm(self.embed_dim, elementwise_affine=False, eps=1e-6)
368
+
369
+ def forward(self, x, t, mask=None, frames=None):
370
+ shift_msa_l, scale_msa_l, gate_msa_l, \
371
+ shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(t).chunk(6, dim=-1)
372
+ x = x + self.ipa(self.ipa_norm(x), frames, frame_mask=mask)
373
+
374
+ residual = x
375
+ x = modulate(self.mha_layer_norm(x), shift_msa_l, scale_msa_l)
376
+ x = self.mha_l(x, mask=mask)
377
+ x = residual + gate_msa_l.unsqueeze(1) * x
378
+
379
+ residual = x
380
+ x = modulate(self.final_layer_norm(x), shift_mlp, scale_mlp)
381
+ x = self.fc2(gelu(self.fc1(x)))
382
+ x = residual + gate_mlp.unsqueeze(1) * x
383
+
384
+ return x
385
+
386
+
387
+ class LatentMDGenLayer(nn.Module):
388
+ """Transformer layer block."""
389
+
390
+ def __init__(self, embed_dim, ffn_embed_dim, mha_heads, dropout=0.0, num_frames=50, hyena=False,
391
+ use_rotary_embeddings=False, use_time_attention=True, ipa_args=None):
392
+ super().__init__()
393
+ self.embed_dim = embed_dim
394
+ self.num_frames = num_frames
395
+ self.hyena = hyena
396
+ self.ffn_embed_dim = ffn_embed_dim
397
+ self.mha_heads = mha_heads
398
+ self.inf = 1e5
399
+ self.use_time_attention = use_time_attention
400
+ self.use_rotary_embeddings = use_rotary_embeddings
401
+ self._init_submodules(add_bias_kv=True, dropout=dropout, ipa_args=ipa_args)
402
+
403
+ def _init_submodules(self, add_bias_kv=False, dropout=0.0, ipa_args=None):
404
+
405
+ self.adaLN_modulation = nn.Sequential(
406
+ nn.SiLU(),
407
+ nn.Linear(self.embed_dim, 9 * self.embed_dim, bias=True)
408
+ )
409
+
410
+ if ipa_args is not None:
411
+ self.ipa_norm = nn.LayerNorm(self.embed_dim)
412
+ self.ipa = InvariantPointAttention(**ipa_args)
413
+
414
+ if self.hyena:
415
+ self.mha_t = HyenaOperator(
416
+ d_model=self.embed_dim,
417
+ l_max=self.num_frames,
418
+ order=2,
419
+ filter_order=64,
420
+ )
421
+
422
+ else:
423
+ self.mha_t = AttentionWithRoPE(
424
+ self.embed_dim,
425
+ self.mha_heads,
426
+ add_bias_kv=add_bias_kv,
427
+ dropout=dropout,
428
+ use_rotary_embeddings=self.use_rotary_embeddings,
429
+ )
430
+
431
+ self.mha_l = AttentionWithRoPE(
432
+ self.embed_dim,
433
+ self.mha_heads,
434
+ add_bias_kv=add_bias_kv,
435
+ dropout=dropout,
436
+ use_rotary_embeddings=self.use_rotary_embeddings,
437
+ )
438
+
439
+ self.mha_layer_norm = nn.LayerNorm(self.embed_dim, elementwise_affine=False, eps=1e-6)
440
+
441
+ self.fc1 = nn.Linear(self.embed_dim, self.ffn_embed_dim)
442
+ self.fc2 = nn.Linear(self.ffn_embed_dim, self.embed_dim)
443
+
444
+ self.final_layer_norm = nn.LayerNorm(self.embed_dim, elementwise_affine=False, eps=1e-6)
445
+
446
+ def forward(self, x, t, mask=None, frames=None):
447
+ B, T, L, C = x.shape
448
+
449
+ shift_msa_l, scale_msa_l, gate_msa_l, \
450
+ shift_msa_t, scale_msa_t, gate_msa_t, \
451
+ shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(t).chunk(9, dim=-1)
452
+
453
+ if hasattr(self, 'ipa'):
454
+ x = x + self.ipa(self.ipa_norm(x), frames[:, None], frame_mask=mask)
455
+
456
+ residual = x
457
+ x = modulate(self.mha_layer_norm(x), shift_msa_l, scale_msa_l)
458
+ x = self.mha_l(
459
+ x.reshape(B * T, L, C),
460
+ mask=mask.reshape(B * T, L), # [:,None].expand(-1, T, -1).reshape(B * T, L)
461
+ ).reshape(B, T, L, C)
462
+ x = residual + gate_msa_l.unsqueeze(1) * x
463
+
464
+ residual = x
465
+ x = modulate(self.mha_layer_norm(x), shift_msa_t, scale_msa_t)
466
+ if self.hyena:
467
+ assert (mask - 1).sum() == 0
468
+ x = self.mha_t(
469
+ x.transpose(1, 2).reshape(B * L, T, C)
470
+ ).reshape(B, L, T, C).transpose(1, 2)
471
+ else:
472
+ x = self.mha_t(
473
+ x.transpose(1, 2).reshape(B * L, T, C),
474
+ mask=mask.transpose(1, 2).reshape(B * L, T)
475
+ ).reshape(B, L, T, C).transpose(1, 2)
476
+ x = residual + gate_msa_t.unsqueeze(1) * x
477
+
478
+ residual = x
479
+ x = modulate(self.final_layer_norm(x), shift_mlp, scale_mlp)
480
+ x = self.fc2(gelu(self.fc1(x)))
481
+ x = residual + gate_mlp.unsqueeze(1) * x
482
+
483
+ return x
data/mdgen/model/layers.py ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+ import typing as T
6
+
7
+ import numpy as np
8
+ import torch, math
9
+ import torch.nn.functional as F
10
+ from einops import rearrange, repeat
11
+ from torch.nn import LayerNorm
12
+ from torch import nn
13
+
14
+ def modulate(x, shift, scale):
15
+ return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
16
+
17
+ class TimestepEmbedder(nn.Module):
18
+ """
19
+ Embeds scalar timesteps into vector representations.
20
+ """
21
+ def __init__(self, hidden_size, frequency_embedding_size=256):
22
+ super().__init__()
23
+ self.mlp = nn.Sequential(
24
+ nn.Linear(frequency_embedding_size, hidden_size, bias=True),
25
+ nn.SiLU(),
26
+ nn.Linear(hidden_size, hidden_size, bias=True),
27
+ )
28
+ self.frequency_embedding_size = frequency_embedding_size
29
+
30
+ @staticmethod
31
+ def timestep_embedding(t, dim, max_period=10000):
32
+ """
33
+ Create sinusoidal timestep embeddings.
34
+ :param t: a 1-D Tensor of N indices, one per batch element.
35
+ These may be fractional.
36
+ :param dim: the dimension of the output.
37
+ :param max_period: controls the minimum frequency of the embeddings.
38
+ :return: an (N, D) Tensor of positional embeddings.
39
+ """
40
+
41
+ # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py
42
+ half = dim // 2
43
+ freqs = torch.exp(
44
+ -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half
45
+ ).to(device=t.device)
46
+ args = t[:, None].float() * freqs[None]
47
+ embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
48
+ if dim % 2:
49
+ embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
50
+ return embedding
51
+
52
+ def forward(self, t):
53
+ t_freq = self.timestep_embedding(t, self.frequency_embedding_size)
54
+ t_emb = self.mlp(t_freq)
55
+ return t_emb
56
+
57
+ class FinalLayer(nn.Module):
58
+ """
59
+ The final layer of DiT.
60
+ """
61
+ def __init__(self, hidden_size, out_channels):
62
+ super().__init__()
63
+ self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)
64
+ self.linear = nn.Linear(hidden_size, out_channels, bias=True)
65
+ self.adaLN_modulation = nn.Sequential(
66
+ nn.SiLU(),
67
+ nn.Linear(hidden_size, 2 * hidden_size, bias=True)
68
+ )
69
+
70
+ def forward(self, x, c):
71
+ shift, scale = self.adaLN_modulation(c).chunk(2, dim=-1)
72
+ x = modulate(self.norm_final(x), shift, scale)
73
+ x = self.linear(x)
74
+ return x
75
+
76
+
77
+ def gelu(x):
78
+ """Implementation of the gelu activation function.
79
+
80
+ For information: OpenAI GPT's gelu is slightly different
81
+ (and gives slightly different results):
82
+ 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
83
+ """
84
+ return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
85
+
86
+ # class GaussianFourierProjection(nn.Module):
87
+ # """
88
+ # Gaussian random features for encoding time steps.
89
+ # """
90
+
91
+ # def __init__(self, embed_dim, scale=30.):
92
+ # super().__init__()
93
+ # # Randomly sample weights during initialization. These weights are fixed
94
+ # # during optimization and are not trainable.
95
+ # self.W = nn.Parameter(torch.randn(embed_dim // 2) * scale, requires_grad=False)
96
+
97
+ # def forward(self, x):
98
+ # x_proj = x[:, None] * self.W[None, :] * 2 * np.pi
99
+ # return torch.cat([torch.sin(x_proj), torch.cos(x_proj)], dim=-1)
100
+
101
+
102
+ class GaussianFourierProjection(nn.Module):
103
+ """Gaussian Fourier embeddings for noise levels.
104
+ from https://github.com/yang-song/score_sde_pytorch/blob/1618ddea340f3e4a2ed7852a0694a809775cf8d0/models/layerspp.py#L32
105
+ """
106
+
107
+ def __init__(self, embedding_size=256, scale=1.0):
108
+ super().__init__()
109
+ self.W = nn.Parameter(
110
+ torch.randn(embedding_size // 2) * scale, requires_grad=False
111
+ )
112
+
113
+ def forward(self, x):
114
+ x_proj = x[:, None] * self.W[None, :] * 2 * np.pi
115
+ emb = torch.cat([torch.sin(x_proj), torch.cos(x_proj)], dim=-1)
116
+ return emb
117
+
118
+ class Attention(nn.Module):
119
+ def __init__(self, embed_dim, num_heads, head_width, gated=False):
120
+ super().__init__()
121
+ assert embed_dim == num_heads * head_width
122
+
123
+ self.embed_dim = embed_dim
124
+ self.num_heads = num_heads
125
+ self.head_width = head_width
126
+
127
+ self.proj = nn.Linear(embed_dim, embed_dim * 3, bias=False)
128
+ self.o_proj = nn.Linear(embed_dim, embed_dim, bias=True)
129
+ self.gated = gated
130
+ if gated:
131
+ self.g_proj = nn.Linear(embed_dim, embed_dim)
132
+ torch.nn.init.zeros_(self.g_proj.weight)
133
+ torch.nn.init.ones_(self.g_proj.bias)
134
+
135
+ self.rescale_factor = self.head_width**-0.5
136
+
137
+ torch.nn.init.zeros_(self.o_proj.bias)
138
+
139
+ def forward(self, x, mask=None, bias=None, indices=None):
140
+ """
141
+ Basic self attention with optional mask and external pairwise bias.
142
+ To handle sequences of different lengths, use mask.
143
+
144
+ Inputs:
145
+ x: batch of input sequneces (.. x L x C)
146
+ mask: batch of boolean masks where 1=valid, 0=padding position (.. x L_k). optional.
147
+ bias: batch of scalar pairwise attention biases (.. x Lq x Lk x num_heads). optional.
148
+
149
+ Outputs:
150
+ sequence projection (B x L x embed_dim), attention maps (B x L x L x num_heads)
151
+ """
152
+
153
+ t = rearrange(self.proj(x), "... l (h c) -> ... h l c", h=self.num_heads)
154
+ q, k, v = t.chunk(3, dim=-1)
155
+
156
+ q = self.rescale_factor * q
157
+ a = torch.einsum("...qc,...kc->...qk", q, k)
158
+
159
+ # Add external attention bias.
160
+ if bias is not None:
161
+ a = a + rearrange(bias, "... lq lk h -> ... h lq lk")
162
+
163
+ # Do not attend to padding tokens.
164
+ if mask is not None:
165
+ mask = repeat(
166
+ mask, "... lk -> ... h lq lk", h=self.num_heads, lq=q.shape[-2]
167
+ )
168
+ a = a.masked_fill(mask == False, -np.inf)
169
+
170
+ a = F.softmax(a, dim=-1)
171
+
172
+ y = torch.einsum("...hqk,...hkc->...qhc", a, v)
173
+ y = rearrange(y, "... h c -> ... (h c)", h=self.num_heads)
174
+
175
+ if self.gated:
176
+ y = self.g_proj(x).sigmoid() * y
177
+ y = self.o_proj(y)
178
+
179
+ return y, rearrange(a, "... lq lk h -> ... h lq lk")
180
+
181
+
182
+ class Dropout(nn.Module):
183
+ """
184
+ Implementation of dropout with the ability to share the dropout mask
185
+ along a particular dimension.
186
+ """
187
+
188
+ def __init__(self, r: float, batch_dim: T.Union[int, T.List[int]]):
189
+ super(Dropout, self).__init__()
190
+
191
+ self.r = r
192
+ if type(batch_dim) == int:
193
+ batch_dim = [batch_dim]
194
+ self.batch_dim = batch_dim
195
+ self.dropout = nn.Dropout(self.r)
196
+
197
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
198
+ shape = list(x.shape)
199
+ if self.batch_dim is not None:
200
+ for bd in self.batch_dim:
201
+ shape[bd] = 1
202
+ return x * self.dropout(x.new_ones(shape))
203
+
204
+
205
+ class SequenceToPair(nn.Module):
206
+ def __init__(self, sequence_state_dim, inner_dim, pairwise_state_dim):
207
+ super().__init__()
208
+
209
+ self.layernorm = nn.LayerNorm(sequence_state_dim)
210
+ self.proj = nn.Linear(sequence_state_dim, inner_dim * 2, bias=True)
211
+ self.o_proj = nn.Linear(2 * inner_dim, pairwise_state_dim, bias=True)
212
+
213
+ torch.nn.init.zeros_(self.proj.bias)
214
+ torch.nn.init.zeros_(self.o_proj.bias)
215
+
216
+ def forward(self, sequence_state):
217
+ """
218
+ Inputs:
219
+ sequence_state: B x L x sequence_state_dim
220
+
221
+ Output:
222
+ pairwise_state: B x L x L x pairwise_state_dim
223
+
224
+ Intermediate state:
225
+ B x L x L x 2*inner_dim
226
+ """
227
+
228
+ assert len(sequence_state.shape) == 3
229
+
230
+ s = self.layernorm(sequence_state)
231
+ s = self.proj(s)
232
+ q, k = s.chunk(2, dim=-1)
233
+
234
+ prod = q[:, None, :, :] * k[:, :, None, :]
235
+ diff = q[:, None, :, :] - k[:, :, None, :]
236
+
237
+ x = torch.cat([prod, diff], dim=-1)
238
+ x = self.o_proj(x)
239
+
240
+ return x
241
+
242
+
243
+ class StructureModuleTransitionLayer(nn.Module):
244
+ def __init__(self, c):
245
+ super(StructureModuleTransitionLayer, self).__init__()
246
+
247
+ self.c = c
248
+
249
+ self.linear_1 = Linear(self.c, self.c, init="relu")
250
+ self.linear_2 = Linear(self.c, self.c, init="relu")
251
+ self.linear_3 = Linear(self.c, self.c, init="final")
252
+
253
+ self.relu = nn.ReLU()
254
+
255
+ def forward(self, s):
256
+ s_initial = s
257
+ s = self.linear_1(s)
258
+ s = self.relu(s)
259
+ s = self.linear_2(s)
260
+ s = self.relu(s)
261
+ s = self.linear_3(s)
262
+
263
+ s = s + s_initial
264
+
265
+ return s
266
+
267
+
268
+ class StructureModuleTransition(nn.Module):
269
+ def __init__(self, c, num_layers, dropout_rate):
270
+ super().__init__()
271
+
272
+ self.c = c
273
+ self.num_layers = num_layers
274
+ self.dropout_rate = dropout_rate
275
+
276
+ self.layers = nn.ModuleList()
277
+ for _ in range(self.num_layers):
278
+ l = StructureModuleTransitionLayer(self.c)
279
+ self.layers.append(l)
280
+
281
+ self.dropout = nn.Dropout(self.dropout_rate)
282
+ self.layer_norm = LayerNorm(self.c)
283
+
284
+ def forward(self, s):
285
+ for l in self.layers:
286
+ s = l(s)
287
+
288
+ s = self.dropout(s)
289
+ s = self.layer_norm(s)
290
+ return s
291
+
292
+
293
+ class PairToSequence(nn.Module):
294
+ def __init__(self, pairwise_state_dim, num_heads):
295
+ super().__init__()
296
+
297
+ self.layernorm = nn.LayerNorm(pairwise_state_dim)
298
+ self.linear = nn.Linear(pairwise_state_dim, num_heads, bias=False)
299
+
300
+ def forward(self, pairwise_state):
301
+ """
302
+ Inputs:
303
+ pairwise_state: B x L x L x pairwise_state_dim
304
+
305
+ Output:
306
+ pairwise_bias: B x L x L x num_heads
307
+ """
308
+ assert len(pairwise_state.shape) == 4
309
+ z = self.layernorm(pairwise_state)
310
+ pairwise_bias = self.linear(z)
311
+ return pairwise_bias
312
+
313
+
314
+ class ResidueMLP(nn.Module):
315
+ def __init__(self, embed_dim, inner_dim, norm=nn.LayerNorm, dropout=0):
316
+ super().__init__()
317
+
318
+ self.mlp = nn.Sequential(
319
+ norm(embed_dim),
320
+ nn.Linear(embed_dim, inner_dim),
321
+ nn.ReLU(),
322
+ nn.Linear(inner_dim, embed_dim),
323
+ nn.Dropout(dropout),
324
+ )
325
+
326
+ def forward(self, x):
327
+ return x + self.mlp(x)
data/mdgen/model/mha.py ADDED
@@ -0,0 +1,510 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import math
7
+ from typing import Dict, Optional, Tuple
8
+
9
+ import torch
10
+ import torch.nn.functional as F
11
+ from torch import Tensor, nn
12
+ from torch.nn import Parameter
13
+ from esm.rotary_embedding import RotaryEmbedding
14
+
15
+
16
+ import uuid
17
+
18
+
19
+ def utils_softmax(x, dim: int, onnx_trace: bool = False):
20
+ if onnx_trace:
21
+ return F.softmax(x.float(), dim=dim)
22
+ else:
23
+ return F.softmax(x, dim=dim, dtype=torch.float32)
24
+
25
+
26
+ class FairseqIncrementalState(object):
27
+ def __init__(self, *args, **kwargs):
28
+ super().__init__(*args, **kwargs)
29
+ self.init_incremental_state()
30
+
31
+ def init_incremental_state(self):
32
+ self._incremental_state_id = str(uuid.uuid4())
33
+
34
+ def _get_full_incremental_state_key(self, key: str) -> str:
35
+ return "{}.{}".format(self._incremental_state_id, key)
36
+
37
+ def get_incremental_state(
38
+ self,
39
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
40
+ key: str,
41
+ ) -> Optional[Dict[str, Optional[Tensor]]]:
42
+ """Helper for getting incremental state for an nn.Module."""
43
+ full_key = self._get_full_incremental_state_key(key)
44
+ if incremental_state is None or full_key not in incremental_state:
45
+ return None
46
+ return incremental_state[full_key]
47
+
48
+ def set_incremental_state(
49
+ self,
50
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
51
+ key: str,
52
+ value: Dict[str, Optional[Tensor]],
53
+ ) -> Optional[Dict[str, Dict[str, Optional[Tensor]]]]:
54
+ """Helper for setting incremental state for an nn.Module."""
55
+ if incremental_state is not None:
56
+ full_key = self._get_full_incremental_state_key(key)
57
+ incremental_state[full_key] = value
58
+ return incremental_state
59
+
60
+
61
+ def with_incremental_state(cls):
62
+ cls.__bases__ = (FairseqIncrementalState,) + tuple(
63
+ b for b in cls.__bases__ if b != FairseqIncrementalState
64
+ )
65
+ return cls
66
+
67
+
68
+ @with_incremental_state
69
+ class MultiheadAttention(nn.Module):
70
+ """Multi-headed attention.
71
+
72
+ See "Attention Is All You Need" for more details.
73
+ """
74
+
75
+ def __init__(
76
+ self,
77
+ embed_dim,
78
+ num_heads,
79
+ kdim=None,
80
+ vdim=None,
81
+ dropout=0.0,
82
+ bias=True,
83
+ add_bias_kv: bool = False,
84
+ add_zero_attn: bool = False,
85
+ self_attention: bool = False,
86
+ encoder_decoder_attention: bool = False,
87
+ use_rotary_embeddings: bool = False,
88
+ ):
89
+ super().__init__()
90
+ self.embed_dim = embed_dim
91
+ self.kdim = kdim if kdim is not None else embed_dim
92
+ self.vdim = vdim if vdim is not None else embed_dim
93
+ self.qkv_same_dim = self.kdim == embed_dim and self.vdim == embed_dim
94
+
95
+ self.batch_first = True
96
+ self.num_heads = num_heads
97
+ self.dropout = dropout
98
+ self.head_dim = embed_dim // num_heads
99
+ assert (
100
+ self.head_dim * num_heads == self.embed_dim
101
+ ), "embed_dim must be divisible by num_heads"
102
+ self.scaling = self.head_dim**-0.5
103
+
104
+ self.self_attention = self_attention
105
+ self.encoder_decoder_attention = encoder_decoder_attention
106
+
107
+ assert not self.self_attention or self.qkv_same_dim, (
108
+ "Self-attention requires query, key and " "value to be of the same size"
109
+ )
110
+
111
+ self.k_proj = nn.Linear(self.kdim, embed_dim, bias=bias)
112
+ self.v_proj = nn.Linear(self.vdim, embed_dim, bias=bias)
113
+ self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
114
+
115
+ self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
116
+
117
+ if add_bias_kv:
118
+ self.bias_k = Parameter(torch.Tensor(1, 1, embed_dim))
119
+ self.bias_v = Parameter(torch.Tensor(1, 1, embed_dim))
120
+ else:
121
+ self.bias_k = self.bias_v = None
122
+
123
+ self.add_zero_attn = add_zero_attn
124
+
125
+ self.reset_parameters()
126
+
127
+ self.onnx_trace = False
128
+ self.rot_emb = None
129
+ if use_rotary_embeddings:
130
+ self.rot_emb = RotaryEmbedding(dim=self.head_dim)
131
+
132
+ self.enable_torch_version = False
133
+ if hasattr(F, "multi_head_attention_forward"):
134
+ self.enable_torch_version = True
135
+ else:
136
+ self.enable_torch_version = False
137
+
138
+ def prepare_for_onnx_export_(self):
139
+ self.onnx_trace = True
140
+
141
+ def reset_parameters(self):
142
+ if self.qkv_same_dim:
143
+ # Empirically observed the convergence to be much better with
144
+ # the scaled initialization
145
+ nn.init.xavier_uniform_(self.k_proj.weight, gain=1 / math.sqrt(2))
146
+ nn.init.xavier_uniform_(self.v_proj.weight, gain=1 / math.sqrt(2))
147
+ nn.init.xavier_uniform_(self.q_proj.weight, gain=1 / math.sqrt(2))
148
+ else:
149
+ nn.init.xavier_uniform_(self.k_proj.weight)
150
+ nn.init.xavier_uniform_(self.v_proj.weight)
151
+ nn.init.xavier_uniform_(self.q_proj.weight)
152
+
153
+ nn.init.xavier_uniform_(self.out_proj.weight)
154
+ if self.out_proj.bias is not None:
155
+ nn.init.constant_(self.out_proj.bias, 0.0)
156
+ if self.bias_k is not None:
157
+ nn.init.xavier_normal_(self.bias_k)
158
+ if self.bias_v is not None:
159
+ nn.init.xavier_normal_(self.bias_v)
160
+
161
+ def forward(
162
+ self,
163
+ query,
164
+ key: Optional[Tensor],
165
+ value: Optional[Tensor],
166
+ key_padding_mask: Optional[Tensor] = None,
167
+ incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
168
+ need_weights: bool = True,
169
+ static_kv: bool = False,
170
+ attn_mask: Optional[Tensor] = None,
171
+ before_softmax: bool = False,
172
+ need_head_weights: bool = False,
173
+ ) -> Tuple[Tensor, Optional[Tensor]]:
174
+ """Input shape: Time x Batch x Channel
175
+
176
+ Args:
177
+ key_padding_mask (ByteTensor, optional): mask to exclude
178
+ keys that are pads, of shape `(batch, src_len)`, where
179
+ padding elements are indicated by 1s.
180
+ need_weights (bool, optional): return the attention weights,
181
+ averaged over heads (default: False).
182
+ attn_mask (ByteTensor, optional): typically used to
183
+ implement causal attention, where the mask prevents the
184
+ attention from looking forward in time (default: None).
185
+ before_softmax (bool, optional): return the raw attention
186
+ weights and values before the attention softmax.
187
+ need_head_weights (bool, optional): return the attention
188
+ weights for each head. Implies *need_weights*. Default:
189
+ return the average attention weights over all heads.
190
+ """
191
+ if need_head_weights:
192
+ need_weights = True
193
+
194
+ tgt_len, bsz, embed_dim = query.size()
195
+ assert embed_dim == self.embed_dim
196
+ assert list(query.size()) == [tgt_len, bsz, embed_dim]
197
+
198
+ if (
199
+ not self.rot_emb
200
+ and self.enable_torch_version
201
+ and not self.onnx_trace
202
+ and incremental_state is None
203
+ and not static_kv
204
+ # A workaround for quantization to work. Otherwise JIT compilation
205
+ # treats bias in linear module as method.
206
+ and not torch.jit.is_scripting()
207
+ and not need_head_weights
208
+ ):
209
+ assert key is not None and value is not None
210
+ return F.multi_head_attention_forward(
211
+ query,
212
+ key,
213
+ value,
214
+ self.embed_dim,
215
+ self.num_heads,
216
+ torch.empty([0]),
217
+ torch.cat((self.q_proj.bias, self.k_proj.bias, self.v_proj.bias)),
218
+ self.bias_k,
219
+ self.bias_v,
220
+ self.add_zero_attn,
221
+ self.dropout,
222
+ self.out_proj.weight,
223
+ self.out_proj.bias,
224
+ self.training,
225
+ key_padding_mask,
226
+ need_weights,
227
+ attn_mask,
228
+ use_separate_proj_weight=True,
229
+ q_proj_weight=self.q_proj.weight,
230
+ k_proj_weight=self.k_proj.weight,
231
+ v_proj_weight=self.v_proj.weight,
232
+ )
233
+ if incremental_state is not None:
234
+ saved_state = self._get_input_buffer(incremental_state)
235
+ if saved_state is not None and "prev_key" in saved_state:
236
+ # previous time steps are cached - no need to recompute
237
+ # key and value if they are static
238
+ if static_kv:
239
+ assert self.encoder_decoder_attention and not self.self_attention
240
+ key = value = None
241
+ else:
242
+ saved_state = None
243
+
244
+ if self.self_attention:
245
+ q = self.q_proj(query)
246
+ k = self.k_proj(query)
247
+ v = self.v_proj(query)
248
+ elif self.encoder_decoder_attention:
249
+ # encoder-decoder attention
250
+ q = self.q_proj(query)
251
+ if key is None:
252
+ assert value is None
253
+ k = v = None
254
+ else:
255
+ k = self.k_proj(key)
256
+ v = self.v_proj(key)
257
+
258
+ else:
259
+ assert key is not None and value is not None
260
+ q = self.q_proj(query)
261
+ k = self.k_proj(key)
262
+ v = self.v_proj(value)
263
+ q *= self.scaling
264
+
265
+ if self.bias_k is not None:
266
+ assert self.bias_v is not None
267
+ k = torch.cat([k, self.bias_k.repeat(1, bsz, 1)])
268
+ v = torch.cat([v, self.bias_v.repeat(1, bsz, 1)])
269
+ if attn_mask is not None:
270
+ attn_mask = torch.cat(
271
+ [attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1
272
+ )
273
+ if key_padding_mask is not None:
274
+ key_padding_mask = torch.cat(
275
+ [
276
+ key_padding_mask,
277
+ key_padding_mask.new_zeros(key_padding_mask.size(0), 1),
278
+ ],
279
+ dim=1,
280
+ )
281
+
282
+ q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1)
283
+ if k is not None:
284
+ k = k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
285
+ if v is not None:
286
+ v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
287
+
288
+ if saved_state is not None:
289
+ # saved states are stored with shape (bsz, num_heads, seq_len, head_dim)
290
+ if "prev_key" in saved_state:
291
+ _prev_key = saved_state["prev_key"]
292
+ assert _prev_key is not None
293
+ prev_key = _prev_key.view(bsz * self.num_heads, -1, self.head_dim)
294
+ if static_kv:
295
+ k = prev_key
296
+ else:
297
+ assert k is not None
298
+ k = torch.cat([prev_key, k], dim=1)
299
+ if "prev_value" in saved_state:
300
+ _prev_value = saved_state["prev_value"]
301
+ assert _prev_value is not None
302
+ prev_value = _prev_value.view(bsz * self.num_heads, -1, self.head_dim)
303
+ if static_kv:
304
+ v = prev_value
305
+ else:
306
+ assert v is not None
307
+ v = torch.cat([prev_value, v], dim=1)
308
+ prev_key_padding_mask: Optional[Tensor] = None
309
+ if "prev_key_padding_mask" in saved_state:
310
+ prev_key_padding_mask = saved_state["prev_key_padding_mask"]
311
+ assert k is not None and v is not None
312
+ key_padding_mask = MultiheadAttention._append_prev_key_padding_mask(
313
+ key_padding_mask=key_padding_mask,
314
+ prev_key_padding_mask=prev_key_padding_mask,
315
+ batch_size=bsz,
316
+ src_len=k.size(1),
317
+ static_kv=static_kv,
318
+ )
319
+
320
+ saved_state["prev_key"] = k.view(bsz, self.num_heads, -1, self.head_dim)
321
+ saved_state["prev_value"] = v.view(bsz, self.num_heads, -1, self.head_dim)
322
+ saved_state["prev_key_padding_mask"] = key_padding_mask
323
+ # In this branch incremental_state is never None
324
+ assert incremental_state is not None
325
+ incremental_state = self._set_input_buffer(incremental_state, saved_state)
326
+ assert k is not None
327
+ src_len = k.size(1)
328
+
329
+ # This is part of a workaround to get around fork/join parallelism
330
+ # not supporting Optional types.
331
+ if key_padding_mask is not None and key_padding_mask.dim() == 0:
332
+ key_padding_mask = None
333
+
334
+ if key_padding_mask is not None:
335
+ assert key_padding_mask.size(0) == bsz
336
+ assert key_padding_mask.size(1) == src_len
337
+
338
+ if self.add_zero_attn:
339
+ assert v is not None
340
+ src_len += 1
341
+ k = torch.cat([k, k.new_zeros((k.size(0), 1) + k.size()[2:])], dim=1)
342
+ v = torch.cat([v, v.new_zeros((v.size(0), 1) + v.size()[2:])], dim=1)
343
+ if attn_mask is not None:
344
+ attn_mask = torch.cat(
345
+ [attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1
346
+ )
347
+ if key_padding_mask is not None:
348
+ key_padding_mask = torch.cat(
349
+ [
350
+ key_padding_mask,
351
+ torch.zeros(key_padding_mask.size(0), 1).type_as(key_padding_mask),
352
+ ],
353
+ dim=1,
354
+ )
355
+
356
+ if self.rot_emb:
357
+ q, k = self.rot_emb(q, k)
358
+
359
+ attn_weights = torch.bmm(q, k.transpose(1, 2))
360
+ attn_weights = MultiheadAttention.apply_sparse_mask(attn_weights, tgt_len, src_len, bsz)
361
+
362
+ assert list(attn_weights.size()) == [bsz * self.num_heads, tgt_len, src_len]
363
+
364
+ if attn_mask is not None:
365
+ attn_mask = attn_mask.unsqueeze(0)
366
+ if self.onnx_trace:
367
+ attn_mask = attn_mask.repeat(attn_weights.size(0), 1, 1)
368
+ attn_weights += attn_mask
369
+
370
+ if key_padding_mask is not None:
371
+ # don't attend to padding symbols
372
+ attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
373
+ attn_weights = attn_weights.masked_fill(
374
+ key_padding_mask.unsqueeze(1).unsqueeze(2).to(torch.bool), float("-inf")
375
+ )
376
+ attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
377
+
378
+ if before_softmax:
379
+ return attn_weights, v
380
+
381
+ attn_weights_float = utils_softmax(attn_weights, dim=-1, onnx_trace=self.onnx_trace)
382
+ attn_weights = attn_weights_float.type_as(attn_weights)
383
+ attn_probs = F.dropout(
384
+ attn_weights_float.type_as(attn_weights),
385
+ p=self.dropout,
386
+ training=self.training,
387
+ )
388
+ assert v is not None
389
+ attn = torch.bmm(attn_probs, v)
390
+ assert list(attn.size()) == [bsz * self.num_heads, tgt_len, self.head_dim]
391
+ if self.onnx_trace and attn.size(1) == 1:
392
+ # when ONNX tracing a single decoder step (sequence length == 1)
393
+ # the transpose is a no-op copy before view, thus unnecessary
394
+ attn = attn.contiguous().view(tgt_len, bsz, embed_dim)
395
+ else:
396
+ attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
397
+ attn = self.out_proj(attn)
398
+ attn_weights: Optional[Tensor] = None
399
+ if need_weights:
400
+ attn_weights = attn_weights_float.view(
401
+ bsz, self.num_heads, tgt_len, src_len
402
+ ).type_as(attn).transpose(1, 0)
403
+ if not need_head_weights:
404
+ # average attention weights over heads
405
+ attn_weights = attn_weights.mean(dim=0)
406
+
407
+ return attn, attn_weights
408
+
409
+ @staticmethod
410
+ def _append_prev_key_padding_mask(
411
+ key_padding_mask: Optional[Tensor],
412
+ prev_key_padding_mask: Optional[Tensor],
413
+ batch_size: int,
414
+ src_len: int,
415
+ static_kv: bool,
416
+ ) -> Optional[Tensor]:
417
+ # saved key padding masks have shape (bsz, seq_len)
418
+ if prev_key_padding_mask is not None and static_kv:
419
+ new_key_padding_mask = prev_key_padding_mask
420
+ elif prev_key_padding_mask is not None and key_padding_mask is not None:
421
+ new_key_padding_mask = torch.cat(
422
+ [prev_key_padding_mask.float(), key_padding_mask.float()], dim=1
423
+ )
424
+ # During incremental decoding, as the padding token enters and
425
+ # leaves the frame, there will be a time when prev or current
426
+ # is None
427
+ elif prev_key_padding_mask is not None:
428
+ filler = torch.zeros(
429
+ (batch_size, src_len - prev_key_padding_mask.size(1)),
430
+ device=prev_key_padding_mask.device,
431
+ )
432
+ new_key_padding_mask = torch.cat(
433
+ [prev_key_padding_mask.float(), filler.float()], dim=1
434
+ )
435
+ elif key_padding_mask is not None:
436
+ filler = torch.zeros(
437
+ (batch_size, src_len - key_padding_mask.size(1)),
438
+ device=key_padding_mask.device,
439
+ )
440
+ new_key_padding_mask = torch.cat([filler.float(), key_padding_mask.float()], dim=1)
441
+ else:
442
+ new_key_padding_mask = prev_key_padding_mask
443
+ return new_key_padding_mask
444
+
445
+ @torch.jit.export
446
+ def reorder_incremental_state(
447
+ self, incremental_state: Dict[str, Dict[str, Optional[Tensor]]], new_order: Tensor
448
+ ):
449
+ """Reorder buffered internal state (for incremental generation)."""
450
+ input_buffer = self._get_input_buffer(incremental_state)
451
+ if input_buffer is not None:
452
+ for k in input_buffer.keys():
453
+ input_buffer_k = input_buffer[k]
454
+ if input_buffer_k is not None:
455
+ if self.encoder_decoder_attention and input_buffer_k.size(0) == new_order.size(
456
+ 0
457
+ ):
458
+ break
459
+ input_buffer[k] = input_buffer_k.index_select(0, new_order)
460
+ incremental_state = self._set_input_buffer(incremental_state, input_buffer)
461
+ return incremental_state
462
+
463
+ def _get_input_buffer(
464
+ self, incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]]
465
+ ) -> Dict[str, Optional[Tensor]]:
466
+ result = self.get_incremental_state(incremental_state, "attn_state")
467
+ if result is not None:
468
+ return result
469
+ else:
470
+ empty_result: Dict[str, Optional[Tensor]] = {}
471
+ return empty_result
472
+
473
+ def _set_input_buffer(
474
+ self,
475
+ incremental_state: Dict[str, Dict[str, Optional[Tensor]]],
476
+ buffer: Dict[str, Optional[Tensor]],
477
+ ):
478
+ return self.set_incremental_state(incremental_state, "attn_state", buffer)
479
+
480
+ def apply_sparse_mask(attn_weights, tgt_len: int, src_len: int, bsz: int):
481
+ return attn_weights
482
+
483
+ def upgrade_state_dict_named(self, state_dict, name):
484
+ prefix = name + "." if name != "" else ""
485
+ items_to_add = {}
486
+ keys_to_remove = []
487
+ for k in state_dict.keys():
488
+ if k.endswith(prefix + "in_proj_weight"):
489
+ # in_proj_weight used to be q + k + v with same dimensions
490
+ dim = int(state_dict[k].shape[0] / 3)
491
+ items_to_add[prefix + "q_proj.weight"] = state_dict[k][:dim]
492
+ items_to_add[prefix + "k_proj.weight"] = state_dict[k][dim : 2 * dim]
493
+ items_to_add[prefix + "v_proj.weight"] = state_dict[k][2 * dim :]
494
+
495
+ keys_to_remove.append(k)
496
+
497
+ k_bias = prefix + "in_proj_bias"
498
+ if k_bias in state_dict.keys():
499
+ dim = int(state_dict[k].shape[0] / 3)
500
+ items_to_add[prefix + "q_proj.bias"] = state_dict[k_bias][:dim]
501
+ items_to_add[prefix + "k_proj.bias"] = state_dict[k_bias][dim : 2 * dim]
502
+ items_to_add[prefix + "v_proj.bias"] = state_dict[k_bias][2 * dim :]
503
+
504
+ keys_to_remove.append(prefix + "in_proj_bias")
505
+
506
+ for k in keys_to_remove:
507
+ del state_dict[k]
508
+
509
+ for key, value in items_to_add.items():
510
+ state_dict[key] = value
data/mdgen/model/primitives.py ADDED
@@ -0,0 +1,830 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ import importlib
16
+ import math
17
+ from typing import Optional, Callable, List, Tuple
18
+ import numpy as np
19
+
20
+ deepspeed_is_installed = importlib.util.find_spec("deepspeed") is not None
21
+ ds4s_is_installed = deepspeed_is_installed and importlib.util.find_spec("deepspeed.ops.deepspeed4science") is not None
22
+ if deepspeed_is_installed:
23
+ import deepspeed
24
+
25
+ if ds4s_is_installed:
26
+ from deepspeed.ops.deepspeed4science import DS4Sci_EvoformerAttention
27
+
28
+ fa_is_installed = importlib.util.find_spec("flash_attn") is not None
29
+ if fa_is_installed:
30
+ from flash_attn.bert_padding import unpad_input
31
+ from flash_attn.flash_attn_interface import flash_attn_unpadded_kvpacked_func
32
+
33
+ import torch
34
+ import torch.nn as nn
35
+ from scipy.stats import truncnorm
36
+
37
+ # from openfold.utils.checkpointing import get_checkpoint_fn
38
+ # from openfold.utils.kernel.attention_core import attention_core
39
+ # from openfold.utils.precision_utils import is_fp16_enabled
40
+ from ..tensor_utils import (
41
+ permute_final_dims,
42
+ flatten_final_dims,
43
+ )
44
+
45
+
46
+ DEFAULT_LMA_Q_CHUNK_SIZE = 1024
47
+ DEFAULT_LMA_KV_CHUNK_SIZE = 4096
48
+
49
+
50
+ def _prod(nums):
51
+ out = 1
52
+ for n in nums:
53
+ out = out * n
54
+ return out
55
+
56
+
57
+ def _calculate_fan(linear_weight_shape, fan="fan_in"):
58
+ fan_out, fan_in = linear_weight_shape
59
+
60
+ if fan == "fan_in":
61
+ f = fan_in
62
+ elif fan == "fan_out":
63
+ f = fan_out
64
+ elif fan == "fan_avg":
65
+ f = (fan_in + fan_out) / 2
66
+ else:
67
+ raise ValueError("Invalid fan option")
68
+
69
+ return f
70
+
71
+
72
+ def trunc_normal_init_(weights, scale=1.0, fan="fan_in"):
73
+ shape = weights.shape
74
+ f = _calculate_fan(shape, fan)
75
+ scale = scale / max(1, f)
76
+ a = -2
77
+ b = 2
78
+ std = math.sqrt(scale) / truncnorm.std(a=a, b=b, loc=0, scale=1)
79
+ size = _prod(shape)
80
+ samples = truncnorm.rvs(a=a, b=b, loc=0, scale=std, size=size)
81
+ samples = np.reshape(samples, shape)
82
+ with torch.no_grad():
83
+ weights.copy_(torch.tensor(samples, device=weights.device))
84
+
85
+
86
+ def lecun_normal_init_(weights):
87
+ trunc_normal_init_(weights, scale=1.0)
88
+
89
+
90
+ def he_normal_init_(weights):
91
+ trunc_normal_init_(weights, scale=2.0)
92
+
93
+
94
+ def glorot_uniform_init_(weights):
95
+ nn.init.xavier_uniform_(weights, gain=1)
96
+
97
+
98
+ def final_init_(weights):
99
+ with torch.no_grad():
100
+ weights.fill_(0.0)
101
+
102
+
103
+ def gating_init_(weights):
104
+ with torch.no_grad():
105
+ weights.fill_(0.0)
106
+
107
+
108
+ def normal_init_(weights):
109
+ torch.nn.init.kaiming_normal_(weights, nonlinearity="linear")
110
+
111
+
112
+ def ipa_point_weights_init_(weights):
113
+ with torch.no_grad():
114
+ softplus_inverse_1 = 0.541324854612918
115
+ weights.fill_(softplus_inverse_1)
116
+
117
+
118
+ class Linear(nn.Linear):
119
+ """
120
+ A Linear layer with built-in nonstandard initializations. Called just
121
+ like torch.nn.Linear.
122
+
123
+ Implements the initializers in 1.11.4, plus some additional ones found
124
+ in the code.
125
+ """
126
+
127
+ def __init__(
128
+ self,
129
+ in_dim: int,
130
+ out_dim: int,
131
+ bias: bool = True,
132
+ init: str = "default",
133
+ init_fn: Optional[Callable[[torch.Tensor, torch.Tensor], None]] = None,
134
+ precision=None
135
+ ):
136
+ """
137
+ Args:
138
+ in_dim:
139
+ The final dimension of inputs to the layer
140
+ out_dim:
141
+ The final dimension of layer outputs
142
+ bias:
143
+ Whether to learn an additive bias. True by default
144
+ init:
145
+ The initializer to use. Choose from:
146
+
147
+ "default": LeCun fan-in truncated normal initialization
148
+ "relu": He initialization w/ truncated normal distribution
149
+ "glorot": Fan-average Glorot uniform initialization
150
+ "gating": Weights=0, Bias=1
151
+ "normal": Normal initialization with std=1/sqrt(fan_in)
152
+ "final": Weights=0, Bias=0
153
+
154
+ Overridden by init_fn if the latter is not None.
155
+ init_fn:
156
+ A custom initializer taking weight and bias as inputs.
157
+ Overrides init if not None.
158
+ """
159
+ super(Linear, self).__init__(in_dim, out_dim, bias=bias)
160
+
161
+ if bias:
162
+ with torch.no_grad():
163
+ self.bias.fill_(0)
164
+
165
+ with torch.no_grad():
166
+ if init_fn is not None:
167
+ init_fn(self.weight, self.bias)
168
+ else:
169
+ if init == "default":
170
+ lecun_normal_init_(self.weight)
171
+ elif init == "relu":
172
+ he_normal_init_(self.weight)
173
+ elif init == "glorot":
174
+ glorot_uniform_init_(self.weight)
175
+ elif init == "gating":
176
+ gating_init_(self.weight)
177
+ if bias:
178
+ self.bias.fill_(1.0)
179
+ elif init == "normal":
180
+ normal_init_(self.weight)
181
+ elif init == "final":
182
+ final_init_(self.weight)
183
+ else:
184
+ raise ValueError("Invalid init string.")
185
+
186
+ self.precision = precision
187
+
188
+ def forward(self, input: torch.Tensor) -> torch.Tensor:
189
+ d = input.dtype
190
+ deepspeed_is_initialized = (
191
+ deepspeed_is_installed and
192
+ deepspeed.comm.comm.is_initialized()
193
+ )
194
+ if self.precision is not None:
195
+ with torch.cuda.amp.autocast(enabled=False):
196
+ bias = self.bias.to(dtype=self.precision) if self.bias is not None else None
197
+ return nn.functional.linear(input.to(dtype=self.precision),
198
+ self.weight.to(dtype=self.precision),
199
+ bias).to(dtype=d)
200
+
201
+ if d is torch.bfloat16 and not deepspeed_is_initialized:
202
+ with torch.cuda.amp.autocast(enabled=False):
203
+ bias = self.bias.to(dtype=d) if self.bias is not None else None
204
+ return nn.functional.linear(input, self.weight.to(dtype=d), bias)
205
+
206
+ return nn.functional.linear(input, self.weight, self.bias)
207
+
208
+
209
+ class LayerNorm(nn.Module):
210
+ def __init__(self, c_in, eps=1e-5):
211
+ super(LayerNorm, self).__init__()
212
+
213
+ self.c_in = (c_in,)
214
+ self.eps = eps
215
+
216
+ self.weight = nn.Parameter(torch.ones(c_in))
217
+ self.bias = nn.Parameter(torch.zeros(c_in))
218
+
219
+ def forward(self, x):
220
+ d = x.dtype
221
+ deepspeed_is_initialized = (
222
+ deepspeed_is_installed and
223
+ deepspeed.comm.comm.is_initialized()
224
+ )
225
+ if d is torch.bfloat16 and not deepspeed_is_initialized:
226
+ with torch.cuda.amp.autocast(enabled=False):
227
+ out = nn.functional.layer_norm(
228
+ x,
229
+ self.c_in,
230
+ self.weight.to(dtype=d),
231
+ self.bias.to(dtype=d),
232
+ self.eps
233
+ )
234
+ else:
235
+ out = nn.functional.layer_norm(
236
+ x,
237
+ self.c_in,
238
+ self.weight,
239
+ self.bias,
240
+ self.eps,
241
+ )
242
+
243
+ return out
244
+
245
+
246
+ @torch.jit.ignore
247
+ def softmax_no_cast(t: torch.Tensor, dim: int = -1) -> torch.Tensor:
248
+ """
249
+ Softmax, but without automatic casting to fp32 when the input is of
250
+ type bfloat16
251
+ """
252
+ d = t.dtype
253
+ deepspeed_is_initialized = (
254
+ deepspeed_is_installed and
255
+ deepspeed.comm.comm.is_initialized()
256
+ )
257
+ if d is torch.bfloat16 and not deepspeed_is_initialized:
258
+ with torch.cuda.amp.autocast(enabled=False):
259
+ s = torch.nn.functional.softmax(t, dim=dim)
260
+ else:
261
+ s = torch.nn.functional.softmax(t, dim=dim)
262
+
263
+ return s
264
+
265
+
266
+ #@torch.jit.script
267
+ def _attention(query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, biases: List[torch.Tensor]) -> torch.Tensor:
268
+ # [*, H, C_hidden, K]
269
+ key = permute_final_dims(key, (1, 0))
270
+
271
+ # [*, H, Q, K]
272
+ a = torch.matmul(query, key)
273
+
274
+ for b in biases:
275
+ a += b
276
+
277
+ a = softmax_no_cast(a, -1)
278
+
279
+ # [*, H, Q, C_hidden]
280
+ a = torch.matmul(a, value)
281
+
282
+ return a
283
+
284
+
285
+ @torch.jit.ignore
286
+ def _attention_chunked_trainable(
287
+ query, key, value, biases, chunk_size, chunk_dim, checkpoint,
288
+ ):
289
+ if checkpoint and len(biases) > 2:
290
+ raise ValueError(
291
+ "Checkpointed version permits only permits two bias terms"
292
+ )
293
+
294
+ def _checkpointable_attention(q, k, v, b1, b2):
295
+ bs = [b for b in [b1, b2] if b is not None]
296
+ a = _attention(q, k, v, bs)
297
+ return a
298
+
299
+ o_chunks = []
300
+ checkpoint_fn = get_checkpoint_fn()
301
+ count = query.shape[chunk_dim]
302
+ for start in range(0, count, chunk_size):
303
+ end = start + chunk_size
304
+ idx = [slice(None)] * len(query.shape)
305
+ idx[chunk_dim] = slice(start, end)
306
+ idx_tup = tuple(idx)
307
+ q_chunk = query[idx_tup]
308
+ k_chunk = key[idx_tup]
309
+ v_chunk = value[idx_tup]
310
+
311
+ def _slice_bias(b):
312
+ idx[chunk_dim] = (
313
+ slice(start, end) if b.shape[chunk_dim] != 1 else slice(None)
314
+ )
315
+ return b[tuple(idx)]
316
+
317
+ if checkpoint:
318
+ bias_1_chunk, bias_2_chunk = [
319
+ _slice_bias(b) if b is not None else None
320
+ for b in (biases + [None, None])[:2]
321
+ ]
322
+
323
+ o_chunk = checkpoint_fn(_checkpointable_attention,
324
+ q_chunk, k_chunk, v_chunk, bias_1_chunk, bias_2_chunk
325
+ )
326
+ else:
327
+ bias_chunks = [
328
+ _slice_bias(b) for b in biases
329
+ ]
330
+
331
+ o_chunk = _attention(q_chunk, k_chunk, v_chunk, bias_chunks)
332
+
333
+ o_chunk = o_chunk.transpose(-2, -3)
334
+ o_chunks.append(o_chunk)
335
+
336
+ o = torch.cat(o_chunks, dim=chunk_dim)
337
+ return o
338
+
339
+
340
+ class Attention(nn.Module):
341
+ """
342
+ Standard multi-head attention using AlphaFold's default layer
343
+ initialization. Allows multiple bias vectors.
344
+ """
345
+ def __init__(
346
+ self,
347
+ c_q: int,
348
+ c_k: int,
349
+ c_v: int,
350
+ c_hidden: int,
351
+ no_heads: int,
352
+ gating: bool = True,
353
+ ):
354
+ """
355
+ Args:
356
+ c_q:
357
+ Input dimension of query data
358
+ c_k:
359
+ Input dimension of key data
360
+ c_v:
361
+ Input dimension of value data
362
+ c_hidden:
363
+ Per-head hidden dimension
364
+ no_heads:
365
+ Number of attention heads
366
+ gating:
367
+ Whether the output should be gated using query data
368
+ """
369
+ super(Attention, self).__init__()
370
+
371
+ self.c_q = c_q
372
+ self.c_k = c_k
373
+ self.c_v = c_v
374
+ self.c_hidden = c_hidden
375
+ self.no_heads = no_heads
376
+ self.gating = gating
377
+
378
+ # DISCREPANCY: c_hidden is not the per-head channel dimension, as
379
+ # stated in the supplement, but the overall channel dimension.
380
+
381
+ self.linear_q = Linear(
382
+ self.c_q, self.c_hidden * self.no_heads, bias=False, init="glorot"
383
+ )
384
+ self.linear_k = Linear(
385
+ self.c_k, self.c_hidden * self.no_heads, bias=False, init="glorot"
386
+ )
387
+ self.linear_v = Linear(
388
+ self.c_v, self.c_hidden * self.no_heads, bias=False, init="glorot"
389
+ )
390
+ self.linear_o = Linear(
391
+ self.c_hidden * self.no_heads, self.c_q, init="final"
392
+ )
393
+
394
+ self.linear_g = None
395
+ if self.gating:
396
+ self.linear_g = Linear(
397
+ self.c_q, self.c_hidden * self.no_heads, init="gating"
398
+ )
399
+
400
+ self.sigmoid = nn.Sigmoid()
401
+
402
+ def _prep_qkv(self,
403
+ q_x: torch.Tensor,
404
+ kv_x: torch.Tensor,
405
+ apply_scale: bool = True
406
+ ) -> Tuple[
407
+ torch.Tensor, torch.Tensor, torch.Tensor
408
+ ]:
409
+ # [*, Q/K/V, H * C_hidden]
410
+ q = self.linear_q(q_x)
411
+ k = self.linear_k(kv_x)
412
+ v = self.linear_v(kv_x)
413
+
414
+ # [*, Q/K, H, C_hidden]
415
+ q = q.view(q.shape[:-1] + (self.no_heads, -1))
416
+ k = k.view(k.shape[:-1] + (self.no_heads, -1))
417
+ v = v.view(v.shape[:-1] + (self.no_heads, -1))
418
+
419
+ # [*, H, Q/K, C_hidden]
420
+ q = q.transpose(-2, -3)
421
+ k = k.transpose(-2, -3)
422
+ v = v.transpose(-2, -3)
423
+
424
+ if apply_scale:
425
+ q /= math.sqrt(self.c_hidden)
426
+
427
+ return q, k, v
428
+
429
+ def _wrap_up(self,
430
+ o: torch.Tensor,
431
+ q_x: torch.Tensor
432
+ ) -> torch.Tensor:
433
+ if self.linear_g is not None:
434
+ g = self.sigmoid(self.linear_g(q_x))
435
+
436
+ # [*, Q, H, C_hidden]
437
+ g = g.view(g.shape[:-1] + (self.no_heads, -1))
438
+ o = o * g
439
+
440
+ # [*, Q, H * C_hidden]
441
+ o = flatten_final_dims(o, 2)
442
+
443
+ # [*, Q, C_q]
444
+ o = self.linear_o(o)
445
+
446
+ return o
447
+
448
+ def forward(
449
+ self,
450
+ q_x: torch.Tensor,
451
+ kv_x: torch.Tensor,
452
+ biases: Optional[List[torch.Tensor]] = None,
453
+ use_memory_efficient_kernel: bool = False,
454
+ use_deepspeed_evo_attention: bool = False,
455
+ use_lma: bool = False,
456
+ lma_q_chunk_size: int = DEFAULT_LMA_Q_CHUNK_SIZE,
457
+ lma_kv_chunk_size: int = DEFAULT_LMA_KV_CHUNK_SIZE,
458
+ use_flash: bool = False,
459
+ flash_mask: Optional[torch.Tensor] = None
460
+ ) -> torch.Tensor:
461
+ """
462
+ Args:
463
+ q_x:
464
+ [*, Q, C_q] query data
465
+ kv_x:
466
+ [*, K, C_k] key data
467
+ biases:
468
+ List of biases that broadcast to [*, H, Q, K]
469
+ use_memory_efficient_kernel:
470
+ Whether to use a custom memory-efficient attention kernel.
471
+ This should be the default choice for most. If none of the
472
+ "use_<...>" flags are True, a stock PyTorch implementation
473
+ is used instead
474
+ use_deepspeed_evo_attention:
475
+ Whether to use DeepSpeed memory-efficient attention kernel.
476
+ If none of the "use_<...>" flags are True, a stock PyTorch
477
+ implementation is used instead
478
+ use_lma:
479
+ Whether to use low-memory attention (Staats & Rabe 2021). If
480
+ none of the "use_<...>" flags are True, a stock PyTorch
481
+ implementation is used instead
482
+ lma_q_chunk_size:
483
+ Query chunk size (for LMA)
484
+ lma_kv_chunk_size:
485
+ Key/Value chunk size (for LMA)
486
+ Returns
487
+ [*, Q, C_q] attention update
488
+ """
489
+ if use_lma and (lma_q_chunk_size is None or lma_kv_chunk_size is None):
490
+ raise ValueError(
491
+ "If use_lma is specified, lma_q_chunk_size and "
492
+ "lma_kv_chunk_size must be provided"
493
+ )
494
+
495
+ if use_flash and biases is not None:
496
+ raise ValueError(
497
+ "use_flash is incompatible with the bias option. For masking, "
498
+ "use flash_mask instead"
499
+ )
500
+
501
+ attn_options = [use_memory_efficient_kernel, use_deepspeed_evo_attention, use_lma, use_flash]
502
+ if sum(attn_options) > 1:
503
+ raise ValueError(
504
+ "Choose at most one alternative attention algorithm"
505
+ )
506
+
507
+ if biases is None:
508
+ biases = []
509
+
510
+ # DeepSpeed attention kernel applies scaling internally
511
+ q, k, v = self._prep_qkv(q_x, kv_x,
512
+ apply_scale=not use_deepspeed_evo_attention)
513
+
514
+ if is_fp16_enabled():
515
+ use_memory_efficient_kernel = False
516
+
517
+ if use_memory_efficient_kernel:
518
+ if len(biases) > 2:
519
+ raise ValueError(
520
+ "If use_memory_efficient_kernel is True, you may only "
521
+ "provide up to two bias terms"
522
+ )
523
+ o = attention_core(q, k, v, *((biases + [None] * 2)[:2]))
524
+ o = o.transpose(-2, -3)
525
+ elif use_deepspeed_evo_attention:
526
+ if len(biases) > 2:
527
+ raise ValueError(
528
+ "If use_deepspeed_evo_attention is True, you may only "
529
+ "provide up to two bias terms"
530
+ )
531
+ o = _deepspeed_evo_attn(q, k, v, biases)
532
+ elif use_lma:
533
+ biases = [
534
+ b.expand(b.shape[:-2] + (q_x.shape[-2],) + (kv_x.shape[-2],))
535
+ for b in biases
536
+ ]
537
+ o = _lma(q, k, v, biases, lma_q_chunk_size, lma_kv_chunk_size)
538
+ o = o.transpose(-2, -3)
539
+ elif use_flash:
540
+ o = _flash_attn(q, k, v, flash_mask)
541
+ else:
542
+ o = _attention(q, k, v, biases)
543
+ o = o.transpose(-2, -3)
544
+
545
+ o = self._wrap_up(o, q_x)
546
+
547
+ return o
548
+
549
+
550
+ class GlobalAttention(nn.Module):
551
+ def __init__(self, c_in, c_hidden, no_heads, inf, eps):
552
+ super(GlobalAttention, self).__init__()
553
+
554
+ self.c_in = c_in
555
+ self.c_hidden = c_hidden
556
+ self.no_heads = no_heads
557
+ self.inf = inf
558
+ self.eps = eps
559
+
560
+ self.linear_q = Linear(
561
+ c_in, c_hidden * no_heads, bias=False, init="glorot"
562
+ )
563
+
564
+ self.linear_k = Linear(
565
+ c_in, c_hidden, bias=False, init="glorot",
566
+ )
567
+ self.linear_v = Linear(
568
+ c_in, c_hidden, bias=False, init="glorot",
569
+ )
570
+ self.linear_g = Linear(c_in, c_hidden * no_heads, init="gating")
571
+ self.linear_o = Linear(c_hidden * no_heads, c_in, init="final")
572
+
573
+ self.sigmoid = nn.Sigmoid()
574
+
575
+ def forward(self,
576
+ m: torch.Tensor,
577
+ mask: torch.Tensor,
578
+ use_lma: bool = False,
579
+ ) -> torch.Tensor:
580
+ # [*, N_res, C_in]
581
+ q = torch.sum(m * mask.unsqueeze(-1), dim=-2) / (
582
+ torch.sum(mask, dim=-1)[..., None] + self.eps
583
+ )
584
+
585
+ # [*, N_res, H * C_hidden]
586
+ q = self.linear_q(q)
587
+ q *= (self.c_hidden ** (-0.5))
588
+
589
+ # [*, N_res, H, C_hidden]
590
+ q = q.view(q.shape[:-1] + (self.no_heads, -1))
591
+
592
+ # [*, N_res, N_seq, C_hidden]
593
+ k = self.linear_k(m)
594
+ v = self.linear_v(m)
595
+
596
+ bias = (self.inf * (mask - 1))[..., :, None, :]
597
+ if not use_lma:
598
+ # [*, N_res, H, N_seq]
599
+ a = torch.matmul(
600
+ q,
601
+ k.transpose(-1, -2), # [*, N_res, C_hidden, N_seq]
602
+ )
603
+ a += bias
604
+ a = softmax_no_cast(a)
605
+
606
+ # [*, N_res, H, C_hidden]
607
+ o = torch.matmul(
608
+ a,
609
+ v,
610
+ )
611
+ else:
612
+ o = _lma(
613
+ q,
614
+ k,
615
+ v,
616
+ [bias],
617
+ DEFAULT_LMA_Q_CHUNK_SIZE,
618
+ DEFAULT_LMA_KV_CHUNK_SIZE
619
+ )
620
+
621
+ # [*, N_res, N_seq, C_hidden]
622
+ g = self.sigmoid(self.linear_g(m))
623
+
624
+ # [*, N_res, N_seq, H, C_hidden]
625
+ g = g.view(g.shape[:-1] + (self.no_heads, -1))
626
+
627
+ # [*, N_res, N_seq, H, C_hidden]
628
+ o = o.unsqueeze(-3) * g
629
+
630
+ # [*, N_res, N_seq, H * C_hidden]
631
+ o = o.reshape(o.shape[:-2] + (-1,))
632
+
633
+ # [*, N_res, N_seq, C_in]
634
+ m = self.linear_o(o)
635
+
636
+ return m
637
+
638
+
639
+ @torch.jit.ignore
640
+ def _deepspeed_evo_attn(
641
+ q: torch.Tensor,
642
+ k: torch.Tensor,
643
+ v: torch.Tensor,
644
+ biases: List[torch.Tensor],
645
+ ):
646
+ """""
647
+ Compute attention using the DeepSpeed DS4Sci_EvoformerAttention kernel.
648
+
649
+ Args:
650
+ q:
651
+ [*, H, Q, C_hidden] query data
652
+ k:
653
+ [*, H, K, C_hidden] key data
654
+ v:
655
+ [*, H, V, C_hidden] value data
656
+ biases:
657
+ List of biases that broadcast to [*, H, Q, K]
658
+ """
659
+
660
+ if not ds4s_is_installed:
661
+ raise ValueError(
662
+ "_deepspeed_evo_attn requires that DeepSpeed be installed "
663
+ "and that the deepspeed.ops.deepspeed4science package exists"
664
+ )
665
+
666
+ def reshape_dims(x):
667
+ no_batch_dims = len(x.shape[:-3])
668
+ if no_batch_dims < 2:
669
+ return x.reshape(*((1,) * (2 - no_batch_dims) + x.shape))
670
+ if no_batch_dims > 2:
671
+ return x.reshape(*((x.shape[0], -1) + x.shape[-3:]))
672
+ return x
673
+
674
+ # [*, Q/K, H, C_hidden]
675
+ q = q.transpose(-2, -3)
676
+ k = k.transpose(-2, -3)
677
+ v = v.transpose(-2, -3)
678
+
679
+ # Reshape tensors to match expected input shape [B, N, Q/K, H, C_hidden]
680
+ # for DS4Sci_EvoformerAttention() by adding or flattening batch dims as needed.
681
+ orig_shape = q.shape
682
+ if len(orig_shape[:-3]) != 2:
683
+ q = reshape_dims(q)
684
+ k = reshape_dims(k)
685
+ v = reshape_dims(v)
686
+ biases = [reshape_dims(b) for b in biases]
687
+
688
+ # DeepSpeed attn. kernel requires inputs to be type bf16 or fp16
689
+ # Cast to bf16 so kernel can be used during inference
690
+ orig_dtype = q.dtype
691
+ if orig_dtype not in [torch.bfloat16, torch.float16]:
692
+ o = DS4Sci_EvoformerAttention(q.to(dtype=torch.bfloat16),
693
+ k.to(dtype=torch.bfloat16),
694
+ v.to(dtype=torch.bfloat16),
695
+ [b.to(dtype=torch.bfloat16) for b in biases])
696
+
697
+ o = o.to(dtype=orig_dtype)
698
+ else:
699
+ o = DS4Sci_EvoformerAttention(q, k, v, biases)
700
+
701
+ o = o.reshape(orig_shape)
702
+ return o
703
+
704
+
705
+ def _lma(
706
+ q: torch.Tensor,
707
+ k: torch.Tensor,
708
+ v: torch.Tensor,
709
+ biases: List[torch.Tensor],
710
+ q_chunk_size: int,
711
+ kv_chunk_size: int,
712
+ ):
713
+ no_q, no_kv = q.shape[-2], k.shape[-2]
714
+
715
+ # [*, H, Q, C_hidden]
716
+ o = q.new_zeros(q.shape)
717
+ for q_s in range(0, no_q, q_chunk_size):
718
+ q_chunk = q[..., q_s: q_s + q_chunk_size, :]
719
+ large_bias_chunks = [
720
+ b[..., q_s: q_s + q_chunk_size, :] for b in biases
721
+ ]
722
+
723
+ maxes = []
724
+ weights = []
725
+ values = []
726
+ for kv_s in range(0, no_kv, kv_chunk_size):
727
+ k_chunk = k[..., kv_s: kv_s + kv_chunk_size, :]
728
+ v_chunk = v[..., kv_s: kv_s + kv_chunk_size, :]
729
+ small_bias_chunks = [
730
+ b[..., kv_s: kv_s + kv_chunk_size] for b in large_bias_chunks
731
+ ]
732
+
733
+ a = torch.einsum(
734
+ "...hqd,...hkd->...hqk", q_chunk, k_chunk,
735
+ )
736
+
737
+ for b in small_bias_chunks:
738
+ a += b
739
+
740
+ max_a = torch.max(a, dim=-1, keepdim=True)[0]
741
+ exp_a = torch.exp(a - max_a)
742
+ exp_v = torch.einsum("...hvf,...hqv->...hqf", v_chunk, exp_a)
743
+
744
+ maxes.append(max_a.detach().squeeze(-1))
745
+ weights.append(torch.sum(exp_a, dim=-1))
746
+ values.append(exp_v)
747
+
748
+ chunk_max = torch.stack(maxes, dim=-3)
749
+ chunk_weights = torch.stack(weights, dim=-3)
750
+ chunk_values = torch.stack(values, dim=-4)
751
+
752
+ global_max = torch.max(chunk_max, dim=-3, keepdim=True)[0]
753
+ max_diffs = torch.exp(chunk_max - global_max)
754
+ chunk_values = chunk_values * max_diffs.unsqueeze(-1)
755
+ chunk_weights = chunk_weights * max_diffs
756
+
757
+ all_values = torch.sum(chunk_values, dim=-4)
758
+ all_weights = torch.sum(chunk_weights.unsqueeze(-1), dim=-4)
759
+
760
+ q_chunk_out = all_values / all_weights
761
+
762
+ o[..., q_s: q_s + q_chunk_size, :] = q_chunk_out
763
+
764
+ return o
765
+
766
+
767
+ @torch.jit.ignore
768
+ def _flash_attn(q, k, v, kv_mask):
769
+ if not fa_is_installed:
770
+ raise ValueError(
771
+ "_flash_attn requires that FlashAttention be installed"
772
+ )
773
+
774
+ batch_dims = q.shape[:-3]
775
+ no_heads, n, c = q.shape[-3:]
776
+ dtype = q.dtype
777
+
778
+ q = q.half()
779
+ k = k.half()
780
+ v = v.half()
781
+ kv_mask = kv_mask.half()
782
+
783
+ # [*, B, N, H, C]
784
+ q = q.transpose(-2, -3)
785
+ k = k.transpose(-2, -3)
786
+ v = v.transpose(-2, -3)
787
+
788
+ # [B_flat, N, H, C]
789
+ q = q.reshape(-1, *q.shape[-3:])
790
+ k = k.reshape(-1, *k.shape[-3:])
791
+ v = v.reshape(-1, *v.shape[-3:])
792
+
793
+ # Flattened batch size
794
+ batch_size = q.shape[0]
795
+
796
+ # [B_flat * N, H, C]
797
+ q = q.reshape(-1, *q.shape[-2:])
798
+
799
+ q_max_s = n
800
+ q_cu_seqlens = torch.arange(
801
+ 0, (batch_size + 1) * n, step=n, dtype=torch.int32, device=q.device
802
+ )
803
+
804
+ # [B_flat, N, 2, H, C]
805
+ kv = torch.stack([k, v], dim=-3)
806
+ kv_shape = kv.shape
807
+
808
+ # [B_flat, N, 2 * H * C]
809
+ kv = kv.reshape(*kv.shape[:-3], -1)
810
+
811
+ kv_unpad, _, kv_cu_seqlens, kv_max_s = unpad_input(kv, kv_mask)
812
+ kv_unpad = kv_unpad.reshape(-1, *kv_shape[-3:])
813
+
814
+ out = flash_attn_unpadded_kvpacked_func(
815
+ q,
816
+ kv_unpad,
817
+ q_cu_seqlens,
818
+ kv_cu_seqlens,
819
+ q_max_s,
820
+ kv_max_s,
821
+ dropout_p=0.,
822
+ softmax_scale=1., # q has been scaled already
823
+ )
824
+
825
+ # [*, B, N, H, C]
826
+ out = out.reshape(*batch_dims, n, no_heads, c)
827
+
828
+ out = out.to(dtype=dtype)
829
+
830
+ return out
data/mdgen/model/standalone_hyena.py ADDED
@@ -0,0 +1,289 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/HazyResearch/safari/blob/main/standalone_hyena.py
2
+ """
3
+ Simplified standalone version of Hyena: https://arxiv.org/abs/2302.10866, designed for quick experimentation.
4
+ A complete version is available under `src.models.sequence.hyena`.
5
+ """
6
+
7
+ import math
8
+ import random
9
+
10
+ import torch
11
+ import torch.nn as nn
12
+ from einops import rearrange
13
+
14
+
15
+ def fftconv(u, k, D):
16
+ L = u.shape[-1]
17
+ fft_size = 2 * L
18
+
19
+ k_f = torch.fft.rfft(k, n=fft_size) / fft_size
20
+ u_f = torch.fft.rfft(u.to(dtype=k.dtype), n=fft_size)
21
+
22
+ if len(u.shape) > 3: k_f = k_f.unsqueeze(1)
23
+ y = torch.fft.irfft(u_f * k_f, n=fft_size, norm='forward')[..., L - 1: 2 * L - 1]
24
+
25
+ out = y + u * D.unsqueeze(-1)
26
+ return out.to(dtype=u.dtype)
27
+
28
+
29
+ @torch.jit.script
30
+ def mul_sum(q, y):
31
+ return (q * y).sum(dim=1)
32
+
33
+
34
+ class OptimModule(nn.Module):
35
+ """ Interface for Module that allows registering buffers/parameters with configurable optimizer hyperparameters """
36
+
37
+ def register(self, name, tensor, lr=None, wd=0.0):
38
+ """Register a tensor with a configurable learning rate and 0 weight decay"""
39
+
40
+ if lr == 0.0:
41
+ self.register_buffer(name, tensor)
42
+ else:
43
+ self.register_parameter(name, nn.Parameter(tensor))
44
+
45
+ optim = {}
46
+ if lr is not None: optim["lr"] = lr
47
+ if wd is not None: optim["weight_decay"] = wd
48
+ setattr(getattr(self, name), "_optim", optim)
49
+
50
+
51
+ class Sin(nn.Module):
52
+ def __init__(self, dim, w=10, train_freq=True):
53
+ super().__init__()
54
+ self.freq = nn.Parameter(w * torch.ones(1, dim)) if train_freq else w * torch.ones(1, dim)
55
+
56
+ def forward(self, x):
57
+ return torch.sin(self.freq * x)
58
+
59
+
60
+ class PositionalEmbedding(OptimModule):
61
+ def __init__(self, emb_dim: int, seq_len: int, lr_pos_emb: float = 1e-5, **kwargs):
62
+ """Complex exponential positional embeddings for Hyena filters."""
63
+ super().__init__()
64
+
65
+ self.seq_len = seq_len
66
+ # The time embedding fed to the filteres is normalized so that t_f = 1
67
+ t = torch.linspace(0, 1, self.seq_len)[None, :, None] # 1, L, 1
68
+
69
+ if emb_dim > 1:
70
+ bands = (emb_dim - 1) // 2
71
+ # To compute the right embeddings we use the "proper" linspace
72
+ t_rescaled = torch.linspace(0, seq_len - 1, seq_len)[None, :, None]
73
+ w = 2 * math.pi * t_rescaled / seq_len # 1, L, 1
74
+
75
+ f = torch.linspace(1e-4, bands - 1, bands)[None, None]
76
+ z = torch.exp(-1j * f * w)
77
+ z = torch.cat([t, z.real, z.imag], dim=-1)
78
+ self.register("z", z, lr=lr_pos_emb)
79
+ self.register("t", t, lr=0.0)
80
+
81
+ def forward(self, L):
82
+ return self.z[:, :L], self.t[:, :L]
83
+
84
+
85
+ class ExponentialModulation(OptimModule):
86
+ def __init__(
87
+ self,
88
+ d_model,
89
+ fast_decay_pct=0.3,
90
+ slow_decay_pct=1.5,
91
+ target=1e-2,
92
+ modulation_lr=0.0,
93
+ modulate: bool = True,
94
+ shift: float = 0.0,
95
+ **kwargs
96
+ ):
97
+ super().__init__()
98
+ self.modulate = modulate
99
+ self.shift = shift
100
+ max_decay = math.log(target) / fast_decay_pct
101
+ min_decay = math.log(target) / slow_decay_pct
102
+ deltas = torch.linspace(min_decay, max_decay, d_model)[None, None]
103
+ self.register("deltas", deltas, lr=modulation_lr)
104
+
105
+ def forward(self, t, x):
106
+ if self.modulate:
107
+ decay = torch.exp(-t * self.deltas.abs())
108
+ x = x * (decay + self.shift)
109
+ return x
110
+
111
+
112
+ class HyenaFilter(OptimModule):
113
+ def __init__(
114
+ self,
115
+ d_model,
116
+ emb_dim=3, # dim of input to MLP, augments with positional encoding
117
+ order=16, # width of the implicit MLP
118
+ fused_fft_conv=False,
119
+ seq_len=1024,
120
+ lr=1e-3,
121
+ lr_pos_emb=1e-5,
122
+ dropout=0.0,
123
+ w=1, # frequency of periodic activations
124
+ wd=0, # weight decay of kernel parameters
125
+ bias=True,
126
+ num_inner_mlps=2,
127
+ normalized=False,
128
+ **kwargs
129
+ ):
130
+ """
131
+ Implicit long filter with modulation.
132
+
133
+ Args:
134
+ d_model: number of channels in the input
135
+ emb_dim: dimension of the positional encoding (`emb_dim` - 1) // 2 is the number of bands
136
+ order: width of the FFN
137
+ num_inner_mlps: number of inner linear layers inside filter MLP
138
+ """
139
+ super().__init__()
140
+ self.d_model = d_model
141
+ self.use_bias = bias
142
+ self.fused_fft_conv = fused_fft_conv
143
+ self.bias = nn.Parameter(torch.randn(self.d_model))
144
+ self.dropout = nn.Dropout(dropout)
145
+
146
+ act = Sin(dim=order, w=w)
147
+ self.emb_dim = emb_dim
148
+ assert emb_dim % 2 != 0 and emb_dim >= 3, "emb_dim must be odd and greater or equal to 3 (time, " \
149
+ "sine and cosine)"
150
+ self.seq_len = seq_len
151
+
152
+ self.pos_emb = PositionalEmbedding(emb_dim, seq_len, lr_pos_emb)
153
+
154
+ self.implicit_filter = nn.Sequential(
155
+ nn.Linear(emb_dim, order),
156
+ act,
157
+ )
158
+ for i in range(num_inner_mlps):
159
+ self.implicit_filter.append(nn.Linear(order, order))
160
+ self.implicit_filter.append(act)
161
+
162
+ self.implicit_filter.append(nn.Linear(order, d_model, bias=False))
163
+
164
+ self.modulation = ExponentialModulation(d_model, **kwargs)
165
+
166
+ self.normalized = normalized
167
+ for c in self.implicit_filter.children():
168
+ for name, v in c.state_dict().items():
169
+ optim = {"weight_decay": wd, "lr": lr}
170
+ setattr(getattr(c, name), "_optim", optim)
171
+
172
+ def filter(self, L, *args, **kwargs):
173
+ z, t = self.pos_emb(L)
174
+ h = self.implicit_filter(z)
175
+ h = self.modulation(t, h)
176
+ return h
177
+
178
+ def forward(self, x, L, k=None, bias=None, *args, **kwargs):
179
+ if k is None: k = self.filter(2 * L - 1)
180
+
181
+ # Ensure compatibility with filters that return a tuple
182
+ k = k[0] if type(k) is tuple else k
183
+
184
+ y = fftconv(x, k, bias)
185
+ return y
186
+
187
+
188
+ class HyenaOperator(nn.Module):
189
+ def __init__(
190
+ self,
191
+ d_model,
192
+ l_max,
193
+ order=2,
194
+ filter_order=64,
195
+ dropout=0.0,
196
+ filter_dropout=0.0,
197
+ **filter_args,
198
+ ):
199
+ r"""
200
+ Hyena operator described in the paper https://arxiv.org/pdf/2302.10866.pdf
201
+
202
+ Args:
203
+ d_model (int): Dimension of the input and output embeddings (width of the layer)
204
+ l_max: (int): Maximum input sequence length. Defaults to None
205
+ order: (int): Depth of the Hyena recurrence. Defaults to 2
206
+ dropout: (float): Dropout probability. Defaults to 0.0
207
+ filter_dropout: (float): Dropout probability for the filter. Defaults to 0.0
208
+ """
209
+ super().__init__()
210
+ self.d_model = d_model
211
+ self.l_max = l_max
212
+ self.order = order
213
+ inner_width = d_model * (order + 1)
214
+ self.dropout = nn.Dropout(dropout)
215
+ self.in_proj = nn.Linear(d_model, inner_width)
216
+ self.out_proj = nn.Linear(d_model, d_model)
217
+
218
+ self.short_filter = nn.Conv1d(
219
+ inner_width,
220
+ inner_width,
221
+ 3,
222
+ padding=2,
223
+ groups=inner_width
224
+ )
225
+ self.filter_fn = HyenaFilter(
226
+ d_model * (order - 1),
227
+ order=filter_order,
228
+ seq_len=l_max,
229
+ channels=1,
230
+ dropout=filter_dropout,
231
+ **filter_args
232
+ )
233
+
234
+ def forward(self, u, *args, **kwargs):
235
+ l = u.size(-2)
236
+ l_filter = min(l, self.l_max)
237
+ u = self.in_proj(u)
238
+ u = rearrange(u, 'b l d -> b d l')
239
+
240
+ uc = self.short_filter(u)[..., :l_filter]
241
+ *x, v = uc.split(self.d_model, dim=1)
242
+
243
+ k = self.filter_fn.filter(l_filter)[0]
244
+ k = rearrange(k, 'l (o d) -> o d l', o=self.order - 1)
245
+ bias = rearrange(self.filter_fn.bias, '(o d) -> o d', o=self.order - 1)
246
+
247
+ for o, x_i in enumerate(reversed(x[1:])):
248
+ v = self.dropout(v * x_i)
249
+ v = self.filter_fn(v, l_filter, k=k[o], bias=bias[o])
250
+
251
+ y = rearrange(v * x[0], 'b d l -> b l d')
252
+
253
+ y = self.out_proj(y)
254
+ return y
255
+
256
+ if __name__ == '__main__':
257
+ h = torch.FloatTensor([1, 1, 1, 1, 1, 1, 1])
258
+ x = torch.FloatTensor([1, 1, 1, 1])
259
+ L = len(x)
260
+ res = torch.fft.irfft(torch.fft.rfft(h, n=2 * L) * torch.fft.rfft(x, n=2 * L))
261
+ print('all', res)
262
+ print('cropped', res[L - 1: 2 * L - 1])
263
+ a = torch.Tensor([random.random() for _ in range(8)])
264
+ b = torch.Tensor([random.random() for _ in range(8)])
265
+
266
+ a_rf = torch.fft.rfft(a, norm="forward")
267
+ b_rf = torch.fft.rfft(b, norm="forward")
268
+ c_rf = a_rf * b_rf
269
+ cr = torch.fft.irfft(c_rf, norm="forward")
270
+
271
+ a_f = torch.fft.fft(a, norm="forward")
272
+ b_f = torch.fft.fft(b, norm="forward")
273
+ c_f = a_f * b_f
274
+ c = torch.fft.irfft(c_f, norm="forward")
275
+
276
+ layer = HyenaOperator(
277
+ d_model=512,
278
+ l_max=1024,
279
+ order=2,
280
+ filter_order=64
281
+ )
282
+ x = torch.randn(1, 1024, 512, requires_grad=True)
283
+ y = layer(x)
284
+
285
+ print(x.shape, y.shape)
286
+
287
+ grad = torch.autograd.grad(y[:, 10, :].sum(), x)[0]
288
+ print('Causality check: gradients should not flow "from future to past"')
289
+ print(grad[0, 105, :].sum(), grad[0, 0, :].sum())
data/mdgen/parsing.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from argparse import ArgumentParser
2
+ import os
3
+
4
+
5
+ def parse_train_args():
6
+ parser = ArgumentParser()
7
+
8
+ ## Trainer settings
9
+ parser.add_argument("--ckpt", type=str, default=None)
10
+ parser.add_argument("--validate", action='store_true', default=False)
11
+ parser.add_argument("--num_workers", type=int, default=4)
12
+
13
+ ## Epoch settings
14
+ group = parser.add_argument_group("Epoch settings")
15
+ group.add_argument("--epochs", type=int, default=100)
16
+ group.add_argument("--overfit", action='store_true')
17
+ group.add_argument("--overfit_peptide", type=str, default=None)
18
+ group.add_argument("--overfit_frame", action='store_true')
19
+ group.add_argument("--train_batches", type=int, default=None)
20
+ group.add_argument("--val_batches", type=int, default=None)
21
+ group.add_argument("--val_repeat", type=int, default=1)
22
+ group.add_argument("--inference_batches", type=int, default=0)
23
+ group.add_argument("--batch_size", type=int, default=8)
24
+ group.add_argument("--val_freq", type=int, default=None)
25
+ group.add_argument("--val_epoch_freq", type=int, default=1)
26
+ group.add_argument("--no_validate", action='store_true')
27
+ group.add_argument("--designability_freq", type=int, default=1)
28
+
29
+ ## Logging args
30
+ group = parser.add_argument_group("Logging settings")
31
+ group.add_argument("--print_freq", type=int, default=100)
32
+ group.add_argument("--ckpt_freq", type=int, default=1)
33
+ group.add_argument("--wandb", action="store_true")
34
+ group.add_argument("--run_name", type=str, default="default")
35
+
36
+
37
+ ## Optimization settings
38
+ group = parser.add_argument_group("Optimization settings")
39
+ group.add_argument("--accumulate_grad", type=int, default=1)
40
+ group.add_argument("--grad_clip", type=float, default=1.)
41
+ group.add_argument("--check_grad", action='store_true')
42
+ group.add_argument('--grad_checkpointing', action='store_true')
43
+ group.add_argument('--adamW', action='store_true')
44
+ group.add_argument('--ema', action='store_true')
45
+ group.add_argument('--ema_decay', type=float, default=0.999)
46
+ group.add_argument("--lr", type=float, default=1e-4)
47
+ group.add_argument('--precision', type=str, default='32-true')
48
+
49
+ ## Training data
50
+ group = parser.add_argument_group("Training data settings")
51
+ group.add_argument('--train_split', type=str, default=None, required=True)
52
+ group.add_argument('--val_split', type=str, default=None, required=True)
53
+ group.add_argument('--data_dir', type=str, default=None, required=True)
54
+ group.add_argument('--num_frames', type=int, default=50)
55
+ group.add_argument('--crop', type=int, default=256)
56
+ group.add_argument('--suffix', type=str, default='')
57
+ group.add_argument('--atlas', action='store_true')
58
+ group.add_argument('--copy_frames', action='store_true')
59
+ group.add_argument('--no_pad', action='store_true')
60
+ group.add_argument('--short_md', action='store_true')
61
+
62
+ ### Masking settings
63
+ group = parser.add_argument_group("Masking settings")
64
+ group.add_argument('--design_key_frames', action='store_true')
65
+ group.add_argument('--no_aa_emb', action='store_true')
66
+ group.add_argument("--no_torsion", action='store_true')
67
+ group.add_argument("--no_design_torsion", action='store_true')
68
+ group.add_argument("--supervise_no_torsions", action='store_true')
69
+ group.add_argument("--supervise_all_torsions", action='store_true')
70
+
71
+ ## Ablations settings
72
+ group = parser.add_argument_group("Ablations settings")
73
+ group.add_argument('--no_offsets', action='store_true')
74
+ group.add_argument('--no_frames', action='store_true')
75
+
76
+
77
+ ## Model settings
78
+ group = parser.add_argument_group("Model settings")
79
+ group.add_argument('--hyena', action='store_true')
80
+ group.add_argument('--no_rope', action='store_true')
81
+ group.add_argument('--dropout', type=float, default=0.0)
82
+ group.add_argument('--scale_factor', type=float, default=1.0)
83
+ group.add_argument('--interleave_ipa', action='store_true')
84
+ group.add_argument('--prepend_ipa', action='store_true')
85
+ group.add_argument('--oracle', action='store_true')
86
+ group.add_argument('--num_layers', type=int, default=5)
87
+ group.add_argument('--embed_dim', type=int, default=384)
88
+ group.add_argument('--mha_heads', type=int, default=16)
89
+ group.add_argument('--ipa_heads', type=int, default=4)
90
+ # group.add_argument('--ipa_layers', type=int, default=None)
91
+ group.add_argument('--ipa_head_dim', type=int, default=32)
92
+ group.add_argument('--ipa_qk', type=int, default=8)
93
+ group.add_argument('--ipa_v', type=int, default=8)
94
+
95
+ group.add_argument('--time_multiplier', type=float, default=100.)
96
+ group.add_argument('--abs_pos_emb', action='store_true')
97
+ group.add_argument('--abs_time_emb', action='store_true')
98
+
99
+ group = parser.add_argument_group("Transport arguments")
100
+ group.add_argument("--path-type", type=str, default="GVP", choices=["Linear", "GVP", "VP"])
101
+ group.add_argument("--prediction", type=str, default="velocity", choices=["velocity", "score", "noise"])
102
+ group.add_argument("--sampling_method", type=str, default="dopri5", choices=["dopri5", "euler"])
103
+ group.add_argument('--alpha_max', type=float, default=8)
104
+ group.add_argument('--discrete_loss_weight', type=float, default=0.5)
105
+ group.add_argument("--dirichlet_flow_temp", type=float, default=1.0)
106
+ group.add_argument('--allow_nan_cfactor', action='store_true')
107
+ # group.add_argument("--loss-weight", type=none_or_str, default=None, choices=[None, "velocity", "likelihood"])
108
+
109
+
110
+ ## video settings
111
+ group = parser.add_argument_group("Video settings")
112
+ group.add_argument('--tps_condition', action='store_true')
113
+ group.add_argument('--design', action='store_true')
114
+ group.add_argument('--design_from_traj', action='store_true')
115
+ group.add_argument('--sim_condition', action='store_true')
116
+ group.add_argument('--inpainting', action='store_true')
117
+ group.add_argument('--dynamic_mpnn', action='store_true')
118
+ group.add_argument('--mpnn', action='store_true')
119
+ group.add_argument('--frame_interval', type=int, default=None)
120
+ group.add_argument('--cond_interval', type=int, default=None) # for superresolution
121
+
122
+ args = parser.parse_args()
123
+ os.environ["MODEL_DIR"] = os.path.join("workdir", args.run_name)
124
+
125
+ return args
126
+
127
+
data/mdgen/protein.py ADDED
@@ -0,0 +1,636 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """Protein data type."""
17
+ import dataclasses
18
+ import io
19
+ from typing import Any, Sequence, Mapping, Optional
20
+ import re
21
+ import string
22
+
23
+ from . import residue_constants
24
+ from Bio.PDB import PDBParser
25
+ import numpy as np
26
+ # import modelcif
27
+ # import modelcif.model
28
+ # import modelcif.dumper
29
+ # import modelcif.reference
30
+ # import modelcif.protocol
31
+ # import modelcif.alignment
32
+ # import modelcif.qa_metric
33
+
34
+
35
+ FeatureDict = Mapping[str, np.ndarray]
36
+ ModelOutput = Mapping[str, Any] # Is a nested dict.
37
+ PICO_TO_ANGSTROM = 0.01
38
+
39
+ PDB_CHAIN_IDS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
40
+ PDB_MAX_CHAINS = len(PDB_CHAIN_IDS)
41
+ assert(PDB_MAX_CHAINS == 62)
42
+
43
+
44
+ @dataclasses.dataclass(frozen=True)
45
+ class Protein:
46
+ """Protein structure representation."""
47
+
48
+ # Cartesian coordinates of atoms in angstroms. The atom types correspond to
49
+ # residue_constants.atom_types, i.e. the first three are N, CA, CB.
50
+ atom_positions: np.ndarray # [num_res, num_atom_type, 3]
51
+
52
+ # Amino-acid type for each residue represented as an integer between 0 and
53
+ # 20, where 20 is 'X'.
54
+ aatype: np.ndarray # [num_res]
55
+
56
+ # Binary float mask to indicate presence of a particular atom. 1.0 if an atom
57
+ # is present and 0.0 if not. This should be used for loss masking.
58
+ atom_mask: np.ndarray # [num_res, num_atom_type]
59
+
60
+ # Residue index as used in PDB. It is not necessarily continuous or 0-indexed.
61
+ residue_index: np.ndarray # [num_res]
62
+
63
+ # B-factors, or temperature factors, of each residue (in sq. angstroms units),
64
+ # representing the displacement of the residue from its ground truth mean
65
+ # value.
66
+ b_factors: np.ndarray # [num_res, num_atom_type]
67
+
68
+ # Chain indices for multi-chain predictions
69
+ chain_index: Optional[np.ndarray] = None
70
+
71
+ # Optional remark about the protein. Included as a comment in output PDB
72
+ # files
73
+ remark: Optional[str] = None
74
+
75
+ # Templates used to generate this protein (prediction-only)
76
+ parents: Optional[Sequence[str]] = None
77
+
78
+ # Chain corresponding to each parent
79
+ parents_chain_index: Optional[Sequence[int]] = None
80
+
81
+ def __post_init__(self):
82
+ if(len(np.unique(self.chain_index)) > PDB_MAX_CHAINS):
83
+ raise ValueError(
84
+ f"Cannot build an instance with more than {PDB_MAX_CHAINS} "
85
+ "chains because these cannot be written to PDB format"
86
+ )
87
+
88
+
89
+ def from_pdb_string(pdb_str: str, chain_id: Optional[str] = None) -> Protein:
90
+ """Takes a PDB string and constructs a Protein object.
91
+
92
+ WARNING: All non-standard residue types will be converted into UNK. All
93
+ non-standard atoms will be ignored.
94
+
95
+ Args:
96
+ pdb_str: The contents of the pdb file
97
+ chain_id: If None, then the whole pdb file is parsed. If chain_id is specified (e.g. A), then only that chain
98
+ is parsed.
99
+
100
+ Returns:
101
+ A new `Protein` parsed from the pdb contents.
102
+ """
103
+ pdb_fh = io.StringIO(pdb_str)
104
+ parser = PDBParser(QUIET=True)
105
+ structure = parser.get_structure("none", pdb_fh)
106
+ models = list(structure.get_models())
107
+ if len(models) != 1:
108
+ raise ValueError(
109
+ f"Only single model PDBs are supported. Found {len(models)} models."
110
+ )
111
+ model = models[0]
112
+
113
+ atom_positions = []
114
+ aatype = []
115
+ atom_mask = []
116
+ residue_index = []
117
+ chain_ids = []
118
+ b_factors = []
119
+
120
+ for chain in model:
121
+ if(chain_id is not None and chain.id != chain_id):
122
+ continue
123
+
124
+ for res in chain:
125
+ if res.id[2] != " ":
126
+ raise ValueError(
127
+ f"PDB contains an insertion code at chain {chain.id} and residue "
128
+ f"index {res.id[1]}. These are not supported."
129
+ )
130
+ res_shortname = residue_constants.restype_3to1.get(res.resname, "X")
131
+ restype_idx = residue_constants.restype_order.get(
132
+ res_shortname, residue_constants.restype_num
133
+ )
134
+ pos = np.zeros((residue_constants.atom_type_num, 3))
135
+ mask = np.zeros((residue_constants.atom_type_num,))
136
+ res_b_factors = np.zeros((residue_constants.atom_type_num,))
137
+ for atom in res:
138
+ if atom.name not in residue_constants.atom_types:
139
+ continue
140
+ pos[residue_constants.atom_order[atom.name]] = atom.coord
141
+ mask[residue_constants.atom_order[atom.name]] = 1.0
142
+ res_b_factors[
143
+ residue_constants.atom_order[atom.name]
144
+ ] = atom.bfactor
145
+ if np.sum(mask) < 0.5:
146
+ # If no known atom positions are reported for the residue then skip it.
147
+ continue
148
+
149
+ aatype.append(restype_idx)
150
+ atom_positions.append(pos)
151
+ atom_mask.append(mask)
152
+ residue_index.append(res.id[1])
153
+ chain_ids.append(chain.id)
154
+ b_factors.append(res_b_factors)
155
+
156
+ parents = None
157
+ parents_chain_index = None
158
+ if("PARENT" in pdb_str):
159
+ parents = []
160
+ parents_chain_index = []
161
+ chain_id = 0
162
+ for l in pdb_str.split("\n"):
163
+ if("PARENT" in l):
164
+ if(not "N/A" in l):
165
+ parent_names = l.split()[1:]
166
+ parents.extend(parent_names)
167
+ parents_chain_index.extend([
168
+ chain_id for _ in parent_names
169
+ ])
170
+ chain_id += 1
171
+
172
+ unique_chain_ids = np.unique(chain_ids)
173
+ chain_id_mapping = {cid: n for n, cid in enumerate(string.ascii_uppercase)}
174
+ chain_index = np.array([chain_id_mapping[cid] for cid in chain_ids])
175
+
176
+ return Protein(
177
+ atom_positions=np.array(atom_positions),
178
+ atom_mask=np.array(atom_mask),
179
+ aatype=np.array(aatype),
180
+ residue_index=np.array(residue_index),
181
+ chain_index=chain_index,
182
+ b_factors=np.array(b_factors),
183
+ parents=parents,
184
+ parents_chain_index=parents_chain_index,
185
+ )
186
+
187
+
188
+ def from_proteinnet_string(proteinnet_str: str) -> Protein:
189
+ tag_re = r'(\[[A-Z]+\]\n)'
190
+ tags = [
191
+ tag.strip() for tag in re.split(tag_re, proteinnet_str) if len(tag) > 0
192
+ ]
193
+ groups = zip(tags[0::2], [l.split('\n') for l in tags[1::2]])
194
+
195
+ atoms = ['N', 'CA', 'C']
196
+ aatype = None
197
+ atom_positions = None
198
+ atom_mask = None
199
+ for g in groups:
200
+ if("[PRIMARY]" == g[0]):
201
+ seq = g[1][0].strip()
202
+ for i in range(len(seq)):
203
+ if(seq[i] not in residue_constants.restypes):
204
+ seq[i] = 'X'
205
+ aatype = np.array([
206
+ residue_constants.restype_order.get(
207
+ res_symbol, residue_constants.restype_num
208
+ ) for res_symbol in seq
209
+ ])
210
+ elif("[TERTIARY]" == g[0]):
211
+ tertiary = []
212
+ for axis in range(3):
213
+ tertiary.append(list(map(float, g[1][axis].split())))
214
+ tertiary_np = np.array(tertiary)
215
+ atom_positions = np.zeros(
216
+ (len(tertiary[0])//3, residue_constants.atom_type_num, 3)
217
+ ).astype(np.float32)
218
+ for i, atom in enumerate(atoms):
219
+ atom_positions[:, residue_constants.atom_order[atom], :] = (
220
+ np.transpose(tertiary_np[:, i::3])
221
+ )
222
+ atom_positions *= PICO_TO_ANGSTROM
223
+ elif("[MASK]" == g[0]):
224
+ mask = np.array(list(map({'-': 0, '+': 1}.get, g[1][0].strip())))
225
+ atom_mask = np.zeros(
226
+ (len(mask), residue_constants.atom_type_num,)
227
+ ).astype(np.float32)
228
+ for i, atom in enumerate(atoms):
229
+ atom_mask[:, residue_constants.atom_order[atom]] = 1
230
+ atom_mask *= mask[..., None]
231
+
232
+ return Protein(
233
+ atom_positions=atom_positions,
234
+ atom_mask=atom_mask,
235
+ aatype=aatype,
236
+ residue_index=np.arange(len(aatype)),
237
+ b_factors=None,
238
+ )
239
+
240
+
241
+ def _chain_end(atom_index, end_resname, chain_name, residue_index) -> str:
242
+ chain_end = 'TER'
243
+ return(
244
+ f'{chain_end:<6}{atom_index:>5} {end_resname:>3} '
245
+ f'{chain_name:>1}{residue_index:>4}'
246
+ )
247
+
248
+
249
+ def get_pdb_headers(prot: Protein, chain_id: int = 0) -> Sequence[str]:
250
+ pdb_headers = []
251
+
252
+ remark = prot.remark
253
+ if(remark is not None):
254
+ pdb_headers.append(f"REMARK {remark}")
255
+
256
+ parents = prot.parents
257
+ parents_chain_index = prot.parents_chain_index
258
+ if(parents_chain_index is not None):
259
+ parents = [
260
+ p for i, p in zip(parents_chain_index, parents) if i == chain_id
261
+ ]
262
+
263
+ if(parents is None or len(parents) == 0):
264
+ parents = ["N/A"]
265
+
266
+ pdb_headers.append(f"PARENT {' '.join(parents)}")
267
+
268
+ return pdb_headers
269
+
270
+
271
+ def add_pdb_headers(prot: Protein, pdb_str: str) -> str:
272
+ """ Add pdb headers to an existing PDB string. Useful during multi-chain
273
+ recycling
274
+ """
275
+ out_pdb_lines = []
276
+ lines = pdb_str.split('\n')
277
+
278
+ remark = prot.remark
279
+ if(remark is not None):
280
+ out_pdb_lines.append(f"REMARK {remark}")
281
+
282
+ parents_per_chain = None
283
+ if(prot.parents is not None and len(prot.parents) > 0):
284
+ parents_per_chain = []
285
+ if(prot.parents_chain_index is not None):
286
+ cur_chain = prot.parents_chain_index[0]
287
+ parent_dict = {}
288
+ for p, i in zip(prot.parents, prot.parents_chain_index):
289
+ parent_dict.setdefault(str(i), [])
290
+ parent_dict[str(i)].append(p)
291
+
292
+ max_idx = max([int(chain_idx) for chain_idx in parent_dict])
293
+ for i in range(max_idx + 1):
294
+ chain_parents = parent_dict.get(str(i), ["N/A"])
295
+ parents_per_chain.append(chain_parents)
296
+ else:
297
+ parents_per_chain.append(prot.parents)
298
+ else:
299
+ parents_per_chain = [["N/A"]]
300
+
301
+ make_parent_line = lambda p: f"PARENT {' '.join(p)}"
302
+
303
+ out_pdb_lines.append(make_parent_line(parents_per_chain[0]))
304
+
305
+ chain_counter = 0
306
+ for i, l in enumerate(lines):
307
+ if("PARENT" not in l and "REMARK" not in l):
308
+ out_pdb_lines.append(l)
309
+ if("TER" in l and not "END" in lines[i + 1]):
310
+ chain_counter += 1
311
+ if(not chain_counter >= len(parents_per_chain)):
312
+ chain_parents = parents_per_chain[chain_counter]
313
+ else:
314
+ chain_parents = ["N/A"]
315
+
316
+ out_pdb_lines.append(make_parent_line(chain_parents))
317
+
318
+ return '\n'.join(out_pdb_lines)
319
+
320
+
321
+ def to_pdb(prot: Protein) -> str:
322
+ """Converts a `Protein` instance to a PDB string.
323
+
324
+ Args:
325
+ prot: The protein to convert to PDB.
326
+
327
+ Returns:
328
+ PDB string.
329
+ """
330
+ restypes = residue_constants.restypes + ["X"]
331
+ res_1to3 = lambda r: residue_constants.restype_1to3.get(restypes[r], "UNK")
332
+ atom_types = residue_constants.atom_types
333
+
334
+ pdb_lines = []
335
+
336
+ atom_mask = prot.atom_mask
337
+ aatype = prot.aatype
338
+ atom_positions = prot.atom_positions
339
+ residue_index = prot.residue_index.astype(np.int32)
340
+ b_factors = prot.b_factors
341
+ chain_index = prot.chain_index.astype(np.int32)
342
+
343
+ if np.any(aatype > residue_constants.restype_num):
344
+ raise ValueError("Invalid aatypes.")
345
+
346
+ # Construct a mapping from chain integer indices to chain ID strings.
347
+ chain_ids = {}
348
+ for i in np.unique(chain_index): # np.unique gives sorted output.
349
+ if i >= PDB_MAX_CHAINS:
350
+ raise ValueError(
351
+ f"The PDB format supports at most {PDB_MAX_CHAINS} chains."
352
+ )
353
+ chain_ids[i] = PDB_CHAIN_IDS[i]
354
+
355
+ headers = get_pdb_headers(prot)
356
+ if (len(headers) > 0):
357
+ pdb_lines.extend(headers)
358
+
359
+ pdb_lines.append("MODEL 1")
360
+ n = aatype.shape[0]
361
+ atom_index = 1
362
+ last_chain_index = chain_index[0]
363
+ prev_chain_index = 0
364
+ chain_tags = string.ascii_uppercase
365
+
366
+ # Add all atom sites.
367
+ for i in range(aatype.shape[0]):
368
+ # Close the previous chain if in a multichain PDB.
369
+ if last_chain_index != chain_index[i]:
370
+ pdb_lines.append(
371
+ _chain_end(
372
+ atom_index,
373
+ res_1to3(aatype[i - 1]),
374
+ chain_ids[chain_index[i - 1]],
375
+ residue_index[i - 1]
376
+ )
377
+ )
378
+ last_chain_index = chain_index[i]
379
+ atom_index += 1 # Atom index increases at the TER symbol.
380
+
381
+ res_name_3 = res_1to3(aatype[i])
382
+ for atom_name, pos, mask, b_factor in zip(
383
+ atom_types, atom_positions[i], atom_mask[i], b_factors[i]
384
+ ):
385
+ if mask < 0.5:
386
+ continue
387
+
388
+ record_type = "ATOM"
389
+ name = atom_name if len(atom_name) == 4 else f" {atom_name}"
390
+ alt_loc = ""
391
+ insertion_code = ""
392
+ occupancy = 1.00
393
+ element = atom_name[
394
+ 0
395
+ ] # Protein supports only C, N, O, S, this works.
396
+ charge = ""
397
+
398
+ chain_tag = "A"
399
+ if(chain_index is not None):
400
+ chain_tag = chain_tags[chain_index[i]]
401
+
402
+ # PDB is a columnar format, every space matters here!
403
+ atom_line = (
404
+ f"{record_type:<6}{atom_index:>5} {name:<4}{alt_loc:>1}"
405
+ #TODO: check this refactor, chose main branch version
406
+ #f"{res_name_3:>3} {chain_ids[chain_index[i]]:>1}"
407
+ f"{res_name_3:>3} {chain_tag:>1}"
408
+ f"{residue_index[i]:>4}{insertion_code:>1} "
409
+ f"{pos[0]:>8.3f}{pos[1]:>8.3f}{pos[2]:>8.3f}"
410
+ f"{occupancy:>6.2f}{b_factor:>6.2f} "
411
+ f"{element:>2}{charge:>2}"
412
+ )
413
+ pdb_lines.append(atom_line)
414
+ atom_index += 1
415
+
416
+ should_terminate = (i == n - 1)
417
+ if(chain_index is not None):
418
+ if(i != n - 1 and chain_index[i + 1] != prev_chain_index):
419
+ should_terminate = True
420
+ prev_chain_index = chain_index[i + 1]
421
+
422
+ if(should_terminate):
423
+ # Close the chain.
424
+ chain_end = "TER"
425
+ chain_termination_line = (
426
+ f"{chain_end:<6}{atom_index:>5} "
427
+ f"{res_1to3(aatype[i]):>3} "
428
+ f"{chain_tag:>1}{residue_index[i]:>4}"
429
+ )
430
+ pdb_lines.append(chain_termination_line)
431
+ atom_index += 1
432
+
433
+ if(i != n - 1):
434
+ # "prev" is a misnomer here. This happens at the beginning of
435
+ # each new chain.
436
+ pdb_lines.extend(get_pdb_headers(prot, prev_chain_index))
437
+
438
+ pdb_lines.append("ENDMDL")
439
+ pdb_lines.append("END")
440
+
441
+ # Pad all lines to 80 characters
442
+ pdb_lines = [line.ljust(80) for line in pdb_lines]
443
+ return '\n'.join(pdb_lines) + '\n' # Add terminating newline.
444
+
445
+
446
+ def to_modelcif(prot: Protein) -> str:
447
+ """
448
+ Converts a `Protein` instance to a ModelCIF string. Chains with identical modelled coordinates
449
+ will be treated as the same polymer entity. But note that if chains differ in modelled regions,
450
+ no attempt is made at identifying them as a single polymer entity.
451
+
452
+ Args:
453
+ prot: The protein to convert to PDB.
454
+
455
+ Returns:
456
+ ModelCIF string.
457
+ """
458
+
459
+ restypes = residue_constants.restypes + ["X"]
460
+ atom_types = residue_constants.atom_types
461
+
462
+ atom_mask = prot.atom_mask
463
+ aatype = prot.aatype
464
+ atom_positions = prot.atom_positions
465
+ residue_index = prot.residue_index.astype(np.int32)
466
+ b_factors = prot.b_factors
467
+ chain_index = prot.chain_index
468
+
469
+ n = aatype.shape[0]
470
+ if chain_index is None:
471
+ chain_index = [0 for i in range(n)]
472
+
473
+ system = modelcif.System(title='OpenFold prediction')
474
+
475
+ # Finding chains and creating entities
476
+ seqs = {}
477
+ seq = []
478
+ last_chain_idx = None
479
+ for i in range(n):
480
+ if last_chain_idx is not None and last_chain_idx != chain_index[i]:
481
+ seqs[last_chain_idx] = seq
482
+ seq = []
483
+ seq.append(restypes[aatype[i]])
484
+ last_chain_idx = chain_index[i]
485
+ # finally add the last chain
486
+ seqs[last_chain_idx] = seq
487
+
488
+ # now reduce sequences to unique ones (note this won't work if different asyms have different unmodelled regions)
489
+ unique_seqs = {}
490
+ for chain_idx, seq_list in seqs.items():
491
+ seq = "".join(seq_list)
492
+ if seq in unique_seqs:
493
+ unique_seqs[seq].append(chain_idx)
494
+ else:
495
+ unique_seqs[seq] = [chain_idx]
496
+
497
+ # adding 1 entity per unique sequence
498
+ entities_map = {}
499
+ for key, value in unique_seqs.items():
500
+ model_e = modelcif.Entity(key, description='Model subunit')
501
+ for chain_idx in value:
502
+ entities_map[chain_idx] = model_e
503
+
504
+ chain_tags = string.ascii_uppercase
505
+ asym_unit_map = {}
506
+ for chain_idx in set(chain_index):
507
+ # Define the model assembly
508
+ chain_id = chain_tags[chain_idx]
509
+ asym = modelcif.AsymUnit(entities_map[chain_idx], details='Model subunit %s' % chain_id, id=chain_id)
510
+ asym_unit_map[chain_idx] = asym
511
+ modeled_assembly = modelcif.Assembly(asym_unit_map.values(), name='Modeled assembly')
512
+
513
+ class _LocalPLDDT(modelcif.qa_metric.Local, modelcif.qa_metric.PLDDT):
514
+ name = "pLDDT"
515
+ software = None
516
+ description = "Predicted lddt"
517
+
518
+ class _GlobalPLDDT(modelcif.qa_metric.Global, modelcif.qa_metric.PLDDT):
519
+ name = "pLDDT"
520
+ software = None
521
+ description = "Global pLDDT, mean of per-residue pLDDTs"
522
+
523
+ class _MyModel(modelcif.model.AbInitioModel):
524
+ def get_atoms(self):
525
+ # Add all atom sites.
526
+ for i in range(n):
527
+ for atom_name, pos, mask, b_factor in zip(
528
+ atom_types, atom_positions[i], atom_mask[i], b_factors[i]
529
+ ):
530
+ if mask < 0.5:
531
+ continue
532
+ element = atom_name[0] # Protein supports only C, N, O, S, this works.
533
+ yield modelcif.model.Atom(
534
+ asym_unit=asym_unit_map[chain_index[i]], type_symbol=element,
535
+ seq_id=residue_index[i], atom_id=atom_name,
536
+ x=pos[0], y=pos[1], z=pos[2],
537
+ het=False, biso=b_factor, occupancy=1.00)
538
+
539
+ def add_scores(self):
540
+ # local scores
541
+ plddt_per_residue = {}
542
+ for i in range(n):
543
+ for mask, b_factor in zip(atom_mask[i], b_factors[i]):
544
+ if mask < 0.5:
545
+ continue
546
+ # add 1 per residue, not 1 per atom
547
+ if chain_index[i] not in plddt_per_residue:
548
+ # first time a chain index is seen: add the key and start the residue dict
549
+ plddt_per_residue[chain_index[i]] = {residue_index[i]: b_factor}
550
+ if residue_index[i] not in plddt_per_residue[chain_index[i]]:
551
+ plddt_per_residue[chain_index[i]][residue_index[i]] = b_factor
552
+ plddts = []
553
+ for chain_idx in plddt_per_residue:
554
+ for residue_idx in plddt_per_residue[chain_idx]:
555
+ plddt = plddt_per_residue[chain_idx][residue_idx]
556
+ plddts.append(plddt)
557
+ self.qa_metrics.append(
558
+ _LocalPLDDT(asym_unit_map[chain_idx].residue(residue_idx), plddt))
559
+ # global score
560
+ self.qa_metrics.append((_GlobalPLDDT(np.mean(plddts))))
561
+
562
+ # Add the model and modeling protocol to the file and write them out:
563
+ model = _MyModel(assembly=modeled_assembly, name='Best scoring model')
564
+ model.add_scores()
565
+
566
+ model_group = modelcif.model.ModelGroup([model], name='All models')
567
+ system.model_groups.append(model_group)
568
+
569
+ fh = io.StringIO()
570
+ modelcif.dumper.write(fh, [system])
571
+ return fh.getvalue()
572
+
573
+
574
+ def ideal_atom_mask(prot: Protein) -> np.ndarray:
575
+ """Computes an ideal atom mask.
576
+
577
+ `Protein.atom_mask` typically is defined according to the atoms that are
578
+ reported in the PDB. This function computes a mask according to heavy atoms
579
+ that should be present in the given sequence of amino acids.
580
+
581
+ Args:
582
+ prot: `Protein` whose fields are `numpy.ndarray` objects.
583
+
584
+ Returns:
585
+ An ideal atom mask.
586
+ """
587
+ return residue_constants.STANDARD_ATOM_MASK[prot.aatype]
588
+
589
+
590
+ def from_prediction(
591
+ features: FeatureDict,
592
+ result: ModelOutput,
593
+ b_factors: Optional[np.ndarray] = None,
594
+ remove_leading_feature_dimension: bool = True,
595
+ remark: Optional[str] = None,
596
+ parents: Optional[Sequence[str]] = None,
597
+ parents_chain_index: Optional[Sequence[int]] = None
598
+ ) -> Protein:
599
+ """Assembles a protein from a prediction.
600
+
601
+ Args:
602
+ features: Dictionary holding model inputs.
603
+ result: Dictionary holding model outputs.
604
+ b_factors: (Optional) B-factors to use for the protein.
605
+ remove_leading_feature_dimension: Whether to remove the leading dimension
606
+ of the `features` values
607
+ chain_index: (Optional) Chain indices for multi-chain predictions
608
+ remark: (Optional) Remark about the prediction
609
+ parents: (Optional) List of template names
610
+ Returns:
611
+ A protein instance.
612
+ """
613
+ def _maybe_remove_leading_dim(arr: np.ndarray) -> np.ndarray:
614
+ return arr[0] if remove_leading_feature_dimension else arr
615
+
616
+ if 'asym_id' in features:
617
+ chain_index = _maybe_remove_leading_dim(features["asym_id"]) - 1
618
+ else:
619
+ chain_index = np.zeros_like(
620
+ _maybe_remove_leading_dim(features["aatype"])
621
+ )
622
+
623
+ if b_factors is None:
624
+ b_factors = np.zeros_like(result["final_atom_mask"])
625
+
626
+ return Protein(
627
+ aatype=_maybe_remove_leading_dim(features["aatype"]),
628
+ atom_positions=result["final_atom_positions"],
629
+ atom_mask=result["final_atom_mask"],
630
+ residue_index=_maybe_remove_leading_dim(features["residue_index"]) + 1,
631
+ b_factors=b_factors,
632
+ chain_index=chain_index,
633
+ remark=remark,
634
+ parents=parents,
635
+ parents_chain_index=parents_chain_index,
636
+ )
data/mdgen/residue_constants.py ADDED
@@ -0,0 +1,1486 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """Constants used in AlphaFold."""
17
+
18
+ import collections
19
+ import functools
20
+ from typing import Mapping, List, Tuple
21
+ from importlib import resources
22
+
23
+ import numpy as np
24
+ import tree
25
+
26
+
27
+ # Distance from one CA to next CA [trans configuration: omega = 180].
28
+ ca_ca = 3.80209737096
29
+
30
+ # Format: The list for each AA type contains chi1, chi2, chi3, chi4 in
31
+ # this order (or a relevant subset from chi1 onwards). ALA and GLY don't have
32
+ # chi angles so their chi angle lists are empty.
33
+ chi_angles_atoms = {
34
+ "ALA": [],
35
+ # Chi5 in arginine is always 0 +- 5 degrees, so ignore it.
36
+ "ARG": [
37
+ ["N", "CA", "CB", "CG"],
38
+ ["CA", "CB", "CG", "CD"],
39
+ ["CB", "CG", "CD", "NE"],
40
+ ["CG", "CD", "NE", "CZ"],
41
+ ],
42
+ "ASN": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "OD1"]],
43
+ "ASP": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "OD1"]],
44
+ "CYS": [["N", "CA", "CB", "SG"]],
45
+ "GLN": [
46
+ ["N", "CA", "CB", "CG"],
47
+ ["CA", "CB", "CG", "CD"],
48
+ ["CB", "CG", "CD", "OE1"],
49
+ ],
50
+ "GLU": [
51
+ ["N", "CA", "CB", "CG"],
52
+ ["CA", "CB", "CG", "CD"],
53
+ ["CB", "CG", "CD", "OE1"],
54
+ ],
55
+ "GLY": [],
56
+ "HIS": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "ND1"]],
57
+ "ILE": [["N", "CA", "CB", "CG1"], ["CA", "CB", "CG1", "CD1"]],
58
+ "LEU": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "CD1"]],
59
+ "LYS": [
60
+ ["N", "CA", "CB", "CG"],
61
+ ["CA", "CB", "CG", "CD"],
62
+ ["CB", "CG", "CD", "CE"],
63
+ ["CG", "CD", "CE", "NZ"],
64
+ ],
65
+ "MET": [
66
+ ["N", "CA", "CB", "CG"],
67
+ ["CA", "CB", "CG", "SD"],
68
+ ["CB", "CG", "SD", "CE"],
69
+ ],
70
+ "PHE": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "CD1"]],
71
+ "PRO": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "CD"]],
72
+ "SER": [["N", "CA", "CB", "OG"]],
73
+ "THR": [["N", "CA", "CB", "OG1"]],
74
+ "TRP": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "CD1"]],
75
+ "TYR": [["N", "CA", "CB", "CG"], ["CA", "CB", "CG", "CD1"]],
76
+ "VAL": [["N", "CA", "CB", "CG1"]],
77
+ }
78
+
79
+ # If chi angles given in fixed-length array, this matrix determines how to mask
80
+ # them for each AA type. The order is as per restype_order (see below).
81
+ chi_angles_mask = [
82
+ [0.0, 0.0, 0.0, 0.0], # ALA
83
+ [1.0, 1.0, 1.0, 1.0], # ARG
84
+ [1.0, 1.0, 0.0, 0.0], # ASN
85
+ [1.0, 1.0, 0.0, 0.0], # ASP
86
+ [1.0, 0.0, 0.0, 0.0], # CYS
87
+ [1.0, 1.0, 1.0, 0.0], # GLN
88
+ [1.0, 1.0, 1.0, 0.0], # GLU
89
+ [0.0, 0.0, 0.0, 0.0], # GLY
90
+ [1.0, 1.0, 0.0, 0.0], # HIS
91
+ [1.0, 1.0, 0.0, 0.0], # ILE
92
+ [1.0, 1.0, 0.0, 0.0], # LEU
93
+ [1.0, 1.0, 1.0, 1.0], # LYS
94
+ [1.0, 1.0, 1.0, 0.0], # MET
95
+ [1.0, 1.0, 0.0, 0.0], # PHE
96
+ [1.0, 1.0, 0.0, 0.0], # PRO
97
+ [1.0, 0.0, 0.0, 0.0], # SER
98
+ [1.0, 0.0, 0.0, 0.0], # THR
99
+ [1.0, 1.0, 0.0, 0.0], # TRP
100
+ [1.0, 1.0, 0.0, 0.0], # TYR
101
+ [1.0, 0.0, 0.0, 0.0], # VAL
102
+ ]
103
+
104
+ # The following chi angles are pi periodic: they can be rotated by a multiple
105
+ # of pi without affecting the structure.
106
+ chi_pi_periodic = [
107
+ [0.0, 0.0, 0.0, 0.0], # ALA
108
+ [0.0, 0.0, 0.0, 0.0], # ARG
109
+ [0.0, 0.0, 0.0, 0.0], # ASN
110
+ [0.0, 1.0, 0.0, 0.0], # ASP
111
+ [0.0, 0.0, 0.0, 0.0], # CYS
112
+ [0.0, 0.0, 0.0, 0.0], # GLN
113
+ [0.0, 0.0, 1.0, 0.0], # GLU
114
+ [0.0, 0.0, 0.0, 0.0], # GLY
115
+ [0.0, 0.0, 0.0, 0.0], # HIS
116
+ [0.0, 0.0, 0.0, 0.0], # ILE
117
+ [0.0, 0.0, 0.0, 0.0], # LEU
118
+ [0.0, 0.0, 0.0, 0.0], # LYS
119
+ [0.0, 0.0, 0.0, 0.0], # MET
120
+ [0.0, 1.0, 0.0, 0.0], # PHE
121
+ [0.0, 0.0, 0.0, 0.0], # PRO
122
+ [0.0, 0.0, 0.0, 0.0], # SER
123
+ [0.0, 0.0, 0.0, 0.0], # THR
124
+ [0.0, 0.0, 0.0, 0.0], # TRP
125
+ [0.0, 1.0, 0.0, 0.0], # TYR
126
+ [0.0, 0.0, 0.0, 0.0], # VAL
127
+ [0.0, 0.0, 0.0, 0.0], # UNK
128
+ ]
129
+
130
+ # Atoms positions relative to the 8 rigid groups, defined by the pre-omega, phi,
131
+ # psi and chi angles:
132
+ # 0: 'backbone group',
133
+ # 1: 'pre-omega-group', (empty)
134
+ # 2: 'phi-group', (currently empty, because it defines only hydrogens)
135
+ # 3: 'psi-group',
136
+ # 4,5,6,7: 'chi1,2,3,4-group'
137
+ # The atom positions are relative to the axis-end-atom of the corresponding
138
+ # rotation axis. The x-axis is in direction of the rotation axis, and the y-axis
139
+ # is defined such that the dihedral-angle-definiting atom (the last entry in
140
+ # chi_angles_atoms above) is in the xy-plane (with a positive y-coordinate).
141
+ # format: [atomname, group_idx, rel_position]
142
+ rigid_group_atom_positions = {
143
+ "ALA": [
144
+ ["N", 0, (-0.525, 1.363, 0.000)],
145
+ ["CA", 0, (0.000, 0.000, 0.000)],
146
+ ["C", 0, (1.526, -0.000, -0.000)],
147
+ ["CB", 0, (-0.529, -0.774, -1.205)],
148
+ ["O", 3, (0.627, 1.062, 0.000)],
149
+ ],
150
+ "ARG": [
151
+ ["N", 0, (-0.524, 1.362, -0.000)],
152
+ ["CA", 0, (0.000, 0.000, 0.000)],
153
+ ["C", 0, (1.525, -0.000, -0.000)],
154
+ ["CB", 0, (-0.524, -0.778, -1.209)],
155
+ ["O", 3, (0.626, 1.062, 0.000)],
156
+ ["CG", 4, (0.616, 1.390, -0.000)],
157
+ ["CD", 5, (0.564, 1.414, 0.000)],
158
+ ["NE", 6, (0.539, 1.357, -0.000)],
159
+ ["NH1", 7, (0.206, 2.301, 0.000)],
160
+ ["NH2", 7, (2.078, 0.978, -0.000)],
161
+ ["CZ", 7, (0.758, 1.093, -0.000)],
162
+ ],
163
+ "ASN": [
164
+ ["N", 0, (-0.536, 1.357, 0.000)],
165
+ ["CA", 0, (0.000, 0.000, 0.000)],
166
+ ["C", 0, (1.526, -0.000, -0.000)],
167
+ ["CB", 0, (-0.531, -0.787, -1.200)],
168
+ ["O", 3, (0.625, 1.062, 0.000)],
169
+ ["CG", 4, (0.584, 1.399, 0.000)],
170
+ ["ND2", 5, (0.593, -1.188, 0.001)],
171
+ ["OD1", 5, (0.633, 1.059, 0.000)],
172
+ ],
173
+ "ASP": [
174
+ ["N", 0, (-0.525, 1.362, -0.000)],
175
+ ["CA", 0, (0.000, 0.000, 0.000)],
176
+ ["C", 0, (1.527, 0.000, -0.000)],
177
+ ["CB", 0, (-0.526, -0.778, -1.208)],
178
+ ["O", 3, (0.626, 1.062, -0.000)],
179
+ ["CG", 4, (0.593, 1.398, -0.000)],
180
+ ["OD1", 5, (0.610, 1.091, 0.000)],
181
+ ["OD2", 5, (0.592, -1.101, -0.003)],
182
+ ],
183
+ "CYS": [
184
+ ["N", 0, (-0.522, 1.362, -0.000)],
185
+ ["CA", 0, (0.000, 0.000, 0.000)],
186
+ ["C", 0, (1.524, 0.000, 0.000)],
187
+ ["CB", 0, (-0.519, -0.773, -1.212)],
188
+ ["O", 3, (0.625, 1.062, -0.000)],
189
+ ["SG", 4, (0.728, 1.653, 0.000)],
190
+ ],
191
+ "GLN": [
192
+ ["N", 0, (-0.526, 1.361, -0.000)],
193
+ ["CA", 0, (0.000, 0.000, 0.000)],
194
+ ["C", 0, (1.526, 0.000, 0.000)],
195
+ ["CB", 0, (-0.525, -0.779, -1.207)],
196
+ ["O", 3, (0.626, 1.062, -0.000)],
197
+ ["CG", 4, (0.615, 1.393, 0.000)],
198
+ ["CD", 5, (0.587, 1.399, -0.000)],
199
+ ["NE2", 6, (0.593, -1.189, -0.001)],
200
+ ["OE1", 6, (0.634, 1.060, 0.000)],
201
+ ],
202
+ "GLU": [
203
+ ["N", 0, (-0.528, 1.361, 0.000)],
204
+ ["CA", 0, (0.000, 0.000, 0.000)],
205
+ ["C", 0, (1.526, -0.000, -0.000)],
206
+ ["CB", 0, (-0.526, -0.781, -1.207)],
207
+ ["O", 3, (0.626, 1.062, 0.000)],
208
+ ["CG", 4, (0.615, 1.392, 0.000)],
209
+ ["CD", 5, (0.600, 1.397, 0.000)],
210
+ ["OE1", 6, (0.607, 1.095, -0.000)],
211
+ ["OE2", 6, (0.589, -1.104, -0.001)],
212
+ ],
213
+ "GLY": [
214
+ ["N", 0, (-0.572, 1.337, 0.000)],
215
+ ["CA", 0, (0.000, 0.000, 0.000)],
216
+ ["C", 0, (1.517, -0.000, -0.000)],
217
+ ["O", 3, (0.626, 1.062, -0.000)],
218
+ ],
219
+ "HIS": [
220
+ ["N", 0, (-0.527, 1.360, 0.000)],
221
+ ["CA", 0, (0.000, 0.000, 0.000)],
222
+ ["C", 0, (1.525, 0.000, 0.000)],
223
+ ["CB", 0, (-0.525, -0.778, -1.208)],
224
+ ["O", 3, (0.625, 1.063, 0.000)],
225
+ ["CG", 4, (0.600, 1.370, -0.000)],
226
+ ["CD2", 5, (0.889, -1.021, 0.003)],
227
+ ["ND1", 5, (0.744, 1.160, -0.000)],
228
+ ["CE1", 5, (2.030, 0.851, 0.002)],
229
+ ["NE2", 5, (2.145, -0.466, 0.004)],
230
+ ],
231
+ "ILE": [
232
+ ["N", 0, (-0.493, 1.373, -0.000)],
233
+ ["CA", 0, (0.000, 0.000, 0.000)],
234
+ ["C", 0, (1.527, -0.000, -0.000)],
235
+ ["CB", 0, (-0.536, -0.793, -1.213)],
236
+ ["O", 3, (0.627, 1.062, -0.000)],
237
+ ["CG1", 4, (0.534, 1.437, -0.000)],
238
+ ["CG2", 4, (0.540, -0.785, -1.199)],
239
+ ["CD1", 5, (0.619, 1.391, 0.000)],
240
+ ],
241
+ "LEU": [
242
+ ["N", 0, (-0.520, 1.363, 0.000)],
243
+ ["CA", 0, (0.000, 0.000, 0.000)],
244
+ ["C", 0, (1.525, -0.000, -0.000)],
245
+ ["CB", 0, (-0.522, -0.773, -1.214)],
246
+ ["O", 3, (0.625, 1.063, -0.000)],
247
+ ["CG", 4, (0.678, 1.371, 0.000)],
248
+ ["CD1", 5, (0.530, 1.430, -0.000)],
249
+ ["CD2", 5, (0.535, -0.774, 1.200)],
250
+ ],
251
+ "LYS": [
252
+ ["N", 0, (-0.526, 1.362, -0.000)],
253
+ ["CA", 0, (0.000, 0.000, 0.000)],
254
+ ["C", 0, (1.526, 0.000, 0.000)],
255
+ ["CB", 0, (-0.524, -0.778, -1.208)],
256
+ ["O", 3, (0.626, 1.062, -0.000)],
257
+ ["CG", 4, (0.619, 1.390, 0.000)],
258
+ ["CD", 5, (0.559, 1.417, 0.000)],
259
+ ["CE", 6, (0.560, 1.416, 0.000)],
260
+ ["NZ", 7, (0.554, 1.387, 0.000)],
261
+ ],
262
+ "MET": [
263
+ ["N", 0, (-0.521, 1.364, -0.000)],
264
+ ["CA", 0, (0.000, 0.000, 0.000)],
265
+ ["C", 0, (1.525, 0.000, 0.000)],
266
+ ["CB", 0, (-0.523, -0.776, -1.210)],
267
+ ["O", 3, (0.625, 1.062, -0.000)],
268
+ ["CG", 4, (0.613, 1.391, -0.000)],
269
+ ["SD", 5, (0.703, 1.695, 0.000)],
270
+ ["CE", 6, (0.320, 1.786, -0.000)],
271
+ ],
272
+ "PHE": [
273
+ ["N", 0, (-0.518, 1.363, 0.000)],
274
+ ["CA", 0, (0.000, 0.000, 0.000)],
275
+ ["C", 0, (1.524, 0.000, -0.000)],
276
+ ["CB", 0, (-0.525, -0.776, -1.212)],
277
+ ["O", 3, (0.626, 1.062, -0.000)],
278
+ ["CG", 4, (0.607, 1.377, 0.000)],
279
+ ["CD1", 5, (0.709, 1.195, -0.000)],
280
+ ["CD2", 5, (0.706, -1.196, 0.000)],
281
+ ["CE1", 5, (2.102, 1.198, -0.000)],
282
+ ["CE2", 5, (2.098, -1.201, -0.000)],
283
+ ["CZ", 5, (2.794, -0.003, -0.001)],
284
+ ],
285
+ "PRO": [
286
+ ["N", 0, (-0.566, 1.351, -0.000)],
287
+ ["CA", 0, (0.000, 0.000, 0.000)],
288
+ ["C", 0, (1.527, -0.000, 0.000)],
289
+ ["CB", 0, (-0.546, -0.611, -1.293)],
290
+ ["O", 3, (0.621, 1.066, 0.000)],
291
+ ["CG", 4, (0.382, 1.445, 0.0)],
292
+ # ['CD', 5, (0.427, 1.440, 0.0)],
293
+ ["CD", 5, (0.477, 1.424, 0.0)], # manually made angle 2 degrees larger
294
+ ],
295
+ "SER": [
296
+ ["N", 0, (-0.529, 1.360, -0.000)],
297
+ ["CA", 0, (0.000, 0.000, 0.000)],
298
+ ["C", 0, (1.525, -0.000, -0.000)],
299
+ ["CB", 0, (-0.518, -0.777, -1.211)],
300
+ ["O", 3, (0.626, 1.062, -0.000)],
301
+ ["OG", 4, (0.503, 1.325, 0.000)],
302
+ ],
303
+ "THR": [
304
+ ["N", 0, (-0.517, 1.364, 0.000)],
305
+ ["CA", 0, (0.000, 0.000, 0.000)],
306
+ ["C", 0, (1.526, 0.000, -0.000)],
307
+ ["CB", 0, (-0.516, -0.793, -1.215)],
308
+ ["O", 3, (0.626, 1.062, 0.000)],
309
+ ["CG2", 4, (0.550, -0.718, -1.228)],
310
+ ["OG1", 4, (0.472, 1.353, 0.000)],
311
+ ],
312
+ "TRP": [
313
+ ["N", 0, (-0.521, 1.363, 0.000)],
314
+ ["CA", 0, (0.000, 0.000, 0.000)],
315
+ ["C", 0, (1.525, -0.000, 0.000)],
316
+ ["CB", 0, (-0.523, -0.776, -1.212)],
317
+ ["O", 3, (0.627, 1.062, 0.000)],
318
+ ["CG", 4, (0.609, 1.370, -0.000)],
319
+ ["CD1", 5, (0.824, 1.091, 0.000)],
320
+ ["CD2", 5, (0.854, -1.148, -0.005)],
321
+ ["CE2", 5, (2.186, -0.678, -0.007)],
322
+ ["CE3", 5, (0.622, -2.530, -0.007)],
323
+ ["NE1", 5, (2.140, 0.690, -0.004)],
324
+ ["CH2", 5, (3.028, -2.890, -0.013)],
325
+ ["CZ2", 5, (3.283, -1.543, -0.011)],
326
+ ["CZ3", 5, (1.715, -3.389, -0.011)],
327
+ ],
328
+ "TYR": [
329
+ ["N", 0, (-0.522, 1.362, 0.000)],
330
+ ["CA", 0, (0.000, 0.000, 0.000)],
331
+ ["C", 0, (1.524, -0.000, -0.000)],
332
+ ["CB", 0, (-0.522, -0.776, -1.213)],
333
+ ["O", 3, (0.627, 1.062, -0.000)],
334
+ ["CG", 4, (0.607, 1.382, -0.000)],
335
+ ["CD1", 5, (0.716, 1.195, -0.000)],
336
+ ["CD2", 5, (0.713, -1.194, -0.001)],
337
+ ["CE1", 5, (2.107, 1.200, -0.002)],
338
+ ["CE2", 5, (2.104, -1.201, -0.003)],
339
+ ["OH", 5, (4.168, -0.002, -0.005)],
340
+ ["CZ", 5, (2.791, -0.001, -0.003)],
341
+ ],
342
+ "VAL": [
343
+ ["N", 0, (-0.494, 1.373, -0.000)],
344
+ ["CA", 0, (0.000, 0.000, 0.000)],
345
+ ["C", 0, (1.527, -0.000, -0.000)],
346
+ ["CB", 0, (-0.533, -0.795, -1.213)],
347
+ ["O", 3, (0.627, 1.062, -0.000)],
348
+ ["CG1", 4, (0.540, 1.429, -0.000)],
349
+ ["CG2", 4, (0.533, -0.776, 1.203)],
350
+ ],
351
+ }
352
+
353
+ # A list of atoms (excluding hydrogen) for each AA type. PDB naming convention.
354
+ residue_atoms = {
355
+ "ALA": ["C", "CA", "CB", "N", "O"],
356
+ "ARG": ["C", "CA", "CB", "CG", "CD", "CZ", "N", "NE", "O", "NH1", "NH2"],
357
+ "ASP": ["C", "CA", "CB", "CG", "N", "O", "OD1", "OD2"],
358
+ "ASN": ["C", "CA", "CB", "CG", "N", "ND2", "O", "OD1"],
359
+ "CYS": ["C", "CA", "CB", "N", "O", "SG"],
360
+ "GLU": ["C", "CA", "CB", "CG", "CD", "N", "O", "OE1", "OE2"],
361
+ "GLN": ["C", "CA", "CB", "CG", "CD", "N", "NE2", "O", "OE1"],
362
+ "GLY": ["C", "CA", "N", "O"],
363
+ "HIS": ["C", "CA", "CB", "CG", "CD2", "CE1", "N", "ND1", "NE2", "O"],
364
+ "ILE": ["C", "CA", "CB", "CG1", "CG2", "CD1", "N", "O"],
365
+ "LEU": ["C", "CA", "CB", "CG", "CD1", "CD2", "N", "O"],
366
+ "LYS": ["C", "CA", "CB", "CG", "CD", "CE", "N", "NZ", "O"],
367
+ "MET": ["C", "CA", "CB", "CG", "CE", "N", "O", "SD"],
368
+ "PHE": ["C", "CA", "CB", "CG", "CD1", "CD2", "CE1", "CE2", "CZ", "N", "O"],
369
+ "PRO": ["C", "CA", "CB", "CG", "CD", "N", "O"],
370
+ "SER": ["C", "CA", "CB", "N", "O", "OG"],
371
+ "THR": ["C", "CA", "CB", "CG2", "N", "O", "OG1"],
372
+ "TRP": [
373
+ "C",
374
+ "CA",
375
+ "CB",
376
+ "CG",
377
+ "CD1",
378
+ "CD2",
379
+ "CE2",
380
+ "CE3",
381
+ "CZ2",
382
+ "CZ3",
383
+ "CH2",
384
+ "N",
385
+ "NE1",
386
+ "O",
387
+ ],
388
+ "TYR": [
389
+ "C",
390
+ "CA",
391
+ "CB",
392
+ "CG",
393
+ "CD1",
394
+ "CD2",
395
+ "CE1",
396
+ "CE2",
397
+ "CZ",
398
+ "N",
399
+ "O",
400
+ "OH",
401
+ ],
402
+ "VAL": ["C", "CA", "CB", "CG1", "CG2", "N", "O"],
403
+ }
404
+
405
+ # Naming swaps for ambiguous atom names.
406
+ # Due to symmetries in the amino acids the naming of atoms is ambiguous in
407
+ # 4 of the 20 amino acids.
408
+ # (The LDDT paper lists 7 amino acids as ambiguous, but the naming ambiguities
409
+ # in LEU, VAL and ARG can be resolved by using the 3d constellations of
410
+ # the 'ambiguous' atoms and their neighbours)
411
+ # Because for LEU, VAL and ARG, no ambiguous exist when the prediction output is chi angle instead of the location of individual atoms.
412
+ # For the rest, ASP and others, when you rotate the bond 180 degree, you get the same configuraiton due to symmetry.
413
+
414
+ residue_atom_renaming_swaps = {
415
+ "ASP": {"OD1": "OD2"},
416
+ "GLU": {"OE1": "OE2"},
417
+ "PHE": {"CD1": "CD2", "CE1": "CE2"},
418
+ "TYR": {"CD1": "CD2", "CE1": "CE2"},
419
+ }
420
+
421
+ # Van der Waals radii [Angstroem] of the atoms (from Wikipedia)
422
+ van_der_waals_radius = {
423
+ "C": 1.7,
424
+ "N": 1.55,
425
+ "O": 1.52,
426
+ "S": 1.8,
427
+ }
428
+
429
+ Bond = collections.namedtuple(
430
+ "Bond", ["atom1_name", "atom2_name", "length", "stddev"]
431
+ )
432
+ BondAngle = collections.namedtuple(
433
+ "BondAngle",
434
+ ["atom1_name", "atom2_name", "atom3name", "angle_rad", "stddev"],
435
+ )
436
+
437
+
438
+ @functools.lru_cache(maxsize=None)
439
+ def load_stereo_chemical_props() -> Tuple[
440
+ Mapping[str, List[Bond]],
441
+ Mapping[str, List[Bond]],
442
+ Mapping[str, List[BondAngle]],
443
+ ]:
444
+ """Load stereo_chemical_props.txt into a nice structure.
445
+
446
+ Load literature values for bond lengths and bond angles and translate
447
+ bond angles into the length of the opposite edge of the triangle
448
+ ("residue_virtual_bonds").
449
+
450
+ Returns:
451
+ residue_bonds: Dict that maps resname -> list of Bond tuples
452
+ residue_virtual_bonds: Dict that maps resname -> list of Bond tuples
453
+ residue_bond_angles: Dict that maps resname -> list of BondAngle tuples
454
+ """
455
+ # TODO: this file should be downloaded in a setup script
456
+ stereo_chemical_props = resources.read_text("openfold.resources", "stereo_chemical_props.txt")
457
+
458
+ lines_iter = iter(stereo_chemical_props.splitlines())
459
+ # Load bond lengths.
460
+ residue_bonds = {}
461
+ next(lines_iter) # Skip header line.
462
+ for line in lines_iter:
463
+ if line.strip() == "-":
464
+ break
465
+ bond, resname, length, stddev = line.split()
466
+ atom1, atom2 = bond.split("-")
467
+ if resname not in residue_bonds:
468
+ residue_bonds[resname] = []
469
+ residue_bonds[resname].append(
470
+ Bond(atom1, atom2, float(length), float(stddev))
471
+ )
472
+ residue_bonds["UNK"] = []
473
+
474
+ # Load bond angles.
475
+ residue_bond_angles = {}
476
+ next(lines_iter) # Skip empty line.
477
+ next(lines_iter) # Skip header line.
478
+ for line in lines_iter:
479
+ if line.strip() == "-":
480
+ break
481
+ bond, resname, angle_degree, stddev_degree = line.split()
482
+ atom1, atom2, atom3 = bond.split("-")
483
+ if resname not in residue_bond_angles:
484
+ residue_bond_angles[resname] = []
485
+ residue_bond_angles[resname].append(
486
+ BondAngle(
487
+ atom1,
488
+ atom2,
489
+ atom3,
490
+ float(angle_degree) / 180.0 * np.pi,
491
+ float(stddev_degree) / 180.0 * np.pi,
492
+ )
493
+ )
494
+ residue_bond_angles["UNK"] = []
495
+
496
+ def make_bond_key(atom1_name, atom2_name):
497
+ """Unique key to lookup bonds."""
498
+ return "-".join(sorted([atom1_name, atom2_name]))
499
+
500
+ # Translate bond angles into distances ("virtual bonds").
501
+ residue_virtual_bonds = {}
502
+ for resname, bond_angles in residue_bond_angles.items():
503
+ # Create a fast lookup dict for bond lengths.
504
+ bond_cache = {}
505
+ for b in residue_bonds[resname]:
506
+ bond_cache[make_bond_key(b.atom1_name, b.atom2_name)] = b
507
+ residue_virtual_bonds[resname] = []
508
+ for ba in bond_angles:
509
+ bond1 = bond_cache[make_bond_key(ba.atom1_name, ba.atom2_name)]
510
+ bond2 = bond_cache[make_bond_key(ba.atom2_name, ba.atom3name)]
511
+
512
+ # Compute distance between atom1 and atom3 using the law of cosines
513
+ # c^2 = a^2 + b^2 - 2ab*cos(gamma).
514
+ gamma = ba.angle_rad
515
+ length = np.sqrt(
516
+ bond1.length ** 2
517
+ + bond2.length ** 2
518
+ - 2 * bond1.length * bond2.length * np.cos(gamma)
519
+ )
520
+
521
+ # Propagation of uncertainty assuming uncorrelated errors.
522
+ dl_outer = 0.5 / length
523
+ dl_dgamma = (
524
+ 2 * bond1.length * bond2.length * np.sin(gamma)
525
+ ) * dl_outer
526
+ dl_db1 = (
527
+ 2 * bond1.length - 2 * bond2.length * np.cos(gamma)
528
+ ) * dl_outer
529
+ dl_db2 = (
530
+ 2 * bond2.length - 2 * bond1.length * np.cos(gamma)
531
+ ) * dl_outer
532
+ stddev = np.sqrt(
533
+ (dl_dgamma * ba.stddev) ** 2
534
+ + (dl_db1 * bond1.stddev) ** 2
535
+ + (dl_db2 * bond2.stddev) ** 2
536
+ )
537
+ residue_virtual_bonds[resname].append(
538
+ Bond(ba.atom1_name, ba.atom3name, length, stddev)
539
+ )
540
+
541
+ return (residue_bonds, residue_virtual_bonds, residue_bond_angles)
542
+
543
+
544
+ # Between-residue bond lengths for general bonds (first element) and for Proline
545
+ # (second element).
546
+ between_res_bond_length_c_n = [1.329, 1.341]
547
+ between_res_bond_length_stddev_c_n = [0.014, 0.016]
548
+
549
+ # Between-residue cos_angles.
550
+ between_res_cos_angles_c_n_ca = [-0.5203, 0.0353] # degrees: 121.352 +- 2.315
551
+ between_res_cos_angles_ca_c_n = [-0.4473, 0.0311] # degrees: 116.568 +- 1.995
552
+
553
+ # This mapping is used when we need to store atom data in a format that requires
554
+ # fixed atom data size for every residue (e.g. a numpy array).
555
+ atom_types = [
556
+ "N",
557
+ "CA",
558
+ "C",
559
+ "CB",
560
+ "O",
561
+ "CG",
562
+ "CG1",
563
+ "CG2",
564
+ "OG",
565
+ "OG1",
566
+ "SG",
567
+ "CD",
568
+ "CD1",
569
+ "CD2",
570
+ "ND1",
571
+ "ND2",
572
+ "OD1",
573
+ "OD2",
574
+ "SD",
575
+ "CE",
576
+ "CE1",
577
+ "CE2",
578
+ "CE3",
579
+ "NE",
580
+ "NE1",
581
+ "NE2",
582
+ "OE1",
583
+ "OE2",
584
+ "CH2",
585
+ "NH1",
586
+ "NH2",
587
+ "OH",
588
+ "CZ",
589
+ "CZ2",
590
+ "CZ3",
591
+ "NZ",
592
+ "OXT",
593
+ ]
594
+ atom_order = {atom_type: i for i, atom_type in enumerate(atom_types)}
595
+ atom_type_num = len(atom_types) # := 37.
596
+
597
+ # A compact atom encoding with 14 columns
598
+ # pylint: disable=line-too-long
599
+ # pylint: disable=bad-whitespace
600
+ restype_name_to_atom14_names = {
601
+ "ALA": ["N", "CA", "C", "O", "CB", "", "", "", "", "", "", "", "", ""],
602
+ "ARG": [
603
+ "N",
604
+ "CA",
605
+ "C",
606
+ "O",
607
+ "CB",
608
+ "CG",
609
+ "CD",
610
+ "NE",
611
+ "CZ",
612
+ "NH1",
613
+ "NH2",
614
+ "",
615
+ "",
616
+ "",
617
+ ],
618
+ "ASN": [
619
+ "N",
620
+ "CA",
621
+ "C",
622
+ "O",
623
+ "CB",
624
+ "CG",
625
+ "OD1",
626
+ "ND2",
627
+ "",
628
+ "",
629
+ "",
630
+ "",
631
+ "",
632
+ "",
633
+ ],
634
+ "ASP": [
635
+ "N",
636
+ "CA",
637
+ "C",
638
+ "O",
639
+ "CB",
640
+ "CG",
641
+ "OD1",
642
+ "OD2",
643
+ "",
644
+ "",
645
+ "",
646
+ "",
647
+ "",
648
+ "",
649
+ ],
650
+ "CYS": ["N", "CA", "C", "O", "CB", "SG", "", "", "", "", "", "", "", ""],
651
+ "GLN": [
652
+ "N",
653
+ "CA",
654
+ "C",
655
+ "O",
656
+ "CB",
657
+ "CG",
658
+ "CD",
659
+ "OE1",
660
+ "NE2",
661
+ "",
662
+ "",
663
+ "",
664
+ "",
665
+ "",
666
+ ],
667
+ "GLU": [
668
+ "N",
669
+ "CA",
670
+ "C",
671
+ "O",
672
+ "CB",
673
+ "CG",
674
+ "CD",
675
+ "OE1",
676
+ "OE2",
677
+ "",
678
+ "",
679
+ "",
680
+ "",
681
+ "",
682
+ ],
683
+ "GLY": ["N", "CA", "C", "O", "", "", "", "", "", "", "", "", "", ""],
684
+ "HIS": [
685
+ "N",
686
+ "CA",
687
+ "C",
688
+ "O",
689
+ "CB",
690
+ "CG",
691
+ "ND1",
692
+ "CD2",
693
+ "CE1",
694
+ "NE2",
695
+ "",
696
+ "",
697
+ "",
698
+ "",
699
+ ],
700
+ "ILE": [
701
+ "N",
702
+ "CA",
703
+ "C",
704
+ "O",
705
+ "CB",
706
+ "CG1",
707
+ "CG2",
708
+ "CD1",
709
+ "",
710
+ "",
711
+ "",
712
+ "",
713
+ "",
714
+ "",
715
+ ],
716
+ "LEU": [
717
+ "N",
718
+ "CA",
719
+ "C",
720
+ "O",
721
+ "CB",
722
+ "CG",
723
+ "CD1",
724
+ "CD2",
725
+ "",
726
+ "",
727
+ "",
728
+ "",
729
+ "",
730
+ "",
731
+ ],
732
+ "LYS": [
733
+ "N",
734
+ "CA",
735
+ "C",
736
+ "O",
737
+ "CB",
738
+ "CG",
739
+ "CD",
740
+ "CE",
741
+ "NZ",
742
+ "",
743
+ "",
744
+ "",
745
+ "",
746
+ "",
747
+ ],
748
+ "MET": [
749
+ "N",
750
+ "CA",
751
+ "C",
752
+ "O",
753
+ "CB",
754
+ "CG",
755
+ "SD",
756
+ "CE",
757
+ "",
758
+ "",
759
+ "",
760
+ "",
761
+ "",
762
+ "",
763
+ ],
764
+ "PHE": [
765
+ "N",
766
+ "CA",
767
+ "C",
768
+ "O",
769
+ "CB",
770
+ "CG",
771
+ "CD1",
772
+ "CD2",
773
+ "CE1",
774
+ "CE2",
775
+ "CZ",
776
+ "",
777
+ "",
778
+ "",
779
+ ],
780
+ "PRO": ["N", "CA", "C", "O", "CB", "CG", "CD", "", "", "", "", "", "", ""],
781
+ "SER": ["N", "CA", "C", "O", "CB", "OG", "", "", "", "", "", "", "", ""],
782
+ "THR": [
783
+ "N",
784
+ "CA",
785
+ "C",
786
+ "O",
787
+ "CB",
788
+ "OG1",
789
+ "CG2",
790
+ "",
791
+ "",
792
+ "",
793
+ "",
794
+ "",
795
+ "",
796
+ "",
797
+ ],
798
+ "TRP": [
799
+ "N",
800
+ "CA",
801
+ "C",
802
+ "O",
803
+ "CB",
804
+ "CG",
805
+ "CD1",
806
+ "CD2",
807
+ "NE1",
808
+ "CE2",
809
+ "CE3",
810
+ "CZ2",
811
+ "CZ3",
812
+ "CH2",
813
+ ],
814
+ "TYR": [
815
+ "N",
816
+ "CA",
817
+ "C",
818
+ "O",
819
+ "CB",
820
+ "CG",
821
+ "CD1",
822
+ "CD2",
823
+ "CE1",
824
+ "CE2",
825
+ "CZ",
826
+ "OH",
827
+ "",
828
+ "",
829
+ ],
830
+ "VAL": [
831
+ "N",
832
+ "CA",
833
+ "C",
834
+ "O",
835
+ "CB",
836
+ "CG1",
837
+ "CG2",
838
+ "",
839
+ "",
840
+ "",
841
+ "",
842
+ "",
843
+ "",
844
+ "",
845
+ ],
846
+ "UNK": ["", "", "", "", "", "", "", "", "", "", "", "", "", ""],
847
+ }
848
+ # pylint: enable=line-too-long
849
+ # pylint: enable=bad-whitespace
850
+
851
+
852
+ # This is the standard residue order when coding AA type as a number.
853
+ # Reproduce it by taking 3-letter AA codes and sorting them alphabetically.
854
+ restypes = [
855
+ "A",
856
+ "R",
857
+ "N",
858
+ "D",
859
+ "C",
860
+ "Q",
861
+ "E",
862
+ "G",
863
+ "H",
864
+ "I",
865
+ "L",
866
+ "K",
867
+ "M",
868
+ "F",
869
+ "P",
870
+ "S",
871
+ "T",
872
+ "W",
873
+ "Y",
874
+ "V",
875
+ ]
876
+ restype_order = {restype: i for i, restype in enumerate(restypes)}
877
+ restype_num = len(restypes) # := 20.
878
+ unk_restype_index = restype_num # Catch-all index for unknown restypes.
879
+
880
+ restypes_with_x = restypes + ["X"]
881
+ restype_order_with_x = {restype: i for i, restype in enumerate(restypes_with_x)}
882
+
883
+
884
+ def sequence_to_onehot(
885
+ sequence: str, mapping: Mapping[str, int], map_unknown_to_x: bool = False
886
+ ) -> np.ndarray:
887
+ """Maps the given sequence into a one-hot encoded matrix.
888
+
889
+ Args:
890
+ sequence: An amino acid sequence.
891
+ mapping: A dictionary mapping amino acids to integers.
892
+ map_unknown_to_x: If True, any amino acid that is not in the mapping will be
893
+ mapped to the unknown amino acid 'X'. If the mapping doesn't contain
894
+ amino acid 'X', an error will be thrown. If False, any amino acid not in
895
+ the mapping will throw an error.
896
+
897
+ Returns:
898
+ A numpy array of shape (seq_len, num_unique_aas) with one-hot encoding of
899
+ the sequence.
900
+
901
+ Raises:
902
+ ValueError: If the mapping doesn't contain values from 0 to
903
+ num_unique_aas - 1 without any gaps.
904
+ """
905
+ num_entries = max(mapping.values()) + 1
906
+
907
+ if sorted(set(mapping.values())) != list(range(num_entries)):
908
+ raise ValueError(
909
+ "The mapping must have values from 0 to num_unique_aas-1 "
910
+ "without any gaps. Got: %s" % sorted(mapping.values())
911
+ )
912
+
913
+ one_hot_arr = np.zeros((len(sequence), num_entries), dtype=np.int32)
914
+
915
+ for aa_index, aa_type in enumerate(sequence):
916
+ if map_unknown_to_x:
917
+ if aa_type.isalpha() and aa_type.isupper():
918
+ aa_id = mapping.get(aa_type, mapping["X"])
919
+ else:
920
+ raise ValueError(
921
+ f"Invalid character in the sequence: {aa_type}"
922
+ )
923
+ else:
924
+ aa_id = mapping[aa_type]
925
+ one_hot_arr[aa_index, aa_id] = 1
926
+
927
+ return one_hot_arr
928
+
929
+
930
+ restype_1to3 = {
931
+ "A": "ALA",
932
+ "R": "ARG",
933
+ "N": "ASN",
934
+ "D": "ASP",
935
+ "C": "CYS",
936
+ "Q": "GLN",
937
+ "E": "GLU",
938
+ "G": "GLY",
939
+ "H": "HIS",
940
+ "I": "ILE",
941
+ "L": "LEU",
942
+ "K": "LYS",
943
+ "M": "MET",
944
+ "F": "PHE",
945
+ "P": "PRO",
946
+ "S": "SER",
947
+ "T": "THR",
948
+ "W": "TRP",
949
+ "Y": "TYR",
950
+ "V": "VAL",
951
+ }
952
+
953
+
954
+ # NB: restype_3to1 differs from Bio.PDB.protein_letters_3to1 by being a simple
955
+ # 1-to-1 mapping of 3 letter names to one letter names. The latter contains
956
+ # many more, and less common, three letter names as keys and maps many of these
957
+ # to the same one letter name (including 'X' and 'U' which we don't use here).
958
+ restype_3to1 = {v: k for k, v in restype_1to3.items()}
959
+
960
+ # Define a restype name for all unknown residues.
961
+ unk_restype = "UNK"
962
+
963
+ resnames = [restype_1to3[r] for r in restypes] + [unk_restype]
964
+ resname_to_idx = {resname: i for i, resname in enumerate(resnames)}
965
+
966
+
967
+ # The mapping here uses hhblits convention, so that B is mapped to D, J and O
968
+ # are mapped to X, U is mapped to C, and Z is mapped to E. Other than that the
969
+ # remaining 20 amino acids are kept in alphabetical order.
970
+ # There are 2 non-amino acid codes, X (representing any amino acid) and
971
+ # "-" representing a missing amino acid in an alignment. The id for these
972
+ # codes is put at the end (20 and 21) so that they can easily be ignored if
973
+ # desired.
974
+ HHBLITS_AA_TO_ID = {
975
+ "A": 0,
976
+ "B": 2,
977
+ "C": 1,
978
+ "D": 2,
979
+ "E": 3,
980
+ "F": 4,
981
+ "G": 5,
982
+ "H": 6,
983
+ "I": 7,
984
+ "J": 20,
985
+ "K": 8,
986
+ "L": 9,
987
+ "M": 10,
988
+ "N": 11,
989
+ "O": 20,
990
+ "P": 12,
991
+ "Q": 13,
992
+ "R": 14,
993
+ "S": 15,
994
+ "T": 16,
995
+ "U": 1,
996
+ "V": 17,
997
+ "W": 18,
998
+ "X": 20,
999
+ "Y": 19,
1000
+ "Z": 3,
1001
+ "-": 21,
1002
+ }
1003
+
1004
+ # Partial inversion of HHBLITS_AA_TO_ID.
1005
+ ID_TO_HHBLITS_AA = {
1006
+ 0: "A",
1007
+ 1: "C", # Also U.
1008
+ 2: "D", # Also B.
1009
+ 3: "E", # Also Z.
1010
+ 4: "F",
1011
+ 5: "G",
1012
+ 6: "H",
1013
+ 7: "I",
1014
+ 8: "K",
1015
+ 9: "L",
1016
+ 10: "M",
1017
+ 11: "N",
1018
+ 12: "P",
1019
+ 13: "Q",
1020
+ 14: "R",
1021
+ 15: "S",
1022
+ 16: "T",
1023
+ 17: "V",
1024
+ 18: "W",
1025
+ 19: "Y",
1026
+ 20: "X", # Includes J and O.
1027
+ 21: "-",
1028
+ }
1029
+
1030
+ restypes_with_x_and_gap = restypes + ["X", "-"]
1031
+ MAP_HHBLITS_AATYPE_TO_OUR_AATYPE = tuple(
1032
+ restypes_with_x_and_gap.index(ID_TO_HHBLITS_AA[i])
1033
+ for i in range(len(restypes_with_x_and_gap))
1034
+ )
1035
+
1036
+
1037
+ def _make_standard_atom_mask() -> np.ndarray:
1038
+ """Returns [num_res_types, num_atom_types] mask array."""
1039
+ # +1 to account for unknown (all 0s).
1040
+ mask = np.zeros([restype_num + 1, atom_type_num], dtype=np.int32)
1041
+ for restype, restype_letter in enumerate(restypes):
1042
+ restype_name = restype_1to3[restype_letter]
1043
+ atom_names = residue_atoms[restype_name]
1044
+ for atom_name in atom_names:
1045
+ atom_type = atom_order[atom_name]
1046
+ mask[restype, atom_type] = 1
1047
+ return mask
1048
+
1049
+
1050
+ STANDARD_ATOM_MASK = _make_standard_atom_mask()
1051
+
1052
+
1053
+ # A one hot representation for the first and second atoms defining the axis
1054
+ # of rotation for each chi-angle in each residue.
1055
+ def chi_angle_atom(atom_index: int) -> np.ndarray:
1056
+ """Define chi-angle rigid groups via one-hot representations."""
1057
+ chi_angles_index = {}
1058
+ one_hots = []
1059
+
1060
+ for k, v in chi_angles_atoms.items():
1061
+ indices = [atom_types.index(s[atom_index]) for s in v]
1062
+ indices.extend([-1] * (4 - len(indices)))
1063
+ chi_angles_index[k] = indices
1064
+
1065
+ for r in restypes:
1066
+ res3 = restype_1to3[r]
1067
+ one_hot = np.eye(atom_type_num)[chi_angles_index[res3]]
1068
+ one_hots.append(one_hot)
1069
+
1070
+ one_hots.append(np.zeros([4, atom_type_num])) # Add zeros for residue `X`.
1071
+ one_hot = np.stack(one_hots, axis=0)
1072
+ one_hot = np.transpose(one_hot, [0, 2, 1])
1073
+
1074
+ return one_hot
1075
+
1076
+
1077
+ chi_atom_1_one_hot = chi_angle_atom(1)
1078
+ chi_atom_2_one_hot = chi_angle_atom(2)
1079
+
1080
+ # An array like chi_angles_atoms but using indices rather than names.
1081
+ chi_angles_atom_indices = [chi_angles_atoms[restype_1to3[r]] for r in restypes]
1082
+ chi_angles_atom_indices = tree.map_structure(
1083
+ lambda atom_name: atom_order[atom_name], chi_angles_atom_indices
1084
+ )
1085
+ chi_angles_atom_indices = np.array(
1086
+ [
1087
+ chi_atoms + ([[0, 0, 0, 0]] * (4 - len(chi_atoms)))
1088
+ for chi_atoms in chi_angles_atom_indices
1089
+ ]
1090
+ )
1091
+
1092
+ # Mapping from (res_name, atom_name) pairs to the atom's chi group index
1093
+ # and atom index within that group.
1094
+ chi_groups_for_atom = collections.defaultdict(list)
1095
+ for res_name, chi_angle_atoms_for_res in chi_angles_atoms.items():
1096
+ for chi_group_i, chi_group in enumerate(chi_angle_atoms_for_res):
1097
+ for atom_i, atom in enumerate(chi_group):
1098
+ chi_groups_for_atom[(res_name, atom)].append((chi_group_i, atom_i))
1099
+ chi_groups_for_atom = dict(chi_groups_for_atom)
1100
+
1101
+
1102
+ def _make_rigid_transformation_4x4(ex, ey, translation):
1103
+ """Create a rigid 4x4 transformation matrix from two axes and transl."""
1104
+ # Normalize ex.
1105
+ ex_normalized = ex / np.linalg.norm(ex)
1106
+
1107
+ # make ey perpendicular to ex
1108
+ ey_normalized = ey - np.dot(ey, ex_normalized) * ex_normalized
1109
+ ey_normalized /= np.linalg.norm(ey_normalized)
1110
+
1111
+ # compute ez as cross product
1112
+ eznorm = np.cross(ex_normalized, ey_normalized)
1113
+ m = np.stack(
1114
+ [ex_normalized, ey_normalized, eznorm, translation]
1115
+ ).transpose()
1116
+ m = np.concatenate([m, [[0.0, 0.0, 0.0, 1.0]]], axis=0)
1117
+ return m
1118
+
1119
+
1120
+ # create an array with (restype, atomtype) --> rigid_group_idx
1121
+ # and an array with (restype, atomtype, coord) for the atom positions
1122
+ # and compute affine transformation matrices (4,4) from one rigid group to the
1123
+ # previous group
1124
+ restype_atom37_to_rigid_group = np.zeros([21, 37], dtype=int)
1125
+ restype_atom37_mask = np.zeros([21, 37], dtype=np.float32)
1126
+ restype_atom37_rigid_group_positions = np.zeros([21, 37, 3], dtype=np.float32)
1127
+ restype_atom14_to_rigid_group = np.zeros([21, 14], dtype=int)
1128
+ restype_atom14_mask = np.zeros([21, 14], dtype=np.float32)
1129
+ restype_atom14_rigid_group_positions = np.zeros([21, 14, 3], dtype=np.float32)
1130
+ restype_rigid_group_default_frame = np.zeros([21, 8, 4, 4], dtype=np.float32)
1131
+
1132
+
1133
+ def _make_rigid_group_constants():
1134
+ """Fill the arrays above."""
1135
+ for restype, restype_letter in enumerate(restypes):
1136
+ resname = restype_1to3[restype_letter]
1137
+ for atomname, group_idx, atom_position in rigid_group_atom_positions[
1138
+ resname
1139
+ ]:
1140
+ atomtype = atom_order[atomname]
1141
+ restype_atom37_to_rigid_group[restype, atomtype] = group_idx
1142
+ restype_atom37_mask[restype, atomtype] = 1
1143
+ restype_atom37_rigid_group_positions[
1144
+ restype, atomtype, :
1145
+ ] = atom_position
1146
+
1147
+ atom14idx = restype_name_to_atom14_names[resname].index(atomname)
1148
+ restype_atom14_to_rigid_group[restype, atom14idx] = group_idx
1149
+ restype_atom14_mask[restype, atom14idx] = 1
1150
+ restype_atom14_rigid_group_positions[
1151
+ restype, atom14idx, :
1152
+ ] = atom_position
1153
+
1154
+ for restype, restype_letter in enumerate(restypes):
1155
+ resname = restype_1to3[restype_letter]
1156
+ atom_positions = {
1157
+ name: np.array(pos)
1158
+ for name, _, pos in rigid_group_atom_positions[resname]
1159
+ }
1160
+
1161
+ # backbone to backbone is the identity transform
1162
+ restype_rigid_group_default_frame[restype, 0, :, :] = np.eye(4)
1163
+
1164
+ # pre-omega-frame to backbone (currently dummy identity matrix)
1165
+ restype_rigid_group_default_frame[restype, 1, :, :] = np.eye(4)
1166
+
1167
+ # phi-frame to backbone
1168
+ mat = _make_rigid_transformation_4x4(
1169
+ ex=atom_positions["N"] - atom_positions["CA"],
1170
+ ey=np.array([1.0, 0.0, 0.0]),
1171
+ translation=atom_positions["N"],
1172
+ )
1173
+ restype_rigid_group_default_frame[restype, 2, :, :] = mat
1174
+
1175
+ # psi-frame to backbone
1176
+ mat = _make_rigid_transformation_4x4(
1177
+ ex=atom_positions["C"] - atom_positions["CA"],
1178
+ ey=atom_positions["CA"] - atom_positions["N"],
1179
+ translation=atom_positions["C"],
1180
+ )
1181
+ restype_rigid_group_default_frame[restype, 3, :, :] = mat
1182
+
1183
+ # chi1-frame to backbone
1184
+ if chi_angles_mask[restype][0]:
1185
+ base_atom_names = chi_angles_atoms[resname][0]
1186
+ base_atom_positions = [
1187
+ atom_positions[name] for name in base_atom_names
1188
+ ]
1189
+ mat = _make_rigid_transformation_4x4(
1190
+ ex=base_atom_positions[2] - base_atom_positions[1],
1191
+ ey=base_atom_positions[0] - base_atom_positions[1],
1192
+ translation=base_atom_positions[2],
1193
+ )
1194
+ restype_rigid_group_default_frame[restype, 4, :, :] = mat
1195
+
1196
+ # chi2-frame to chi1-frame
1197
+ # chi3-frame to chi2-frame
1198
+ # chi4-frame to chi3-frame
1199
+ # luckily all rotation axes for the next frame start at (0,0,0) of the
1200
+ # previous frame
1201
+ for chi_idx in range(1, 4):
1202
+ if chi_angles_mask[restype][chi_idx]:
1203
+ axis_end_atom_name = chi_angles_atoms[resname][chi_idx][2]
1204
+ axis_end_atom_position = atom_positions[axis_end_atom_name]
1205
+ mat = _make_rigid_transformation_4x4(
1206
+ ex=axis_end_atom_position,
1207
+ ey=np.array([-1.0, 0.0, 0.0]),
1208
+ translation=axis_end_atom_position,
1209
+ )
1210
+ restype_rigid_group_default_frame[
1211
+ restype, 4 + chi_idx, :, :
1212
+ ] = mat
1213
+
1214
+
1215
+ _make_rigid_group_constants()
1216
+
1217
+
1218
+ def make_atom14_dists_bounds(
1219
+ overlap_tolerance=1.5, bond_length_tolerance_factor=15
1220
+ ):
1221
+ """compute upper and lower bounds for bonds to assess violations."""
1222
+ restype_atom14_bond_lower_bound = np.zeros([21, 14, 14], np.float32)
1223
+ restype_atom14_bond_upper_bound = np.zeros([21, 14, 14], np.float32)
1224
+ restype_atom14_bond_stddev = np.zeros([21, 14, 14], np.float32)
1225
+ residue_bonds, residue_virtual_bonds, _ = load_stereo_chemical_props()
1226
+ for restype, restype_letter in enumerate(restypes):
1227
+ resname = restype_1to3[restype_letter]
1228
+ atom_list = restype_name_to_atom14_names[resname]
1229
+
1230
+ # create lower and upper bounds for clashes
1231
+ for atom1_idx, atom1_name in enumerate(atom_list):
1232
+ if not atom1_name:
1233
+ continue
1234
+ atom1_radius = van_der_waals_radius[atom1_name[0]]
1235
+ for atom2_idx, atom2_name in enumerate(atom_list):
1236
+ if (not atom2_name) or atom1_idx == atom2_idx:
1237
+ continue
1238
+ atom2_radius = van_der_waals_radius[atom2_name[0]]
1239
+ lower = atom1_radius + atom2_radius - overlap_tolerance
1240
+ upper = 1e10
1241
+ restype_atom14_bond_lower_bound[
1242
+ restype, atom1_idx, atom2_idx
1243
+ ] = lower
1244
+ restype_atom14_bond_lower_bound[
1245
+ restype, atom2_idx, atom1_idx
1246
+ ] = lower
1247
+ restype_atom14_bond_upper_bound[
1248
+ restype, atom1_idx, atom2_idx
1249
+ ] = upper
1250
+ restype_atom14_bond_upper_bound[
1251
+ restype, atom2_idx, atom1_idx
1252
+ ] = upper
1253
+
1254
+ # overwrite lower and upper bounds for bonds and angles
1255
+ for b in residue_bonds[resname] + residue_virtual_bonds[resname]:
1256
+ atom1_idx = atom_list.index(b.atom1_name)
1257
+ atom2_idx = atom_list.index(b.atom2_name)
1258
+ lower = b.length - bond_length_tolerance_factor * b.stddev
1259
+ upper = b.length + bond_length_tolerance_factor * b.stddev
1260
+ restype_atom14_bond_lower_bound[
1261
+ restype, atom1_idx, atom2_idx
1262
+ ] = lower
1263
+ restype_atom14_bond_lower_bound[
1264
+ restype, atom2_idx, atom1_idx
1265
+ ] = lower
1266
+ restype_atom14_bond_upper_bound[
1267
+ restype, atom1_idx, atom2_idx
1268
+ ] = upper
1269
+ restype_atom14_bond_upper_bound[
1270
+ restype, atom2_idx, atom1_idx
1271
+ ] = upper
1272
+ restype_atom14_bond_stddev[restype, atom1_idx, atom2_idx] = b.stddev
1273
+ restype_atom14_bond_stddev[restype, atom2_idx, atom1_idx] = b.stddev
1274
+ return {
1275
+ "lower_bound": restype_atom14_bond_lower_bound, # shape (21,14,14)
1276
+ "upper_bound": restype_atom14_bond_upper_bound, # shape (21,14,14)
1277
+ "stddev": restype_atom14_bond_stddev, # shape (21,14,14)
1278
+ }
1279
+
1280
+
1281
+ restype_atom14_ambiguous_atoms = np.zeros((21, 14), dtype=np.float32)
1282
+ restype_atom14_ambiguous_atoms_swap_idx = np.tile(
1283
+ np.arange(14, dtype=int), (21, 1)
1284
+ )
1285
+
1286
+
1287
+ def _make_atom14_ambiguity_feats():
1288
+ for res, pairs in residue_atom_renaming_swaps.items():
1289
+ res_idx = restype_order[restype_3to1[res]]
1290
+ for atom1, atom2 in pairs.items():
1291
+ atom1_idx = restype_name_to_atom14_names[res].index(atom1)
1292
+ atom2_idx = restype_name_to_atom14_names[res].index(atom2)
1293
+ restype_atom14_ambiguous_atoms[res_idx, atom1_idx] = 1
1294
+ restype_atom14_ambiguous_atoms[res_idx, atom2_idx] = 1
1295
+ restype_atom14_ambiguous_atoms_swap_idx[
1296
+ res_idx, atom1_idx
1297
+ ] = atom2_idx
1298
+ restype_atom14_ambiguous_atoms_swap_idx[
1299
+ res_idx, atom2_idx
1300
+ ] = atom1_idx
1301
+
1302
+
1303
+ _make_atom14_ambiguity_feats()
1304
+
1305
+
1306
+ def aatype_to_str_sequence(aatype):
1307
+ return ''.join([
1308
+ restypes_with_x[aatype[i]]
1309
+ for i in range(len(aatype))
1310
+ ])
1311
+
1312
+
1313
+ ### ALPHAFOLD MULTIMER STUFF ###
1314
+ def _make_chi_atom_indices():
1315
+ """Returns atom indices needed to compute chi angles for all residue types.
1316
+
1317
+ Returns:
1318
+ A tensor of shape [residue_types=21, chis=4, atoms=4]. The residue types are
1319
+ in the order specified in residue_constants.restypes + unknown residue type
1320
+ at the end. For chi angles which are not defined on the residue, the
1321
+ positions indices are by default set to 0.
1322
+ """
1323
+ chi_atom_indices = []
1324
+ for residue_name in restypes:
1325
+ residue_name = restype_1to3[residue_name]
1326
+ residue_chi_angles = chi_angles_atoms[residue_name]
1327
+ atom_indices = []
1328
+ for chi_angle in residue_chi_angles:
1329
+ atom_indices.append(
1330
+ [atom_order[atom] for atom in chi_angle])
1331
+ for _ in range(4 - len(atom_indices)):
1332
+ atom_indices.append([0, 0, 0, 0]) # For chi angles not defined on the AA.
1333
+ chi_atom_indices.append(atom_indices)
1334
+
1335
+ chi_atom_indices.append([[0, 0, 0, 0]] * 4) # For UNKNOWN residue.
1336
+
1337
+ return np.array(chi_atom_indices)
1338
+
1339
+
1340
+ def _make_renaming_matrices():
1341
+ """Matrices to map atoms to symmetry partners in ambiguous case."""
1342
+ # As the atom naming is ambiguous for 7 of the 20 amino acids, provide
1343
+ # alternative groundtruth coordinates where the naming is swapped
1344
+ restype_3 = [
1345
+ restype_1to3[res] for res in restypes
1346
+ ]
1347
+ restype_3 += ['UNK']
1348
+ # Matrices for renaming ambiguous atoms.
1349
+ all_matrices = {res: np.eye(14, dtype=np.float32) for res in restype_3}
1350
+ for resname, swap in residue_atom_renaming_swaps.items():
1351
+ correspondences = np.arange(14)
1352
+ for source_atom_swap, target_atom_swap in swap.items():
1353
+ source_index = restype_name_to_atom14_names[
1354
+ resname].index(source_atom_swap)
1355
+ target_index = restype_name_to_atom14_names[
1356
+ resname].index(target_atom_swap)
1357
+ correspondences[source_index] = target_index
1358
+ correspondences[target_index] = source_index
1359
+ renaming_matrix = np.zeros((14, 14), dtype=np.float32)
1360
+ for index, correspondence in enumerate(correspondences):
1361
+ renaming_matrix[index, correspondence] = 1.
1362
+ all_matrices[resname] = renaming_matrix.astype(np.float32)
1363
+ renaming_matrices = np.stack([all_matrices[restype] for restype in restype_3])
1364
+ return renaming_matrices
1365
+
1366
+
1367
+ def _make_restype_atom37_mask():
1368
+ """Mask of which atoms are present for which residue type in atom37."""
1369
+ # create the corresponding mask
1370
+ restype_atom37_mask = np.zeros([21, 37], dtype=np.float32)
1371
+ for restype, restype_letter in enumerate(restypes):
1372
+ restype_name = restype_1to3[restype_letter]
1373
+ atom_names = residue_atoms[restype_name]
1374
+ for atom_name in atom_names:
1375
+ atom_type = atom_order[atom_name]
1376
+ restype_atom37_mask[restype, atom_type] = 1
1377
+ return restype_atom37_mask
1378
+
1379
+
1380
+ def _make_restype_atom14_mask():
1381
+ """Mask of which atoms are present for which residue type in atom14."""
1382
+ restype_atom14_mask = []
1383
+
1384
+ for rt in restypes:
1385
+ atom_names = restype_name_to_atom14_names[
1386
+ restype_1to3[rt]]
1387
+ restype_atom14_mask.append([(1. if name else 0.) for name in atom_names])
1388
+
1389
+ restype_atom14_mask.append([0.] * 14)
1390
+ restype_atom14_mask = np.array(restype_atom14_mask, dtype=np.float32)
1391
+ return restype_atom14_mask
1392
+
1393
+
1394
+ def _make_restype_atom37_to_atom14():
1395
+ """Map from atom37 to atom14 per residue type."""
1396
+ restype_atom37_to_atom14 = [] # mapping (restype, atom37) --> atom14
1397
+ for rt in restypes:
1398
+ atom_names = restype_name_to_atom14_names[
1399
+ restype_1to3[rt]]
1400
+ atom_name_to_idx14 = {name: i for i, name in enumerate(atom_names)}
1401
+ restype_atom37_to_atom14.append([
1402
+ (atom_name_to_idx14[name] if name in atom_name_to_idx14 else 0)
1403
+ for name in atom_types
1404
+ ])
1405
+
1406
+ restype_atom37_to_atom14.append([0] * 37)
1407
+ restype_atom37_to_atom14 = np.array(restype_atom37_to_atom14, dtype=np.int32)
1408
+ return restype_atom37_to_atom14
1409
+
1410
+
1411
+ def _make_restype_atom14_to_atom37():
1412
+ """Map from atom14 to atom37 per residue type."""
1413
+ restype_atom14_to_atom37 = [] # mapping (restype, atom14) --> atom37
1414
+ for rt in restypes:
1415
+ atom_names = restype_name_to_atom14_names[
1416
+ restype_1to3[rt]]
1417
+ restype_atom14_to_atom37.append([
1418
+ (atom_order[name] if name else 0)
1419
+ for name in atom_names
1420
+ ])
1421
+ # Add dummy mapping for restype 'UNK'
1422
+ restype_atom14_to_atom37.append([0] * 14)
1423
+ restype_atom14_to_atom37 = np.array(restype_atom14_to_atom37, dtype=np.int32)
1424
+ return restype_atom14_to_atom37
1425
+
1426
+
1427
+ def _make_restype_atom14_is_ambiguous():
1428
+ """Mask which atoms are ambiguous in atom14."""
1429
+ # create an ambiguous atoms mask. shape: (21, 14)
1430
+ restype_atom14_is_ambiguous = np.zeros((21, 14), dtype=np.float32)
1431
+ for resname, swap in residue_atom_renaming_swaps.items():
1432
+ for atom_name1, atom_name2 in swap.items():
1433
+ restype = restype_order[
1434
+ restype_3to1[resname]]
1435
+ atom_idx1 = restype_name_to_atom14_names[resname].index(
1436
+ atom_name1)
1437
+ atom_idx2 = restype_name_to_atom14_names[resname].index(
1438
+ atom_name2)
1439
+ restype_atom14_is_ambiguous[restype, atom_idx1] = 1
1440
+ restype_atom14_is_ambiguous[restype, atom_idx2] = 1
1441
+
1442
+ return restype_atom14_is_ambiguous
1443
+
1444
+
1445
+ def _make_restype_rigidgroup_base_atom37_idx():
1446
+ """Create Map from rigidgroups to atom37 indices."""
1447
+ # Create an array with the atom names.
1448
+ # shape (num_restypes, num_rigidgroups, 3_atoms): (21, 8, 3)
1449
+ base_atom_names = np.full([21, 8, 3], '', dtype=object)
1450
+
1451
+ # 0: backbone frame
1452
+ base_atom_names[:, 0, :] = ['C', 'CA', 'N']
1453
+
1454
+ # 3: 'psi-group'
1455
+ base_atom_names[:, 3, :] = ['CA', 'C', 'O']
1456
+
1457
+ # 4,5,6,7: 'chi1,2,3,4-group'
1458
+ for restype, restype_letter in enumerate(restypes):
1459
+ resname = restype_1to3[restype_letter]
1460
+ for chi_idx in range(4):
1461
+ if chi_angles_mask[restype][chi_idx]:
1462
+ atom_names = chi_angles_atoms[resname][chi_idx]
1463
+ base_atom_names[restype, chi_idx + 4, :] = atom_names[1:]
1464
+
1465
+ # Translate atom names into atom37 indices.
1466
+ lookuptable = atom_order.copy()
1467
+ lookuptable[''] = 0
1468
+ restype_rigidgroup_base_atom37_idx = np.vectorize(lambda x: lookuptable[x])(
1469
+ base_atom_names)
1470
+ return restype_rigidgroup_base_atom37_idx
1471
+
1472
+
1473
+ CHI_ATOM_INDICES = _make_chi_atom_indices()
1474
+ RENAMING_MATRICES = _make_renaming_matrices()
1475
+ RESTYPE_ATOM14_TO_ATOM37 = _make_restype_atom14_to_atom37()
1476
+ RESTYPE_ATOM37_TO_ATOM14 = _make_restype_atom37_to_atom14()
1477
+ RESTYPE_ATOM37_MASK = _make_restype_atom37_mask()
1478
+ RESTYPE_ATOM14_MASK = _make_restype_atom14_mask()
1479
+ RESTYPE_ATOM14_IS_AMBIGUOUS = _make_restype_atom14_is_ambiguous()
1480
+ RESTYPE_RIGIDGROUP_BASE_ATOM37_IDX = _make_restype_rigidgroup_base_atom37_idx()
1481
+
1482
+ # Create mask for existing rigid groups.
1483
+ RESTYPE_RIGIDGROUP_MASK = np.zeros([21, 8], dtype=np.float32)
1484
+ RESTYPE_RIGIDGROUP_MASK[:, 0] = 1
1485
+ RESTYPE_RIGIDGROUP_MASK[:, 3] = 1
1486
+ RESTYPE_RIGIDGROUP_MASK[:20, 4:] = chi_angles_mask
data/mdgen/rigid_utils.py ADDED
@@ -0,0 +1,1391 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from __future__ import annotations
17
+ from functools import lru_cache
18
+ from typing import Tuple, Any, Sequence, Callable, Optional
19
+
20
+ import numpy as np
21
+ import torch
22
+
23
+
24
+ def rot_matmul(
25
+ a: torch.Tensor,
26
+ b: torch.Tensor
27
+ ) -> torch.Tensor:
28
+ """
29
+ Performs matrix multiplication of two rotation matrix tensors. Written
30
+ out by hand to avoid AMP downcasting.
31
+
32
+ Args:
33
+ a: [*, 3, 3] left multiplicand
34
+ b: [*, 3, 3] right multiplicand
35
+ Returns:
36
+ The product ab
37
+ """
38
+ def row_mul(i):
39
+ return torch.stack(
40
+ [
41
+ a[..., i, 0] * b[..., 0, 0]
42
+ + a[..., i, 1] * b[..., 1, 0]
43
+ + a[..., i, 2] * b[..., 2, 0],
44
+ a[..., i, 0] * b[..., 0, 1]
45
+ + a[..., i, 1] * b[..., 1, 1]
46
+ + a[..., i, 2] * b[..., 2, 1],
47
+ a[..., i, 0] * b[..., 0, 2]
48
+ + a[..., i, 1] * b[..., 1, 2]
49
+ + a[..., i, 2] * b[..., 2, 2],
50
+ ],
51
+ dim=-1,
52
+ )
53
+
54
+ return torch.stack(
55
+ [
56
+ row_mul(0),
57
+ row_mul(1),
58
+ row_mul(2),
59
+ ],
60
+ dim=-2
61
+ )
62
+
63
+
64
+ def rot_vec_mul(
65
+ r: torch.Tensor,
66
+ t: torch.Tensor
67
+ ) -> torch.Tensor:
68
+ """
69
+ Applies a rotation to a vector. Written out by hand to avoid transfer
70
+ to avoid AMP downcasting.
71
+
72
+ Args:
73
+ r: [*, 3, 3] rotation matrices
74
+ t: [*, 3] coordinate tensors
75
+ Returns:
76
+ [*, 3] rotated coordinates
77
+ """
78
+ x, y, z = torch.unbind(t, dim=-1)
79
+ return torch.stack(
80
+ [
81
+ r[..., 0, 0] * x + r[..., 0, 1] * y + r[..., 0, 2] * z,
82
+ r[..., 1, 0] * x + r[..., 1, 1] * y + r[..., 1, 2] * z,
83
+ r[..., 2, 0] * x + r[..., 2, 1] * y + r[..., 2, 2] * z,
84
+ ],
85
+ dim=-1,
86
+ )
87
+
88
+ @lru_cache(maxsize=None)
89
+ def identity_rot_mats(
90
+ batch_dims: Tuple[int],
91
+ dtype: Optional[torch.dtype] = None,
92
+ device: Optional[torch.device] = None,
93
+ requires_grad: bool = True,
94
+ ) -> torch.Tensor:
95
+ rots = torch.eye(
96
+ 3, dtype=dtype, device=device, requires_grad=requires_grad
97
+ )
98
+ rots = rots.view(*((1,) * len(batch_dims)), 3, 3)
99
+ rots = rots.expand(*batch_dims, -1, -1)
100
+ rots = rots.contiguous()
101
+
102
+ return rots
103
+
104
+
105
+ @lru_cache(maxsize=None)
106
+ def identity_trans(
107
+ batch_dims: Tuple[int],
108
+ dtype: Optional[torch.dtype] = None,
109
+ device: Optional[torch.device] = None,
110
+ requires_grad: bool = True,
111
+ ) -> torch.Tensor:
112
+ trans = torch.zeros(
113
+ (*batch_dims, 3),
114
+ dtype=dtype,
115
+ device=device,
116
+ requires_grad=requires_grad
117
+ )
118
+ return trans
119
+
120
+
121
+ @lru_cache(maxsize=None)
122
+ def identity_quats(
123
+ batch_dims: Tuple[int],
124
+ dtype: Optional[torch.dtype] = None,
125
+ device: Optional[torch.device] = None,
126
+ requires_grad: bool = True,
127
+ ) -> torch.Tensor:
128
+ quat = torch.zeros(
129
+ (*batch_dims, 4),
130
+ dtype=dtype,
131
+ device=device,
132
+ requires_grad=requires_grad
133
+ )
134
+
135
+ with torch.no_grad():
136
+ quat[..., 0] = 1
137
+
138
+ return quat
139
+
140
+
141
+ _quat_elements = ["a", "b", "c", "d"]
142
+ _qtr_keys = [l1 + l2 for l1 in _quat_elements for l2 in _quat_elements]
143
+ _qtr_ind_dict = {key: ind for ind, key in enumerate(_qtr_keys)}
144
+
145
+
146
+ def _to_mat(pairs):
147
+ mat = np.zeros((4, 4))
148
+ for pair in pairs:
149
+ key, value = pair
150
+ ind = _qtr_ind_dict[key]
151
+ mat[ind // 4][ind % 4] = value
152
+
153
+ return mat
154
+
155
+
156
+ _QTR_MAT = np.zeros((4, 4, 3, 3))
157
+ _QTR_MAT[..., 0, 0] = _to_mat([("aa", 1), ("bb", 1), ("cc", -1), ("dd", -1)])
158
+ _QTR_MAT[..., 0, 1] = _to_mat([("bc", 2), ("ad", -2)])
159
+ _QTR_MAT[..., 0, 2] = _to_mat([("bd", 2), ("ac", 2)])
160
+ _QTR_MAT[..., 1, 0] = _to_mat([("bc", 2), ("ad", 2)])
161
+ _QTR_MAT[..., 1, 1] = _to_mat([("aa", 1), ("bb", -1), ("cc", 1), ("dd", -1)])
162
+ _QTR_MAT[..., 1, 2] = _to_mat([("cd", 2), ("ab", -2)])
163
+ _QTR_MAT[..., 2, 0] = _to_mat([("bd", 2), ("ac", -2)])
164
+ _QTR_MAT[..., 2, 1] = _to_mat([("cd", 2), ("ab", 2)])
165
+ _QTR_MAT[..., 2, 2] = _to_mat([("aa", 1), ("bb", -1), ("cc", -1), ("dd", 1)])
166
+
167
+
168
+ def quat_to_rot(quat: torch.Tensor) -> torch.Tensor:
169
+ """
170
+ Converts a quaternion to a rotation matrix.
171
+
172
+ Args:
173
+ quat: [*, 4] quaternions
174
+ Returns:
175
+ [*, 3, 3] rotation matrices
176
+ """
177
+ # [*, 4, 4]
178
+ quat = quat[..., None] * quat[..., None, :]
179
+
180
+ # [4, 4, 3, 3]
181
+ mat = _get_quat("_QTR_MAT", dtype=quat.dtype, device=quat.device)
182
+
183
+ # [*, 4, 4, 3, 3]
184
+ shaped_qtr_mat = mat.view((1,) * len(quat.shape[:-2]) + mat.shape)
185
+ quat = quat[..., None, None] * shaped_qtr_mat
186
+
187
+ # [*, 3, 3]
188
+ return torch.sum(quat, dim=(-3, -4))
189
+
190
+
191
+ def rot_to_quat(
192
+ rot: torch.Tensor,
193
+ ):
194
+ if(rot.shape[-2:] != (3, 3)):
195
+ raise ValueError("Input rotation is incorrectly shaped")
196
+
197
+ rot = [[rot[..., i, j] for j in range(3)] for i in range(3)]
198
+ [[xx, xy, xz], [yx, yy, yz], [zx, zy, zz]] = rot
199
+
200
+ k = [
201
+ [ xx + yy + zz, zy - yz, xz - zx, yx - xy,],
202
+ [ zy - yz, xx - yy - zz, xy + yx, xz + zx,],
203
+ [ xz - zx, xy + yx, yy - xx - zz, yz + zy,],
204
+ [ yx - xy, xz + zx, yz + zy, zz - xx - yy,]
205
+ ]
206
+
207
+ k = (1./3.) * torch.stack([torch.stack(t, dim=-1) for t in k], dim=-2)
208
+
209
+ _, vectors = torch.linalg.eigh(k)
210
+ return vectors[..., -1]
211
+
212
+
213
+ _QUAT_MULTIPLY = np.zeros((4, 4, 4))
214
+ _QUAT_MULTIPLY[:, :, 0] = [[ 1, 0, 0, 0],
215
+ [ 0,-1, 0, 0],
216
+ [ 0, 0,-1, 0],
217
+ [ 0, 0, 0,-1]]
218
+
219
+ _QUAT_MULTIPLY[:, :, 1] = [[ 0, 1, 0, 0],
220
+ [ 1, 0, 0, 0],
221
+ [ 0, 0, 0, 1],
222
+ [ 0, 0,-1, 0]]
223
+
224
+ _QUAT_MULTIPLY[:, :, 2] = [[ 0, 0, 1, 0],
225
+ [ 0, 0, 0,-1],
226
+ [ 1, 0, 0, 0],
227
+ [ 0, 1, 0, 0]]
228
+
229
+ _QUAT_MULTIPLY[:, :, 3] = [[ 0, 0, 0, 1],
230
+ [ 0, 0, 1, 0],
231
+ [ 0,-1, 0, 0],
232
+ [ 1, 0, 0, 0]]
233
+
234
+ _QUAT_MULTIPLY_BY_VEC = _QUAT_MULTIPLY[:, 1:, :]
235
+
236
+ _CACHED_QUATS = {
237
+ "_QTR_MAT": _QTR_MAT,
238
+ "_QUAT_MULTIPLY": _QUAT_MULTIPLY,
239
+ "_QUAT_MULTIPLY_BY_VEC": _QUAT_MULTIPLY_BY_VEC
240
+ }
241
+
242
+ @lru_cache(maxsize=None)
243
+ def _get_quat(quat_key, dtype, device):
244
+ return torch.tensor(_CACHED_QUATS[quat_key], dtype=dtype, device=device)
245
+
246
+
247
+ def quat_multiply(quat1, quat2):
248
+ """Multiply a quaternion by another quaternion."""
249
+ mat = _get_quat("_QUAT_MULTIPLY", dtype=quat1.dtype, device=quat1.device)
250
+ reshaped_mat = mat.view((1,) * len(quat1.shape[:-1]) + mat.shape)
251
+ return torch.sum(
252
+ reshaped_mat *
253
+ quat1[..., :, None, None] *
254
+ quat2[..., None, :, None],
255
+ dim=(-3, -2)
256
+ )
257
+
258
+
259
+ def quat_multiply_by_vec(quat, vec):
260
+ """Multiply a quaternion by a pure-vector quaternion."""
261
+ mat = _get_quat("_QUAT_MULTIPLY_BY_VEC", dtype=quat.dtype, device=quat.device)
262
+ reshaped_mat = mat.view((1,) * len(quat.shape[:-1]) + mat.shape)
263
+ return torch.sum(
264
+ reshaped_mat *
265
+ quat[..., :, None, None] *
266
+ vec[..., None, :, None],
267
+ dim=(-3, -2)
268
+ )
269
+
270
+
271
+ def invert_rot_mat(rot_mat: torch.Tensor):
272
+ return rot_mat.transpose(-1, -2)
273
+
274
+
275
+ def invert_quat(quat: torch.Tensor):
276
+ quat_prime = quat.clone()
277
+ quat_prime[..., 1:] *= -1
278
+ inv = quat_prime / torch.sum(quat ** 2, dim=-1, keepdim=True)
279
+ return inv
280
+
281
+
282
+ class Rotation:
283
+ """
284
+ A 3D rotation. Depending on how the object is initialized, the
285
+ rotation is represented by either a rotation matrix or a
286
+ quaternion, though both formats are made available by helper functions.
287
+ To simplify gradient computation, the underlying format of the
288
+ rotation cannot be changed in-place. Like Rigid, the class is designed
289
+ to mimic the behavior of a torch Tensor, almost as if each Rotation
290
+ object were a tensor of rotations, in one format or another.
291
+ """
292
+ def __init__(self,
293
+ rot_mats: Optional[torch.Tensor] = None,
294
+ quats: Optional[torch.Tensor] = None,
295
+ normalize_quats: bool = True,
296
+ ):
297
+ """
298
+ Args:
299
+ rot_mats:
300
+ A [*, 3, 3] rotation matrix tensor. Mutually exclusive with
301
+ quats
302
+ quats:
303
+ A [*, 4] quaternion. Mutually exclusive with rot_mats. If
304
+ normalize_quats is not True, must be a unit quaternion
305
+ normalize_quats:
306
+ If quats is specified, whether to normalize quats
307
+ """
308
+ if((rot_mats is None and quats is None) or
309
+ (rot_mats is not None and quats is not None)):
310
+ raise ValueError("Exactly one input argument must be specified")
311
+
312
+ if((rot_mats is not None and rot_mats.shape[-2:] != (3, 3)) or
313
+ (quats is not None and quats.shape[-1] != 4)):
314
+ raise ValueError(
315
+ "Incorrectly shaped rotation matrix or quaternion"
316
+ )
317
+
318
+ # Force full-precision
319
+ if(quats is not None):
320
+ quats = quats.to(dtype=torch.float32)
321
+ if(rot_mats is not None):
322
+ rot_mats = rot_mats.to(dtype=torch.float32)
323
+
324
+ if(quats is not None and normalize_quats):
325
+ quats = quats / torch.linalg.norm(quats, dim=-1, keepdim=True)
326
+
327
+ self._rot_mats = rot_mats
328
+ self._quats = quats
329
+
330
+ @staticmethod
331
+ def identity(
332
+ shape,
333
+ dtype: Optional[torch.dtype] = None,
334
+ device: Optional[torch.device] = None,
335
+ requires_grad: bool = True,
336
+ fmt: str = "quat",
337
+ ) -> Rotation:
338
+ """
339
+ Returns an identity Rotation.
340
+
341
+ Args:
342
+ shape:
343
+ The "shape" of the resulting Rotation object. See documentation
344
+ for the shape property
345
+ dtype:
346
+ The torch dtype for the rotation
347
+ device:
348
+ The torch device for the new rotation
349
+ requires_grad:
350
+ Whether the underlying tensors in the new rotation object
351
+ should require gradient computation
352
+ fmt:
353
+ One of "quat" or "rot_mat". Determines the underlying format
354
+ of the new object's rotation
355
+ Returns:
356
+ A new identity rotation
357
+ """
358
+ if(fmt == "rot_mat"):
359
+ rot_mats = identity_rot_mats(
360
+ shape, dtype, device, requires_grad,
361
+ )
362
+ return Rotation(rot_mats=rot_mats, quats=None)
363
+ elif(fmt == "quat"):
364
+ quats = identity_quats(shape, dtype, device, requires_grad)
365
+ return Rotation(rot_mats=None, quats=quats, normalize_quats=False)
366
+ else:
367
+ raise ValueError(f"Invalid format: f{fmt}")
368
+
369
+ # Magic methods
370
+
371
+ def __getitem__(self, index: Any) -> Rotation:
372
+ """
373
+ Allows torch-style indexing over the virtual shape of the rotation
374
+ object. See documentation for the shape property.
375
+
376
+ Args:
377
+ index:
378
+ A torch index. E.g. (1, 3, 2), or (slice(None,))
379
+ Returns:
380
+ The indexed rotation
381
+ """
382
+ if type(index) != tuple:
383
+ index = (index,)
384
+
385
+ if(self._rot_mats is not None):
386
+ rot_mats = self._rot_mats[index + (slice(None), slice(None))]
387
+ return Rotation(rot_mats=rot_mats)
388
+ elif(self._quats is not None):
389
+ quats = self._quats[index + (slice(None),)]
390
+ return Rotation(quats=quats, normalize_quats=False)
391
+ else:
392
+ raise ValueError("Both rotations are None")
393
+
394
+ def __mul__(self,
395
+ right: torch.Tensor,
396
+ ) -> Rotation:
397
+ """
398
+ Pointwise left multiplication of the rotation with a tensor. Can be
399
+ used to e.g. mask the Rotation.
400
+
401
+ Args:
402
+ right:
403
+ The tensor multiplicand
404
+ Returns:
405
+ The product
406
+ """
407
+ if not(isinstance(right, torch.Tensor)):
408
+ raise TypeError("The other multiplicand must be a Tensor")
409
+
410
+ if(self._rot_mats is not None):
411
+ rot_mats = self._rot_mats * right[..., None, None]
412
+ return Rotation(rot_mats=rot_mats, quats=None)
413
+ elif(self._quats is not None):
414
+ quats = self._quats * right[..., None]
415
+ return Rotation(rot_mats=None, quats=quats, normalize_quats=False)
416
+ else:
417
+ raise ValueError("Both rotations are None")
418
+
419
+ def __rmul__(self,
420
+ left: torch.Tensor,
421
+ ) -> Rotation:
422
+ """
423
+ Reverse pointwise multiplication of the rotation with a tensor.
424
+
425
+ Args:
426
+ left:
427
+ The left multiplicand
428
+ Returns:
429
+ The product
430
+ """
431
+ return self.__mul__(left)
432
+
433
+ # Properties
434
+
435
+ @property
436
+ def shape(self) -> torch.Size:
437
+ """
438
+ Returns the virtual shape of the rotation object. This shape is
439
+ defined as the batch dimensions of the underlying rotation matrix
440
+ or quaternion. If the Rotation was initialized with a [10, 3, 3]
441
+ rotation matrix tensor, for example, the resulting shape would be
442
+ [10].
443
+
444
+ Returns:
445
+ The virtual shape of the rotation object
446
+ """
447
+ s = None
448
+ if(self._quats is not None):
449
+ s = self._quats.shape[:-1]
450
+ else:
451
+ s = self._rot_mats.shape[:-2]
452
+
453
+ return s
454
+
455
+ @property
456
+ def dtype(self) -> torch.dtype:
457
+ """
458
+ Returns the dtype of the underlying rotation.
459
+
460
+ Returns:
461
+ The dtype of the underlying rotation
462
+ """
463
+ if(self._rot_mats is not None):
464
+ return self._rot_mats.dtype
465
+ elif(self._quats is not None):
466
+ return self._quats.dtype
467
+ else:
468
+ raise ValueError("Both rotations are None")
469
+
470
+ @property
471
+ def device(self) -> torch.device:
472
+ """
473
+ The device of the underlying rotation
474
+
475
+ Returns:
476
+ The device of the underlying rotation
477
+ """
478
+ if(self._rot_mats is not None):
479
+ return self._rot_mats.device
480
+ elif(self._quats is not None):
481
+ return self._quats.device
482
+ else:
483
+ raise ValueError("Both rotations are None")
484
+
485
+ @property
486
+ def requires_grad(self) -> bool:
487
+ """
488
+ Returns the requires_grad property of the underlying rotation
489
+
490
+ Returns:
491
+ The requires_grad property of the underlying tensor
492
+ """
493
+ if(self._rot_mats is not None):
494
+ return self._rot_mats.requires_grad
495
+ elif(self._quats is not None):
496
+ return self._quats.requires_grad
497
+ else:
498
+ raise ValueError("Both rotations are None")
499
+
500
+ def get_rot_mats(self) -> torch.Tensor:
501
+ """
502
+ Returns the underlying rotation as a rotation matrix tensor.
503
+
504
+ Returns:
505
+ The rotation as a rotation matrix tensor
506
+ """
507
+ rot_mats = self._rot_mats
508
+ if(rot_mats is None):
509
+ if(self._quats is None):
510
+ raise ValueError("Both rotations are None")
511
+ else:
512
+ rot_mats = quat_to_rot(self._quats)
513
+
514
+ return rot_mats
515
+
516
+ def get_quats(self) -> torch.Tensor:
517
+ """
518
+ Returns the underlying rotation as a quaternion tensor.
519
+
520
+ Depending on whether the Rotation was initialized with a
521
+ quaternion, this function may call torch.linalg.eigh.
522
+
523
+ Returns:
524
+ The rotation as a quaternion tensor.
525
+ """
526
+ quats = self._quats
527
+ if(quats is None):
528
+ if(self._rot_mats is None):
529
+ raise ValueError("Both rotations are None")
530
+ else:
531
+ quats = rot_to_quat(self._rot_mats)
532
+
533
+ return quats
534
+
535
+ def get_cur_rot(self) -> torch.Tensor:
536
+ """
537
+ Return the underlying rotation in its current form
538
+
539
+ Returns:
540
+ The stored rotation
541
+ """
542
+ if(self._rot_mats is not None):
543
+ return self._rot_mats
544
+ elif(self._quats is not None):
545
+ return self._quats
546
+ else:
547
+ raise ValueError("Both rotations are None")
548
+
549
+ # Rotation functions
550
+
551
+ def compose_q_update_vec(self,
552
+ q_update_vec: torch.Tensor,
553
+ normalize_quats: bool = True
554
+ ) -> Rotation:
555
+ """
556
+ Returns a new quaternion Rotation after updating the current
557
+ object's underlying rotation with a quaternion update, formatted
558
+ as a [*, 3] tensor whose final three columns represent x, y, z such
559
+ that (1, x, y, z) is the desired (not necessarily unit) quaternion
560
+ update.
561
+
562
+ Args:
563
+ q_update_vec:
564
+ A [*, 3] quaternion update tensor
565
+ normalize_quats:
566
+ Whether to normalize the output quaternion
567
+ Returns:
568
+ An updated Rotation
569
+ """
570
+ quats = self.get_quats()
571
+ new_quats = quats + quat_multiply_by_vec(quats, q_update_vec)
572
+ return Rotation(
573
+ rot_mats=None,
574
+ quats=new_quats,
575
+ normalize_quats=normalize_quats,
576
+ )
577
+
578
+ def compose_r(self, r: Rotation) -> Rotation:
579
+ """
580
+ Compose the rotation matrices of the current Rotation object with
581
+ those of another.
582
+
583
+ Args:
584
+ r:
585
+ An update rotation object
586
+ Returns:
587
+ An updated rotation object
588
+ """
589
+ r1 = self.get_rot_mats()
590
+ r2 = r.get_rot_mats()
591
+ new_rot_mats = rot_matmul(r1, r2)
592
+ return Rotation(rot_mats=new_rot_mats, quats=None)
593
+
594
+ def compose_q(self, r: Rotation, normalize_quats: bool = True) -> Rotation:
595
+ """
596
+ Compose the quaternions of the current Rotation object with those
597
+ of another.
598
+
599
+ Depending on whether either Rotation was initialized with
600
+ quaternions, this function may call torch.linalg.eigh.
601
+
602
+ Args:
603
+ r:
604
+ An update rotation object
605
+ Returns:
606
+ An updated rotation object
607
+ """
608
+ q1 = self.get_quats()
609
+ q2 = r.get_quats()
610
+ new_quats = quat_multiply(q1, q2)
611
+ return Rotation(
612
+ rot_mats=None, quats=new_quats, normalize_quats=normalize_quats
613
+ )
614
+
615
+ def apply(self, pts: torch.Tensor) -> torch.Tensor:
616
+ """
617
+ Apply the current Rotation as a rotation matrix to a set of 3D
618
+ coordinates.
619
+
620
+ Args:
621
+ pts:
622
+ A [*, 3] set of points
623
+ Returns:
624
+ [*, 3] rotated points
625
+ """
626
+ rot_mats = self.get_rot_mats()
627
+ return rot_vec_mul(rot_mats, pts)
628
+
629
+ def invert_apply(self, pts: torch.Tensor) -> torch.Tensor:
630
+ """
631
+ The inverse of the apply() method.
632
+
633
+ Args:
634
+ pts:
635
+ A [*, 3] set of points
636
+ Returns:
637
+ [*, 3] inverse-rotated points
638
+ """
639
+ rot_mats = self.get_rot_mats()
640
+ inv_rot_mats = invert_rot_mat(rot_mats)
641
+ return rot_vec_mul(inv_rot_mats, pts)
642
+
643
+ def invert(self) -> Rotation:
644
+ """
645
+ Returns the inverse of the current Rotation.
646
+
647
+ Returns:
648
+ The inverse of the current Rotation
649
+ """
650
+ if(self._rot_mats is not None):
651
+ return Rotation(
652
+ rot_mats=invert_rot_mat(self._rot_mats),
653
+ quats=None
654
+ )
655
+ elif(self._quats is not None):
656
+ return Rotation(
657
+ rot_mats=None,
658
+ quats=invert_quat(self._quats),
659
+ normalize_quats=False,
660
+ )
661
+ else:
662
+ raise ValueError("Both rotations are None")
663
+
664
+ # "Tensor" stuff
665
+
666
+ def unsqueeze(self,
667
+ dim: int,
668
+ ) -> Rigid:
669
+ """
670
+ Analogous to torch.unsqueeze. The dimension is relative to the
671
+ shape of the Rotation object.
672
+
673
+ Args:
674
+ dim: A positive or negative dimension index.
675
+ Returns:
676
+ The unsqueezed Rotation.
677
+ """
678
+ if dim >= len(self.shape):
679
+ raise ValueError("Invalid dimension")
680
+
681
+ if(self._rot_mats is not None):
682
+ rot_mats = self._rot_mats.unsqueeze(dim if dim >= 0 else dim - 2)
683
+ return Rotation(rot_mats=rot_mats, quats=None)
684
+ elif(self._quats is not None):
685
+ quats = self._quats.unsqueeze(dim if dim >= 0 else dim - 1)
686
+ return Rotation(rot_mats=None, quats=quats, normalize_quats=False)
687
+ else:
688
+ raise ValueError("Both rotations are None")
689
+
690
+ @staticmethod
691
+ def cat(
692
+ rs: Sequence[Rotation],
693
+ dim: int,
694
+ ) -> Rigid:
695
+ """
696
+ Concatenates rotations along one of the batch dimensions. Analogous
697
+ to torch.cat().
698
+
699
+ Note that the output of this operation is always a rotation matrix,
700
+ regardless of the format of input rotations.
701
+
702
+ Args:
703
+ rs:
704
+ A list of rotation objects
705
+ dim:
706
+ The dimension along which the rotations should be
707
+ concatenated
708
+ Returns:
709
+ A concatenated Rotation object in rotation matrix format
710
+ """
711
+ rot_mats = [r.get_rot_mats() for r in rs]
712
+ rot_mats = torch.cat(rot_mats, dim=dim if dim >= 0 else dim - 2)
713
+
714
+ return Rotation(rot_mats=rot_mats, quats=None)
715
+
716
+ def map_tensor_fn(self,
717
+ fn: Callable[torch.Tensor, torch.Tensor]
718
+ ) -> Rotation:
719
+ """
720
+ Apply a Tensor -> Tensor function to underlying rotation tensors,
721
+ mapping over the rotation dimension(s). Can be used e.g. to sum out
722
+ a one-hot batch dimension.
723
+
724
+ Args:
725
+ fn:
726
+ A Tensor -> Tensor function to be mapped over the Rotation
727
+ Returns:
728
+ The transformed Rotation object
729
+ """
730
+ if(self._rot_mats is not None):
731
+ rot_mats = self._rot_mats.view(self._rot_mats.shape[:-2] + (9,))
732
+ rot_mats = torch.stack(
733
+ list(map(fn, torch.unbind(rot_mats, dim=-1))), dim=-1
734
+ )
735
+ rot_mats = rot_mats.view(rot_mats.shape[:-1] + (3, 3))
736
+ return Rotation(rot_mats=rot_mats, quats=None)
737
+ elif(self._quats is not None):
738
+ quats = torch.stack(
739
+ list(map(fn, torch.unbind(self._quats, dim=-1))), dim=-1
740
+ )
741
+ return Rotation(rot_mats=None, quats=quats, normalize_quats=False)
742
+ else:
743
+ raise ValueError("Both rotations are None")
744
+
745
+ def cuda(self) -> Rotation:
746
+ """
747
+ Analogous to the cuda() method of torch Tensors
748
+
749
+ Returns:
750
+ A copy of the Rotation in CUDA memory
751
+ """
752
+ if(self._rot_mats is not None):
753
+ return Rotation(rot_mats=self._rot_mats.cuda(), quats=None)
754
+ elif(self._quats is not None):
755
+ return Rotation(
756
+ rot_mats=None,
757
+ quats=self._quats.cuda(),
758
+ normalize_quats=False
759
+ )
760
+ else:
761
+ raise ValueError("Both rotations are None")
762
+
763
+ def to(self,
764
+ device: Optional[torch.device],
765
+ dtype: Optional[torch.dtype]
766
+ ) -> Rotation:
767
+ """
768
+ Analogous to the to() method of torch Tensors
769
+
770
+ Args:
771
+ device:
772
+ A torch device
773
+ dtype:
774
+ A torch dtype
775
+ Returns:
776
+ A copy of the Rotation using the new device and dtype
777
+ """
778
+ if(self._rot_mats is not None):
779
+ return Rotation(
780
+ rot_mats=self._rot_mats.to(device=device, dtype=dtype),
781
+ quats=None,
782
+ )
783
+ elif(self._quats is not None):
784
+ return Rotation(
785
+ rot_mats=None,
786
+ quats=self._quats.to(device=device, dtype=dtype),
787
+ normalize_quats=False,
788
+ )
789
+ else:
790
+ raise ValueError("Both rotations are None")
791
+
792
+ def detach(self) -> Rotation:
793
+ """
794
+ Returns a copy of the Rotation whose underlying Tensor has been
795
+ detached from its torch graph.
796
+
797
+ Returns:
798
+ A copy of the Rotation whose underlying Tensor has been detached
799
+ from its torch graph
800
+ """
801
+ if(self._rot_mats is not None):
802
+ return Rotation(rot_mats=self._rot_mats.detach(), quats=None)
803
+ elif(self._quats is not None):
804
+ return Rotation(
805
+ rot_mats=None,
806
+ quats=self._quats.detach(),
807
+ normalize_quats=False,
808
+ )
809
+ else:
810
+ raise ValueError("Both rotations are None")
811
+
812
+
813
+ class Rigid:
814
+ """
815
+ A class representing a rigid transformation. Little more than a wrapper
816
+ around two objects: a Rotation object and a [*, 3] translation
817
+ Designed to behave approximately like a single torch tensor with the
818
+ shape of the shared batch dimensions of its component parts.
819
+ """
820
+ def __init__(self,
821
+ rots: Optional[Rotation],
822
+ trans: Optional[torch.Tensor],
823
+ ):
824
+ """
825
+ Args:
826
+ rots: A [*, 3, 3] rotation tensor
827
+ trans: A corresponding [*, 3] translation tensor
828
+ """
829
+ # (we need device, dtype, etc. from at least one input)
830
+
831
+ batch_dims, dtype, device, requires_grad = None, None, None, None
832
+ if(trans is not None):
833
+ batch_dims = trans.shape[:-1]
834
+ dtype = trans.dtype
835
+ device = trans.device
836
+ requires_grad = trans.requires_grad
837
+ elif(rots is not None):
838
+ batch_dims = rots.shape
839
+ dtype = rots.dtype
840
+ device = rots.device
841
+ requires_grad = rots.requires_grad
842
+ else:
843
+ raise ValueError("At least one input argument must be specified")
844
+
845
+ if(rots is None):
846
+ rots = Rotation.identity(
847
+ batch_dims, dtype, device, requires_grad,
848
+ )
849
+ elif(trans is None):
850
+ trans = identity_trans(
851
+ batch_dims, dtype, device, requires_grad,
852
+ )
853
+
854
+ if((rots.shape != trans.shape[:-1]) or
855
+ (rots.device != trans.device)):
856
+ raise ValueError("Rots and trans incompatible")
857
+
858
+ # Force full precision. Happens to the rotations automatically.
859
+ trans = trans.to(dtype=torch.float32)
860
+
861
+ self._rots = rots
862
+ self._trans = trans
863
+
864
+ @staticmethod
865
+ def identity(
866
+ shape: Tuple[int],
867
+ dtype: Optional[torch.dtype] = None,
868
+ device: Optional[torch.device] = None,
869
+ requires_grad: bool = True,
870
+ fmt: str = "quat",
871
+ ) -> Rigid:
872
+ """
873
+ Constructs an identity transformation.
874
+
875
+ Args:
876
+ shape:
877
+ The desired shape
878
+ dtype:
879
+ The dtype of both internal tensors
880
+ device:
881
+ The device of both internal tensors
882
+ requires_grad:
883
+ Whether grad should be enabled for the internal tensors
884
+ Returns:
885
+ The identity transformation
886
+ """
887
+ return Rigid(
888
+ Rotation.identity(shape, dtype, device, requires_grad, fmt=fmt),
889
+ identity_trans(shape, dtype, device, requires_grad),
890
+ )
891
+
892
+ def __getitem__(self,
893
+ index: Any,
894
+ ) -> Rigid:
895
+ """
896
+ Indexes the affine transformation with PyTorch-style indices.
897
+ The index is applied to the shared dimensions of both the rotation
898
+ and the translation.
899
+
900
+ E.g.::
901
+
902
+ r = Rotation(rot_mats=torch.rand(10, 10, 3, 3), quats=None)
903
+ t = Rigid(r, torch.rand(10, 10, 3))
904
+ indexed = t[3, 4:6]
905
+ assert(indexed.shape == (2,))
906
+ assert(indexed.get_rots().shape == (2,))
907
+ assert(indexed.get_trans().shape == (2, 3))
908
+
909
+ Args:
910
+ index: A standard torch tensor index. E.g. 8, (10, None, 3),
911
+ or (3, slice(0, 1, None))
912
+ Returns:
913
+ The indexed tensor
914
+ """
915
+ if type(index) != tuple:
916
+ index = (index,)
917
+
918
+ return Rigid(
919
+ self._rots[index],
920
+ self._trans[index + (slice(None),)],
921
+ )
922
+
923
+ def __mul__(self,
924
+ right: torch.Tensor,
925
+ ) -> Rigid:
926
+ """
927
+ Pointwise left multiplication of the transformation with a tensor.
928
+ Can be used to e.g. mask the Rigid.
929
+
930
+ Args:
931
+ right:
932
+ The tensor multiplicand
933
+ Returns:
934
+ The product
935
+ """
936
+ if not(isinstance(right, torch.Tensor)):
937
+ raise TypeError("The other multiplicand must be a Tensor")
938
+
939
+ new_rots = self._rots * right
940
+ new_trans = self._trans * right[..., None]
941
+
942
+ return Rigid(new_rots, new_trans)
943
+
944
+ def __rmul__(self,
945
+ left: torch.Tensor,
946
+ ) -> Rigid:
947
+ """
948
+ Reverse pointwise multiplication of the transformation with a
949
+ tensor.
950
+
951
+ Args:
952
+ left:
953
+ The left multiplicand
954
+ Returns:
955
+ The product
956
+ """
957
+ return self.__mul__(left)
958
+
959
+ @property
960
+ def shape(self) -> torch.Size:
961
+ """
962
+ Returns the shape of the shared dimensions of the rotation and
963
+ the translation.
964
+
965
+ Returns:
966
+ The shape of the transformation
967
+ """
968
+ s = self._trans.shape[:-1]
969
+ return s
970
+
971
+ @property
972
+ def device(self) -> torch.device:
973
+ """
974
+ Returns the device on which the Rigid's tensors are located.
975
+
976
+ Returns:
977
+ The device on which the Rigid's tensors are located
978
+ """
979
+ return self._trans.device
980
+
981
+ @property
982
+ def dtype(self) -> torch.dtype:
983
+ """
984
+ Returns the dtype of the Rigid tensors.
985
+
986
+ Returns:
987
+ The dtype of the Rigid tensors
988
+ """
989
+ return self._rots.dtype
990
+
991
+ def get_rots(self) -> Rotation:
992
+ """
993
+ Getter for the rotation.
994
+
995
+ Returns:
996
+ The rotation object
997
+ """
998
+ return self._rots
999
+
1000
+ def get_trans(self) -> torch.Tensor:
1001
+ """
1002
+ Getter for the translation.
1003
+
1004
+ Returns:
1005
+ The stored translation
1006
+ """
1007
+ return self._trans
1008
+
1009
+ def compose_q_update_vec(self,
1010
+ q_update_vec: torch.Tensor,
1011
+ ) -> Rigid:
1012
+ """
1013
+ Composes the transformation with a quaternion update vector of
1014
+ shape [*, 6], where the final 6 columns represent the x, y, and
1015
+ z values of a quaternion of form (1, x, y, z) followed by a 3D
1016
+ translation.
1017
+
1018
+ Args:
1019
+ q_vec: The quaternion update vector.
1020
+ Returns:
1021
+ The composed transformation.
1022
+ """
1023
+ q_vec, t_vec = q_update_vec[..., :3], q_update_vec[..., 3:]
1024
+ new_rots = self._rots.compose_q_update_vec(q_vec)
1025
+
1026
+ trans_update = self._rots.apply(t_vec)
1027
+ new_translation = self._trans + trans_update
1028
+
1029
+ return Rigid(new_rots, new_translation)
1030
+
1031
+ def compose(self,
1032
+ r: Rigid,
1033
+ ) -> Rigid:
1034
+ """
1035
+ Composes the current rigid object with another.
1036
+
1037
+ Args:
1038
+ r:
1039
+ Another Rigid object
1040
+ Returns:
1041
+ The composition of the two transformations
1042
+ """
1043
+ new_rot = self._rots.compose_r(r._rots)
1044
+ new_trans = self._rots.apply(r._trans) + self._trans
1045
+ return Rigid(new_rot, new_trans)
1046
+
1047
+ def apply(self,
1048
+ pts: torch.Tensor,
1049
+ ) -> torch.Tensor:
1050
+ """
1051
+ Applies the transformation to a coordinate tensor.
1052
+
1053
+ Args:
1054
+ pts: A [*, 3] coordinate tensor.
1055
+ Returns:
1056
+ The transformed points.
1057
+ """
1058
+ rotated = self._rots.apply(pts)
1059
+ return rotated + self._trans
1060
+
1061
+ def invert_apply(self,
1062
+ pts: torch.Tensor
1063
+ ) -> torch.Tensor:
1064
+ """
1065
+ Applies the inverse of the transformation to a coordinate tensor.
1066
+
1067
+ Args:
1068
+ pts: A [*, 3] coordinate tensor
1069
+ Returns:
1070
+ The transformed points.
1071
+ """
1072
+ pts = pts - self._trans
1073
+ return self._rots.invert_apply(pts)
1074
+
1075
+ def invert(self) -> Rigid:
1076
+ """
1077
+ Inverts the transformation.
1078
+
1079
+ Returns:
1080
+ The inverse transformation.
1081
+ """
1082
+ rot_inv = self._rots.invert()
1083
+ trn_inv = rot_inv.apply(self._trans)
1084
+
1085
+ return Rigid(rot_inv, -1 * trn_inv)
1086
+
1087
+ def map_tensor_fn(self,
1088
+ fn: Callable[torch.Tensor, torch.Tensor]
1089
+ ) -> Rigid:
1090
+ """
1091
+ Apply a Tensor -> Tensor function to underlying translation and
1092
+ rotation tensors, mapping over the translation/rotation dimensions
1093
+ respectively.
1094
+
1095
+ Args:
1096
+ fn:
1097
+ A Tensor -> Tensor function to be mapped over the Rigid
1098
+ Returns:
1099
+ The transformed Rigid object
1100
+ """
1101
+ new_rots = self._rots.map_tensor_fn(fn)
1102
+ new_trans = torch.stack(
1103
+ list(map(fn, torch.unbind(self._trans, dim=-1))),
1104
+ dim=-1
1105
+ )
1106
+
1107
+ return Rigid(new_rots, new_trans)
1108
+
1109
+ def to_tensor_4x4(self) -> torch.Tensor:
1110
+ """
1111
+ Converts a transformation to a homogenous transformation tensor.
1112
+
1113
+ Returns:
1114
+ A [*, 4, 4] homogenous transformation tensor
1115
+ """
1116
+ tensor = self._trans.new_zeros((*self.shape, 4, 4))
1117
+ tensor[..., :3, :3] = self._rots.get_rot_mats()
1118
+ tensor[..., :3, 3] = self._trans
1119
+ tensor[..., 3, 3] = 1
1120
+ return tensor
1121
+
1122
+ @staticmethod
1123
+ def from_tensor_4x4(
1124
+ t: torch.Tensor
1125
+ ) -> Rigid:
1126
+ """
1127
+ Constructs a transformation from a homogenous transformation
1128
+ tensor.
1129
+
1130
+ Args:
1131
+ t: [*, 4, 4] homogenous transformation tensor
1132
+ Returns:
1133
+ T object with shape [*]
1134
+ """
1135
+ if(t.shape[-2:] != (4, 4)):
1136
+ raise ValueError("Incorrectly shaped input tensor")
1137
+
1138
+ rots = Rotation(rot_mats=t[..., :3, :3], quats=None)
1139
+ trans = t[..., :3, 3]
1140
+
1141
+ return Rigid(rots, trans)
1142
+
1143
+ def to_tensor_7(self) -> torch.Tensor:
1144
+ """
1145
+ Converts a transformation to a tensor with 7 final columns, four
1146
+ for the quaternion followed by three for the translation.
1147
+
1148
+ Returns:
1149
+ A [*, 7] tensor representation of the transformation
1150
+ """
1151
+ tensor = self._trans.new_zeros((*self.shape, 7))
1152
+ tensor[..., :4] = self._rots.get_quats()
1153
+ tensor[..., 4:] = self._trans
1154
+
1155
+ return tensor
1156
+
1157
+ @staticmethod
1158
+ def from_tensor_7(
1159
+ t: torch.Tensor,
1160
+ normalize_quats: bool = False,
1161
+ ) -> Rigid:
1162
+ if(t.shape[-1] != 7):
1163
+ raise ValueError("Incorrectly shaped input tensor")
1164
+
1165
+ quats, trans = t[..., :4], t[..., 4:]
1166
+
1167
+ rots = Rotation(
1168
+ rot_mats=None,
1169
+ quats=quats,
1170
+ normalize_quats=normalize_quats
1171
+ )
1172
+
1173
+ return Rigid(rots, trans)
1174
+
1175
+ @staticmethod
1176
+ def from_3_points(
1177
+ p_neg_x_axis: torch.Tensor,
1178
+ origin: torch.Tensor,
1179
+ p_xy_plane: torch.Tensor,
1180
+ eps: float = 1e-8
1181
+ ) -> Rigid:
1182
+ """
1183
+ Implements algorithm 21. Constructs transformations from sets of 3
1184
+ points using the Gram-Schmidt algorithm.
1185
+
1186
+ Args:
1187
+ p_neg_x_axis: [*, 3] coordinates
1188
+ origin: [*, 3] coordinates used as frame origins
1189
+ p_xy_plane: [*, 3] coordinates
1190
+ eps: Small epsilon value
1191
+ Returns:
1192
+ A transformation object of shape [*]
1193
+ """
1194
+ p_neg_x_axis = torch.unbind(p_neg_x_axis, dim=-1)
1195
+ origin = torch.unbind(origin, dim=-1)
1196
+ p_xy_plane = torch.unbind(p_xy_plane, dim=-1)
1197
+
1198
+ e0 = [c1 - c2 for c1, c2 in zip(origin, p_neg_x_axis)]
1199
+ e1 = [c1 - c2 for c1, c2 in zip(p_xy_plane, origin)]
1200
+
1201
+ denom = torch.sqrt(sum((c * c for c in e0)) + eps)
1202
+ e0 = [c / denom for c in e0]
1203
+ dot = sum((c1 * c2 for c1, c2 in zip(e0, e1)))
1204
+ e1 = [c2 - c1 * dot for c1, c2 in zip(e0, e1)]
1205
+ denom = torch.sqrt(sum((c * c for c in e1)) + eps)
1206
+ e1 = [c / denom for c in e1]
1207
+ e2 = [
1208
+ e0[1] * e1[2] - e0[2] * e1[1],
1209
+ e0[2] * e1[0] - e0[0] * e1[2],
1210
+ e0[0] * e1[1] - e0[1] * e1[0],
1211
+ ]
1212
+
1213
+ rots = torch.stack([c for tup in zip(e0, e1, e2) for c in tup], dim=-1)
1214
+ rots = rots.reshape(rots.shape[:-1] + (3, 3))
1215
+
1216
+ rot_obj = Rotation(rot_mats=rots, quats=None)
1217
+
1218
+ return Rigid(rot_obj, torch.stack(origin, dim=-1))
1219
+
1220
+ def unsqueeze(self,
1221
+ dim: int,
1222
+ ) -> Rigid:
1223
+ """
1224
+ Analogous to torch.unsqueeze. The dimension is relative to the
1225
+ shared dimensions of the rotation/translation.
1226
+
1227
+ Args:
1228
+ dim: A positive or negative dimension index.
1229
+ Returns:
1230
+ The unsqueezed transformation.
1231
+ """
1232
+ if dim >= len(self.shape):
1233
+ raise ValueError("Invalid dimension")
1234
+ rots = self._rots.unsqueeze(dim)
1235
+ trans = self._trans.unsqueeze(dim if dim >= 0 else dim - 1)
1236
+
1237
+ return Rigid(rots, trans)
1238
+
1239
+ @staticmethod
1240
+ def cat(
1241
+ ts: Sequence[Rigid],
1242
+ dim: int,
1243
+ ) -> Rigid:
1244
+ """
1245
+ Concatenates transformations along a new dimension.
1246
+
1247
+ Args:
1248
+ ts:
1249
+ A list of T objects
1250
+ dim:
1251
+ The dimension along which the transformations should be
1252
+ concatenated
1253
+ Returns:
1254
+ A concatenated transformation object
1255
+ """
1256
+ rots = Rotation.cat([t._rots for t in ts], dim)
1257
+ trans = torch.cat(
1258
+ [t._trans for t in ts], dim=dim if dim >= 0 else dim - 1
1259
+ )
1260
+
1261
+ return Rigid(rots, trans)
1262
+
1263
+ def apply_rot_fn(self, fn: Callable[Rotation, Rotation]) -> Rigid:
1264
+ """
1265
+ Applies a Rotation -> Rotation function to the stored rotation
1266
+ object.
1267
+
1268
+ Args:
1269
+ fn: A function of type Rotation -> Rotation
1270
+ Returns:
1271
+ A transformation object with a transformed rotation.
1272
+ """
1273
+ return Rigid(fn(self._rots), self._trans)
1274
+
1275
+ def apply_trans_fn(self, fn: Callable[torch.Tensor, torch.Tensor]) -> Rigid:
1276
+ """
1277
+ Applies a Tensor -> Tensor function to the stored translation.
1278
+
1279
+ Args:
1280
+ fn:
1281
+ A function of type Tensor -> Tensor to be applied to the
1282
+ translation
1283
+ Returns:
1284
+ A transformation object with a transformed translation.
1285
+ """
1286
+ return Rigid(self._rots, fn(self._trans))
1287
+
1288
+ def scale_translation(self, trans_scale_factor: float) -> Rigid:
1289
+ """
1290
+ Scales the translation by a constant factor.
1291
+
1292
+ Args:
1293
+ trans_scale_factor:
1294
+ The constant factor
1295
+ Returns:
1296
+ A transformation object with a scaled translation.
1297
+ """
1298
+ fn = lambda t: t * trans_scale_factor
1299
+ return self.apply_trans_fn(fn)
1300
+
1301
+ def stop_rot_gradient(self) -> Rigid:
1302
+ """
1303
+ Detaches the underlying rotation object
1304
+
1305
+ Returns:
1306
+ A transformation object with detached rotations
1307
+ """
1308
+ fn = lambda r: r.detach()
1309
+ return self.apply_rot_fn(fn)
1310
+
1311
+ @staticmethod
1312
+ def make_transform_from_reference(n_xyz, ca_xyz, c_xyz, eps=1e-20):
1313
+ """
1314
+ Returns a transformation object from reference coordinates.
1315
+
1316
+ Note that this method does not take care of symmetries. If you
1317
+ provide the atom positions in the non-standard way, the N atom will
1318
+ end up not at [-0.527250, 1.359329, 0.0] but instead at
1319
+ [-0.527250, -1.359329, 0.0]. You need to take care of such cases in
1320
+ your code.
1321
+
1322
+ Args:
1323
+ n_xyz: A [*, 3] tensor of nitrogen xyz coordinates.
1324
+ ca_xyz: A [*, 3] tensor of carbon alpha xyz coordinates.
1325
+ c_xyz: A [*, 3] tensor of carbon xyz coordinates.
1326
+ Returns:
1327
+ A transformation object. After applying the translation and
1328
+ rotation to the reference backbone, the coordinates will
1329
+ approximately equal to the input coordinates.
1330
+ """
1331
+ translation = -1 * ca_xyz
1332
+ n_xyz = n_xyz + translation
1333
+ c_xyz = c_xyz + translation
1334
+
1335
+ c_x, c_y, c_z = [c_xyz[..., i] for i in range(3)]
1336
+ norm = torch.sqrt(eps + c_x ** 2 + c_y ** 2)
1337
+ sin_c1 = -c_y / norm
1338
+ cos_c1 = c_x / norm
1339
+ zeros = sin_c1.new_zeros(sin_c1.shape)
1340
+ ones = sin_c1.new_ones(sin_c1.shape)
1341
+
1342
+ c1_rots = sin_c1.new_zeros((*sin_c1.shape, 3, 3))
1343
+ c1_rots[..., 0, 0] = cos_c1
1344
+ c1_rots[..., 0, 1] = -1 * sin_c1
1345
+ c1_rots[..., 1, 0] = sin_c1
1346
+ c1_rots[..., 1, 1] = cos_c1
1347
+ c1_rots[..., 2, 2] = 1
1348
+
1349
+ norm = torch.sqrt(eps + c_x ** 2 + c_y ** 2 + c_z ** 2)
1350
+ sin_c2 = c_z / norm
1351
+ cos_c2 = torch.sqrt(c_x ** 2 + c_y ** 2) / norm
1352
+
1353
+ c2_rots = sin_c2.new_zeros((*sin_c2.shape, 3, 3))
1354
+ c2_rots[..., 0, 0] = cos_c2
1355
+ c2_rots[..., 0, 2] = sin_c2
1356
+ c2_rots[..., 1, 1] = 1
1357
+ c2_rots[..., 2, 0] = -1 * sin_c2
1358
+ c2_rots[..., 2, 2] = cos_c2
1359
+
1360
+ c_rots = rot_matmul(c2_rots, c1_rots)
1361
+ n_xyz = rot_vec_mul(c_rots, n_xyz)
1362
+
1363
+ _, n_y, n_z = [n_xyz[..., i] for i in range(3)]
1364
+ norm = torch.sqrt(eps + n_y ** 2 + n_z ** 2)
1365
+ sin_n = -n_z / norm
1366
+ cos_n = n_y / norm
1367
+
1368
+ n_rots = sin_c2.new_zeros((*sin_c2.shape, 3, 3))
1369
+ n_rots[..., 0, 0] = 1
1370
+ n_rots[..., 1, 1] = cos_n
1371
+ n_rots[..., 1, 2] = -1 * sin_n
1372
+ n_rots[..., 2, 1] = sin_n
1373
+ n_rots[..., 2, 2] = cos_n
1374
+
1375
+ rots = rot_matmul(n_rots, c_rots)
1376
+
1377
+ rots = rots.transpose(-1, -2)
1378
+ translation = -1 * translation
1379
+
1380
+ rot_obj = Rotation(rot_mats=rots, quats=None)
1381
+
1382
+ return Rigid(rot_obj, translation)
1383
+
1384
+ def cuda(self) -> Rigid:
1385
+ """
1386
+ Moves the transformation object to GPU memory
1387
+
1388
+ Returns:
1389
+ A version of the transformation on GPU
1390
+ """
1391
+ return Rigid(self._rots.cuda(), self._trans.cuda())
data/mdgen/tensor_utils.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 AlQuraishi Laboratory
2
+ # Copyright 2021 DeepMind Technologies Limited
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from functools import partial
17
+ from typing import List
18
+
19
+ import torch
20
+ import torch.nn as nn
21
+
22
+
23
+ def add(m1, m2, inplace):
24
+ # The first operation in a checkpoint can't be in-place, but it's
25
+ # nice to have in-place addition during inference. Thus...
26
+ if(not inplace):
27
+ m1 = m1 + m2
28
+ else:
29
+ m1 += m2
30
+
31
+ return m1
32
+
33
+
34
+ def permute_final_dims(tensor: torch.Tensor, inds: List[int]):
35
+ zero_index = -1 * len(inds)
36
+ first_inds = list(range(len(tensor.shape[:zero_index])))
37
+ return tensor.permute(first_inds + [zero_index + i for i in inds])
38
+
39
+
40
+ def flatten_final_dims(t: torch.Tensor, no_dims: int):
41
+ return t.reshape(t.shape[:-no_dims] + (-1,))
42
+
43
+
44
+ def masked_mean(mask, value, dim, eps=1e-4):
45
+ mask = mask.expand(*value.shape)
46
+ return torch.sum(mask * value, dim=dim) / (eps + torch.sum(mask, dim=dim))
47
+
48
+
49
+ def pts_to_distogram(pts, min_bin=2.3125, max_bin=21.6875, no_bins=64):
50
+ boundaries = torch.linspace(
51
+ min_bin, max_bin, no_bins - 1, device=pts.device
52
+ )
53
+ dists = torch.sqrt(
54
+ torch.sum((pts.unsqueeze(-2) - pts.unsqueeze(-3)) ** 2, dim=-1)
55
+ )
56
+ return torch.bucketize(dists, boundaries)
57
+
58
+
59
+ def dict_multimap(fn, dicts):
60
+ first = dicts[0]
61
+ new_dict = {}
62
+ for k, v in first.items():
63
+ all_v = [d[k] for d in dicts]
64
+ if type(v) is dict:
65
+ new_dict[k] = dict_multimap(fn, all_v)
66
+ else:
67
+ new_dict[k] = fn(all_v)
68
+
69
+ return new_dict
70
+
71
+
72
+ def one_hot(x, v_bins):
73
+ reshaped_bins = v_bins.view(((1,) * len(x.shape)) + (len(v_bins),))
74
+ diffs = x[..., None] - reshaped_bins
75
+ am = torch.argmin(torch.abs(diffs), dim=-1)
76
+ return nn.functional.one_hot(am, num_classes=len(v_bins)).float()
77
+
78
+
79
+ def batched_gather(data, inds, dim=0, no_batch_dims=0):
80
+ ranges = []
81
+ for i, s in enumerate(data.shape[:no_batch_dims]):
82
+ r = torch.arange(s)
83
+ r = r.view(*(*((1,) * i), -1, *((1,) * (len(inds.shape) - i - 1))))
84
+ ranges.append(r)
85
+
86
+ remaining_dims = [
87
+ slice(None) for _ in range(len(data.shape) - no_batch_dims)
88
+ ]
89
+ remaining_dims[dim - no_batch_dims if dim >= 0 else dim] = inds
90
+ ranges.extend(remaining_dims)
91
+ return data[ranges]
92
+
93
+
94
+ # With tree_map, a poor man's JAX tree_map
95
+ def dict_map(fn, dic, leaf_type):
96
+ new_dict = {}
97
+ for k, v in dic.items():
98
+ if type(v) is dict:
99
+ new_dict[k] = dict_map(fn, v, leaf_type)
100
+ else:
101
+ new_dict[k] = tree_map(fn, v, leaf_type)
102
+
103
+ return new_dict
104
+
105
+
106
+ def tree_map(fn, tree, leaf_type):
107
+ if isinstance(tree, dict):
108
+ return dict_map(fn, tree, leaf_type)
109
+ elif isinstance(tree, list):
110
+ return [tree_map(fn, x, leaf_type) for x in tree]
111
+ elif isinstance(tree, tuple):
112
+ return tuple([tree_map(fn, x, leaf_type) for x in tree])
113
+ elif isinstance(tree, leaf_type):
114
+ return fn(tree)
115
+ else:
116
+ raise ValueError(f"Tree of type {type(tree)} not supported")
117
+
118
+
119
+ tensor_tree_map = partial(tree_map, leaf_type=torch.Tensor)
data/mdgen/transport/integrators.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/willisma/SiT/
2
+ import torch as th
3
+ from torchdiffeq import odeint
4
+
5
+ class sde:
6
+ """SDE solver class"""
7
+ def __init__(
8
+ self,
9
+ drift,
10
+ diffusion,
11
+ *,
12
+ t0,
13
+ t1,
14
+ num_steps,
15
+ sampler_type,
16
+ ):
17
+ assert t0 < t1, "SDE sampler has to be in forward time"
18
+
19
+ self.inference_steps = num_steps
20
+ self.t = th.linspace(t0, t1, num_steps)
21
+ self.dt = self.t[1] - self.t[0]
22
+ self.drift = drift
23
+ self.diffusion = diffusion
24
+ self.sampler_type = sampler_type
25
+
26
+ def __Euler_Maruyama_step(self, x, mean_x, t, model, **model_kwargs):
27
+ w_cur = th.randn(x.size()).to(x)
28
+ t = th.ones(x.size(0)).to(x) * t
29
+ dw = w_cur * th.sqrt(self.dt)
30
+ drift = self.drift(x, t, model, **model_kwargs)
31
+ diffusion = self.diffusion(x, t)
32
+ mean_x = x + drift * self.dt
33
+ x = mean_x + th.sqrt(2 * diffusion) * dw
34
+ return x, mean_x
35
+
36
+ def __Heun_step(self, x, _, t, model, **model_kwargs):
37
+ w_cur = th.randn(x.size()).to(x)
38
+ dw = w_cur * th.sqrt(self.dt)
39
+ t_cur = th.ones(x.size(0)).to(x) * t
40
+ diffusion = self.diffusion(x, t_cur)
41
+ xhat = x + th.sqrt(2 * diffusion) * dw
42
+ K1 = self.drift(xhat, t_cur, model, **model_kwargs)
43
+ xp = xhat + self.dt * K1
44
+ K2 = self.drift(xp, t_cur + self.dt, model, **model_kwargs)
45
+ return xhat + 0.5 * self.dt * (K1 + K2), xhat # at last time point we do not perform the heun step
46
+
47
+ def __forward_fn(self):
48
+ """TODO: generalize here by adding all private functions ending with steps to it"""
49
+ sampler_dict = {
50
+ "Euler": self.__Euler_Maruyama_step,
51
+ "Heun": self.__Heun_step,
52
+ }
53
+
54
+ try:
55
+ sampler = sampler_dict[self.sampler_type]
56
+ except:
57
+ raise NotImplementedError("Smapler type not implemented.")
58
+
59
+ return sampler
60
+
61
+ def sample(self, init, model, **model_kwargs):
62
+ """forward loop of sde"""
63
+ x = init
64
+ mean_x = init
65
+ samples = []
66
+ sampler = self.__forward_fn()
67
+ for ti in self.t[:-1]:
68
+ with th.no_grad():
69
+ x, mean_x = sampler(x, mean_x, ti, model, **model_kwargs)
70
+ samples.append(x)
71
+
72
+ return samples
73
+
74
+ class ode:
75
+ """ODE solver class"""
76
+ def __init__(
77
+ self,
78
+ drift,
79
+ *,
80
+ t0,
81
+ t1,
82
+ sampler_type,
83
+ num_steps,
84
+ atol,
85
+ rtol,
86
+ ):
87
+ assert t0 < t1, "ODE sampler has to be in forward time"
88
+
89
+ self.drift = drift
90
+ self.t = th.linspace(t0, t1, num_steps)
91
+ self.atol = atol
92
+ self.rtol = rtol
93
+ self.sampler_type = sampler_type
94
+
95
+ def sample(self, x, model, **model_kwargs):
96
+
97
+ device = x[0].device if isinstance(x, tuple) else x.device
98
+ def _fn(t, x):
99
+ t = th.ones(x[0].size(0)).to(device) * t if isinstance(x, tuple) else th.ones(x.size(0)).to(device) * t
100
+ model_output = self.drift(x, t, model, **model_kwargs)
101
+ return model_output
102
+
103
+ t = self.t.to(device)
104
+ atol = [self.atol] * len(x) if isinstance(x, tuple) else [self.atol]
105
+ rtol = [self.rtol] * len(x) if isinstance(x, tuple) else [self.rtol]
106
+ samples = odeint(
107
+ _fn,
108
+ x,
109
+ t,
110
+ method=self.sampler_type,
111
+ atol=atol,
112
+ rtol=rtol
113
+ )
114
+ return samples
data/mdgen/transport/path.py ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/willisma/SiT/
2
+ import torch as th
3
+ import numpy as np
4
+ def expand_t_like_x(t, x):
5
+ """Function to reshape time t to broadcastable dimension of x
6
+ Args:
7
+ t: [batch_dim,], time vector
8
+ x: [batch_dim,...], data point
9
+ """
10
+ dims = [1] * (len(x.size()) - 1)
11
+ t = t.view(t.size(0), *dims)
12
+ return t
13
+
14
+
15
+ #################### Coupling Plans ####################
16
+
17
+ class ICPlan:
18
+ """Linear Coupling Plan"""
19
+ def __init__(self, sigma=0.0):
20
+ self.sigma = sigma
21
+
22
+ def compute_alpha_t(self, t):
23
+ """Compute the data coefficient along the path"""
24
+ return t, 1
25
+
26
+ def compute_sigma_t(self, t):
27
+ """Compute the noise coefficient along the path"""
28
+ return 1 - t, -1
29
+
30
+ def compute_d_alpha_alpha_ratio_t(self, t):
31
+ """Compute the ratio between d_alpha and alpha"""
32
+ return 1 / t
33
+
34
+ def compute_drift(self, x, t):
35
+ """We always output sde according to score parametrization; """
36
+ t = expand_t_like_x(t, x)
37
+ alpha_ratio = self.compute_d_alpha_alpha_ratio_t(t)
38
+ sigma_t, d_sigma_t = self.compute_sigma_t(t)
39
+ drift = alpha_ratio * x
40
+ diffusion = alpha_ratio * (sigma_t ** 2) - sigma_t * d_sigma_t
41
+
42
+ return -drift, diffusion
43
+
44
+ def compute_diffusion(self, x, t, form="constant", norm=1.0):
45
+ """Compute the diffusion term of the SDE
46
+ Args:
47
+ x: [batch_dim, ...], data point
48
+ t: [batch_dim,], time vector
49
+ form: str, form of the diffusion term
50
+ norm: float, norm of the diffusion term
51
+ """
52
+ t = expand_t_like_x(t, x)
53
+ choices = {
54
+ "constant": norm,
55
+ "SBDM": norm * self.compute_drift(x, t)[1],
56
+ "sigma": norm * self.compute_sigma_t(t)[0],
57
+ "linear": norm * (1 - t),
58
+ "decreasing": 0.25 * (norm * th.cos(np.pi * t) + 1) ** 2,
59
+ "inccreasing-decreasing": norm * th.sin(np.pi * t) ** 2,
60
+ }
61
+
62
+ try:
63
+ diffusion = choices[form]
64
+ except KeyError:
65
+ raise NotImplementedError(f"Diffusion form {form} not implemented")
66
+
67
+ return diffusion
68
+
69
+ def get_score_from_velocity(self, velocity, x, t):
70
+ """Wrapper function: transfrom velocity prediction model to score
71
+ Args:
72
+ velocity: [batch_dim, ...] shaped tensor; velocity model output
73
+ x: [batch_dim, ...] shaped tensor; x_t data point
74
+ t: [batch_dim,] time tensor
75
+ """
76
+ t = expand_t_like_x(t, x)
77
+ alpha_t, d_alpha_t = self.compute_alpha_t(t)
78
+ sigma_t, d_sigma_t = self.compute_sigma_t(t)
79
+ mean = x
80
+ reverse_alpha_ratio = alpha_t / d_alpha_t
81
+ var = sigma_t**2 - reverse_alpha_ratio * d_sigma_t * sigma_t
82
+ score = (reverse_alpha_ratio * velocity - mean) / var
83
+ return score
84
+
85
+ def get_noise_from_velocity(self, velocity, x, t):
86
+ """Wrapper function: transfrom velocity prediction model to denoiser
87
+ Args:
88
+ velocity: [batch_dim, ...] shaped tensor; velocity model output
89
+ x: [batch_dim, ...] shaped tensor; x_t data point
90
+ t: [batch_dim,] time tensor
91
+ """
92
+ t = expand_t_like_x(t, x)
93
+ alpha_t, d_alpha_t = self.compute_alpha_t(t)
94
+ sigma_t, d_sigma_t = self.compute_sigma_t(t)
95
+ mean = x
96
+ reverse_alpha_ratio = alpha_t / d_alpha_t
97
+ var = reverse_alpha_ratio * d_sigma_t - sigma_t
98
+ noise = (reverse_alpha_ratio * velocity - mean) / var
99
+ return noise
100
+
101
+ def get_velocity_from_score(self, score, x, t):
102
+ """Wrapper function: transfrom score prediction model to velocity
103
+ Args:
104
+ score: [batch_dim, ...] shaped tensor; score model output
105
+ x: [batch_dim, ...] shaped tensor; x_t data point
106
+ t: [batch_dim,] time tensor
107
+ """
108
+ t = expand_t_like_x(t, x)
109
+ drift, var = self.compute_drift(x, t)
110
+ velocity = var * score - drift
111
+ return velocity
112
+
113
+ def compute_mu_t(self, t, x0, x1):
114
+ """Compute the mean of time-dependent density p_t"""
115
+ t = expand_t_like_x(t, x1)
116
+ alpha_t, _ = self.compute_alpha_t(t)
117
+ sigma_t, _ = self.compute_sigma_t(t)
118
+ return alpha_t * x1 + sigma_t * x0
119
+
120
+ def compute_xt(self, t, x0, x1):
121
+ """Sample xt from time-dependent density p_t; rng is required"""
122
+ xt = self.compute_mu_t(t, x0, x1)
123
+ return xt
124
+
125
+ def compute_ut(self, t, x0, x1, xt):
126
+ """Compute the vector field corresponding to p_t"""
127
+ t = expand_t_like_x(t, x1)
128
+ _, d_alpha_t = self.compute_alpha_t(t)
129
+ _, d_sigma_t = self.compute_sigma_t(t)
130
+ return d_alpha_t * x1 + d_sigma_t * x0
131
+
132
+ def plan(self, t, x0, x1):
133
+ xt = self.compute_xt(t, x0, x1)
134
+ ut = self.compute_ut(t, x0, x1, xt)
135
+ return t, xt, ut
136
+
137
+
138
+ class VPCPlan(ICPlan):
139
+ """class for VP path flow matching"""
140
+
141
+ def __init__(self, sigma_min=0.1, sigma_max=20.0):
142
+ self.sigma_min = sigma_min
143
+ self.sigma_max = sigma_max
144
+ self.log_mean_coeff = lambda t: -0.25 * ((1 - t) ** 2) * (self.sigma_max - self.sigma_min) - 0.5 * (1 - t) * self.sigma_min
145
+ self.d_log_mean_coeff = lambda t: 0.5 * (1 - t) * (self.sigma_max - self.sigma_min) + 0.5 * self.sigma_min
146
+
147
+
148
+ def compute_alpha_t(self, t):
149
+ """Compute coefficient of x1"""
150
+ alpha_t = self.log_mean_coeff(t)
151
+ alpha_t = th.exp(alpha_t)
152
+ d_alpha_t = alpha_t * self.d_log_mean_coeff(t)
153
+ return alpha_t, d_alpha_t
154
+
155
+ def compute_sigma_t(self, t):
156
+ """Compute coefficient of x0"""
157
+ p_sigma_t = 2 * self.log_mean_coeff(t)
158
+ sigma_t = th.sqrt(1 - th.exp(p_sigma_t))
159
+ d_sigma_t = th.exp(p_sigma_t) * (2 * self.d_log_mean_coeff(t)) / (-2 * sigma_t)
160
+ return sigma_t, d_sigma_t
161
+
162
+ def compute_d_alpha_alpha_ratio_t(self, t):
163
+ """Special purposed function for computing numerical stabled d_alpha_t / alpha_t"""
164
+ return self.d_log_mean_coeff(t)
165
+
166
+ def compute_drift(self, x, t):
167
+ """Compute the drift term of the SDE"""
168
+ t = expand_t_like_x(t, x)
169
+ beta_t = self.sigma_min + (1 - t) * (self.sigma_max - self.sigma_min)
170
+ return -0.5 * beta_t * x, beta_t / 2
171
+
172
+
173
+ class GVPCPlan(ICPlan):
174
+ def __init__(self, sigma=0.0):
175
+ super().__init__(sigma)
176
+
177
+ def compute_alpha_t(self, t):
178
+ """Compute coefficient of x1"""
179
+ alpha_t = th.sin(t * np.pi / 2)
180
+ d_alpha_t = np.pi / 2 * th.cos(t * np.pi / 2)
181
+ return alpha_t, d_alpha_t
182
+
183
+ def compute_sigma_t(self, t):
184
+ """Compute coefficient of x0"""
185
+ sigma_t = th.cos(t * np.pi / 2)
186
+ d_sigma_t = -np.pi / 2 * th.sin(t * np.pi / 2)
187
+ return sigma_t, d_sigma_t
188
+
189
+ def compute_d_alpha_alpha_ratio_t(self, t):
190
+ """Special purposed function for computing numerical stabled d_alpha_t / alpha_t"""
191
+ return np.pi / (2 * th.tan(t * np.pi / 2))
data/mdgen/transport/transport.py ADDED
@@ -0,0 +1,575 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/willisma/SiT/
2
+ import copy
3
+
4
+ import torch
5
+ import torch as th
6
+ import numpy as np
7
+
8
+ import enum
9
+
10
+ from . import path
11
+
12
+
13
+ def mean_flat(x, mask):
14
+ """
15
+ Take the mean over all non-batch dimensions.
16
+ """
17
+ return th.sum(x * mask, dim=list(range(1, len(x.size())))) / th.sum(mask, dim=list(range(1, len(x.size()))))
18
+
19
+
20
+ from .integrators import ode, sde
21
+
22
+
23
+ class ModelType(enum.Enum):
24
+ """
25
+ Which type of output the model predicts.
26
+ """
27
+
28
+ NOISE = enum.auto() # the model predicts epsilon
29
+ SCORE = enum.auto() # the model predicts \nabla \log p(x)
30
+ VELOCITY = enum.auto() # the model predicts v(x)
31
+
32
+
33
+ class PathType(enum.Enum):
34
+ """
35
+ Which type of path to use.
36
+ """
37
+
38
+ LINEAR = enum.auto()
39
+ GVP = enum.auto()
40
+ VP = enum.auto()
41
+
42
+
43
+ class WeightType(enum.Enum):
44
+ """
45
+ Which type of weighting to use.
46
+ """
47
+
48
+ NONE = enum.auto()
49
+ VELOCITY = enum.auto()
50
+ LIKELIHOOD = enum.auto()
51
+
52
+
53
+ def t_to_alpha(t, args):
54
+ """
55
+ Convert t to alpha for Dirichlet distribution.
56
+ """
57
+
58
+ return 1 * (1 - t) + t * args.alpha_max, (args.alpha_max - 1)
59
+
60
+
61
+ class Transport:
62
+
63
+ def __init__(
64
+ self,
65
+ *,
66
+ args,
67
+ model_type,
68
+ path_type,
69
+ loss_type,
70
+ train_eps,
71
+ sample_eps,
72
+ ):
73
+ path_options = {
74
+ PathType.LINEAR: path.ICPlan,
75
+ PathType.GVP: path.GVPCPlan,
76
+ PathType.VP: path.VPCPlan,
77
+ }
78
+ self.args = args
79
+ self.loss_type = loss_type
80
+ self.model_type = model_type
81
+ self.path_sampler = path_options[path_type]()
82
+ self.train_eps = train_eps
83
+ self.sample_eps = sample_eps
84
+
85
+ def prior_logp(self, z):
86
+ '''
87
+ Standard multivariate normal prior
88
+ Assume z is batched
89
+ '''
90
+ shape = th.tensor(z.size())
91
+ N = th.prod(shape[1:])
92
+ _fn = lambda x: -N / 2. * np.log(2 * np.pi) - th.sum(x ** 2) / 2.
93
+ return th.vmap(_fn)(z)
94
+
95
+ def check_interval(
96
+ self,
97
+ train_eps,
98
+ sample_eps,
99
+ *,
100
+ diffusion_form="SBDM",
101
+ sde=False,
102
+ reverse=False,
103
+ eval=False,
104
+ last_step_size=0.0,
105
+ ):
106
+ t0 = 0
107
+ t1 = 1
108
+ eps = train_eps if not eval else sample_eps
109
+ if (type(self.path_sampler) in [path.VPCPlan]):
110
+
111
+ t1 = 1 - eps if (not sde or last_step_size == 0) else 1 - last_step_size
112
+
113
+ elif (type(self.path_sampler) in [path.ICPlan, path.GVPCPlan]) \
114
+ and (
115
+ self.model_type != ModelType.VELOCITY or sde): # avoid numerical issue by taking a first
116
+ # semi-implicit step
117
+
118
+ t0 = eps if (diffusion_form == "SBDM" and sde) or self.model_type != ModelType.VELOCITY else 0
119
+ t1 = 1 - eps if (not sde or last_step_size == 0) else 1 - last_step_size
120
+
121
+ if reverse:
122
+ t0, t1 = 1 - t0, 1 - t1
123
+
124
+ return t0, t1
125
+
126
+ def sample(self, x1):
127
+ """Sampling x0 & t based on shape of x1 (if needed)
128
+ Args:
129
+ x1 - data point; [batch, *dim]
130
+ """
131
+
132
+ x0 = th.randn_like(x1)
133
+ t0, t1 = self.check_interval(self.train_eps, self.sample_eps)
134
+ t = th.rand((x1.shape[0],)) * (t1 - t0) + t0
135
+ t = t.to(x1)
136
+ return t, x0, x1
137
+
138
+ def training_losses(
139
+ self,
140
+ model,
141
+ x1, # target tokens
142
+ aatype1=None, # target aatype
143
+ mask=None,
144
+ model_kwargs=None
145
+ ):
146
+ """Loss for training the score model
147
+ Args:
148
+ - model: backbone model; could be score, noise, or velocity
149
+ - x1: datapoint
150
+ - model_kwargs: additional arguments for the model
151
+ """
152
+
153
+ if model_kwargs == None:
154
+ model_kwargs = {}
155
+
156
+ t, x0, x1 = self.sample(x1)
157
+ t, xt, ut = self.path_sampler.plan(t, x0, x1)
158
+
159
+
160
+
161
+ if self.args.design: # alterations made to the original SIT code to include dirichlet flow matching for design
162
+ assert self.model_type == ModelType.VELOCITY
163
+ if self.args.dynamic_mpnn or self.args.mpnn:
164
+ t = torch.ones_like(t, device=t.device)
165
+ x_d = torch.zeros(xt.shape[0], xt.shape[2], 20, device=xt.device)
166
+ else:
167
+ seq_one_hot = th.nn.functional.one_hot(aatype1, num_classes=20)
168
+ alphas, _ = t_to_alpha(t, self.args)
169
+ alphas = 1 + seq_one_hot * (alphas[:, None, None] - 1)
170
+ x_d = th.distributions.Dirichlet(alphas).sample()
171
+ x_d = x_d[:, None, :, :].expand(-1, xt.shape[1], -1, -1)
172
+ xt = th.cat([xt, x_d], dim=-1)
173
+
174
+ model_output = model(xt, t, **model_kwargs)
175
+ B, *_, C = xt.shape
176
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
177
+ assert model_output.size() == (B, *xt.size()[1:-1], C)
178
+
179
+ if self.args.design:
180
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
181
+ logits = model_output[:, :, :, -20:]
182
+ model_output = model_output[:, :, :, :-20]
183
+
184
+ terms = {}
185
+ terms['t'] = t
186
+ terms['pred'] = model_output
187
+ if not (self.args.dynamic_mpnn or self.args.mpnn):
188
+ if self.model_type == ModelType.VELOCITY:
189
+ terms['loss'] = mean_flat(((model_output - ut) ** 2), mask)
190
+ else:
191
+ _, drift_var = self.path_sampler.compute_drift(xt, t)
192
+ sigma_t, _ = self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, xt))
193
+ if self.loss_type in [WeightType.VELOCITY]:
194
+ weight = (drift_var / sigma_t) ** 2
195
+ elif self.loss_type in [WeightType.LIKELIHOOD]:
196
+ weight = drift_var / (sigma_t ** 2)
197
+ elif self.loss_type in [WeightType.NONE]:
198
+ weight = 1
199
+ else:
200
+ raise NotImplementedError()
201
+
202
+ if self.model_type == ModelType.NOISE:
203
+ terms['loss'] = mean_flat(weight * ((model_output - x0) ** 2), mask)
204
+ else:
205
+ terms['loss'] = mean_flat(weight * ((model_output * sigma_t + x0) ** 2), mask)
206
+
207
+ # more changes for dirichlet flow matching
208
+
209
+ if self.args.design:
210
+ if self.args.dynamic_mpnn or self.args.mpnn:
211
+ logits = model_output
212
+ terms['loss_continuous'] = torch.tensor(torch.nan, device=xt.device)
213
+ loss_d = th.nn.functional.cross_entropy(logits.reshape(-1,20), aatype1.reshape(-1))
214
+ terms['loss'] = loss_d
215
+ else:
216
+ terms['loss_continuous'] = terms['loss']
217
+ seq_expanded = aatype1[:, None, :].expand(-1, xt.shape[1], -1)
218
+ loss_d = th.nn.functional.cross_entropy(logits.reshape(-1, 20), seq_expanded.reshape(-1))
219
+ terms['loss'] = loss_d * self.args.discrete_loss_weight + (1 - self.args.discrete_loss_weight) * terms['loss']
220
+ terms['loss_discrete'] = loss_d
221
+ terms['logits'] = logits
222
+
223
+ return terms
224
+
225
+ def get_drift(
226
+ self
227
+ ):
228
+ """member function for obtaining the drift of the probability flow ODE"""
229
+
230
+ def score_ode(x, t, model, **model_kwargs):
231
+ drift_mean, drift_var = self.path_sampler.compute_drift(x, t)
232
+ model_output = model(x, t, **model_kwargs)
233
+ return (-drift_mean + drift_var * model_output) # by change of variable
234
+
235
+ def noise_ode(x, t, model, **model_kwargs):
236
+ drift_mean, drift_var = self.path_sampler.compute_drift(x, t)
237
+ sigma_t, _ = self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, x))
238
+ model_output = model(x, t, **model_kwargs)
239
+ score = model_output / -sigma_t
240
+ return (-drift_mean + drift_var * score)
241
+
242
+ def velocity_ode(x, t, model, **model_kwargs):
243
+ model_output = model(x, t, **model_kwargs)
244
+ return model_output
245
+
246
+ if self.model_type == ModelType.NOISE:
247
+ drift_fn = noise_ode
248
+ elif self.model_type == ModelType.SCORE:
249
+ drift_fn = score_ode
250
+ else:
251
+ drift_fn = velocity_ode
252
+
253
+ def body_fn(x, t, model, **model_kwargs):
254
+ model_output = drift_fn(x, t, model, **model_kwargs)
255
+ assert model_output.shape == x.shape, "Output shape from ODE solver must match input shape"
256
+ return model_output
257
+
258
+ return body_fn
259
+
260
+ def get_score(
261
+ self,
262
+ ):
263
+ """member function for obtaining score of
264
+ x_t = alpha_t * x + sigma_t * eps"""
265
+ if self.model_type == ModelType.NOISE:
266
+ score_fn = lambda x, t, model, **kwargs: model(x, t, **kwargs) / - \
267
+ self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, x))[0]
268
+ elif self.model_type == ModelType.SCORE:
269
+ score_fn = lambda x, t, model, **kwagrs: model(x, t, **kwagrs)
270
+ elif self.model_type == ModelType.VELOCITY:
271
+ score_fn = lambda x, t, model, **kwargs: self.path_sampler.get_score_from_velocity(model(x, t, **kwargs), x,
272
+ t)
273
+ else:
274
+ raise NotImplementedError()
275
+
276
+ return score_fn
277
+
278
+
279
+ class Sampler:
280
+ """Sampler class for the transport model"""
281
+
282
+ def __init__(
283
+ self,
284
+ transport,
285
+ ):
286
+ """Constructor for a general sampler; supporting different sampling methods
287
+ Args:
288
+ - transport: an tranport object specify model prediction & interpolant type
289
+ """
290
+
291
+ self.transport = transport
292
+ self.drift = self.transport.get_drift()
293
+ self.score = self.transport.get_score()
294
+
295
+ def __get_sde_diffusion_and_drift(
296
+ self,
297
+ *,
298
+ diffusion_form="SBDM",
299
+ diffusion_norm=1.0,
300
+ ):
301
+
302
+ def diffusion_fn(x, t):
303
+ diffusion = self.transport.path_sampler.compute_diffusion(x, t, form=diffusion_form, norm=diffusion_norm)
304
+ return diffusion
305
+
306
+ sde_drift = \
307
+ lambda x, t, model, **kwargs: \
308
+ self.drift(x, t, model, **kwargs) + diffusion_fn(x, t) * self.score(x, t, model, **kwargs)
309
+
310
+ sde_diffusion = diffusion_fn
311
+
312
+ return sde_drift, sde_diffusion
313
+
314
+ def __get_last_step(
315
+ self,
316
+ sde_drift,
317
+ *,
318
+ last_step,
319
+ last_step_size,
320
+ ):
321
+ """Get the last step function of the SDE solver"""
322
+
323
+ if last_step is None:
324
+ last_step_fn = \
325
+ lambda x, t, model, **model_kwargs: \
326
+ x
327
+ elif last_step == "Mean":
328
+ last_step_fn = \
329
+ lambda x, t, model, **model_kwargs: \
330
+ x + sde_drift(x, t, model, **model_kwargs) * last_step_size
331
+ elif last_step == "Tweedie":
332
+ alpha = self.transport.path_sampler.compute_alpha_t # simple aliasing; the original name was too long
333
+ sigma = self.transport.path_sampler.compute_sigma_t
334
+ last_step_fn = \
335
+ lambda x, t, model, **model_kwargs: \
336
+ x / alpha(t)[0][0] + (sigma(t)[0][0] ** 2) / alpha(t)[0][0] * self.score(x, t, model,
337
+ **model_kwargs)
338
+ elif last_step == "Euler":
339
+ last_step_fn = \
340
+ lambda x, t, model, **model_kwargs: \
341
+ x + self.drift(x, t, model, **model_kwargs) * last_step_size
342
+ else:
343
+ raise NotImplementedError()
344
+
345
+ return last_step_fn
346
+
347
+ def sample_sde(
348
+ self,
349
+ *,
350
+ sampling_method="Euler",
351
+ diffusion_form="SBDM",
352
+ diffusion_norm=1.0,
353
+ last_step="Mean",
354
+ last_step_size=0.04,
355
+ num_steps=250,
356
+ ):
357
+ """returns a sampling function with given SDE settings
358
+ Args:
359
+ - sampling_method: type of sampler used in solving the SDE; default to be Euler-Maruyama
360
+ - diffusion_form: function form of diffusion coefficient; default to be matching SBDM
361
+ - diffusion_norm: function magnitude of diffusion coefficient; default to 1
362
+ - last_step: type of the last step; default to identity
363
+ - last_step_size: size of the last step; default to match the stride of 250 steps over [0,1]
364
+ - num_steps: total integration step of SDE
365
+ """
366
+
367
+ if last_step is None:
368
+ last_step_size = 0.0
369
+
370
+ sde_drift, sde_diffusion = self.__get_sde_diffusion_and_drift(
371
+ diffusion_form=diffusion_form,
372
+ diffusion_norm=diffusion_norm,
373
+ )
374
+
375
+ t0, t1 = self.transport.check_interval(
376
+ self.transport.train_eps,
377
+ self.transport.sample_eps,
378
+ diffusion_form=diffusion_form,
379
+ sde=True,
380
+ eval=True,
381
+ reverse=False,
382
+ last_step_size=last_step_size,
383
+ )
384
+
385
+ _sde = sde(
386
+ sde_drift,
387
+ sde_diffusion,
388
+ t0=t0,
389
+ t1=t1,
390
+ num_steps=num_steps,
391
+ sampler_type=sampling_method
392
+ )
393
+
394
+ last_step_fn = self.__get_last_step(sde_drift, last_step=last_step, last_step_size=last_step_size)
395
+
396
+ def _sample(init, model, **model_kwargs):
397
+ xs = _sde.sample(init, model, **model_kwargs)
398
+ ts = th.ones(init.size(0), device=init.device) * t1
399
+ x = last_step_fn(xs[-1], ts, model, **model_kwargs)
400
+ xs.append(x)
401
+
402
+ assert len(xs) == num_steps, "Samples does not match the number of steps"
403
+
404
+ return xs
405
+
406
+ return _sample
407
+
408
+ def sample_ode(
409
+ self,
410
+ *,
411
+ sampling_method="dopri5",
412
+ num_steps=50,
413
+ atol=1e-6,
414
+ rtol=1e-3,
415
+ reverse=False,
416
+ ):
417
+ """returns a sampling function with given ODE settings
418
+ Args:
419
+ - sampling_method: type of sampler used in solving the ODE; default to be Dopri5
420
+ - num_steps:
421
+ - fixed solver (Euler, Heun): the actual number of integration steps performed
422
+ - adaptive solver (Dopri5): the number of datapoints saved during integration; produced by interpolation
423
+ - atol: absolute error tolerance for the solver
424
+ - rtol: relative error tolerance for the solver
425
+ - reverse: whether solving the ODE in reverse (data to noise); default to False
426
+ """
427
+ if reverse:
428
+ drift = lambda x, t, model, **kwargs: self.drift(x, th.ones_like(t) * (1 - t), model, **kwargs)
429
+ else:
430
+ drift = self.drift
431
+
432
+ t0, t1 = self.transport.check_interval(
433
+ self.transport.train_eps,
434
+ self.transport.sample_eps,
435
+ sde=False,
436
+ eval=True,
437
+ reverse=reverse,
438
+ last_step_size=0.0,
439
+ )
440
+
441
+ _ode = ode(
442
+ drift=drift,
443
+ t0=t0,
444
+ t1=t1,
445
+ sampler_type=sampling_method,
446
+ num_steps=num_steps,
447
+ atol=atol,
448
+ rtol=rtol,
449
+ )
450
+
451
+ return _ode.sample
452
+
453
+ def sample_ode_likelihood(
454
+ self,
455
+ *,
456
+ sampling_method="dopri5",
457
+ num_steps=50,
458
+ atol=1e-6,
459
+ rtol=1e-3,
460
+ ):
461
+
462
+ """returns a sampling function for calculating likelihood with given ODE settings
463
+ Args:
464
+ - sampling_method: type of sampler used in solving the ODE; default to be Dopri5
465
+ - num_steps:
466
+ - fixed solver (Euler, Heun): the actual number of integration steps performed
467
+ - adaptive solver (Dopri5): the number of datapoints saved during integration; produced by interpolation
468
+ - atol: absolute error tolerance for the solver
469
+ - rtol: relative error tolerance for the solver
470
+ """
471
+
472
+ def _likelihood_drift(x, t, model, **model_kwargs):
473
+ x, _ = x
474
+ eps = th.randint(2, x.size(), dtype=th.float, device=x.device) * 2 - 1
475
+ t = th.ones_like(t) * (1 - t)
476
+ with th.enable_grad():
477
+ x.requires_grad = True
478
+ grad = th.autograd.grad(th.sum(self.drift(x, t, model, **model_kwargs) * eps), x)[0]
479
+ logp_grad = th.sum(grad * eps, dim=tuple(range(1, len(x.size()))))
480
+ drift = self.drift(x, t, model, **model_kwargs)
481
+ return (-drift, logp_grad)
482
+
483
+ t0, t1 = self.transport.check_interval(
484
+ self.transport.train_eps,
485
+ self.transport.sample_eps,
486
+ sde=False,
487
+ eval=True,
488
+ reverse=False,
489
+ last_step_size=0.0,
490
+ )
491
+
492
+ _ode = ode(
493
+ drift=_likelihood_drift,
494
+ t0=t0,
495
+ t1=t1,
496
+ sampler_type=sampling_method,
497
+ num_steps=num_steps,
498
+ atol=atol,
499
+ rtol=rtol,
500
+ )
501
+
502
+ def _sample_fn(x, model, **model_kwargs):
503
+ init_logp = th.zeros(x.size(0)).to(x)
504
+ input = (x, init_logp)
505
+ drift, delta_logp = _ode.sample(input, model, **model_kwargs)
506
+ drift, delta_logp = drift[-1], delta_logp[-1]
507
+ prior_logp = self.transport.prior_logp(drift)
508
+ logp = prior_logp - delta_logp
509
+ return logp, drift
510
+
511
+ return _sample_fn
512
+
513
+
514
+ def create_transport(
515
+ args,
516
+ path_type='Linear',
517
+ prediction="velocity",
518
+ loss_weight=None,
519
+ train_eps=None,
520
+ sample_eps=None,
521
+ ):
522
+ """function for creating Transport object
523
+ **Note**: model prediction defaults to velocity
524
+ Args:
525
+ - path_type: type of path to use; default to linear
526
+ - learn_score: set model prediction to score
527
+ - learn_noise: set model prediction to noise
528
+ - velocity_weighted: weight loss by velocity weight
529
+ - likelihood_weighted: weight loss by likelihood weight
530
+ - train_eps: small epsilon for avoiding instability during training
531
+ - sample_eps: small epsilon for avoiding instability during sampling
532
+ """
533
+
534
+ if prediction == "noise":
535
+ model_type = ModelType.NOISE
536
+ elif prediction == "score":
537
+ model_type = ModelType.SCORE
538
+ else:
539
+ model_type = ModelType.VELOCITY
540
+
541
+ if loss_weight == "velocity":
542
+ loss_type = WeightType.VELOCITY
543
+ elif loss_weight == "likelihood":
544
+ loss_type = WeightType.LIKELIHOOD
545
+ else:
546
+ loss_type = WeightType.NONE
547
+
548
+ path_choice = {
549
+ "Linear": PathType.LINEAR,
550
+ "GVP": PathType.GVP,
551
+ "VP": PathType.VP,
552
+ }
553
+
554
+ path_type = path_choice[path_type]
555
+ if (path_type in [PathType.VP]):
556
+ train_eps = 1e-5 if train_eps is None else train_eps
557
+ sample_eps = 1e-3 if sample_eps is None else sample_eps
558
+ elif (path_type in [PathType.GVP, PathType.LINEAR] and model_type != ModelType.VELOCITY):
559
+ train_eps = 1e-3 if train_eps is None else train_eps
560
+ sample_eps = 1e-3 if sample_eps is None else sample_eps
561
+ else: # velocity & [GVP, LINEAR] is stable everywhere
562
+ train_eps = 0
563
+ sample_eps = 0
564
+
565
+ # create flow state
566
+ state = Transport(
567
+ args=args,
568
+ model_type=model_type,
569
+ path_type=path_type,
570
+ loss_type=loss_type,
571
+ train_eps=train_eps,
572
+ sample_eps=sample_eps,
573
+ )
574
+
575
+ return state
data/mdgen/utils.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import scipy
3
+ import torch
4
+ from . import protein
5
+ from .geometry import atom14_to_atom37
6
+
7
+ def get_offsets(ref_frame, rigids):
8
+ B, T, L = rigids.shape
9
+ if T > 500000:
10
+ offsets1 = ref_frame.invert().compose(rigids[:, : 500000]).to_tensor_7()
11
+ offsets2 = ref_frame.invert().compose(rigids[:, 500000:]).to_tensor_7()
12
+ return torch.cat([offsets1, offsets2], 1)
13
+ else:
14
+ return ref_frame.invert().compose(rigids).to_tensor_7()
15
+
16
+ def simplex_proj(seq):
17
+ """Algorithm from https://arxiv.org/abs/1309.1541 Weiran Wang, Miguel Á. Carreira-Perpiñán"""
18
+ Y = seq.reshape(-1, seq.shape[-1])
19
+ N, K = Y.shape
20
+ X, _ = torch.sort(Y, dim=-1, descending=True)
21
+ X_cumsum = torch.cumsum(X, dim=-1) - 1
22
+ div_seq = torch.arange(1, K + 1, dtype=Y.dtype, device=Y.device)
23
+ Xtmp = X_cumsum / div_seq.unsqueeze(0)
24
+
25
+ greater_than_Xtmp = (X > Xtmp).sum(dim=1, keepdim=True)
26
+ row_indices = torch.arange(N, dtype=torch.long, device=Y.device).unsqueeze(1)
27
+ selected_Xtmp = Xtmp[row_indices, greater_than_Xtmp - 1]
28
+
29
+ X = torch.max(Y - selected_Xtmp, torch.zeros_like(Y))
30
+ return X.view(seq.shape)
31
+
32
+ class DirichletConditionalFlow:
33
+ def __init__(self, K=20, alpha_min=1, alpha_max=100, alpha_spacing=0.01):
34
+ self.alphas = np.arange(alpha_min, alpha_max + alpha_spacing, alpha_spacing)
35
+ self.beta_cdfs = []
36
+ self.bs = np.linspace(0, 1, 1000)
37
+ for alph in self.alphas:
38
+ self.beta_cdfs.append(scipy.special.betainc(alph, K-1, self.bs))
39
+ self.beta_cdfs = np.array(self.beta_cdfs)
40
+ self.beta_cdfs_derivative = np.diff(self.beta_cdfs, axis=0) / alpha_spacing
41
+ self.alpha_spacing = alpha_spacing
42
+ self.K = K
43
+
44
+ def c_factor(self, bs, alpha):
45
+ # if the bs is close to the edge of the simplex in one of its entries, then we want the c factor to be 0 for high alphas.
46
+ # That is the rationale for why we return 0s in the case of an overflow.
47
+
48
+ beta = scipy.special.beta(alpha, self.K - 1) # betafunction(alpha, K-1)
49
+ beta_div = np.where(bs < 1, beta / ((1 - bs) ** (self.K - 1)), 0)
50
+ beta_div_full = np.where((bs ** (alpha - 1)) > 0, beta_div / (bs ** (alpha - 1)), 0)
51
+
52
+ I_func = self.beta_cdfs_derivative[np.argmin(np.abs(alpha - self.alphas))]
53
+ interp = -np.interp(bs, self.bs, I_func)
54
+
55
+ final = interp * beta_div_full
56
+ return final
57
+
58
+ def atom14_to_pdb(atom14, aatype, path):
59
+ prots = []
60
+ for i, pos in enumerate(atom14):
61
+ pos = atom14_to_atom37(pos, aatype)
62
+ prots.append(create_full_prot(pos, aatype=aatype))
63
+ with open(path, 'w') as f:
64
+ f.write(prots_to_pdb(prots))
65
+
66
+
67
+ def create_full_prot(
68
+ atom37: np.ndarray,
69
+ aatype=None,
70
+ b_factors=None,
71
+ ):
72
+ assert atom37.ndim == 3
73
+ assert atom37.shape[-1] == 3
74
+ assert atom37.shape[-2] == 37
75
+ n = atom37.shape[0]
76
+ residue_index = np.arange(n)
77
+ atom37_mask = np.sum(np.abs(atom37), axis=-1) > 1e-7
78
+ if b_factors is None:
79
+ b_factors = np.zeros([n, 37])
80
+ if aatype is None:
81
+ aatype = np.zeros(n, dtype=int)
82
+ chain_index = np.zeros(n, dtype=int)
83
+ return protein.Protein(
84
+ atom_positions=atom37,
85
+ atom_mask=atom37_mask,
86
+ aatype=aatype,
87
+ residue_index=residue_index,
88
+ b_factors=b_factors,
89
+ chain_index=chain_index
90
+ )
91
+
92
+
93
+ def prots_to_pdb(prots):
94
+ ss = ''
95
+ for i, prot in enumerate(prots):
96
+ ss += f'MODEL {i}\n'
97
+ prot = protein.to_pdb(prot)
98
+ ss += '\n'.join(prot.split('\n')[2:-3])
99
+ ss += '\nENDMDL\n'
100
+ return ss
101
+
data/mdgen/wrapper.py ADDED
@@ -0,0 +1,507 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .ema import ExponentialMovingAverage
2
+ from .logger import get_logger
3
+ from .residue_constants import aatype_to_str_sequence
4
+
5
+ logger = get_logger(__name__)
6
+
7
+ import pytorch_lightning as pl
8
+ import torch, time, os, wandb
9
+ import numpy as np
10
+ import pandas as pd
11
+ from .rigid_utils import Rigid, Rotation
12
+ from collections import defaultdict
13
+ from functools import partial
14
+
15
+ from .model.latent_model import LatentMDGenModel
16
+ from .transport.transport import create_transport, Sampler
17
+ from .utils import get_offsets, atom14_to_pdb
18
+ from .tensor_utils import tensor_tree_map
19
+ from .geometry import frames_torsions_to_atom14, atom37_to_atom14
20
+
21
+
22
+ def gather_log(log, world_size):
23
+ if world_size == 1:
24
+ return log
25
+ log_list = [None] * world_size
26
+ torch.distributed.all_gather_object(log_list, log)
27
+ log = {key: sum([l[key] for l in log_list], []) for key in log}
28
+ return log
29
+
30
+
31
+ def get_log_mean(log):
32
+ out = {}
33
+ for key in log:
34
+ try:
35
+ out[key] = np.nanmean(log[key])
36
+ except:
37
+ pass
38
+ return out
39
+
40
+
41
+ DESIGN_IDX = [1, 2]
42
+ COND_IDX = [0, 3]
43
+ DESIGN_MAP_TO_COND = [0, 0, 3, 3]
44
+
45
+
46
+ class Wrapper(pl.LightningModule):
47
+
48
+ def __init__(self, args):
49
+ super().__init__()
50
+ self.save_hyperparameters()
51
+ self.args = args
52
+ self._log = defaultdict(list)
53
+ self.last_log_time = time.time()
54
+ self.iter_step = 0
55
+
56
+ def log(self, key, data):
57
+ if isinstance(data, torch.Tensor):
58
+ data = data.mean().item()
59
+ log = self._log
60
+ if self.stage == 'train' or self.args.validate:
61
+ log["iter_" + key].append(data)
62
+ log[self.stage + "_" + key].append(data)
63
+
64
+ def load_ema_weights(self):
65
+ # model.state_dict() contains references to model weights rather
66
+ # than copies. Therefore, we need to clone them before calling
67
+ # load_state_dict().
68
+ logger.info('Loading EMA weights')
69
+ clone_param = lambda t: t.detach().clone()
70
+ self.cached_weights = tensor_tree_map(clone_param, self.model.state_dict())
71
+ self.model.load_state_dict(self.ema.state_dict()["params"])
72
+
73
+ def restore_cached_weights(self):
74
+ logger.info('Restoring cached weights')
75
+ self.model.load_state_dict(self.cached_weights)
76
+ self.cached_weights = None
77
+
78
+ def on_before_zero_grad(self, *args, **kwargs):
79
+ if self.args.ema:
80
+ self.ema.update(self.model)
81
+
82
+ def training_step(self, batch, batch_idx):
83
+ if self.args.ema:
84
+ if (self.ema.device != self.device):
85
+ self.ema.to(self.device)
86
+ return self.general_step(batch, stage='train')
87
+
88
+ def validation_step(self, batch, batch_idx):
89
+ if self.args.ema:
90
+ if (self.ema.device != self.device):
91
+ self.ema.to(self.device)
92
+ if (self.cached_weights is None):
93
+ self.load_ema_weights()
94
+
95
+ self.general_step(batch, stage='val')
96
+ self.validation_step_extra(batch, batch_idx)
97
+ if self.args.validate and self.iter_step % self.args.print_freq == 0:
98
+ self.print_log()
99
+
100
+ def validation_step_extra(self, batch, batch_idx):
101
+ pass
102
+
103
+ def on_train_epoch_end(self):
104
+ self.print_log(prefix='train', save=False)
105
+
106
+ def on_validation_epoch_end(self):
107
+ if self.args.ema:
108
+ self.restore_cached_weights()
109
+ self.print_log(prefix='val', save=False)
110
+
111
+ def on_before_optimizer_step(self, optimizer):
112
+ if (self.trainer.global_step + 1) % self.args.print_freq == 0:
113
+ self.print_log()
114
+
115
+ if self.args.check_grad:
116
+ for name, p in self.model.named_parameters():
117
+ if p.grad is None:
118
+ logger.warning(f"Param {name} has no grad")
119
+
120
+ def on_load_checkpoint(self, checkpoint):
121
+ logger.info('Loading EMA state dict')
122
+ if self.args.ema:
123
+ ema = checkpoint["ema"]
124
+ self.ema.load_state_dict(ema)
125
+
126
+ def on_save_checkpoint(self, checkpoint):
127
+ if self.args.ema:
128
+ if self.cached_weights is not None:
129
+ self.restore_cached_weights()
130
+ checkpoint["ema"] = self.ema.state_dict()
131
+
132
+ def print_log(self, prefix='iter', save=False, extra_logs=None):
133
+ log = self._log
134
+ log = {key: log[key] for key in log if f"{prefix}_" in key}
135
+ log = gather_log(log, self.trainer.world_size)
136
+ mean_log = get_log_mean(log)
137
+
138
+ mean_log.update({
139
+ 'epoch': self.trainer.current_epoch,
140
+ 'trainer_step': self.trainer.global_step + int(prefix == 'iter'),
141
+ 'iter_step': self.iter_step,
142
+ f'{prefix}_count': len(log[next(iter(log))]),
143
+
144
+ })
145
+ if extra_logs:
146
+ mean_log.update(extra_logs)
147
+ try:
148
+ for param_group in self.optimizers().optimizer.param_groups:
149
+ mean_log['lr'] = param_group['lr']
150
+ except:
151
+ pass
152
+
153
+ if self.trainer.is_global_zero:
154
+ logger.info(str(mean_log))
155
+ if self.args.wandb:
156
+ wandb.log(mean_log)
157
+ if save:
158
+ path = os.path.join(
159
+ os.environ["MODEL_DIR"],
160
+ f"{prefix}_{self.trainer.current_epoch}.csv"
161
+ )
162
+ pd.DataFrame(log).to_csv(path)
163
+ for key in list(log.keys()):
164
+ if f"{prefix}_" in key:
165
+ del self._log[key]
166
+
167
+ def configure_optimizers(self):
168
+ cls = torch.optim.AdamW if self.args.adamW else torch.optim.Adam
169
+ optimizer = cls(
170
+ filter(lambda p: p.requires_grad, self.model.parameters()), lr=self.args.lr,
171
+ )
172
+ return optimizer
173
+
174
+
175
+ class NewMDGenWrapper(Wrapper):
176
+ def __init__(self, args):
177
+ super().__init__(args)
178
+ for key in [
179
+ 'inpainting',
180
+ 'no_torsion',
181
+ 'hyena',
182
+ 'no_aa_emb',
183
+ 'supervise_all_torsions',
184
+ 'supervise_no_torsions',
185
+ 'design_key_frames',
186
+ 'no_design_torsion',
187
+ 'cond_interval',
188
+ 'mpnn',
189
+ 'dynamic_mpnn',
190
+ 'no_offsets',
191
+ 'no_frames',
192
+ ]:
193
+ if not hasattr(args, key):
194
+ setattr(args, key, False)
195
+ # args.latent_dim = 7 if not self.args.tps_condition else 14
196
+ latent_dim = 21 if not (self.args.tps_condition or self.args.inpainting or self.args.dynamic_mpnn) else 28
197
+ if args.design:
198
+ latent_dim += 20
199
+ if args.no_frames:
200
+ latent_dim = 111
201
+
202
+ self.latent_dim = latent_dim
203
+ self.model = LatentMDGenModel(args, latent_dim)
204
+
205
+ self.transport = create_transport(
206
+ args,
207
+ args.path_type,
208
+ args.prediction,
209
+ None, # args.loss_weight,
210
+ # args.train_eps,
211
+ # args.sample_eps,
212
+ ) # default: velocity;
213
+ self.transport_sampler = Sampler(self.transport)
214
+
215
+ if not hasattr(args, 'ema'):
216
+ args.ema = False
217
+ if args.ema:
218
+ self.ema = ExponentialMovingAverage(
219
+ model=self.model, decay=args.ema_decay
220
+ )
221
+ self.cached_weights = None
222
+
223
+ def prep_hyena_batch(self, batch):
224
+ B, T, L, _ = batch['latents'].shape
225
+ rigids = Rigid(trans=batch['trans'], rots=Rotation(rot_mats=batch['rots']))
226
+
227
+ ########
228
+ cond_mask = torch.zeros(B, T, L, dtype=int, device=self.device)
229
+ if self.args.sim_condition:
230
+ cond_mask[:, 0] = 1
231
+ if self.args.tps_condition:
232
+ cond_mask[:, 0] = cond_mask[:, -1] = 1
233
+ if self.args.cond_interval:
234
+ cond_mask[:, ::self.args.cond_interval] = 1
235
+ if self.args.inpainting or self.args.dynamic_mpnn or self.args.mpnn:
236
+ cond_mask[:, :, COND_IDX] = 1
237
+
238
+ aatype_mask = torch.ones_like(batch['seqres'])
239
+ if self.args.design:
240
+ aatype_mask[:, DESIGN_IDX] = 0
241
+ ########
242
+ return {
243
+ 'latents': batch['latents'].float(),
244
+ 'loss_mask': batch['loss_mask'].unsqueeze(1).expand(-1, T, -1, -1),
245
+ 'model_kwargs': {
246
+ 'start_frames': rigids,
247
+ 'mask': batch['mask'].unsqueeze(1).expand(-1, T, -1),
248
+ 'aatype': torch.where(aatype_mask.bool(), batch['seqres'], 20),
249
+ 'x_cond': torch.where(cond_mask.unsqueeze(-1).bool(), batch['latents'].float(), 0.0),
250
+ 'x_cond_mask': cond_mask,
251
+ }
252
+ }
253
+
254
+ def prep_batch_no_frames(self, batch):
255
+
256
+ B, T, L, _, _ = batch['atom37'].shape
257
+
258
+ latents = batch['atom37'].reshape(B, T, L, 111)
259
+ mask = batch['mask'][:,None,:,1].expand(-1, T, -1)
260
+
261
+ loss_mask = batch['mask'][:,None,:,:,None].expand(-1, T, -1, -1, 3)
262
+ loss_mask = loss_mask.reshape(B, T, L, 111)
263
+
264
+ ########
265
+ cond_mask = torch.zeros(B, T, L, dtype=int, device=mask.device)
266
+ if self.args.sim_condition:
267
+ cond_mask[:, 0] = 1
268
+
269
+ aatype_mask = torch.ones_like(batch['seqres'])
270
+
271
+ return {
272
+ 'latents': latents,
273
+ 'loss_mask': loss_mask,
274
+ 'model_kwargs': {
275
+ 'mask': mask,
276
+ 'aatype': torch.where(aatype_mask.bool(), batch['seqres'], 20),
277
+ 'x_cond': torch.where(cond_mask.unsqueeze(-1).bool(), latents, 0.0),
278
+ 'x_cond_mask': cond_mask,
279
+ }
280
+ }
281
+
282
+
283
+ def prep_batch(self, batch):
284
+
285
+ if self.args.no_frames:
286
+ return self.prep_batch_no_frames(batch)
287
+
288
+ # if self.args.hyena:
289
+ if 'latents' in batch:
290
+ return self.prep_hyena_batch(batch)
291
+
292
+ rigids = Rigid(
293
+ trans=batch['trans'],
294
+ rots=Rotation(rot_mats=batch['rots'])
295
+ ) # B, T, L
296
+ B, T, L = rigids.shape
297
+ if self.args.design_key_frames:
298
+ rigids = Rigid.cat([
299
+ rigids[:, :1, DESIGN_MAP_TO_COND], # replace designed rototranslations in the key frames
300
+ rigids[:, 1:-1],
301
+ rigids[:, -1:, DESIGN_MAP_TO_COND]
302
+ ], 1)
303
+
304
+ if self.args.no_offsets:
305
+ offsets = rigids.to_tensor_7()
306
+ else:
307
+ offsets = get_offsets(rigids[:, 0:1], rigids)
308
+ #### make sure the quaternions have real part
309
+ offsets[..., :4] *= torch.where(offsets[:, :, :, 0:1] < 0, -1, 1)
310
+
311
+ frame_loss_mask = batch['mask'].unsqueeze(-1).expand(-1, -1, 7) # B, L, 7
312
+ torsion_loss_mask = batch['torsion_mask'].unsqueeze(-1).expand(-1, -1, -1, 2).reshape(B, L, 14)
313
+
314
+ if self.args.tps_condition or self.args.inpainting or self.args.dynamic_mpnn:
315
+ offsets_r = get_offsets(rigids[:, -1:], rigids)
316
+ offsets_r[..., :4] *= torch.where(offsets_r[:, :, :, 0:1] < 0, -1, 1)
317
+ offsets = torch.cat([offsets, offsets_r], -1)
318
+ frame_loss_mask = torch.cat([frame_loss_mask, frame_loss_mask], -1)
319
+
320
+ if self.args.no_torsion:
321
+ latents = torch.cat([offsets, torch.zeros_like(batch['torsions'].view(B, T, L, 14))], -1)
322
+ elif self.args.no_design_torsion:
323
+ torsions_ = batch['torsions'].clone()
324
+ torsions_[:, :, DESIGN_IDX] = 0
325
+ latents = torch.cat([offsets, torsions_.view(B, T, L, 14)], -1)
326
+ else:
327
+ latents = torch.cat([offsets, batch['torsions'].view(B, T, L, 14)], -1)
328
+
329
+ if self.args.supervise_all_torsions:
330
+ torsion_loss_mask = torch.ones_like(torsion_loss_mask)
331
+ elif self.args.supervise_no_torsions:
332
+ torsion_loss_mask = torch.zeros_like(torsion_loss_mask)
333
+
334
+ loss_mask = torch.cat([frame_loss_mask, torsion_loss_mask], -1)
335
+ loss_mask = loss_mask.unsqueeze(1).expand(-1, T, -1, -1)
336
+
337
+ ########
338
+ cond_mask = torch.zeros(B, T, L, dtype=int, device=offsets.device)
339
+ if self.args.sim_condition:
340
+ cond_mask[:, 0] = 1
341
+ if self.args.tps_condition:
342
+ cond_mask[:, 0] = cond_mask[:, -1] = 1
343
+ if self.args.cond_interval:
344
+ cond_mask[:, ::self.args.cond_interval] = 1
345
+ if self.args.inpainting or self.args.dynamic_mpnn or self.args.mpnn:
346
+ cond_mask[:, :, COND_IDX] = 1
347
+
348
+ aatype_mask = torch.ones_like(batch['seqres'])
349
+ if self.args.design:
350
+ aatype_mask[:, DESIGN_IDX] = 0
351
+ ########
352
+
353
+ return {
354
+ 'rigids': rigids,
355
+ 'latents': latents,
356
+ 'loss_mask': loss_mask,
357
+ 'model_kwargs': {
358
+ 'start_frames': rigids[:, 0],
359
+ 'end_frames': rigids[:, -1],
360
+ 'mask': batch['mask'].unsqueeze(1).expand(-1, T, -1),
361
+ 'aatype': torch.where(aatype_mask.bool(), batch['seqres'], 20),
362
+ 'x_cond': torch.where(cond_mask.unsqueeze(-1).bool(), latents, 0.0),
363
+ 'x_cond_mask': cond_mask,
364
+ }
365
+ }
366
+
367
+ def general_step(self, batch, stage='train'):
368
+ self.iter_step += 1
369
+ self.stage = stage
370
+ start1 = time.time()
371
+
372
+ prep = self.prep_batch(batch)
373
+
374
+ start = time.time()
375
+ out_dict = self.transport.training_losses(
376
+ model=self.model,
377
+ x1=prep['latents'],
378
+ aatype1=batch['seqres'] if self.args.design else None,
379
+ mask=prep['loss_mask'],
380
+ model_kwargs=prep['model_kwargs']
381
+ )
382
+ self.log('model_dur', time.time() - start)
383
+ loss = out_dict['loss']
384
+ self.log('loss', loss)
385
+
386
+ if self.args.design:
387
+ aa_out = torch.argmax(out_dict['logits'], dim=-1)
388
+ aa_recovery = aa_out == batch['seqres'][:, None, :].expand(-1, aa_out.shape[1], -1)
389
+
390
+ self.log('category_pred_design_aa_recovery', aa_recovery[:, :, 1:-1].float().mean().item())
391
+ cond_aa_recovery = torch.cat([aa_recovery[:, :, 0:1], aa_recovery[:, :, -1:]], 2)
392
+ self.log('category_pred_cond_aa_recovery', cond_aa_recovery.float().mean().item())
393
+
394
+ self.log('loss_continuous', out_dict['loss_continuous'].mean())
395
+ self.log('loss_discrete', out_dict['loss_discrete'])
396
+
397
+ self.log('time', out_dict['t'])
398
+ self.log('dur', time.time() - self.last_log_time)
399
+ if 'name' in batch:
400
+ self.log('name', ','.join(batch['name']))
401
+ self.log('general_step_dur', time.time() - start1)
402
+ self.last_log_time = time.time()
403
+ return loss.mean()
404
+
405
+ def inference(self, batch):
406
+
407
+ prep = self.prep_batch(batch)
408
+
409
+ latents = prep['latents']
410
+ if not self.args.no_frames:
411
+ rigids = prep['rigids']
412
+ B, T, L = rigids.shape
413
+ else:
414
+ B, T, L, _ = latents.shape
415
+
416
+ ### oracle
417
+ # if self.args.oracle:
418
+ # assert self.args.sim_condition # only works with that
419
+ # offsets = get_offsets(rigids[:, 0:1], rigids)
420
+ # torsions = batch['torsions'].view(B, T, L, 14)
421
+ # else:
422
+ if self.args.dynamic_mpnn or self.args.mpnn:
423
+ x1 = prep['latents']
424
+ x_d = torch.zeros(x1.shape[0], x1.shape[1], x1.shape[2], 20, device=self.device)
425
+ xt = torch.cat([x1, x_d], dim=-1)
426
+ logits = self.model.forward_inference(xt, torch.ones(B, device=self.device),
427
+ **prep['model_kwargs'])
428
+ aa_out = torch.argmax(logits, -1)
429
+ atom14 = frames_torsions_to_atom14(rigids, batch['torsions'],
430
+ batch['seqres'][:, None].expand(B, T, L))
431
+ return atom14, aa_out
432
+
433
+ if self.args.design:
434
+ zs_continuous = torch.randn(B, T, L, self.latent_dim - 20, device=latents.device)
435
+ zs_discrete = torch.distributions.Dirichlet(torch.ones(B, L, 20, device=latents.device)).sample()
436
+ zs_discrete = zs_discrete[:, None].expand(-1, T, -1, -1)
437
+ zs = torch.cat([zs_continuous, zs_discrete], -1)
438
+ else:
439
+ zs = torch.randn(B, T, L, self.latent_dim, device=self.device)
440
+
441
+ sample_fn = self.transport_sampler.sample_ode(sampling_method=self.args.sampling_method)
442
+ # num_steps=self.args.inference_steps) # default to ode
443
+
444
+ samples = sample_fn(
445
+ zs,
446
+ partial(self.model.forward_inference, **prep['model_kwargs'])
447
+ )[-1]
448
+
449
+ if self.args.no_frames:
450
+ atom14 = atom37_to_atom14(
451
+ samples.cpu().numpy().reshape(B, T, L, 37, 3),
452
+ batch['seqres'][0].cpu().numpy()
453
+ )
454
+ return torch.from_numpy(atom14).float(), None
455
+
456
+ offsets = samples[..., :7]
457
+
458
+ if self.args.tps_condition or self.args.inpainting:
459
+ torsions = samples[..., 14:28]
460
+ logits = samples[..., -20:]
461
+ else:
462
+ torsions = samples[..., 7:21]
463
+ logits = samples[..., -20:]
464
+
465
+
466
+ if self.args.no_offsets:
467
+ frames = Rigid.from_tensor_7(offsets, normalize_quats=True)
468
+ else:
469
+ frames = rigids[:, 0:1].compose(Rigid.from_tensor_7(offsets, normalize_quats=True))
470
+ if self.args.design:
471
+ trans = frames.get_trans()
472
+ rots = frames.get_rots().get_rot_mats()
473
+ frames = Rigid(trans=trans, rots=Rotation(rot_mats=rots))
474
+ torsions = torsions.reshape(B, T, L, 7, 2)
475
+ if not self.args.oracle:
476
+ torsions = torsions / torch.linalg.norm(torsions, dim=-1, keepdims=True)
477
+ atom14 = frames_torsions_to_atom14(frames, torsions.view(B, T, L, 7, 2),
478
+ batch['seqres'][:, None].expand(B, T, L))
479
+
480
+ if self.args.design:
481
+ aa_out = torch.argmax(logits, -1)
482
+ else:
483
+ aa_out = batch['seqres'][:, None].expand(B, T, L)
484
+ return atom14, aa_out
485
+
486
+ def validation_step_extra(self, batch, batch_idx):
487
+
488
+ do_designability = batch_idx < self.args.inference_batches and (
489
+ (self.current_epoch + 1) % self.args.designability_freq == 0 or \
490
+ self.args.validate) and self.trainer.is_global_zero
491
+ if do_designability:
492
+ atom14, aa_out = self.inference(batch)
493
+ aa_recovery = aa_out == batch['seqres'][:, None, :].expand(-1, aa_out.shape[1], -1)
494
+ self.log('design_aa_recovery', aa_recovery[:, :, 1:-1].float().mean().item())
495
+ cond_aa_recovery = torch.cat([aa_recovery[:, :, 0:1], aa_recovery[:, :, -1:]], 2)
496
+ self.log('cond_aa_recovery', cond_aa_recovery.float().mean().item())
497
+ self.log('seq_pred', ','.join([aatype_to_str_sequence(aa) for aa in aa_out[:, 0]]))
498
+ self.log('seq_true', ','.join([aatype_to_str_sequence(aa) for aa in batch['seqres']]))
499
+ prot_name = batch['name'][0]
500
+ path = os.path.join(os.environ["MODEL_DIR"], f'epoch{self.current_epoch}_{prot_name}.pdb')
501
+
502
+ atom14_to_pdb(atom14[0].cpu().numpy(), batch['seqres'][0].cpu().numpy(), path)
503
+ else:
504
+ self.log('design_aa_recovery', np.nan)
505
+ self.log('cond_aa_recovery', np.nan)
506
+ self.log('seq_pred', 'nan')
507
+ self.log('seq_true', 'nan')
data/scripts/analyze_peptide_design.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import os
3
+ import argparse
4
+ from mdgen.residue_constants import aatype_to_str_sequence, restype_order
5
+ import json
6
+ from tqdm import tqdm
7
+
8
+ parser = argparse.ArgumentParser()
9
+ parser.add_argument('--mddir', type=str, default='share/4AA_sims')
10
+ parser.add_argument('--data_dir', type=str, default='share/4AA_sims_replica')
11
+ parser.add_argument('--pdbdir', type=str, required=True)
12
+ parser.add_argument('--split', type=str, default='splits/4AA_test.csv')
13
+ args = parser.parse_args()
14
+
15
+
16
+
17
+ names = os.listdir(args.pdbdir)
18
+ names = list(set([nam[:4] for nam in names if 'metadata.json' in nam]))
19
+
20
+ metadatas = {name: json.load(open(f'{args.pdbdir}/{name}_metadata.json', 'rb')) for name in names}
21
+
22
+ res = {}
23
+ for name in names:
24
+ metadata = metadatas[name]
25
+ res[name] = [traj['aa_out'][0] for traj in metadata]
26
+
27
+ designed_names = {}
28
+ max_cond_recovery = 0
29
+ max_design_recovery = 0
30
+
31
+ all_recovery = 0
32
+ design_recovery = 0
33
+ final_design_recovery = 0
34
+ most_frequent_middle_recovery = 0
35
+ cond_recovery = 0
36
+ for name in tqdm(names):
37
+ max_aa = []
38
+ name_numeric = np.array([restype_order[l] for l in name])
39
+ pred = np.array(res[name])
40
+ pred_str = [aatype_to_str_sequence(nam[1:-1]) for nam in pred]
41
+
42
+ design_middles, index, counts = np.unique(np.array(pred_str), return_counts=True, return_index=True)
43
+ most_freq_idx = index[np.argmax(counts)]
44
+ #design_middle = design_middles[np.argmax(counts)]
45
+ most_freq_pred = pred[most_freq_idx]
46
+ design_middle = aatype_to_str_sequence(most_freq_pred[1:-1])
47
+ most_frequent_middle_recovery += (most_freq_pred == name_numeric)[1:-1].mean()
48
+
49
+ recovery = (pred == name_numeric[None, :])
50
+ design_recovery += recovery[:, 1:-1].mean()
51
+ cond_recovery += np.concatenate([recovery[:, -1], recovery[:, 0]]).mean()
52
+
53
+ final_design_idx = np.argsort(recovery[:,0].astype(float) + recovery[:,-1].astype(float))[0]
54
+ final_design_name = pred[final_design_idx]
55
+ final_design_recovery += (name_numeric[1:-1] == final_design_name[1:-1]).mean()
56
+
57
+ for i in range(4):
58
+ letters, counts = np.unique(np.array(res[name])[:,i], return_counts=True)
59
+ max_aa.append(letters[np.argmax(counts)])
60
+ max_aa = np.array(max_aa)
61
+ max_cond_recovery += ((name_numeric[0] == max_aa[0]).astype(float) + (name_numeric[-1] == max_aa[-1]).astype(float)) / 2
62
+ max_design_recovery += (name_numeric[1:-1] == max_aa[1:-1]).mean()
63
+
64
+ designed_name = name[0] + design_middle + name[-1]
65
+
66
+
67
+ metadata = metadatas[name]
68
+ start_idx = metadata[most_freq_idx]['start_idx']
69
+ end_idx = metadata[most_freq_idx]['end_idx']
70
+
71
+ designed_names[name] = {
72
+ 'designed_name': designed_name,
73
+ 'start_idx': start_idx,
74
+ 'end_idx': end_idx,
75
+ 'start_state': metadata[most_freq_idx]['start_state'],
76
+ 'end_state': metadata[most_freq_idx]['end_state']
77
+ }
78
+
79
+
80
+ cond_recovery = cond_recovery / len(names)
81
+ design_recovery = design_recovery / len(names)
82
+
83
+ max_cond_recovery = max_cond_recovery / len(names)
84
+ max_design_recovery = max_design_recovery / len(names)
85
+
86
+ final_design_recovery = final_design_recovery/ len(names)
87
+ most_frequent_middle_recovery = most_frequent_middle_recovery/ len(names)
88
+
89
+ print('cond_recovery', cond_recovery)
90
+ print('max_cond_recovery', max_cond_recovery)
91
+
92
+ print('design_recovery', design_recovery)
93
+ print('max_design_recovery', max_design_recovery)
94
+
95
+ print('final_design_recovery', final_design_recovery)
96
+ print('most_frequent_middle_recovery', most_frequent_middle_recovery)
data/scripts/analyze_peptide_sim.py ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ parser = argparse.ArgumentParser()
3
+ parser.add_argument('--mddir', type=str, default='share/4AA_sims')
4
+ parser.add_argument('--pdbdir', type=str, required=True)
5
+ parser.add_argument('--save', action='store_true')
6
+ parser.add_argument('--plot', action='store_true')
7
+ parser.add_argument('--save_name', type=str, default='out.pkl')
8
+ parser.add_argument('--pdb_id', nargs='*', default=[])
9
+ parser.add_argument('--no_msm', action='store_true')
10
+ parser.add_argument('--no_decorr', action='store_true')
11
+ parser.add_argument('--no_traj_msm', action='store_true')
12
+ parser.add_argument('--truncate', type=int, default=None)
13
+ parser.add_argument('--msm_lag', type=int, default=10)
14
+ parser.add_argument('--ito', action='store_true')
15
+ parser.add_argument('--num_workers', type=int, default=1)
16
+
17
+ args = parser.parse_args()
18
+
19
+ import mdgen.analysis
20
+ import pyemma, tqdm, os, pickle
21
+ from scipy.spatial.distance import jensenshannon
22
+ from multiprocessing import Pool
23
+ import numpy as np
24
+ import matplotlib.pyplot as plt
25
+ from statsmodels.tsa.stattools import acovf, acf
26
+ colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
27
+
28
+ def main(name):
29
+ out = {}
30
+ np.random.seed(137)
31
+ fig, axs = plt.subplots(4, 4, figsize=(20, 20))
32
+
33
+ ### BACKBONE torsion marginals PLOT ONLY
34
+ if args.plot:
35
+ feats, traj = mdgen.analysis.get_featurized_traj(f'{args.pdbdir}/{name}', sidechains=False, cossin=False)
36
+ if args.truncate: traj = traj[:args.truncate]
37
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=False, cossin=False)
38
+ pyemma.plots.plot_feature_histograms(ref, feature_labels=feats, ax=axs[0,0], color=colors[0])
39
+ pyemma.plots.plot_feature_histograms(traj, ax=axs[0,0], color=colors[1])
40
+ axs[0,0].set_title('BB torsions')
41
+
42
+
43
+ ### JENSEN SHANNON DISTANCES ON ALL TORSIONS
44
+ feats, traj = mdgen.analysis.get_featurized_traj(f'{args.pdbdir}/{name}', sidechains=True, cossin=False)
45
+ if args.truncate: traj = traj[:args.truncate]
46
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True, cossin=False)
47
+
48
+ out['features'] = feats.describe()
49
+
50
+ out['JSD'] = {}
51
+ for i, feat in enumerate(feats.describe()):
52
+ ref_p = np.histogram(ref[:,i], range=(-np.pi, np.pi), bins=100)[0]
53
+ traj_p = np.histogram(traj[:,i], range=(-np.pi, np.pi), bins=100)[0]
54
+ out['JSD'][feat] = jensenshannon(ref_p, traj_p)
55
+
56
+ for i in [1,3]:
57
+ ref_p = np.histogram2d(*ref[:,i:i+2].T, range=((-np.pi, np.pi),(-np.pi,np.pi)), bins=50)[0]
58
+ traj_p = np.histogram2d(*traj[:,i:i+2].T, range=((-np.pi, np.pi),(-np.pi,np.pi)), bins=50)[0]
59
+ out['JSD']['|'.join(feats.describe()[i:i+2])] = jensenshannon(ref_p.flatten(), traj_p.flatten())
60
+
61
+ ############ Torsion decorrelations
62
+ if args.no_decorr:
63
+ pass
64
+ else:
65
+ out['md_decorrelation'] = {}
66
+ for i, feat in enumerate(feats.describe()):
67
+
68
+ autocorr = acovf(np.sin(ref[:,i]), demean=False, adjusted=True, nlag=100000) + acovf(np.cos(ref[:,i]), demean=False, adjusted=True, nlag=100000)
69
+ baseline = np.sin(ref[:,i]).mean()**2 + np.cos(ref[:,i]).mean()**2
70
+ # E[(X(t) - E[X(t)]) * (X(t+dt) - E[X(t+dt)])] = E[X(t)X(t+dt) - E[X(t)]X(t+dt) - X(t)E[X(t+dt)] + E[X(t)]E[X(t+dt)]] = E[X(t)X(t+dt)] - E[X]**2
71
+ lags = 1 + np.arange(len(autocorr))
72
+ if 'PHI' in feat or 'PSI' in feat:
73
+ axs[0,1].plot(lags, (autocorr - baseline) / (1-baseline), color=colors[i%len(colors)])
74
+ else:
75
+ axs[0,2].plot(lags, (autocorr - baseline) / (1-baseline), color=colors[i%len(colors)])
76
+
77
+ out['md_decorrelation'][feat] = (autocorr.astype(np.float16) - baseline) / (1-baseline)
78
+
79
+ axs[0,1].set_title('Backbone decorrelation')
80
+ axs[0,2].set_title('Sidechain decorrelation')
81
+ axs[0,1].set_xscale('log')
82
+ axs[0,2].set_xscale('log')
83
+
84
+ out['our_decorrelation'] = {}
85
+ for i, feat in enumerate(feats.describe()):
86
+
87
+ autocorr = acovf(np.sin(traj[:,i]), demean=False, adjusted=True, nlag=1 if args.ito else 1000) + acovf(np.cos(traj[:,i]), demean=False, adjusted=True, nlag=1 if args.ito else 1000)
88
+ baseline = np.sin(traj[:,i]).mean()**2 + np.cos(traj[:,i]).mean()**2
89
+ # E[(X(t) - E[X(t)]) * (X(t+dt) - E[X(t+dt)])] = E[X(t)X(t+dt) - E[X(t)]X(t+dt) - X(t)E[X(t+dt)] + E[X(t)]E[X(t+dt)]] = E[X(t)X(t+dt)] - E[X]**2
90
+ lags = 1 + np.arange(len(autocorr))
91
+ if 'PHI' in feat or 'PSI' in feat:
92
+ axs[1,1].plot(lags, (autocorr - baseline) / (1-baseline), color=colors[i%len(colors)])
93
+ else:
94
+ axs[1,2].plot(lags, (autocorr - baseline) / (1-baseline), color=colors[i%len(colors)])
95
+
96
+ out['our_decorrelation'][feat] = (autocorr.astype(np.float16) - baseline) / (1-baseline)
97
+
98
+ axs[1,1].set_title('Backbone decorrelation')
99
+ axs[1,2].set_title('Sidechain decorrelation')
100
+ axs[1,1].set_xscale('log')
101
+ axs[1,2].set_xscale('log')
102
+
103
+ ####### TICA #############
104
+ feats, traj = mdgen.analysis.get_featurized_traj(f'{args.pdbdir}/{name}', sidechains=True, cossin=True)
105
+ if args.truncate: traj = traj[:args.truncate]
106
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True, cossin=True)
107
+
108
+ tica, _ = mdgen.analysis.get_tica(ref)
109
+ ref_tica = tica.transform(ref)
110
+ traj_tica = tica.transform(traj)
111
+
112
+ tica_0_min = min(ref_tica[:,0].min(), traj_tica[:,0].min())
113
+ tica_0_max = max(ref_tica[:,0].max(), traj_tica[:,0].max())
114
+
115
+ tica_1_min = min(ref_tica[:,1].min(), traj_tica[:,1].min())
116
+ tica_1_max = max(ref_tica[:,1].max(), traj_tica[:,1].max())
117
+
118
+ ref_p = np.histogram(ref_tica[:,0], range=(tica_0_min, tica_0_max), bins=100)[0]
119
+ traj_p = np.histogram(traj_tica[:,0], range=(tica_0_min, tica_0_max), bins=100)[0]
120
+ out['JSD']['TICA-0'] = jensenshannon(ref_p, traj_p)
121
+
122
+ ref_p = np.histogram2d(*ref_tica[:,:2].T, range=((tica_0_min, tica_0_max),(tica_1_min, tica_1_max)), bins=50)[0]
123
+ traj_p = np.histogram2d(*traj_tica[:,:2].T, range=((tica_0_min, tica_0_max),(tica_1_min, tica_1_max)), bins=50)[0]
124
+ out['JSD']['TICA-0,1'] = jensenshannon(ref_p.flatten(), traj_p.flatten())
125
+
126
+ #### 1,0, 1,1 TICA FES
127
+ if args.plot:
128
+ pyemma.plots.plot_free_energy(*ref_tica[::100, :2].T, ax=axs[2,0], cbar=False)
129
+ pyemma.plots.plot_free_energy(*traj_tica[:, :2].T, ax=axs[2,1], cbar=False)
130
+ axs[2,0].set_title('TICA FES (MD)')
131
+ axs[2,1].set_title('TICA FES (ours)')
132
+
133
+
134
+ ####### TICA decorrelation ########
135
+ if args.no_decorr:
136
+ pass
137
+ else:
138
+ # x, adjusted=False, demean=True, fft=True, missing='none', nlag=None
139
+ autocorr = acovf(ref_tica[:,0], nlag=100000, adjusted=True, demean=False)
140
+ out['md_decorrelation']['tica'] = autocorr.astype(np.float16)
141
+ if args.plot:
142
+ axs[0,3].plot(autocorr)
143
+ axs[0,3].set_title('MD TICA')
144
+
145
+
146
+ autocorr = acovf(traj_tica[:,0], nlag=1 if args.ito else 1000, adjusted=True, demean=False)
147
+ out['our_decorrelation']['tica'] = autocorr.astype(np.float16)
148
+ if args.plot:
149
+ axs[1,3].plot(autocorr)
150
+ axs[1,3].set_title('Traj TICA')
151
+
152
+ ###### Markov state model stuff #################
153
+ if not args.no_msm:
154
+ kmeans, ref_kmeans = mdgen.analysis.get_kmeans(tica.transform(ref))
155
+ try:
156
+ msm, pcca, cmsm = mdgen.analysis.get_msm(ref_kmeans, nstates=10)
157
+
158
+ out['kmeans'] = kmeans
159
+ out['msm'] = msm
160
+ out['pcca'] = pcca
161
+ out['cmsm'] = cmsm
162
+
163
+ traj_discrete = mdgen.analysis.discretize(tica.transform(traj), kmeans, msm)
164
+ ref_discrete = mdgen.analysis.discretize(tica.transform(ref), kmeans, msm)
165
+ out['traj_metastable_probs'] = (traj_discrete == np.arange(10)[:,None]).mean(1)
166
+ out['ref_metastable_probs'] = (ref_discrete == np.arange(10)[:,None]).mean(1)
167
+ #########
168
+
169
+ msm_transition_matrix = np.eye(10)
170
+ for a, i in enumerate(cmsm.active_set):
171
+ for b, j in enumerate(cmsm.active_set):
172
+ msm_transition_matrix[i,j] = cmsm.transition_matrix[a,b]
173
+
174
+ out['msm_transition_matrix'] = msm_transition_matrix
175
+ out['pcca_pi'] = pcca._pi_coarse
176
+
177
+ msm_pi = np.zeros(10)
178
+ msm_pi[cmsm.active_set] = cmsm.pi
179
+ out['msm_pi'] = msm_pi
180
+
181
+ if args.no_traj_msm:
182
+ pass
183
+ else:
184
+
185
+ traj_msm = pyemma.msm.estimate_markov_model(traj_discrete, lag=args.msm_lag)
186
+ out['traj_msm'] = traj_msm
187
+
188
+ traj_transition_matrix = np.eye(10)
189
+ for a, i in enumerate(traj_msm.active_set):
190
+ for b, j in enumerate(traj_msm.active_set):
191
+ traj_transition_matrix[i,j] = traj_msm.transition_matrix[a,b]
192
+ out['traj_transition_matrix'] = traj_transition_matrix
193
+
194
+
195
+ traj_pi = np.zeros(10)
196
+ traj_pi[traj_msm.active_set] = traj_msm.pi
197
+ out['traj_pi'] = traj_pi
198
+
199
+ except Exception as e:
200
+ print('ERROR', e, name, flush=True)
201
+
202
+ if args.plot:
203
+ fig.savefig(f'{args.pdbdir}/{name}.pdf')
204
+
205
+ return name, out
206
+
207
+ if args.pdb_id:
208
+ pdb_id = args.pdb_id
209
+ else:
210
+ pdb_id = [nam.split('.')[0] for nam in os.listdir(args.pdbdir) if '.pdb' in nam and not '_traj' in nam]
211
+ pdb_id = [nam for nam in pdb_id if os.path.exists(f'{args.pdbdir}/{nam}.xtc')]
212
+ print('number of trajectories', len(pdb_id))
213
+
214
+
215
+ if args.num_workers > 1:
216
+ p = Pool(args.num_workers)
217
+ p.__enter__()
218
+ __map__ = p.imap
219
+ else:
220
+ __map__ = map
221
+ out = dict(tqdm.tqdm(__map__(main, pdb_id), total=len(pdb_id)))
222
+ if args.num_workers > 1:
223
+ p.__exit__(None, None, None)
224
+
225
+ if args.save:
226
+ with open(f"{args.pdbdir}/{args.save_name}", 'wb') as f:
227
+ f.write(pickle.dumps(out))
228
+
229
+
data/scripts/analyze_peptide_tps.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import json
3
+ import pickle
4
+ from multiprocessing import Pool
5
+
6
+ from scipy.spatial.distance import jensenshannon
7
+
8
+ parser = argparse.ArgumentParser()
9
+ parser.add_argument('--mddir', type=str, default='share/4AA_sims')
10
+ parser.add_argument('--repdir', type=str, default='share/4AA_sims_replica')
11
+ parser.add_argument('--pdbdir', type=str, required=True)
12
+ parser.add_argument('--outdir', type=str, required=True)
13
+ parser.add_argument('--save', action='store_true')
14
+ parser.add_argument('--plot', action='store_true')
15
+ parser.add_argument('--save_name', type=str, default='out.pkl')
16
+ parser.add_argument('--pdb_id', nargs='*', default=[])
17
+ parser.add_argument('--no_overwrite', nargs='*', default=[])
18
+ parser.add_argument('--num_workers', type=int, default=1)
19
+ args = parser.parse_args()
20
+
21
+ import mdgen.analysis
22
+ import pyemma, tqdm, os
23
+ import numpy as np
24
+ import matplotlib.pyplot as plt
25
+
26
+
27
+ def main(name):
28
+ print(f'processing {name}')
29
+ np.random.seed(137)
30
+ name = name.split('_')[0]
31
+
32
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True)
33
+
34
+ tica, _ = mdgen.analysis.get_tica(ref)
35
+ out = pickle.load(open(os.path.join(args.pdbdir, f'{name}_metadata.pkl'), 'rb'))
36
+ msm = out['msm']
37
+ cmsm = out['cmsm']
38
+ kmeans = out['kmeans']
39
+ metadata = json.load(open(os.path.join(args.pdbdir, f'{name}_metadata.json'), 'rb'))
40
+ start_idx = metadata[0]['start_idx']
41
+ end_idx = metadata[0]['end_idx']
42
+ start_state = metadata[0]['start_state']
43
+ end_state = metadata[0]['end_state']
44
+
45
+ print("Reference Analysis")
46
+ gen_feats_list, gen_traj_list = mdgen.analysis.load_tps_ensemble(name, args.pdbdir)
47
+ gen_traj_cat = np.concatenate(gen_traj_list, axis=0)
48
+
49
+ fig, axs = plt.subplots(6, 4, figsize=(20, 20))
50
+
51
+ pyemma.plots.plot_free_energy(*tica.transform(gen_traj_cat)[:, :2].T, ax=axs[0, 1], cbar=False)
52
+ axs[0, 1].scatter(tica.transform(ref)[start_idx, 0], tica.transform(ref)[start_idx, 1], s=200, c='black')
53
+ axs[0, 1].scatter(tica.transform(ref)[end_idx, 0], tica.transform(ref)[end_idx, 1], s=200, c='black')
54
+ axs[0, 1].set_title('Transition Path Ensemble')
55
+
56
+ pyemma.plots.plot_free_energy(*tica.transform(ref)[::100, :2].T, ax=axs[0, 0], cbar=False)
57
+ axs[0, 0].scatter(tica.transform(ref)[start_idx, 0], tica.transform(ref)[start_idx, 1], s=200, c='black')
58
+ axs[0, 0].scatter(tica.transform(ref)[end_idx, 0], tica.transform(ref)[end_idx, 1], s=200, c='black')
59
+ axs[0, 0].set_title('Reference MD in TICA space with start and end state')
60
+ pyemma.plots.plot_markov_model(cmsm, minflux=4e-4, arrow_label_format='%.3f', ax=axs[1, 0])
61
+ axs[1, 0].set_title(f'Reference MD MSM. Start {start_state}. End {end_state}.')
62
+
63
+ ref_tp = mdgen.analysis.sample_tp(trans=cmsm.transition_matrix, start_state=start_state, end_state=end_state,
64
+ traj_len=11,
65
+ n_samples=1000)
66
+ ref_stateprobs = mdgen.analysis.get_state_probs(ref_tp)
67
+
68
+ print("generated Analysis")
69
+ highest_prob_state = cmsm.active_set[np.argmax(cmsm.pi)]
70
+ allidx_to_activeidx = {value: idx for idx, value in enumerate(cmsm.active_set)}
71
+ ### Generated analysis
72
+ gen_discrete = mdgen.analysis.discretize(tica.transform(np.concatenate(gen_traj_list)), kmeans, msm)
73
+ gen_tp_all = gen_discrete.reshape((len(gen_traj_list), - 1))
74
+ gen_tp = gen_tp_all[:, ::10]
75
+ gen_tp = np.concatenate([gen_tp, gen_tp_all[:, -1:]], axis=1)
76
+ gen_stateprobs = mdgen.analysis.get_state_probs(gen_tp)
77
+ gen_probs = mdgen.analysis.get_tp_likelihood(np.vectorize(allidx_to_activeidx.get)(gen_tp, highest_prob_state),
78
+ cmsm.transition_matrix)
79
+ gen_prob = gen_probs.prod(-1)
80
+ out[f'gen_prob'] = gen_prob.mean()
81
+ out[f'gen_valid_prob'] = gen_prob[gen_prob > 0].mean()
82
+ out[f'gen_valid_rate'] = (gen_prob > 0).mean()
83
+ out[f'gen_JSD'] = jensenshannon(ref_stateprobs, gen_stateprobs)
84
+
85
+ ### Replica analysis
86
+ rep_feats, rep = mdgen.analysis.get_featurized_traj(f'{args.repdir}/{name}', sidechains=True)
87
+ rep_lens = [999999, 500000, 300000, 200000, 100000, 50000, 20000]
88
+ rep_names = ['100ns', '50ns', '30ns', '20ns', '10ns', '5ns', '2ns']
89
+ rep_stateprobs_list = []
90
+ print('Replica analysis')
91
+ for i in range(len(rep_lens)):
92
+ rep_small = rep[:rep_lens[i]]
93
+ rep_discrete = mdgen.analysis.discretize(tica.transform(rep_small), kmeans, msm)
94
+ rep_msm = pyemma.msm.estimate_markov_model(rep_discrete, lag=1000) # 100ps time lag for the msm
95
+
96
+ idx_to_repidx = {value: idx for idx, value in enumerate(rep_msm.active_set)}
97
+ repidx_to_idx = {idx: value for idx, value in enumerate(rep_msm.active_set)}
98
+ if (start_state not in idx_to_repidx.keys()) or (end_state not in idx_to_repidx.keys()):
99
+ out[f'{rep_names[i]}_rep_prob'] = 0
100
+ out[f'{rep_names[i]}_rep_valid_prob'] = 0
101
+ out[f'{rep_names[i]}_rep_valid_rate'] = 0
102
+ out[f'{rep_names[i]}_rep_JSD'] = 1
103
+ out[f'{rep_names[i]}_repcheat_prob'] = np.nan
104
+ out[f'{rep_names[i]}_repcheat_valid_prob'] = np.nan
105
+ out[f'{rep_names[i]}_repcheat_valid_rate'] = np.nan
106
+ out[f'{rep_names[i]}_repcheat_JSD'] = np.nan
107
+ rep_stateprobs_list.append(np.zeros(10))
108
+ continue
109
+
110
+
111
+ repidx_start_state = idx_to_repidx[start_state]
112
+ repidx_end_state = idx_to_repidx[end_state]
113
+
114
+ repidx_tp = mdgen.analysis.sample_tp(trans=rep_msm.transition_matrix, start_state=repidx_start_state,
115
+ end_state=repidx_end_state, traj_len=11, n_samples=1000)
116
+ rep_tp = np.vectorize(repidx_to_idx.get)(repidx_tp)
117
+ assert rep_tp[0, 0] == start_state
118
+ assert rep_tp[0, -1] == end_state
119
+ rep_probs = mdgen.analysis.get_tp_likelihood(np.vectorize(allidx_to_activeidx.get)(rep_tp, highest_prob_state),
120
+ cmsm.transition_matrix)
121
+ rep_prob = rep_probs.prod(-1)
122
+ rep_stateprobs = mdgen.analysis.get_state_probs(rep_tp)
123
+ rep_stateprobs_list.append(rep_stateprobs)
124
+ out[f'{rep_names[i]}_rep_prob'] = rep_prob.mean()
125
+ out[f'{rep_names[i]}_rep_valid_prob'] = rep_prob[rep_prob > 0].mean()
126
+ out[f'{rep_names[i]}_rep_valid_rate'] = (rep_prob > 0).mean()
127
+ out[f'{rep_names[i]}_rep_JSD'] = jensenshannon(ref_stateprobs, rep_stateprobs)
128
+ out[f'{rep_names[i]}_repcheat_prob'] = rep_prob.mean()
129
+ out[f'{rep_names[i]}_repcheat_valid_prob'] = rep_prob[rep_prob > 0].mean()
130
+ out[f'{rep_names[i]}_repcheat_valid_rate'] = (rep_prob > 0).mean()
131
+ out[f'{rep_names[i]}_repcheat_JSD'] = jensenshannon(ref_stateprobs, rep_stateprobs)
132
+
133
+ full_rep_discrete = mdgen.analysis.discretize(tica.transform(rep), kmeans, msm)
134
+ full_rep_msm = pyemma.msm.estimate_markov_model(full_rep_discrete, lag=1000) # 100ps time lag for the msm
135
+
136
+ axs[0, 2].imshow(cmsm.transition_matrix == 0)
137
+ axs[0, 2].set_title('Reference 100ns MD transition matrix zeros')
138
+ axs[1, 2].imshow(full_rep_msm.transition_matrix == 0)
139
+ axs[1, 2].set_title('Replica 100ns MD transition matrix zeros')
140
+
141
+ data = np.stack([ref_stateprobs, gen_stateprobs, *rep_stateprobs_list])
142
+ row_names = ['Reference', 'Genereated', *[f'Replica {name}' for name in rep_names]]
143
+ axs[1, 1].imshow(data, cmap='viridis')
144
+ axs[1, 1].set_yticks(range(len(row_names)))
145
+ axs[1, 1].set_yticklabels(row_names)
146
+
147
+ gen_stack_all = np.stack(gen_traj_list, axis = 0)
148
+
149
+ for i in range(4):
150
+ for j in range(4):
151
+ idx = i * 4 + j
152
+ pyemma.plots.plot_free_energy(*tica.transform(ref)[::100, :2].T, ax=axs[2+i, j], cbar=False)
153
+ plot_traj = tica.transform(gen_stack_all[idx])[:,:2]
154
+ axs[2+i, j].plot(plot_traj[:,0],plot_traj[:,1], c='black', marker='o')
155
+ axs[2+i, j].set_title(f'Trajectory {idx}')
156
+
157
+ mapping = {value: idx for idx, value in enumerate(cmsm.active_set)}
158
+ ref_tpt = pyemma.msm.tpt(cmsm, [mapping[start_state]], [mapping[end_state]])
159
+ pyemma.plots.plot_flux(ref_tpt, minflux=4e-8, arrow_label_format='%.3f', state_labels=None, show_committor=True, ax=axs[0,3])
160
+ gen_tps_msm = pyemma.msm.estimate_markov_model(list(gen_tp), lag=1)
161
+ mapping = {value: idx for idx, value in enumerate(gen_tps_msm.active_set)}
162
+ gen_tpt = pyemma.msm.tpt(gen_tps_msm, [mapping[start_state]], [mapping[end_state]])
163
+ pyemma.plots.plot_flux(gen_tpt, minflux=4e-8, arrow_label_format='%.3f', state_labels=None, show_committor=True, ax=axs[1,3])
164
+
165
+ if args.plot:
166
+ os.makedirs(args.outdir, exist_ok=True)
167
+ fig.savefig(f'{args.outdir}/{name}.pdf')
168
+
169
+ with open(f"{args.outdir}/{name}.pkl", 'wb') as f:
170
+ f.write(pickle.dumps(out))
171
+
172
+ return name, out
173
+
174
+
175
+ if args.pdb_id:
176
+ pdb_id = args.pdb_id
177
+ else:
178
+ pdb_id = list(set([nam.split('_')[0] for nam in os.listdir(args.pdbdir) if '.pdb' in nam]))
179
+
180
+ if args.num_workers > 1:
181
+ p = Pool(args.num_workers)
182
+ p.__enter__()
183
+ __map__ = p.imap
184
+ else:
185
+ __map__ = map
186
+ out = dict(tqdm.tqdm(__map__(main, pdb_id), total=len(pdb_id)))
187
+ if args.num_workers > 1:
188
+ p.__exit__(None, None, None)
189
+
190
+ if args.save:
191
+ with open(f"{args.outdir}/{args.save_name}", 'wb') as f:
192
+ f.write(pickle.dumps(out))
data/scripts/analyze_upsampling.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ parser = argparse.ArgumentParser()
3
+ parser.add_argument("--pdbdir", required=True)
4
+ parser.add_argument("--mddir", default='share/4AA_sims_implicit')
5
+ parser.add_argument('--pdb_id', nargs='*', default=[])
6
+ args = parser.parse_args()
7
+
8
+ import mdgen.analysis
9
+ import tqdm, os
10
+ import matplotlib.pyplot as plt
11
+ from statsmodels.tsa.stattools import acovf
12
+ import numpy as np
13
+ colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
14
+
15
+ def do(name):
16
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True, cossin=False)
17
+ feats, traj = mdgen.analysis.get_featurized_traj(f'{args.pdbdir}/{name}', sidechains=True, cossin=False)
18
+
19
+
20
+ md_autocorr = {}
21
+ our_autocorr = {}
22
+ subsample_autocorr = {}
23
+ for i, feat in enumerate(feats.describe()):
24
+ md_autocorr[feat] = acovf(np.sin(ref[:,i]), demean=False, adjusted=True) + acovf(np.cos(ref[:,i]), demean=False, adjusted=True)
25
+ our_autocorr[feat] = acovf(np.sin(traj[:,i]), demean=False, adjusted=True) + acovf(np.cos(traj[:,i]), demean=False, adjusted=True)
26
+ subsample_autocorr[feat] = acovf(np.sin(ref[::100,i]), demean=False, adjusted=True) + acovf(np.cos(ref[::100,i]), demean=False, adjusted=True)
27
+
28
+ lags = 0.1 * (1 + np.arange(1000000))
29
+ subsample_lags = 10 * (1 + np.arange(1000000))
30
+
31
+ fig, axs = plt.subplots(1, 2, figsize=(10, 5))
32
+
33
+ for i, key in enumerate([key for key in md_autocorr if 'CHI' in key]):
34
+ toplot = md_autocorr[key][1:]
35
+ axs[0].plot(lags[:len(toplot)], toplot, color=colors[i%len(colors)])
36
+
37
+ toplot = subsample_autocorr[key][1:]
38
+ axs[0].scatter(subsample_lags[:len(toplot)], toplot, color=colors[i%len(colors)], label=key)
39
+
40
+ toplot = our_autocorr[key][1:]
41
+ axs[0].plot(lags[:len(toplot)], toplot, color=colors[i%len(colors)], linestyle='--')
42
+ axs[0].set_title(f"{name} sidechains")
43
+ axs[0].set_xscale('log')
44
+ axs[0].set_xlim(0.1, 100)
45
+ axs[0].set_ylim(0.5, 1)
46
+ axs[0].set_xlabel('ps')
47
+ axs[0].legend(loc='upper center', bbox_to_anchor=(0.5, -0.05), ncol=3, fontsize=6)
48
+
49
+ for i, key in enumerate([key for key in md_autocorr if 'CHI' not in key]):
50
+ toplot = md_autocorr[key][1:]
51
+ axs[1].plot(lags[:len(toplot)], toplot, color=colors[i%len(colors)])
52
+
53
+ toplot = subsample_autocorr[key][1:]
54
+ axs[1].scatter(subsample_lags[:len(toplot)], toplot, color=colors[i%len(colors)], label=key)
55
+
56
+ toplot = our_autocorr[key][1:]
57
+ axs[1].plot(lags[:len(toplot)], toplot, color=colors[i%len(colors)], linestyle='--')
58
+ axs[1].set_title(f"{name} backbones")
59
+
60
+ axs[1].set_xscale('log')
61
+ axs[1].set_xlim(0.1, 100)
62
+ axs[1].set_ylim(0.5, 1)
63
+ axs[1].set_xlabel('ps')
64
+ axs[1].legend(loc='upper center', bbox_to_anchor=(0.5, -0.05), ncol=3, fontsize=6)
65
+
66
+ fig.savefig(f"{args.pdbdir}/{name}.pdf", bbox_inches='tight', pad_inches=0)
67
+
68
+ if args.pdb_id:
69
+ pdb_id = args.pdb_id
70
+ else:
71
+ pdb_id = [nam.split('.')[0] for nam in os.listdir(args.pdbdir) if '.pdb' in nam]
72
+
73
+
74
+ for name in tqdm.tqdm(pdb_id):
75
+ if os.path.exists(f"{args.pdbdir}/{name}.pdf"): continue
76
+ try:
77
+ do(name)
78
+ except Exception as e:
79
+ print(name, e)
data/scripts/prep_sims.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+
3
+ parser = argparse.ArgumentParser()
4
+ parser.add_argument('--split', type=str, default='splits/atlas.csv')
5
+ parser.add_argument('--sim_dir', type=str, default='/data/cb/scratch/datasets/atlas')
6
+ parser.add_argument('--outdir', type=str, default='./data_atlas')
7
+ parser.add_argument('--num_workers', type=int, default=1)
8
+ parser.add_argument('--suffix', type=str, default='')
9
+ parser.add_argument('--atlas', action='store_true')
10
+ parser.add_argument('--stride', type=int, default=1)
11
+ args = parser.parse_args()
12
+
13
+ import mdtraj, os, tqdm
14
+ import pandas as pd
15
+ from multiprocessing import Pool
16
+ import numpy as np
17
+ from mdgen import residue_constants as rc
18
+
19
+ os.makedirs(args.outdir, exist_ok=True)
20
+
21
+ df = pd.read_csv(args.split, index_col='name')
22
+ names = df.index
23
+
24
+ def main():
25
+ jobs = []
26
+ for name in names:
27
+ if os.path.exists(f'{args.outdir}/{name}{args.suffix}.npy'): continue
28
+ jobs.append(name)
29
+
30
+ if args.num_workers > 1:
31
+ p = Pool(args.num_workers)
32
+ p.__enter__()
33
+ __map__ = p.imap
34
+ else:
35
+ __map__ = map
36
+ for _ in tqdm.tqdm(__map__(do_job, jobs), total=len(jobs)):
37
+ pass
38
+ if args.num_workers > 1:
39
+ p.__exit__(None, None, None)
40
+
41
+ # def prot_to_frames(ca_coords, c_coords, n_coords):
42
+ # prot_frames = Rigid.from_3_points(
43
+ # torch.from_numpy(c_coords),
44
+ # torch.from_numpy(ca_coords),
45
+ # torch.from_numpy(n_coords),
46
+ # )
47
+ # rots = torch.eye(3)
48
+ # rots[0, 0] = -1
49
+ # rots[2, 2] = -1
50
+ # rots = Rotation(rot_mats=rots)
51
+ # return prot_frames.compose(Rigid(rots, None))
52
+
53
+
54
+ def traj_to_atom14(traj):
55
+ arr = np.zeros((traj.n_frames, traj.n_residues, 14, 3), dtype=np.float16)
56
+ for i, resi in enumerate(traj.top.residues):
57
+ for at in resi.atoms:
58
+ if at.name not in rc.restype_name_to_atom14_names[resi.name]:
59
+ print(resi.name, at.name, 'not found'); continue
60
+ j = rc.restype_name_to_atom14_names[resi.name].index(at.name)
61
+ arr[:,i,j] = traj.xyz[:,at.index] * 10.0
62
+ return arr
63
+
64
+ if args.atlas:
65
+ def do_job(name):
66
+ for i in [1,2,3]:
67
+ traj = mdtraj.load(f'{args.atlas_dir}/{name}/{name}_prod_R{i}_fit.xtc', top=f'{args.atlas_dir}/{name}/{name}.pdb')
68
+ traj.atom_slice([a.index for a in traj.top.atoms if a.element.symbol != 'H'], True)
69
+ traj.superpose(traj)
70
+ arr = traj_to_atom14(traj)
71
+ np.save(f'{args.outdir}/{name}_R{i}{args.suffix}.npy', arr[::args.stride])
72
+ else:
73
+ def do_job(name):
74
+ traj = mdtraj.load(f'{args.atlas_dir}/{name}/{name}.xtc', top=f'{args.atlas_dir}/{name}/{name}.pdb')
75
+ traj.superpose(traj)
76
+ arr = traj_to_atom14(traj)
77
+ np.save(f'{args.outdir}/{name}{args.suffix}.npy', arr[::args.stride])
78
+
79
+ if __name__ == "__main__":
80
+ main()
data/scripts/run_peptide_sim.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ parser = argparse.ArgumentParser()
3
+ parser.add_argument('--indir', type=str, default='4AA')
4
+ parser.add_argument('--outdir', type=str, default='/data/cb/scratch/share/mdgen/4AA_sims')
5
+ parser.add_argument('--worker_id', type=int, default=0)
6
+ parser.add_argument('--num_workers', type=int, default=1)
7
+ parser.add_argument('--pdb_id', nargs='*', default=[])
8
+ parser.add_argument('--joblist', type=str, default='splits/1k_4AA.csv')
9
+ parser.add_argument('--implicit', action='store_true')
10
+ parser.add_argument('--job_id', nargs='*', default=[])
11
+ parser.add_argument('--sim_ns', type=float, default=100)
12
+ parser.add_argument('--print_freq', type=int, default=1000)
13
+ parser.add_argument('--save_ps', type=float, default=0.1)
14
+ parser.add_argument('--friction_coeff', type=float, default=0.3)
15
+ parser.add_argument('--md_device', type=str, default="CUDA")
16
+ args = parser.parse_args()
17
+
18
+ import openmm, mdtraj, sys, os
19
+ from openmm.app import PDBFile, ForceField, Modeller, PME, HBonds, Simulation, StateDataReporter
20
+ from openmm import unit, LangevinMiddleIntegrator, Platform, MonteCarloBarostat
21
+ import numpy as np
22
+ import pandas as pd
23
+
24
+
25
+ dt = 2 * unit.femtosecond
26
+ total_steps = int((args.sim_ns * unit.nanosecond) / dt)
27
+ save_interval = int((args.save_ps * unit.picosecond) / dt)
28
+ print(f"Running for {total_steps} steps")
29
+ print(f"Saving every {save_interval} steps")
30
+ print(f"Will save {int(total_steps / save_interval)} frames")
31
+
32
+
33
+ def make(aa):
34
+ if '_' in aa:
35
+ aa = aa.split('_')[0]
36
+ print(f'Making {aa}')
37
+ from pymol import cmd
38
+ cmd.reinitialize()
39
+ cmd.fab(aa, hydro=0)
40
+ cmd.save(f'{args.indir}/{aa}.pdb')
41
+ print(f'Fixing {aa}')
42
+ from pdbfixer import PDBFixer
43
+ fixer = PDBFixer(filename=f'{args.indir}/{aa}.pdb')
44
+ fixer.missingResidues = {}
45
+ fixer.findMissingAtoms()
46
+ fixer.addMissingAtoms()
47
+ with open(f'{args.indir}/{aa}.pdb', 'w') as f:
48
+ PDBFile.writeFile(fixer.topology, fixer.positions, f, True)
49
+
50
+ # subprocess.run(['pdbfixer', f'{args.indir}/{aa}.pdb', '--add-atoms=heavy'])
51
+ # subprocess.run(['mv', 'output.pdb', f'{args.indir}/{aa}.pdb'])
52
+
53
+ def do(name):
54
+ os.makedirs(f"{args.outdir}/{name}", exist_ok=True)
55
+
56
+ aa = name.split('_')[0]
57
+ if not os.path.exists(f"{args.indir}/{aa}.pdb"):
58
+ make(aa)
59
+ pdb = PDBFile(f"{args.indir}/{aa}.pdb")
60
+ if args.implicit:
61
+ forcefield = ForceField('amber14-all.xml', 'implicit/gbn2.xml')
62
+ else:
63
+ forcefield = ForceField('amber14-all.xml', 'amber14/tip3pfb.xml')
64
+ modeller = Modeller(pdb.topology, pdb.positions)
65
+ modeller.addHydrogens(forcefield, pH=7)
66
+
67
+ if args.implicit:
68
+ system = forcefield.createSystem(modeller.topology, constraints=HBonds)
69
+ else:
70
+ modeller.addSolvent(forcefield, padding=1.0 * unit.nanometer)
71
+ system = forcefield.createSystem(modeller.topology, nonbondedMethod=PME,
72
+ nonbondedCutoff=1.0 * unit.nanometer,
73
+ constraints=HBonds)
74
+
75
+ integrator = LangevinMiddleIntegrator(350 * unit.kelvin, args.friction_coeff / unit.picosecond, dt)
76
+ simulation = Simulation(modeller.topology, system, integrator,
77
+ platform=Platform.getPlatformByName(args.md_device))
78
+ simulation.context.setPositions(modeller.positions)
79
+
80
+ top = mdtraj.Topology.from_openmm(modeller.topology)
81
+ print(f'System with {top.n_atoms} atoms')
82
+ mask = top.select("protein and not type H")
83
+ print(f'Reporting {len(mask)} atoms')
84
+ reporter = mdtraj.reporters.HDF5Reporter(f'{args.outdir}/{name}/{name}.h5', reportInterval=save_interval,
85
+ atomSubset=mask)
86
+
87
+ print('Minimizing energy')
88
+ simulation.minimizeEnergy()
89
+
90
+ print("Running NVT")
91
+ simulation.reporters.append(StateDataReporter(
92
+ sys.stdout,
93
+ reportInterval=args.print_freq,
94
+ step=True,
95
+ potentialEnergy=True,
96
+ temperature=True,
97
+ volume=True,
98
+ speed=True,
99
+ remainingTime=True,
100
+ totalSteps=total_steps + 10000
101
+ ))
102
+ simulation.reporters.append(StateDataReporter(
103
+ open(f'{args.outdir}/{name}/{name}.out', 'w'),
104
+ reportInterval=1000,
105
+ step=True,
106
+ potentialEnergy=True,
107
+ temperature=True,
108
+ volume=True,
109
+ speed=True,
110
+ remainingTime=True,
111
+ totalSteps=total_steps + 10000
112
+ ))
113
+ simulation.step(10000)
114
+
115
+ if not args.implicit:
116
+ print("Running NPT")
117
+ system.addForce(MonteCarloBarostat(1 * unit.bar, 350 * unit.kelvin))
118
+ simulation.context.reinitialize(preserveState=True)
119
+
120
+ simulation.reporters.append(reporter)
121
+ simulation.step(total_steps)
122
+ reporter.close()
123
+
124
+ print("Converting to XTC")
125
+ traj = mdtraj.load(f'{args.outdir}/{name}/{name}.h5')
126
+ traj.superpose(traj)
127
+ traj.save(f'{args.outdir}/{name}/{name}.xtc')
128
+ traj[0].save(f'{args.outdir}/{name}/{name}.pdb')
129
+
130
+ if args.pdb_id:
131
+ jobs = args.pdb_id
132
+ else:
133
+ jobs = np.array(pd.read_csv(args.joblist, index_col='name').index)
134
+ if args.job_id:
135
+ jobs = jobs.index[list(map(int, args.job_id))]
136
+ else:
137
+ jobs = jobs[args.worker_id::args.num_workers]
138
+
139
+ for name in jobs:
140
+ do(name)
141
+
142
+
data/sim_inference.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ parser = argparse.ArgumentParser()
3
+ parser.add_argument('--sim_ckpt', type=str, default=None, required=True)
4
+ parser.add_argument('--data_dir', type=str, default=None, required=True)
5
+ parser.add_argument('--suffix', type=str, default='')
6
+ parser.add_argument('--pdb_id', nargs='*', default=[])
7
+ parser.add_argument('--num_frames', type=int, default=1000)
8
+ parser.add_argument('--num_rollouts', type=int, default=100)
9
+ parser.add_argument('--no_frames', action='store_true')
10
+ parser.add_argument('--tps', action='store_true')
11
+ parser.add_argument('--xtc', action='store_true')
12
+ parser.add_argument('--out_dir', type=str, default=".")
13
+ parser.add_argument('--split', type=str, default='splits/4AA_test.csv')
14
+ args = parser.parse_args()
15
+
16
+ import os, torch, mdtraj, tqdm, time
17
+ import numpy as np
18
+ from mdgen.geometry import atom14_to_frames, atom14_to_atom37, atom37_to_torsions
19
+ from mdgen.residue_constants import restype_order, restype_atom37_mask
20
+ from mdgen.tensor_utils import tensor_tree_map
21
+ from mdgen.wrapper import NewMDGenWrapper
22
+ from mdgen.utils import atom14_to_pdb
23
+ import pandas as pd
24
+
25
+
26
+
27
+
28
+ os.makedirs(args.out_dir, exist_ok=True)
29
+
30
+
31
+
32
+ def get_batch(name, seqres, num_frames):
33
+ arr = np.lib.format.open_memmap(f'{args.data_dir}/{name}{args.suffix}.npy', 'r')
34
+
35
+ if not args.tps: # else keep all frames
36
+ arr = np.copy(arr[0:1]).astype(np.float32)
37
+
38
+ frames = atom14_to_frames(torch.from_numpy(arr))
39
+ seqres = torch.tensor([restype_order[c] for c in seqres])
40
+ atom37 = torch.from_numpy(atom14_to_atom37(arr, seqres[None])).float()
41
+ L = len(seqres)
42
+ mask = torch.ones(L)
43
+
44
+ if args.no_frames:
45
+ return {
46
+ 'atom37': atom37,
47
+ 'seqres': seqres,
48
+ 'mask': restype_atom37_mask[seqres],
49
+ }
50
+
51
+ torsions, torsion_mask = atom37_to_torsions(atom37, seqres[None])
52
+ return {
53
+ 'torsions': torsions,
54
+ 'torsion_mask': torsion_mask[0],
55
+ 'trans': frames._trans,
56
+ 'rots': frames._rots._rot_mats,
57
+ 'seqres': seqres,
58
+ 'mask': mask, # (L,)
59
+ }
60
+
61
+ def rollout(model, batch):
62
+
63
+ #print('Start sim', batch['trans'][0,0,0])
64
+ if args.no_frames:
65
+
66
+ expanded_batch = {
67
+ 'atom37': batch['atom37'].expand(-1, args.num_frames, -1, -1, -1),
68
+ 'seqres': batch['seqres'],
69
+ 'mask': batch['mask'],
70
+ }
71
+ else:
72
+ expanded_batch = {
73
+ 'torsions': batch['torsions'].expand(-1, args.num_frames, -1, -1, -1),
74
+ 'torsion_mask': batch['torsion_mask'],
75
+ 'trans': batch['trans'].expand(-1, args.num_frames, -1, -1),
76
+ 'rots': batch['rots'].expand(-1, args.num_frames, -1, -1, -1),
77
+ 'seqres': batch['seqres'],
78
+ 'mask': batch['mask'],
79
+ }
80
+ atom14, _ = model.inference(expanded_batch)
81
+ new_batch = {**batch}
82
+
83
+ if args.no_frames:
84
+ new_batch['atom37'] = torch.from_numpy(
85
+ atom14_to_atom37(atom14[:,-1].cpu(), batch['seqres'][0].cpu())
86
+ ).cuda()[:,None].float()
87
+
88
+
89
+
90
+ else:
91
+ frames = atom14_to_frames(atom14[:,-1])
92
+ new_batch['trans'] = frames._trans[None]
93
+ new_batch['rots'] = frames._rots._rot_mats[None]
94
+ atom37 = atom14_to_atom37(atom14[0,-1].cpu(), batch['seqres'][0].cpu())
95
+ torsions, _ = atom37_to_torsions(atom37, batch['seqres'][0].cpu())
96
+ new_batch['torsions'] = torsions[None, None].cuda()
97
+
98
+ return atom14, new_batch
99
+
100
+
101
+ def do(model, name, seqres):
102
+
103
+ item = get_batch(name, seqres, num_frames = model.args.num_frames)
104
+ batch = next(iter(torch.utils.data.DataLoader([item])))
105
+
106
+ batch = tensor_tree_map(lambda x: x.cuda(), batch)
107
+
108
+ all_atom14 = []
109
+ start = time.time()
110
+ for _ in tqdm.trange(args.num_rollouts):
111
+ atom14, batch = rollout(model, batch)
112
+ # print(atom14[0,0,0,1], atom14[0,-1,0,1])
113
+ all_atom14.append(atom14)
114
+
115
+ print(time.time() - start)
116
+ all_atom14 = torch.cat(all_atom14, 1)
117
+
118
+ path = os.path.join(args.out_dir, f'{name}.pdb')
119
+ atom14_to_pdb(all_atom14[0].cpu().numpy(), batch['seqres'][0].cpu().numpy(), path)
120
+
121
+ if args.xtc:
122
+ traj = mdtraj.load(path)
123
+ traj.superpose(traj)
124
+ traj.save(os.path.join(args.out_dir, f'{name}.xtc'))
125
+ traj[0].save(os.path.join(args.out_dir, f'{name}.pdb'))
126
+
127
+ @torch.no_grad()
128
+ def main():
129
+ model = NewMDGenWrapper.load_from_checkpoint(args.sim_ckpt)
130
+ model.eval().to('cuda')
131
+
132
+
133
+ df = pd.read_csv(args.split, index_col='name')
134
+ for name in df.index:
135
+ if args.pdb_id and name not in args.pdb_id:
136
+ continue
137
+ do(model, name, df.seqres[name])
138
+
139
+
140
+ main()
data/splits/4AA.csv ADDED
@@ -0,0 +1,3310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EHFR,EHFR
3
+ IMCP,IMCP
4
+ YVTL,YVTL
5
+ GQGV,GQGV
6
+ PCFK,PCFK
7
+ VVIV,VVIV
8
+ QYTC,QYTC
9
+ WGDY,WGDY
10
+ CWLF,CWLF
11
+ QHIN,QHIN
12
+ ASKI,ASKI
13
+ KITV,KITV
14
+ SLYW,SLYW
15
+ REVV,REVV
16
+ NYLE,NYLE
17
+ QGRE,QGRE
18
+ AEHY,AEHY
19
+ SRPT,SRPT
20
+ NGDS,NGDS
21
+ ATVV,ATVV
22
+ DKFA,DKFA
23
+ MSTP,MSTP
24
+ VNWW,VNWW
25
+ LDNH,LDNH
26
+ CHSI,CHSI
27
+ YFCS,YFCS
28
+ WLSC,WLSC
29
+ YYTK,YYTK
30
+ GPNT,GPNT
31
+ LTQE,LTQE
32
+ VMHV,VMHV
33
+ GTLM,GTLM
34
+ QRRW,QRRW
35
+ EGDW,EGDW
36
+ IFRG,IFRG
37
+ LRYM,LRYM
38
+ ITGR,ITGR
39
+ PTNI,PTNI
40
+ KQSH,KQSH
41
+ WTWS,WTWS
42
+ CASF,CASF
43
+ VKFG,VKFG
44
+ GQYP,GQYP
45
+ CMYH,CMYH
46
+ VFGD,VFGD
47
+ KWIC,KWIC
48
+ PQHG,PQHG
49
+ FLIS,FLIS
50
+ QEGR,QEGR
51
+ WQTE,WQTE
52
+ MHTN,MHTN
53
+ ISML,ISML
54
+ HRCS,HRCS
55
+ EQVR,EQVR
56
+ NNKC,NNKC
57
+ MVQY,MVQY
58
+ FART,FART
59
+ ENKE,ENKE
60
+ TIDH,TIDH
61
+ RDTI,RDTI
62
+ KAPF,KAPF
63
+ ICLP,ICLP
64
+ HDQW,HDQW
65
+ HAPV,HAPV
66
+ AMEN,AMEN
67
+ GPHG,GPHG
68
+ QHFV,QHFV
69
+ GHSS,GHSS
70
+ NEEE,NEEE
71
+ PRGM,PRGM
72
+ YNML,YNML
73
+ RHDG,RHDG
74
+ VMVL,VMVL
75
+ ETYF,ETYF
76
+ EGVT,EGVT
77
+ CWFC,CWFC
78
+ DNYP,DNYP
79
+ RREW,RREW
80
+ QDCN,QDCN
81
+ QNCG,QNCG
82
+ VVVR,VVVR
83
+ FRPQ,FRPQ
84
+ FVFN,FVFN
85
+ PEPR,PEPR
86
+ TQVK,TQVK
87
+ FCND,FCND
88
+ FVLS,FVLS
89
+ NMSG,NMSG
90
+ KCWL,KCWL
91
+ LMWL,LMWL
92
+ WDHC,WDHC
93
+ VGWM,VGWM
94
+ NLQW,NLQW
95
+ CETY,CETY
96
+ EFMM,EFMM
97
+ EDTV,EDTV
98
+ PIRK,PIRK
99
+ YTPA,YTPA
100
+ HEKI,HEKI
101
+ GYQH,GYQH
102
+ FLRH,FLRH
103
+ IMRY,IMRY
104
+ RTVD,RTVD
105
+ PIDV,PIDV
106
+ AKIR,AKIR
107
+ IPGD,IPGD
108
+ HTIQ,HTIQ
109
+ WWQW,WWQW
110
+ KDFM,KDFM
111
+ ESSS,ESSS
112
+ ASRE,ASRE
113
+ LCLQ,LCLQ
114
+ VIYH,VIYH
115
+ IVMA,IVMA
116
+ RVQQ,RVQQ
117
+ DQKV,DQKV
118
+ KDDD,KDDD
119
+ CPEE,CPEE
120
+ WSAQ,WSAQ
121
+ VQCL,VQCL
122
+ CSYR,CSYR
123
+ LAWA,LAWA
124
+ VWVR,VWVR
125
+ SPVN,SPVN
126
+ MIAY,MIAY
127
+ GLIL,GLIL
128
+ AGVD,AGVD
129
+ SNSF,SNSF
130
+ FHSE,FHSE
131
+ TLRK,TLRK
132
+ IAMI,IAMI
133
+ CPYV,CPYV
134
+ NWWG,NWWG
135
+ LQMG,LQMG
136
+ KEGH,KEGH
137
+ FGGW,FGGW
138
+ NVFH,NVFH
139
+ DLCG,DLCG
140
+ APWF,APWF
141
+ QVTC,QVTC
142
+ YQGI,YQGI
143
+ LVVF,LVVF
144
+ RVQS,RVQS
145
+ KAEP,KAEP
146
+ WQVP,WQVP
147
+ LWMR,LWMR
148
+ NNDK,NNDK
149
+ MHVR,MHVR
150
+ EDQK,EDQK
151
+ CACS,CACS
152
+ CKHT,CKHT
153
+ IWHF,IWHF
154
+ IPTH,IPTH
155
+ VDRN,VDRN
156
+ YCPF,YCPF
157
+ HELI,HELI
158
+ SFCH,SFCH
159
+ YLID,YLID
160
+ GGHN,GGHN
161
+ FSDP,FSDP
162
+ CKVH,CKVH
163
+ NTVG,NTVG
164
+ IHNV,IHNV
165
+ YMQN,YMQN
166
+ PRVC,PRVC
167
+ SDFS,SDFS
168
+ SPFH,SPFH
169
+ FSRK,FSRK
170
+ QNWV,QNWV
171
+ PYHQ,PYHQ
172
+ FDNT,FDNT
173
+ KAQW,KAQW
174
+ PKDM,PKDM
175
+ LIFE,LIFE
176
+ ALDA,ALDA
177
+ PGKM,PGKM
178
+ QRNF,QRNF
179
+ SSNN,SSNN
180
+ FKKL,FKKL
181
+ MWQK,MWQK
182
+ RLKR,RLKR
183
+ KSIY,KSIY
184
+ GCIC,GCIC
185
+ INVH,INVH
186
+ NFQF,NFQF
187
+ RDWY,RDWY
188
+ SNIN,SNIN
189
+ ESIC,ESIC
190
+ LIRH,LIRH
191
+ CIYL,CIYL
192
+ MAFM,MAFM
193
+ AASF,AASF
194
+ HENV,HENV
195
+ EHEV,EHEV
196
+ LFNV,LFNV
197
+ RQLP,RQLP
198
+ PNHP,PNHP
199
+ CFEE,CFEE
200
+ DHAR,DHAR
201
+ IDRH,IDRH
202
+ EFPT,EFPT
203
+ CEML,CEML
204
+ KSVT,KSVT
205
+ SMSN,SMSN
206
+ AAYQ,AAYQ
207
+ KQTG,KQTG
208
+ WFPD,WFPD
209
+ FYVR,FYVR
210
+ RYSH,RYSH
211
+ TFKF,TFKF
212
+ LLRT,LLRT
213
+ IERA,IERA
214
+ KPHD,KPHD
215
+ GMMY,GMMY
216
+ RGQL,RGQL
217
+ NYER,NYER
218
+ NDWN,NDWN
219
+ WKWY,WKWY
220
+ QNCY,QNCY
221
+ FFSG,FFSG
222
+ WEFG,WEFG
223
+ FCGM,FCGM
224
+ EDTS,EDTS
225
+ PEGV,PEGV
226
+ RTNT,RTNT
227
+ NNPT,NNPT
228
+ DENA,DENA
229
+ YEPH,YEPH
230
+ NSYF,NSYF
231
+ IYGF,IYGF
232
+ DTGE,DTGE
233
+ TWWQ,TWWQ
234
+ FRID,FRID
235
+ MRVV,MRVV
236
+ ITDP,ITDP
237
+ IVFN,IVFN
238
+ PYDT,PYDT
239
+ LPGP,LPGP
240
+ MGNM,MGNM
241
+ TFWF,TFWF
242
+ LFHI,LFHI
243
+ HYGW,HYGW
244
+ LWPA,LWPA
245
+ FLDY,FLDY
246
+ QKAD,QKAD
247
+ QYTM,QYTM
248
+ YMSS,YMSS
249
+ NWWL,NWWL
250
+ FHWL,FHWL
251
+ HFFG,HFFG
252
+ DGNM,DGNM
253
+ HWPA,HWPA
254
+ VAEY,VAEY
255
+ LKSL,LKSL
256
+ HYAN,HYAN
257
+ PEYE,PEYE
258
+ QLFI,QLFI
259
+ CGNH,CGNH
260
+ EVFQ,EVFQ
261
+ FSQR,FSQR
262
+ ETIT,ETIT
263
+ CNNF,CNNF
264
+ ETRW,ETRW
265
+ MKPK,MKPK
266
+ YAQK,YAQK
267
+ GMLH,GMLH
268
+ WANF,WANF
269
+ TQVG,TQVG
270
+ THGW,THGW
271
+ NSGW,NSGW
272
+ HPLH,HPLH
273
+ TWDS,TWDS
274
+ SCQK,SCQK
275
+ WQCY,WQCY
276
+ FIRG,FIRG
277
+ PSMR,PSMR
278
+ THIC,THIC
279
+ KQRT,KQRT
280
+ LICP,LICP
281
+ DLYC,DLYC
282
+ WMDW,WMDW
283
+ QYRQ,QYRQ
284
+ WDWF,WDWF
285
+ NGYG,NGYG
286
+ CRTL,CRTL
287
+ QADM,QADM
288
+ GVVY,GVVY
289
+ EWNK,EWNK
290
+ MWGY,MWGY
291
+ SLHS,SLHS
292
+ PTHA,PTHA
293
+ ANIP,ANIP
294
+ ANMY,ANMY
295
+ RMMC,RMMC
296
+ MWCA,MWCA
297
+ PTRW,PTRW
298
+ ADSC,ADSC
299
+ PYRM,PYRM
300
+ QVLA,QVLA
301
+ KSLG,KSLG
302
+ GLCK,GLCK
303
+ QIEG,QIEG
304
+ PGNE,PGNE
305
+ IPRW,IPRW
306
+ HTSM,HTSM
307
+ IPEM,IPEM
308
+ NQRK,NQRK
309
+ WVTA,WVTA
310
+ SFIT,SFIT
311
+ VDEF,VDEF
312
+ NDPC,NDPC
313
+ VVKC,VVKC
314
+ TSTY,TSTY
315
+ QRCV,QRCV
316
+ SQFR,SQFR
317
+ KTLL,KTLL
318
+ TYHA,TYHA
319
+ CIAC,CIAC
320
+ KIED,KIED
321
+ ANFS,ANFS
322
+ KTNF,KTNF
323
+ KAAC,KAAC
324
+ ADYH,ADYH
325
+ EKIA,EKIA
326
+ HWRN,HWRN
327
+ DKYV,DKYV
328
+ DQPA,DQPA
329
+ DIFK,DIFK
330
+ MFVD,MFVD
331
+ HWLK,HWLK
332
+ TDQK,TDQK
333
+ DFIC,DFIC
334
+ LDDQ,LDDQ
335
+ DAWF,DAWF
336
+ TDPL,TDPL
337
+ QCQW,QCQW
338
+ MKHG,MKHG
339
+ PCPL,PCPL
340
+ MLPR,MLPR
341
+ NDNV,NDNV
342
+ LTPR,LTPR
343
+ YRVC,YRVC
344
+ HVHQ,HVHQ
345
+ YMPM,YMPM
346
+ FGMM,FGMM
347
+ RFLT,RFLT
348
+ SEAH,SEAH
349
+ VHHC,VHHC
350
+ SMAS,SMAS
351
+ GSLP,GSLP
352
+ NQEF,NQEF
353
+ DEPV,DEPV
354
+ THHL,THHL
355
+ QSAP,QSAP
356
+ VVLI,VVLI
357
+ AGVI,AGVI
358
+ STYP,STYP
359
+ FYWL,FYWL
360
+ SWGL,SWGL
361
+ SSTS,SSTS
362
+ LDGR,LDGR
363
+ LKLW,LKLW
364
+ RYPT,RYPT
365
+ TTHG,TTHG
366
+ SFQK,SFQK
367
+ SNID,SNID
368
+ KGCR,KGCR
369
+ MVHP,MVHP
370
+ FDNC,FDNC
371
+ TFRF,TFRF
372
+ LHIM,LHIM
373
+ RCKP,RCKP
374
+ MYYA,MYYA
375
+ ALQG,ALQG
376
+ DTCD,DTCD
377
+ MLPY,MLPY
378
+ RLEH,RLEH
379
+ LQLG,LQLG
380
+ LNWY,LNWY
381
+ GLQK,GLQK
382
+ GITD,GITD
383
+ NPDS,NPDS
384
+ MDKA,MDKA
385
+ DLYV,DLYV
386
+ AAKD,AAKD
387
+ WRPI,WRPI
388
+ MIQK,MIQK
389
+ WDDK,WDDK
390
+ YNKT,YNKT
391
+ VIHL,VIHL
392
+ ERQS,ERQS
393
+ AYIY,AYIY
394
+ NHDQ,NHDQ
395
+ TKAK,TKAK
396
+ VRNG,VRNG
397
+ IPVY,IPVY
398
+ SQVM,SQVM
399
+ KTTF,KTTF
400
+ KGHN,KGHN
401
+ RTSK,RTSK
402
+ IAIM,IAIM
403
+ FGGV,FGGV
404
+ SHPE,SHPE
405
+ QDFY,QDFY
406
+ SEME,SEME
407
+ EKME,EKME
408
+ YSCA,YSCA
409
+ PTMP,PTMP
410
+ KEWD,KEWD
411
+ TGPT,TGPT
412
+ ILIG,ILIG
413
+ AAKQ,AAKQ
414
+ SGLQ,SGLQ
415
+ LQRL,LQRL
416
+ TTLY,TTLY
417
+ PFGV,PFGV
418
+ FPCE,FPCE
419
+ VSFH,VSFH
420
+ PHEV,PHEV
421
+ EHLC,EHLC
422
+ FFSV,FFSV
423
+ DCVV,DCVV
424
+ RPEY,RPEY
425
+ WYNA,WYNA
426
+ FLKS,FLKS
427
+ EPWP,EPWP
428
+ PMPK,PMPK
429
+ DNCQ,DNCQ
430
+ MYCF,MYCF
431
+ FLHM,FLHM
432
+ WYIG,WYIG
433
+ LNPR,LNPR
434
+ HLSN,HLSN
435
+ CQYS,CQYS
436
+ QQSD,QQSD
437
+ RSAH,RSAH
438
+ SNCV,SNCV
439
+ DIGN,DIGN
440
+ WKRV,WKRV
441
+ NYQM,NYQM
442
+ PFTA,PFTA
443
+ QASA,QASA
444
+ WQRR,WQRR
445
+ FFRN,FFRN
446
+ WKSH,WKSH
447
+ HEGV,HEGV
448
+ SIKA,SIKA
449
+ YSGC,YSGC
450
+ HRND,HRND
451
+ VFWY,VFWY
452
+ NFRP,NFRP
453
+ PVVH,PVVH
454
+ YQIW,YQIW
455
+ QNFH,QNFH
456
+ ISFP,ISFP
457
+ YLCA,YLCA
458
+ NQSC,NQSC
459
+ SMYW,SMYW
460
+ HWTP,HWTP
461
+ ARGH,ARGH
462
+ CRMN,CRMN
463
+ FKWW,FKWW
464
+ QNMV,QNMV
465
+ EWLN,EWLN
466
+ MSLN,MSLN
467
+ AFCQ,AFCQ
468
+ WKPH,WKPH
469
+ IKQG,IKQG
470
+ GADR,GADR
471
+ AKCN,AKCN
472
+ PMMV,PMMV
473
+ LDPH,LDPH
474
+ QMKT,QMKT
475
+ EANQ,EANQ
476
+ HILY,HILY
477
+ DGCA,DGCA
478
+ QWFI,QWFI
479
+ RRCY,RRCY
480
+ AQFQ,AQFQ
481
+ PICC,PICC
482
+ GPGP,GPGP
483
+ IAYH,IAYH
484
+ MHGM,MHGM
485
+ WMFD,WMFD
486
+ VHTK,VHTK
487
+ NWPN,NWPN
488
+ LKRV,LKRV
489
+ GFYF,GFYF
490
+ QTSK,QTSK
491
+ HPNK,HPNK
492
+ QTSA,QTSA
493
+ FGAR,FGAR
494
+ ITWF,ITWF
495
+ LRTE,LRTE
496
+ AEAF,AEAF
497
+ FHLQ,FHLQ
498
+ PIAC,PIAC
499
+ QWPL,QWPL
500
+ KDKI,KDKI
501
+ STSR,STSR
502
+ PTIG,PTIG
503
+ QIVA,QIVA
504
+ MKCC,MKCC
505
+ WSYN,WSYN
506
+ NAWY,NAWY
507
+ YVLE,YVLE
508
+ VCRV,VCRV
509
+ MHAQ,MHAQ
510
+ TSWC,TSWC
511
+ QKYR,QKYR
512
+ TFIL,TFIL
513
+ ITKR,ITKR
514
+ DVGL,DVGL
515
+ FVEL,FVEL
516
+ CGPF,CGPF
517
+ GPSL,GPSL
518
+ LQPT,LQPT
519
+ WKLD,WKLD
520
+ MRLH,MRLH
521
+ TYAM,TYAM
522
+ VNAK,VNAK
523
+ CEIA,CEIA
524
+ WQDT,WQDT
525
+ SRQV,SRQV
526
+ CLHA,CLHA
527
+ TPEA,TPEA
528
+ EEKV,EEKV
529
+ NHEL,NHEL
530
+ NKCC,NKCC
531
+ VVSN,VVSN
532
+ EETC,EETC
533
+ GTEV,GTEV
534
+ NMYR,NMYR
535
+ PYYT,PYYT
536
+ FRCR,FRCR
537
+ AMQN,AMQN
538
+ KDYC,KDYC
539
+ QNIE,QNIE
540
+ MMDH,MMDH
541
+ MWLR,MWLR
542
+ AELN,AELN
543
+ KNQT,KNQT
544
+ TNHK,TNHK
545
+ TDNF,TDNF
546
+ YPDQ,YPDQ
547
+ NTDP,NTDP
548
+ SEGL,SEGL
549
+ PMWQ,PMWQ
550
+ CPQA,CPQA
551
+ PDPH,PDPH
552
+ NYWT,NYWT
553
+ WTIN,WTIN
554
+ TQLE,TQLE
555
+ HFAP,HFAP
556
+ NSCW,NSCW
557
+ LVTE,LVTE
558
+ CWWT,CWWT
559
+ PECP,PECP
560
+ WGWE,WGWE
561
+ FYTW,FYTW
562
+ LHGA,LHGA
563
+ PFGR,PFGR
564
+ SNIA,SNIA
565
+ HPIQ,HPIQ
566
+ KWMG,KWMG
567
+ VIWL,VIWL
568
+ LEIC,LEIC
569
+ LSIQ,LSIQ
570
+ VSSK,VSSK
571
+ ICDY,ICDY
572
+ FFCC,FFCC
573
+ IFWQ,IFWQ
574
+ YWWI,YWWI
575
+ HTDL,HTDL
576
+ DPDA,DPDA
577
+ HLYC,HLYC
578
+ SVCF,SVCF
579
+ VGNK,VGNK
580
+ IKGW,IKGW
581
+ DVFC,DVFC
582
+ PKSC,PKSC
583
+ CAML,CAML
584
+ INCE,INCE
585
+ QEKW,QEKW
586
+ EGSR,EGSR
587
+ NDAL,NDAL
588
+ VGCC,VGCC
589
+ AHVE,AHVE
590
+ KSKE,KSKE
591
+ VPKS,VPKS
592
+ AQYS,AQYS
593
+ CHYC,CHYC
594
+ EVNN,EVNN
595
+ VQVG,VQVG
596
+ ECDG,ECDG
597
+ CSIT,CSIT
598
+ EEFG,EEFG
599
+ AFRN,AFRN
600
+ NLSK,NLSK
601
+ DSQY,DSQY
602
+ ELHH,ELHH
603
+ SNFS,SNFS
604
+ SYPG,SYPG
605
+ DFAK,DFAK
606
+ QAQD,QAQD
607
+ IIDS,IIDS
608
+ MFSG,MFSG
609
+ LANN,LANN
610
+ KKNF,KKNF
611
+ VLNM,VLNM
612
+ DEWW,DEWW
613
+ LEHK,LEHK
614
+ SMVW,SMVW
615
+ IING,IING
616
+ GIIY,GIIY
617
+ IHGP,IHGP
618
+ PEFC,PEFC
619
+ RKHR,RKHR
620
+ DRPY,DRPY
621
+ CPVD,CPVD
622
+ CKVF,CKVF
623
+ PEID,PEID
624
+ LPRM,LPRM
625
+ PRRA,PRRA
626
+ YFTF,YFTF
627
+ VWGW,VWGW
628
+ AGTA,AGTA
629
+ NRHA,NRHA
630
+ VNVC,VNVC
631
+ KHRY,KHRY
632
+ RAKD,RAKD
633
+ YGPH,YGPH
634
+ GCHN,GCHN
635
+ TDRL,TDRL
636
+ HRLK,HRLK
637
+ PWRH,PWRH
638
+ KKTN,KKTN
639
+ TELH,TELH
640
+ EKTL,EKTL
641
+ TWVR,TWVR
642
+ SHQN,SHQN
643
+ TCIA,TCIA
644
+ THKW,THKW
645
+ VGES,VGES
646
+ KMHT,KMHT
647
+ VDYD,VDYD
648
+ KVYT,KVYT
649
+ KGFR,KGFR
650
+ NWNG,NWNG
651
+ PQPK,PQPK
652
+ FKVT,FKVT
653
+ VIDA,VIDA
654
+ AQMK,AQMK
655
+ LGCI,LGCI
656
+ AWDV,AWDV
657
+ AHKR,AHKR
658
+ HAPF,HAPF
659
+ LINW,LINW
660
+ SNYD,SNYD
661
+ KVFQ,KVFQ
662
+ SWML,SWML
663
+ NMHN,NMHN
664
+ RVSQ,RVSQ
665
+ MYCI,MYCI
666
+ WQPN,WQPN
667
+ VNRT,VNRT
668
+ QHTR,QHTR
669
+ RCRP,RCRP
670
+ HCWT,HCWT
671
+ GQSH,GQSH
672
+ RTHG,RTHG
673
+ NQQL,NQQL
674
+ SNHN,SNHN
675
+ MKIF,MKIF
676
+ YYHC,YYHC
677
+ YRMP,YRMP
678
+ SHQH,SHQH
679
+ WAWT,WAWT
680
+ TFIS,TFIS
681
+ DDWR,DDWR
682
+ AMDV,AMDV
683
+ ADRK,ADRK
684
+ HMHR,HMHR
685
+ FNAG,FNAG
686
+ EMYK,EMYK
687
+ WVSQ,WVSQ
688
+ AHFV,AHFV
689
+ RQHV,RQHV
690
+ TQYA,TQYA
691
+ AIWE,AIWE
692
+ CMKR,CMKR
693
+ IKWC,IKWC
694
+ RFML,RFML
695
+ VCGS,VCGS
696
+ RTNP,RTNP
697
+ MTYG,MTYG
698
+ MINS,MINS
699
+ DIKD,DIKD
700
+ KNFC,KNFC
701
+ ENSS,ENSS
702
+ MSPW,MSPW
703
+ VAYR,VAYR
704
+ YSME,YSME
705
+ LKTH,LKTH
706
+ PGHR,PGHR
707
+ LPAH,LPAH
708
+ QCWM,QCWM
709
+ GWSQ,GWSQ
710
+ IVSP,IVSP
711
+ YYFW,YYFW
712
+ WMAW,WMAW
713
+ LLDC,LLDC
714
+ WTHH,WTHH
715
+ GCQH,GCQH
716
+ WTGS,WTGS
717
+ LFGA,LFGA
718
+ LREV,LREV
719
+ TYGD,TYGD
720
+ VHTR,VHTR
721
+ CGCQ,CGCQ
722
+ FVIH,FVIH
723
+ GQFY,GQFY
724
+ ELYA,ELYA
725
+ EMRT,EMRT
726
+ DGDL,DGDL
727
+ QEPC,QEPC
728
+ NHDA,NHDA
729
+ IISC,IISC
730
+ NSMN,NSMN
731
+ GLTR,GLTR
732
+ CISI,CISI
733
+ RGVQ,RGVQ
734
+ KDMC,KDMC
735
+ SGYG,SGYG
736
+ ENNH,ENNH
737
+ NDPM,NDPM
738
+ AYAE,AYAE
739
+ DFWH,DFWH
740
+ GDII,GDII
741
+ GFKP,GFKP
742
+ CAYP,CAYP
743
+ KHDS,KHDS
744
+ GAWV,GAWV
745
+ TTNE,TTNE
746
+ FEDH,FEDH
747
+ YTAV,YTAV
748
+ AMVW,AMVW
749
+ LSQT,LSQT
750
+ NECR,NECR
751
+ GLGA,GLGA
752
+ PNYQ,PNYQ
753
+ KSTF,KSTF
754
+ KVCI,KVCI
755
+ RKIS,RKIS
756
+ PILN,PILN
757
+ GCCR,GCCR
758
+ SVCQ,SVCQ
759
+ WVLQ,WVLQ
760
+ LLGC,LLGC
761
+ HAWM,HAWM
762
+ AVQK,AVQK
763
+ NPGA,NPGA
764
+ TPEW,TPEW
765
+ SDNG,SDNG
766
+ PIQV,PIQV
767
+ YIMK,YIMK
768
+ NYMG,NYMG
769
+ LTYP,LTYP
770
+ GTTS,GTTS
771
+ FERD,FERD
772
+ EIGH,EIGH
773
+ CPKM,CPKM
774
+ QSNI,QSNI
775
+ HSKL,HSKL
776
+ CWEM,CWEM
777
+ NDWV,NDWV
778
+ FFIR,FFIR
779
+ CNSC,CNSC
780
+ KTGN,KTGN
781
+ RGSY,RGSY
782
+ CVLQ,CVLQ
783
+ NKNQ,NKNQ
784
+ CWYK,CWYK
785
+ SSRN,SSRN
786
+ KNGD,KNGD
787
+ PECK,PECK
788
+ NRKC,NRKC
789
+ HTAN,HTAN
790
+ NSKE,NSKE
791
+ ALFY,ALFY
792
+ KWPM,KWPM
793
+ CFTK,CFTK
794
+ PDKM,PDKM
795
+ TQIS,TQIS
796
+ DEWM,DEWM
797
+ EHMK,EHMK
798
+ IYNR,IYNR
799
+ HAFR,HAFR
800
+ DECE,DECE
801
+ WYVV,WYVV
802
+ GCPK,GCPK
803
+ DRKK,DRKK
804
+ KEVR,KEVR
805
+ WNWC,WNWC
806
+ PAYK,PAYK
807
+ QKEE,QKEE
808
+ QGEC,QGEC
809
+ LYGF,LYGF
810
+ IWAR,IWAR
811
+ GRAR,GRAR
812
+ AIYC,AIYC
813
+ TTVF,TTVF
814
+ HFLW,HFLW
815
+ NARM,NARM
816
+ RHRA,RHRA
817
+ QQDS,QQDS
818
+ FHSL,FHSL
819
+ AVDA,AVDA
820
+ HERQ,HERQ
821
+ TMMP,TMMP
822
+ PEMT,PEMT
823
+ AAFS,AAFS
824
+ QEEG,QEEG
825
+ ATMV,ATMV
826
+ CAFW,CAFW
827
+ LSGH,LSGH
828
+ YKLV,YKLV
829
+ MPNS,MPNS
830
+ CKMP,CKMP
831
+ KKYS,KKYS
832
+ IITA,IITA
833
+ PAIY,PAIY
834
+ WDCD,WDCD
835
+ RSDW,RSDW
836
+ QPAL,QPAL
837
+ EYPW,EYPW
838
+ FCWC,FCWC
839
+ CFRY,CFRY
840
+ QLCG,QLCG
841
+ QETE,QETE
842
+ TCNV,TCNV
843
+ DQFR,DQFR
844
+ HKTS,HKTS
845
+ GWDT,GWDT
846
+ SFMN,SFMN
847
+ GHMF,GHMF
848
+ HDRQ,HDRQ
849
+ RTNL,RTNL
850
+ YNKP,YNKP
851
+ KDIG,KDIG
852
+ LECA,LECA
853
+ RRGK,RRGK
854
+ LNPY,LNPY
855
+ IMVW,IMVW
856
+ PVDS,PVDS
857
+ RERF,RERF
858
+ KAER,KAER
859
+ FMGP,FMGP
860
+ PSAT,PSAT
861
+ VKEQ,VKEQ
862
+ CKCE,CKCE
863
+ TAAR,TAAR
864
+ QPRA,QPRA
865
+ HEAH,HEAH
866
+ LCNA,LCNA
867
+ DEMY,DEMY
868
+ TEPE,TEPE
869
+ WNLY,WNLY
870
+ SPHG,SPHG
871
+ GQHQ,GQHQ
872
+ ADYM,ADYM
873
+ FQYW,FQYW
874
+ VSDD,VSDD
875
+ NAKM,NAKM
876
+ QMDQ,QMDQ
877
+ SDAK,SDAK
878
+ HLDH,HLDH
879
+ SCCY,SCCY
880
+ HGSC,HGSC
881
+ RAVV,RAVV
882
+ KCND,KCND
883
+ LIDG,LIDG
884
+ GGMD,GGMD
885
+ PFRW,PFRW
886
+ NMLG,NMLG
887
+ NWSN,NWSN
888
+ TRGN,TRGN
889
+ MIYA,MIYA
890
+ IFAS,IFAS
891
+ QICA,QICA
892
+ VTIL,VTIL
893
+ NYLP,NYLP
894
+ QFGT,QFGT
895
+ GLDV,GLDV
896
+ DHNR,DHNR
897
+ NYRP,NYRP
898
+ CDKL,CDKL
899
+ WQSQ,WQSQ
900
+ ADPG,ADPG
901
+ FEHM,FEHM
902
+ DHFW,DHFW
903
+ WSSW,WSSW
904
+ FPMF,FPMF
905
+ SLLD,SLLD
906
+ KVRS,KVRS
907
+ ITNH,ITNH
908
+ GLAR,GLAR
909
+ CACL,CACL
910
+ HFDQ,HFDQ
911
+ LDPE,LDPE
912
+ FIQA,FIQA
913
+ WFSN,WFSN
914
+ NQQC,NQQC
915
+ CGWR,CGWR
916
+ QMDV,QMDV
917
+ QCEQ,QCEQ
918
+ YNKS,YNKS
919
+ ALCP,ALCP
920
+ RRKI,RRKI
921
+ GPCV,GPCV
922
+ HISI,HISI
923
+ QVAD,QVAD
924
+ DFWV,DFWV
925
+ WEAM,WEAM
926
+ CFWG,CFWG
927
+ GCWP,GCWP
928
+ PHPC,PHPC
929
+ AVSM,AVSM
930
+ TSPC,TSPC
931
+ MWLF,MWLF
932
+ RHSI,RHSI
933
+ HKQL,HKQL
934
+ IPTF,IPTF
935
+ SSNP,SSNP
936
+ ATLL,ATLL
937
+ SAHD,SAHD
938
+ YNQP,YNQP
939
+ RPSF,RPSF
940
+ TCHS,TCHS
941
+ AVGR,AVGR
942
+ HYPI,HYPI
943
+ PQYL,PQYL
944
+ GNIA,GNIA
945
+ LWQD,LWQD
946
+ SHSC,SHSC
947
+ FNCR,FNCR
948
+ VAGR,VAGR
949
+ YNLS,YNLS
950
+ NSLF,NSLF
951
+ MIIM,MIIM
952
+ RYPG,RYPG
953
+ DCQI,DCQI
954
+ PMFI,PMFI
955
+ CIFL,CIFL
956
+ TIRG,TIRG
957
+ CRVF,CRVF
958
+ WHEL,WHEL
959
+ ALWP,ALWP
960
+ CMKC,CMKC
961
+ PWID,PWID
962
+ ESTM,ESTM
963
+ EHDY,EHDY
964
+ EMIR,EMIR
965
+ NAWM,NAWM
966
+ LNCW,LNCW
967
+ FYDK,FYDK
968
+ WKEG,WKEG
969
+ HHCG,HHCG
970
+ NKPS,NKPS
971
+ FCDI,FCDI
972
+ HDSC,HDSC
973
+ PAYR,PAYR
974
+ YFMV,YFMV
975
+ NTGH,NTGH
976
+ SREK,SREK
977
+ FYCL,FYCL
978
+ GFHM,GFHM
979
+ EQCV,EQCV
980
+ GREQ,GREQ
981
+ NYCP,NYCP
982
+ EKER,EKER
983
+ SAIS,SAIS
984
+ RHIQ,RHIQ
985
+ AITQ,AITQ
986
+ FEAT,FEAT
987
+ PKPF,PKPF
988
+ HIMN,HIMN
989
+ DSSQ,DSSQ
990
+ KKYC,KKYC
991
+ YSST,YSST
992
+ LETH,LETH
993
+ PSFA,PSFA
994
+ VQLN,VQLN
995
+ LCCH,LCCH
996
+ MNWM,MNWM
997
+ DNTG,DNTG
998
+ YRQF,YRQF
999
+ NPIF,NPIF
1000
+ AISE,AISE
1001
+ ECGH,ECGH
1002
+ WQRL,WQRL
1003
+ YMKK,YMKK
1004
+ IWQA,IWQA
1005
+ TLRF,TLRF
1006
+ HGYL,HGYL
1007
+ SHPF,SHPF
1008
+ WVNW,WVNW
1009
+ QKIH,QKIH
1010
+ REVN,REVN
1011
+ CQAA,CQAA
1012
+ YMCK,YMCK
1013
+ GCAE,GCAE
1014
+ IYCC,IYCC
1015
+ LHPT,LHPT
1016
+ HYGI,HYGI
1017
+ MVPE,MVPE
1018
+ LGKM,LGKM
1019
+ WGRS,WGRS
1020
+ GSIK,GSIK
1021
+ KPPN,KPPN
1022
+ MNGQ,MNGQ
1023
+ EYCW,EYCW
1024
+ SCWT,SCWT
1025
+ IGNE,IGNE
1026
+ DAVK,DAVK
1027
+ EWIH,EWIH
1028
+ NTWM,NTWM
1029
+ ADSL,ADSL
1030
+ QHHD,QHHD
1031
+ YAYE,YAYE
1032
+ CTVL,CTVL
1033
+ YPNQ,YPNQ
1034
+ GSTY,GSTY
1035
+ NYDY,NYDY
1036
+ SITI,SITI
1037
+ TWNQ,TWNQ
1038
+ KWRC,KWRC
1039
+ IVKV,IVKV
1040
+ QHKL,QHKL
1041
+ HRRT,HRRT
1042
+ SLIP,SLIP
1043
+ EPYW,EPYW
1044
+ WYST,WYST
1045
+ KRQD,KRQD
1046
+ KDKA,KDKA
1047
+ LVTY,LVTY
1048
+ PLKQ,PLKQ
1049
+ DFTH,DFTH
1050
+ PRYH,PRYH
1051
+ HKEW,HKEW
1052
+ DMII,DMII
1053
+ MKSQ,MKSQ
1054
+ AGAE,AGAE
1055
+ KSCD,KSCD
1056
+ STKQ,STKQ
1057
+ THTI,THTI
1058
+ KDNF,KDNF
1059
+ KAHG,KAHG
1060
+ RGQA,RGQA
1061
+ QDKS,QDKS
1062
+ QSIS,QSIS
1063
+ NMSV,NMSV
1064
+ WAAK,WAAK
1065
+ GYRA,GYRA
1066
+ METL,METL
1067
+ VYAY,VYAY
1068
+ HAQA,HAQA
1069
+ HETA,HETA
1070
+ KVYK,KVYK
1071
+ VYCP,VYCP
1072
+ LLGG,LLGG
1073
+ YQLI,YQLI
1074
+ AYLP,AYLP
1075
+ FIVM,FIVM
1076
+ WMAS,WMAS
1077
+ CTRV,CTRV
1078
+ NMDG,NMDG
1079
+ FPDC,FPDC
1080
+ CIYD,CIYD
1081
+ GSKT,GSKT
1082
+ CGFR,CGFR
1083
+ TRSH,TRSH
1084
+ MVSL,MVSL
1085
+ RRKV,RRKV
1086
+ VLQI,VLQI
1087
+ VSAC,VSAC
1088
+ DWSM,DWSM
1089
+ NHNN,NHNN
1090
+ AFYN,AFYN
1091
+ EDDP,EDDP
1092
+ DHMM,DHMM
1093
+ QTHP,QTHP
1094
+ TTPK,TTPK
1095
+ KWHA,KWHA
1096
+ WCTW,WCTW
1097
+ ELHW,ELHW
1098
+ PRGH,PRGH
1099
+ KTSE,KTSE
1100
+ HFIG,HFIG
1101
+ PRTR,PRTR
1102
+ GTPP,GTPP
1103
+ APLR,APLR
1104
+ FPAW,FPAW
1105
+ WNGE,WNGE
1106
+ EYQP,EYQP
1107
+ KHDF,KHDF
1108
+ WELM,WELM
1109
+ KPRK,KPRK
1110
+ YSVA,YSVA
1111
+ TQSV,TQSV
1112
+ VMIK,VMIK
1113
+ RNHG,RNHG
1114
+ SLGK,SLGK
1115
+ ETEG,ETEG
1116
+ MWKN,MWKN
1117
+ TVHD,TVHD
1118
+ TPHR,TPHR
1119
+ SIHY,SIHY
1120
+ LLWM,LLWM
1121
+ EVGN,EVGN
1122
+ NKIP,NKIP
1123
+ PQDM,PQDM
1124
+ YDMK,YDMK
1125
+ CTPI,CTPI
1126
+ FYEH,FYEH
1127
+ NENL,NENL
1128
+ ETVQ,ETVQ
1129
+ NLYD,NLYD
1130
+ SIRG,SIRG
1131
+ PGIN,PGIN
1132
+ QSMW,QSMW
1133
+ HMLW,HMLW
1134
+ QSLR,QSLR
1135
+ CCNM,CCNM
1136
+ LKAW,LKAW
1137
+ TIKC,TIKC
1138
+ RPFF,RPFF
1139
+ SIVL,SIVL
1140
+ TVYC,TVYC
1141
+ HWRD,HWRD
1142
+ PVDL,PVDL
1143
+ RVSH,RVSH
1144
+ QGGV,QGGV
1145
+ ERLR,ERLR
1146
+ IYTM,IYTM
1147
+ KSYI,KSYI
1148
+ KPGM,KPGM
1149
+ WTFI,WTFI
1150
+ ICAC,ICAC
1151
+ TSMW,TSMW
1152
+ PIRF,PIRF
1153
+ NKRN,NKRN
1154
+ VFNR,VFNR
1155
+ QQRK,QQRK
1156
+ KFMW,KFMW
1157
+ PHVT,PHVT
1158
+ KQFW,KQFW
1159
+ DHQD,DHQD
1160
+ SHLR,SHLR
1161
+ ADDC,ADDC
1162
+ RKQI,RKQI
1163
+ PPFQ,PPFQ
1164
+ VECA,VECA
1165
+ EPSY,EPSY
1166
+ RAYD,RAYD
1167
+ QRFT,QRFT
1168
+ CYAI,CYAI
1169
+ NSYW,NSYW
1170
+ VEGM,VEGM
1171
+ PVKV,PVKV
1172
+ CGEF,CGEF
1173
+ QDEA,QDEA
1174
+ EFET,EFET
1175
+ QTCH,QTCH
1176
+ LIFR,LIFR
1177
+ CQNM,CQNM
1178
+ HHPD,HHPD
1179
+ GFIK,GFIK
1180
+ TTLH,TTLH
1181
+ SGED,SGED
1182
+ PRPE,PRPE
1183
+ GIDF,GIDF
1184
+ DVSH,DVSH
1185
+ CCVH,CCVH
1186
+ KTCF,KTCF
1187
+ ISYI,ISYI
1188
+ NQII,NQII
1189
+ GPCY,GPCY
1190
+ HCVE,HCVE
1191
+ FSYV,FSYV
1192
+ SIAV,SIAV
1193
+ VVNC,VVNC
1194
+ VCHD,VCHD
1195
+ DTDS,DTDS
1196
+ CFVK,CFVK
1197
+ VNDH,VNDH
1198
+ VHWL,VHWL
1199
+ CSCE,CSCE
1200
+ KQFR,KQFR
1201
+ EFDP,EFDP
1202
+ RDAI,RDAI
1203
+ PTMQ,PTMQ
1204
+ SDGG,SDGG
1205
+ PTVI,PTVI
1206
+ MIMP,MIMP
1207
+ QSVW,QSVW
1208
+ ETQH,ETQH
1209
+ KVCK,KVCK
1210
+ QRSE,QRSE
1211
+ CQAE,CQAE
1212
+ HVFE,HVFE
1213
+ EHHR,EHHR
1214
+ IKYC,IKYC
1215
+ YWPA,YWPA
1216
+ PGVW,PGVW
1217
+ EHGF,EHGF
1218
+ IQDC,IQDC
1219
+ YVHI,YVHI
1220
+ TMFP,TMFP
1221
+ AEDA,AEDA
1222
+ PCTW,PCTW
1223
+ FNGV,FNGV
1224
+ AHVD,AHVD
1225
+ CGIY,CGIY
1226
+ CYFW,CYFW
1227
+ GQCG,GQCG
1228
+ NTSW,NTSW
1229
+ WYSQ,WYSQ
1230
+ WCLH,WCLH
1231
+ GHCP,GHCP
1232
+ YPMN,YPMN
1233
+ WCPF,WCPF
1234
+ MVGK,MVGK
1235
+ DCVI,DCVI
1236
+ WRPF,WRPF
1237
+ VEDP,VEDP
1238
+ WWNS,WWNS
1239
+ PQFN,PQFN
1240
+ ITGY,ITGY
1241
+ QRWD,QRWD
1242
+ FPDR,FPDR
1243
+ EKQQ,EKQQ
1244
+ FWYD,FWYD
1245
+ TYYL,TYYL
1246
+ THCL,THCL
1247
+ RGTI,RGTI
1248
+ ALVA,ALVA
1249
+ LEWG,LEWG
1250
+ RVQH,RVQH
1251
+ LFAS,LFAS
1252
+ SVHG,SVHG
1253
+ SREG,SREG
1254
+ GVDR,GVDR
1255
+ MMVT,MMVT
1256
+ VPGH,VPGH
1257
+ YTQD,YTQD
1258
+ DYPH,DYPH
1259
+ RRMG,RRMG
1260
+ AVDP,AVDP
1261
+ EVKQ,EVKQ
1262
+ LTFC,LTFC
1263
+ KATL,KATL
1264
+ MYKA,MYKA
1265
+ RYCC,RYCC
1266
+ VHKY,VHKY
1267
+ LVMI,LVMI
1268
+ YHFK,YHFK
1269
+ RLNQ,RLNQ
1270
+ EMEV,EMEV
1271
+ DPGV,DPGV
1272
+ AAWQ,AAWQ
1273
+ FDDC,FDDC
1274
+ SKLN,SKLN
1275
+ LCEE,LCEE
1276
+ VVWW,VVWW
1277
+ KLQI,KLQI
1278
+ DHDA,DHDA
1279
+ RTQF,RTQF
1280
+ STTP,STTP
1281
+ FFEI,FFEI
1282
+ IKCN,IKCN
1283
+ ACWP,ACWP
1284
+ AWGE,AWGE
1285
+ HYRY,HYRY
1286
+ FFVR,FFVR
1287
+ CEEV,CEEV
1288
+ LQMY,LQMY
1289
+ SACN,SACN
1290
+ MWWT,MWWT
1291
+ IDQT,IDQT
1292
+ QGCS,QGCS
1293
+ WRLE,WRLE
1294
+ AAPH,AAPH
1295
+ NTKN,NTKN
1296
+ FMSK,FMSK
1297
+ MGLI,MGLI
1298
+ YECV,YECV
1299
+ RFID,RFID
1300
+ CEDK,CEDK
1301
+ ILEE,ILEE
1302
+ RAMW,RAMW
1303
+ FPTQ,FPTQ
1304
+ HRCR,HRCR
1305
+ FIMH,FIMH
1306
+ KIPT,KIPT
1307
+ NNNH,NNNH
1308
+ ANWL,ANWL
1309
+ ARTD,ARTD
1310
+ APDW,APDW
1311
+ AFCR,AFCR
1312
+ FQPT,FQPT
1313
+ ALNI,ALNI
1314
+ FNWC,FNWC
1315
+ SYEV,SYEV
1316
+ IMYD,IMYD
1317
+ TRVA,TRVA
1318
+ RAMI,RAMI
1319
+ QCTI,QCTI
1320
+ NQRE,NQRE
1321
+ YEPV,YEPV
1322
+ CWAK,CWAK
1323
+ LFCK,LFCK
1324
+ NESI,NESI
1325
+ WHQL,WHQL
1326
+ DGVF,DGVF
1327
+ NHPH,NHPH
1328
+ YSQT,YSQT
1329
+ PWDL,PWDL
1330
+ FEDQ,FEDQ
1331
+ YMPP,YMPP
1332
+ RNCR,RNCR
1333
+ NWWQ,NWWQ
1334
+ VMVQ,VMVQ
1335
+ YSSP,YSSP
1336
+ QHKQ,QHKQ
1337
+ FPVM,FPVM
1338
+ ADHT,ADHT
1339
+ ATSY,ATSY
1340
+ DLPV,DLPV
1341
+ ETRD,ETRD
1342
+ MHVD,MHVD
1343
+ VIHG,VIHG
1344
+ FDEA,FDEA
1345
+ TSNQ,TSNQ
1346
+ YSIF,YSIF
1347
+ QIII,QIII
1348
+ RGGM,RGGM
1349
+ VAMH,VAMH
1350
+ CVGQ,CVGQ
1351
+ RPYI,RPYI
1352
+ DEFL,DEFL
1353
+ QHPN,QHPN
1354
+ MLEP,MLEP
1355
+ IEDW,IEDW
1356
+ YLWR,YLWR
1357
+ DHHD,DHHD
1358
+ KSWF,KSWF
1359
+ AVKM,AVKM
1360
+ SRER,SRER
1361
+ AMNH,AMNH
1362
+ MDDM,MDDM
1363
+ CGNP,CGNP
1364
+ GEHF,GEHF
1365
+ PLPD,PLPD
1366
+ FSHK,FSHK
1367
+ AISH,AISH
1368
+ GDAN,GDAN
1369
+ CQYQ,CQYQ
1370
+ AGEQ,AGEQ
1371
+ RQWE,RQWE
1372
+ RRQL,RRQL
1373
+ WHGN,WHGN
1374
+ PKYY,PKYY
1375
+ CCIM,CCIM
1376
+ KWRL,KWRL
1377
+ LTTE,LTTE
1378
+ MYNQ,MYNQ
1379
+ SSAI,SSAI
1380
+ LYCT,LYCT
1381
+ CMPV,CMPV
1382
+ HVCL,HVCL
1383
+ NRKH,NRKH
1384
+ KECR,KECR
1385
+ TWKA,TWKA
1386
+ WASS,WASS
1387
+ KKIY,KKIY
1388
+ VAFT,VAFT
1389
+ LDVC,LDVC
1390
+ YEPK,YEPK
1391
+ CISD,CISD
1392
+ NMWE,NMWE
1393
+ CVNT,CVNT
1394
+ TLDK,TLDK
1395
+ CYGK,CYGK
1396
+ PVTT,PVTT
1397
+ HDDC,HDDC
1398
+ LGLF,LGLF
1399
+ LAQN,LAQN
1400
+ NTFF,NTFF
1401
+ KISF,KISF
1402
+ GVLP,GVLP
1403
+ LNVM,LNVM
1404
+ NAGK,NAGK
1405
+ NHNV,NHNV
1406
+ LRCW,LRCW
1407
+ WMYG,WMYG
1408
+ GCYY,GCYY
1409
+ CSYK,CSYK
1410
+ PDLM,PDLM
1411
+ WSWA,WSWA
1412
+ GQSL,GQSL
1413
+ NWRM,NWRM
1414
+ EEHP,EEHP
1415
+ WCYP,WCYP
1416
+ NNAM,NNAM
1417
+ MHHK,MHHK
1418
+ FGFS,FGFS
1419
+ KPLK,KPLK
1420
+ TYPI,TYPI
1421
+ CDRS,CDRS
1422
+ WMFH,WMFH
1423
+ HEWG,HEWG
1424
+ PQNW,PQNW
1425
+ QVQH,QVQH
1426
+ NTPI,NTPI
1427
+ VMVI,VMVI
1428
+ RHAT,RHAT
1429
+ PKKF,PKKF
1430
+ IIHN,IIHN
1431
+ LVNS,LVNS
1432
+ YTSK,YTSK
1433
+ IAGS,IAGS
1434
+ KNGE,KNGE
1435
+ GESD,GESD
1436
+ IAKF,IAKF
1437
+ QPDG,QPDG
1438
+ CLEN,CLEN
1439
+ MQHR,MQHR
1440
+ YRFL,YRFL
1441
+ RTPP,RTPP
1442
+ IVKA,IVKA
1443
+ KDPY,KDPY
1444
+ SSFF,SSFF
1445
+ YKPE,YKPE
1446
+ NEDG,NEDG
1447
+ YEGS,YEGS
1448
+ FGGQ,FGGQ
1449
+ NDFM,NDFM
1450
+ HNEC,HNEC
1451
+ NQLR,NQLR
1452
+ RLVW,RLVW
1453
+ NVFD,NVFD
1454
+ PMNY,PMNY
1455
+ RLHA,RLHA
1456
+ DIII,DIII
1457
+ WMLQ,WMLQ
1458
+ KACN,KACN
1459
+ IATA,IATA
1460
+ LIAS,LIAS
1461
+ SVSF,SVSF
1462
+ CKIS,CKIS
1463
+ YNFT,YNFT
1464
+ CKGC,CKGC
1465
+ EQIV,EQIV
1466
+ FRKQ,FRKQ
1467
+ VAWY,VAWY
1468
+ NWPG,NWPG
1469
+ RIHV,RIHV
1470
+ TIEC,TIEC
1471
+ NLYT,NLYT
1472
+ CTYK,CTYK
1473
+ FTQN,FTQN
1474
+ WVER,WVER
1475
+ VPII,VPII
1476
+ DKNM,DKNM
1477
+ MYEK,MYEK
1478
+ WYWF,WYWF
1479
+ IDVQ,IDVQ
1480
+ SWTV,SWTV
1481
+ DTVE,DTVE
1482
+ CDKP,CDKP
1483
+ WEEQ,WEEQ
1484
+ AFRC,AFRC
1485
+ NTKQ,NTKQ
1486
+ KQED,KQED
1487
+ PESP,PESP
1488
+ HDTQ,HDTQ
1489
+ VHHE,VHHE
1490
+ SWGA,SWGA
1491
+ YEHR,YEHR
1492
+ EDWE,EDWE
1493
+ QSVY,QSVY
1494
+ AMWP,AMWP
1495
+ VLHP,VLHP
1496
+ KEYR,KEYR
1497
+ ECKA,ECKA
1498
+ LIAN,LIAN
1499
+ SFHW,SFHW
1500
+ VCHQ,VCHQ
1501
+ CHYW,CHYW
1502
+ PVPV,PVPV
1503
+ SPSG,SPSG
1504
+ LRIA,LRIA
1505
+ YQYS,YQYS
1506
+ GGLV,GGLV
1507
+ DTTL,DTTL
1508
+ TKKN,TKKN
1509
+ QIYT,QIYT
1510
+ NMYD,NMYD
1511
+ EFRP,EFRP
1512
+ FTIK,FTIK
1513
+ WLWV,WLWV
1514
+ CKVC,CKVC
1515
+ TPIG,TPIG
1516
+ WEWH,WEWH
1517
+ QCVC,QCVC
1518
+ NTDK,NTDK
1519
+ WDCT,WDCT
1520
+ RMKF,RMKF
1521
+ LRIY,LRIY
1522
+ FETM,FETM
1523
+ ECRP,ECRP
1524
+ RFVF,RFVF
1525
+ VQIP,VQIP
1526
+ DQEE,DQEE
1527
+ MVLS,MVLS
1528
+ CYIT,CYIT
1529
+ FNPE,FNPE
1530
+ IAGE,IAGE
1531
+ LMWK,LMWK
1532
+ GQAS,GQAS
1533
+ LEWQ,LEWQ
1534
+ IGHI,IGHI
1535
+ ICMM,ICMM
1536
+ MEDM,MEDM
1537
+ FGRY,FGRY
1538
+ VGWW,VGWW
1539
+ KDIN,KDIN
1540
+ RGRW,RGRW
1541
+ YGES,YGES
1542
+ HNTA,HNTA
1543
+ EGNH,EGNH
1544
+ VNTW,VNTW
1545
+ PPVQ,PPVQ
1546
+ PWWA,PWWA
1547
+ MTHR,MTHR
1548
+ NKCN,NKCN
1549
+ IGRL,IGRL
1550
+ FIWM,FIWM
1551
+ PGTT,PGTT
1552
+ HGAE,HGAE
1553
+ PMWS,PMWS
1554
+ KWNC,KWNC
1555
+ GFDW,GFDW
1556
+ QKKG,QKKG
1557
+ HVTG,HVTG
1558
+ SGET,SGET
1559
+ AWKW,AWKW
1560
+ TRPE,TRPE
1561
+ RVHE,RVHE
1562
+ EEPS,EEPS
1563
+ CVSY,CVSY
1564
+ SRPE,SRPE
1565
+ LMMY,LMMY
1566
+ CSDW,CSDW
1567
+ RIHP,RIHP
1568
+ YDTM,YDTM
1569
+ YTYT,YTYT
1570
+ GSHI,GSHI
1571
+ NCRV,NCRV
1572
+ MGVK,MGVK
1573
+ LWNY,LWNY
1574
+ MYWC,MYWC
1575
+ ETST,ETST
1576
+ NCVV,NCVV
1577
+ TIEI,TIEI
1578
+ DMMK,DMMK
1579
+ NMIR,NMIR
1580
+ EFVF,EFVF
1581
+ KTSK,KTSK
1582
+ EQFD,EQFD
1583
+ PVMD,PVMD
1584
+ IGYL,IGYL
1585
+ LLLM,LLLM
1586
+ HDRC,HDRC
1587
+ QGCV,QGCV
1588
+ WQPH,WQPH
1589
+ DRWA,DRWA
1590
+ MVVG,MVVG
1591
+ TMMC,TMMC
1592
+ KVYN,KVYN
1593
+ AIIA,AIIA
1594
+ FNYS,FNYS
1595
+ HRCA,HRCA
1596
+ IAWP,IAWP
1597
+ VDYL,VDYL
1598
+ VICL,VICL
1599
+ HYDA,HYDA
1600
+ ADSA,ADSA
1601
+ TYYT,TYYT
1602
+ SQRL,SQRL
1603
+ EIQM,EIQM
1604
+ HRNL,HRNL
1605
+ DQVF,DQVF
1606
+ PTWK,PTWK
1607
+ LCSY,LCSY
1608
+ FHST,FHST
1609
+ DKQV,DKQV
1610
+ PFSM,PFSM
1611
+ QLAQ,QLAQ
1612
+ EDMF,EDMF
1613
+ EKEN,EKEN
1614
+ LMPW,LMPW
1615
+ CALR,CALR
1616
+ RPAY,RPAY
1617
+ HVCH,HVCH
1618
+ GVQA,GVQA
1619
+ TLGN,TLGN
1620
+ QFIR,QFIR
1621
+ LKYW,LKYW
1622
+ MTWR,MTWR
1623
+ VRMD,VRMD
1624
+ LLFQ,LLFQ
1625
+ CEVE,CEVE
1626
+ MDDA,MDDA
1627
+ WPVP,WPVP
1628
+ DTVI,DTVI
1629
+ WGVM,WGVM
1630
+ DYDT,DYDT
1631
+ NWKV,NWKV
1632
+ DLPC,DLPC
1633
+ LEMF,LEMF
1634
+ VHVQ,VHVQ
1635
+ RVWF,RVWF
1636
+ KNDW,KNDW
1637
+ EAMT,EAMT
1638
+ IQLT,IQLT
1639
+ HYYY,HYYY
1640
+ PEGN,PEGN
1641
+ TRQR,TRQR
1642
+ EIML,EIML
1643
+ ETGF,ETGF
1644
+ QQSV,QQSV
1645
+ KFIP,KFIP
1646
+ TVAM,TVAM
1647
+ GTHA,GTHA
1648
+ QCHI,QCHI
1649
+ IGIR,IGIR
1650
+ HQLS,HQLS
1651
+ QYQM,QYQM
1652
+ PNEP,PNEP
1653
+ RKSK,RKSK
1654
+ AAYG,AAYG
1655
+ ATCM,ATCM
1656
+ DDCH,DDCH
1657
+ NTAF,NTAF
1658
+ LHNG,LHNG
1659
+ ECIA,ECIA
1660
+ EWAA,EWAA
1661
+ CKSK,CKSK
1662
+ MDTH,MDTH
1663
+ DNCP,DNCP
1664
+ NLDP,NLDP
1665
+ KFVS,KFVS
1666
+ TSLV,TSLV
1667
+ QVCA,QVCA
1668
+ DPDM,DPDM
1669
+ LSGA,LSGA
1670
+ MLVC,MLVC
1671
+ IAPM,IAPM
1672
+ CGFY,CGFY
1673
+ VIHV,VIHV
1674
+ GLRD,GLRD
1675
+ EYDE,EYDE
1676
+ FNEP,FNEP
1677
+ DIMC,DIMC
1678
+ AFIC,AFIC
1679
+ KFMD,KFMD
1680
+ YFNI,YFNI
1681
+ HTMD,HTMD
1682
+ EKFN,EKFN
1683
+ NFII,NFII
1684
+ MNHN,MNHN
1685
+ PFGK,PFGK
1686
+ EWSD,EWSD
1687
+ SGMK,SGMK
1688
+ SHTM,SHTM
1689
+ RFQL,RFQL
1690
+ LNTY,LNTY
1691
+ EWPT,EWPT
1692
+ KINA,KINA
1693
+ YYED,YYED
1694
+ LGSD,LGSD
1695
+ CRNH,CRNH
1696
+ NNSK,NNSK
1697
+ SPIQ,SPIQ
1698
+ KWQV,KWQV
1699
+ KKQA,KKQA
1700
+ TSFI,TSFI
1701
+ NPFE,NPFE
1702
+ MKCE,MKCE
1703
+ EQKH,EQKH
1704
+ WENH,WENH
1705
+ VDLI,VDLI
1706
+ WVDF,WVDF
1707
+ FFAM,FFAM
1708
+ SPSP,SPSP
1709
+ TWKI,TWKI
1710
+ TWTC,TWTC
1711
+ RTGE,RTGE
1712
+ SFSI,SFSI
1713
+ WTEF,WTEF
1714
+ DIFH,DIFH
1715
+ QFGV,QFGV
1716
+ NRRP,NRRP
1717
+ GHHS,GHHS
1718
+ IYMN,IYMN
1719
+ TTWT,TTWT
1720
+ NVSG,NVSG
1721
+ LAKQ,LAKQ
1722
+ WNAI,WNAI
1723
+ GHWG,GHWG
1724
+ SPPG,SPPG
1725
+ FWKT,FWKT
1726
+ ERKH,ERKH
1727
+ WWLW,WWLW
1728
+ MNKM,MNKM
1729
+ RITQ,RITQ
1730
+ MFMR,MFMR
1731
+ KVHQ,KVHQ
1732
+ TFFD,TFFD
1733
+ FWFT,FWFT
1734
+ LHLT,LHLT
1735
+ LQQF,LQQF
1736
+ CWVA,CWVA
1737
+ IRQW,IRQW
1738
+ EGHH,EGHH
1739
+ VHLC,VHLC
1740
+ TVKK,TVKK
1741
+ QSQH,QSQH
1742
+ DFIT,DFIT
1743
+ IGRW,IGRW
1744
+ FVHC,FVHC
1745
+ TERW,TERW
1746
+ KFSH,KFSH
1747
+ FECP,FECP
1748
+ DWFG,DWFG
1749
+ SWIH,SWIH
1750
+ KGTR,KGTR
1751
+ HEII,HEII
1752
+ PHDF,PHDF
1753
+ QLQA,QLQA
1754
+ RLYF,RLYF
1755
+ TGYA,TGYA
1756
+ KRKH,KRKH
1757
+ YQCY,YQCY
1758
+ MPWS,MPWS
1759
+ SQTV,SQTV
1760
+ RRNV,RRNV
1761
+ HPCP,HPCP
1762
+ SVRC,SVRC
1763
+ GHVA,GHVA
1764
+ LNVW,LNVW
1765
+ PSRY,PSRY
1766
+ ICRN,ICRN
1767
+ YTNI,YTNI
1768
+ YGIM,YGIM
1769
+ GGNN,GGNN
1770
+ TWEG,TWEG
1771
+ NCYT,NCYT
1772
+ WNDT,WNDT
1773
+ EVSF,EVSF
1774
+ KCDN,KCDN
1775
+ YYVE,YYVE
1776
+ RFVY,RFVY
1777
+ AVIG,AVIG
1778
+ AVKT,AVKT
1779
+ KKLH,KKLH
1780
+ MLTD,MLTD
1781
+ TPMH,TPMH
1782
+ LAQY,LAQY
1783
+ GDIH,GDIH
1784
+ NVEM,NVEM
1785
+ THMI,THMI
1786
+ EMLC,EMLC
1787
+ TFCW,TFCW
1788
+ RVQA,RVQA
1789
+ WHED,WHED
1790
+ SLRQ,SLRQ
1791
+ CKHA,CKHA
1792
+ INVT,INVT
1793
+ QHTI,QHTI
1794
+ VQYL,VQYL
1795
+ FSWQ,FSWQ
1796
+ SFSW,SFSW
1797
+ KPFI,KPFI
1798
+ QIND,QIND
1799
+ KWYI,KWYI
1800
+ IVFD,IVFD
1801
+ TYSC,TYSC
1802
+ KYGD,KYGD
1803
+ KDID,KDID
1804
+ HQTQ,HQTQ
1805
+ SACQ,SACQ
1806
+ VYIV,VYIV
1807
+ PNLV,PNLV
1808
+ MHFD,MHFD
1809
+ KWCN,KWCN
1810
+ GMRN,GMRN
1811
+ PIHW,PIHW
1812
+ QRYQ,QRYQ
1813
+ GESI,GESI
1814
+ WCHK,WCHK
1815
+ HQTR,HQTR
1816
+ PNDQ,PNDQ
1817
+ SRIN,SRIN
1818
+ FLWG,FLWG
1819
+ NGSN,NGSN
1820
+ GKSF,GKSF
1821
+ PCMM,PCMM
1822
+ DQPL,DQPL
1823
+ SYQY,SYQY
1824
+ YYTT,YYTT
1825
+ VIRS,VIRS
1826
+ LSSM,LSSM
1827
+ RACV,RACV
1828
+ KSTA,KSTA
1829
+ NFTQ,NFTQ
1830
+ CVVM,CVVM
1831
+ WTNV,WTNV
1832
+ LCVG,LCVG
1833
+ IMTD,IMTD
1834
+ HTHW,HTHW
1835
+ GNEP,GNEP
1836
+ NGQG,NGQG
1837
+ TFKL,TFKL
1838
+ SVYH,SVYH
1839
+ TVCM,TVCM
1840
+ CQYY,CQYY
1841
+ FFSS,FFSS
1842
+ TWVS,TWVS
1843
+ KMKK,KMKK
1844
+ VNVQ,VNVQ
1845
+ PWEA,PWEA
1846
+ LDIG,LDIG
1847
+ DFVM,DFVM
1848
+ SAAE,SAAE
1849
+ NQPC,NQPC
1850
+ ADMI,ADMI
1851
+ ETDI,ETDI
1852
+ FCCD,FCCD
1853
+ EDVT,EDVT
1854
+ YPLG,YPLG
1855
+ CLCS,CLCS
1856
+ CTTS,CTTS
1857
+ KYLL,KYLL
1858
+ QVGN,QVGN
1859
+ KPRW,KPRW
1860
+ DEQN,DEQN
1861
+ KHAQ,KHAQ
1862
+ TFSK,TFSK
1863
+ MMTS,MMTS
1864
+ QQHD,QQHD
1865
+ CSYV,CSYV
1866
+ DYGS,DYGS
1867
+ ITKD,ITKD
1868
+ LWLA,LWLA
1869
+ GKRS,GKRS
1870
+ YWLV,YWLV
1871
+ EFAN,EFAN
1872
+ MPMS,MPMS
1873
+ MAYH,MAYH
1874
+ PCRG,PCRG
1875
+ WEKY,WEKY
1876
+ MGED,MGED
1877
+ AQQM,AQQM
1878
+ CYCW,CYCW
1879
+ IEDM,IEDM
1880
+ NEND,NEND
1881
+ SFEE,SFEE
1882
+ FQNE,FQNE
1883
+ LPLF,LPLF
1884
+ MQCV,MQCV
1885
+ YDYS,YDYS
1886
+ IDCA,IDCA
1887
+ EWDN,EWDN
1888
+ TGWP,TGWP
1889
+ SPWD,SPWD
1890
+ DIDS,DIDS
1891
+ EVTC,EVTC
1892
+ KMIK,KMIK
1893
+ GDSE,GDSE
1894
+ DCYA,DCYA
1895
+ YLET,YLET
1896
+ LLCC,LLCC
1897
+ TGEQ,TGEQ
1898
+ IMWR,IMWR
1899
+ WQQW,WQQW
1900
+ QPPG,QPPG
1901
+ AMEV,AMEV
1902
+ TYEF,TYEF
1903
+ DGHR,DGHR
1904
+ HPHP,HPHP
1905
+ GHHY,GHHY
1906
+ ARNE,ARNE
1907
+ ACTC,ACTC
1908
+ PTNL,PTNL
1909
+ KWWQ,KWWQ
1910
+ EPSF,EPSF
1911
+ PIDQ,PIDQ
1912
+ QTIW,QTIW
1913
+ DAMF,DAMF
1914
+ RKGV,RKGV
1915
+ DDHT,DDHT
1916
+ TFIR,TFIR
1917
+ LQQC,LQQC
1918
+ RDWA,RDWA
1919
+ FINQ,FINQ
1920
+ VRAK,VRAK
1921
+ FDEE,FDEE
1922
+ RTCP,RTCP
1923
+ LHWM,LHWM
1924
+ KDNC,KDNC
1925
+ VLVP,VLVP
1926
+ VFEW,VFEW
1927
+ QIIN,QIIN
1928
+ DSQR,DSQR
1929
+ LNDW,LNDW
1930
+ VIPR,VIPR
1931
+ HQWN,HQWN
1932
+ NLDH,NLDH
1933
+ MSHM,MSHM
1934
+ STGT,STGT
1935
+ YQYH,YQYH
1936
+ KLRI,KLRI
1937
+ MMKE,MMKE
1938
+ VRKW,VRKW
1939
+ VKYE,VKYE
1940
+ DEAH,DEAH
1941
+ PECI,PECI
1942
+ CADI,CADI
1943
+ VSAK,VSAK
1944
+ ENMS,ENMS
1945
+ PSSQ,PSSQ
1946
+ DKCQ,DKCQ
1947
+ IDCS,IDCS
1948
+ SLLK,SLLK
1949
+ DPHG,DPHG
1950
+ HMFA,HMFA
1951
+ WRHF,WRHF
1952
+ KPKS,KPKS
1953
+ NEPY,NEPY
1954
+ CMIN,CMIN
1955
+ KAYQ,KAYQ
1956
+ TTMH,TTMH
1957
+ DDVL,DDVL
1958
+ ERPG,ERPG
1959
+ KKMP,KKMP
1960
+ TENK,TENK
1961
+ LNWM,LNWM
1962
+ GHAL,GHAL
1963
+ WMKD,WMKD
1964
+ FIWK,FIWK
1965
+ SWMY,SWMY
1966
+ LYHF,LYHF
1967
+ MKYP,MKYP
1968
+ REMS,REMS
1969
+ AMFA,AMFA
1970
+ YSYT,YSYT
1971
+ EKKW,EKKW
1972
+ LRTG,LRTG
1973
+ RFMT,RFMT
1974
+ LHHW,LHHW
1975
+ KAPS,KAPS
1976
+ TFTD,TFTD
1977
+ IMSG,IMSG
1978
+ YKMQ,YKMQ
1979
+ CPPP,CPPP
1980
+ FMDA,FMDA
1981
+ YYAM,YYAM
1982
+ GTSS,GTSS
1983
+ ALYP,ALYP
1984
+ RYND,RYND
1985
+ WWFL,WWFL
1986
+ WYAT,WYAT
1987
+ CIMM,CIMM
1988
+ IVHK,IVHK
1989
+ RYSF,RYSF
1990
+ ENEA,ENEA
1991
+ WPFT,WPFT
1992
+ QVLH,QVLH
1993
+ ECFV,ECFV
1994
+ HQPP,HQPP
1995
+ SQQV,SQQV
1996
+ NRTP,NRTP
1997
+ WEEA,WEEA
1998
+ ICNQ,ICNQ
1999
+ KVDG,KVDG
2000
+ GSLK,GSLK
2001
+ CMVY,CMVY
2002
+ GNNK,GNNK
2003
+ IRQM,IRQM
2004
+ ASHG,ASHG
2005
+ PHEM,PHEM
2006
+ NNSM,NNSM
2007
+ ADRM,ADRM
2008
+ TAYW,TAYW
2009
+ FCDK,FCDK
2010
+ ETYQ,ETYQ
2011
+ MMYF,MMYF
2012
+ FLIM,FLIM
2013
+ GKFT,GKFT
2014
+ IQAK,IQAK
2015
+ GINY,GINY
2016
+ VILD,VILD
2017
+ CPLH,CPLH
2018
+ LVHQ,LVHQ
2019
+ LCKP,LCKP
2020
+ RVNH,RVNH
2021
+ HCGK,HCGK
2022
+ HCQH,HCQH
2023
+ GFKI,GFKI
2024
+ AGET,AGET
2025
+ AIHM,AIHM
2026
+ QITF,QITF
2027
+ RWFE,RWFE
2028
+ GYED,GYED
2029
+ PCSM,PCSM
2030
+ CREN,CREN
2031
+ ASAL,ASAL
2032
+ VRTV,VRTV
2033
+ AQVE,AQVE
2034
+ QFND,QFND
2035
+ FGGR,FGGR
2036
+ KEAT,KEAT
2037
+ CRVT,CRVT
2038
+ PKKG,PKKG
2039
+ PHQM,PHQM
2040
+ PAKY,PAKY
2041
+ IYSC,IYSC
2042
+ GVVM,GVVM
2043
+ IKLF,IKLF
2044
+ HHYG,HHYG
2045
+ SACL,SACL
2046
+ GQCS,GQCS
2047
+ AQIM,AQIM
2048
+ FCVN,FCVN
2049
+ HVWG,HVWG
2050
+ NADT,NADT
2051
+ MHKK,MHKK
2052
+ VWLY,VWLY
2053
+ FPCQ,FPCQ
2054
+ PEKW,PEKW
2055
+ AFYP,AFYP
2056
+ TQYY,TQYY
2057
+ VECI,VECI
2058
+ KWFQ,KWFQ
2059
+ TIEG,TIEG
2060
+ STHY,STHY
2061
+ QSAM,QSAM
2062
+ RNDD,RNDD
2063
+ EQHA,EQHA
2064
+ VCFH,VCFH
2065
+ YRWF,YRWF
2066
+ VQKH,VQKH
2067
+ CSRT,CSRT
2068
+ PRNF,PRNF
2069
+ ADIG,ADIG
2070
+ FHNV,FHNV
2071
+ NVAH,NVAH
2072
+ WYLN,WYLN
2073
+ DKWK,DKWK
2074
+ GIWI,GIWI
2075
+ MEEF,MEEF
2076
+ HCMM,HCMM
2077
+ TLIM,TLIM
2078
+ PYKV,PYKV
2079
+ YATS,YATS
2080
+ QMQW,QMQW
2081
+ KMLA,KMLA
2082
+ ALWW,ALWW
2083
+ WKHD,WKHD
2084
+ DVHP,DVHP
2085
+ EYHF,EYHF
2086
+ DLSY,DLSY
2087
+ NIRC,NIRC
2088
+ GGFI,GGFI
2089
+ RGWM,RGWM
2090
+ SWMR,SWMR
2091
+ FNQD,FNQD
2092
+ WMAY,WMAY
2093
+ ITKT,ITKT
2094
+ LIAV,LIAV
2095
+ CHTP,CHTP
2096
+ QIDD,QIDD
2097
+ PHAG,PHAG
2098
+ IDNA,IDNA
2099
+ DHSC,DHSC
2100
+ EWLA,EWLA
2101
+ FSLW,FSLW
2102
+ QEYK,QEYK
2103
+ LHHR,LHHR
2104
+ VQYS,VQYS
2105
+ VDQD,VDQD
2106
+ RGGL,RGGL
2107
+ ERTH,ERTH
2108
+ WYDD,WYDD
2109
+ EMFN,EMFN
2110
+ DKEP,DKEP
2111
+ TLCW,TLCW
2112
+ QSHE,QSHE
2113
+ IDLS,IDLS
2114
+ KKRT,KKRT
2115
+ CHQI,CHQI
2116
+ PLWC,PLWC
2117
+ VIVA,VIVA
2118
+ KDTG,KDTG
2119
+ PTVC,PTVC
2120
+ YICD,YICD
2121
+ CKRT,CKRT
2122
+ DIDP,DIDP
2123
+ DTEI,DTEI
2124
+ GIAK,GIAK
2125
+ KQLM,KQLM
2126
+ PYPN,PYPN
2127
+ LFLK,LFLK
2128
+ MRGF,MRGF
2129
+ FAMD,FAMD
2130
+ TNTS,TNTS
2131
+ MMIA,MMIA
2132
+ HPQM,HPQM
2133
+ FHEQ,FHEQ
2134
+ CHLP,CHLP
2135
+ NMFS,NMFS
2136
+ LSRR,LSRR
2137
+ CSSG,CSSG
2138
+ PYHS,PYHS
2139
+ ILDG,ILDG
2140
+ MHWD,MHWD
2141
+ QCNV,QCNV
2142
+ QILA,QILA
2143
+ QSCQ,QSCQ
2144
+ QYTN,QYTN
2145
+ WKPR,WKPR
2146
+ RTHR,RTHR
2147
+ FYMW,FYMW
2148
+ SGGN,SGGN
2149
+ CAHL,CAHL
2150
+ HNIR,HNIR
2151
+ MFKN,MFKN
2152
+ KLTN,KLTN
2153
+ CKGT,CKGT
2154
+ CNHN,CNHN
2155
+ FIVT,FIVT
2156
+ MCWG,MCWG
2157
+ TPPR,TPPR
2158
+ FLEW,FLEW
2159
+ QSHA,QSHA
2160
+ DKAD,DKAD
2161
+ DIVY,DIVY
2162
+ MKVM,MKVM
2163
+ TACP,TACP
2164
+ TWNE,TWNE
2165
+ HAYR,HAYR
2166
+ VYRY,VYRY
2167
+ PDKC,PDKC
2168
+ WTCY,WTCY
2169
+ AMWN,AMWN
2170
+ FRFY,FRFY
2171
+ IMRC,IMRC
2172
+ VNPA,VNPA
2173
+ RPDP,RPDP
2174
+ TLEF,TLEF
2175
+ DLNF,DLNF
2176
+ TCPQ,TCPQ
2177
+ VLRR,VLRR
2178
+ WKWF,WKWF
2179
+ VEHK,VEHK
2180
+ DNEA,DNEA
2181
+ DIVA,DIVA
2182
+ FNSQ,FNSQ
2183
+ QYTG,QYTG
2184
+ WPSR,WPSR
2185
+ VATT,VATT
2186
+ CMIF,CMIF
2187
+ EREH,EREH
2188
+ LGKL,LGKL
2189
+ AWQH,AWQH
2190
+ KRCC,KRCC
2191
+ MWCW,MWCW
2192
+ VCGY,VCGY
2193
+ SFFG,SFFG
2194
+ TNRR,TNRR
2195
+ LCVI,LCVI
2196
+ DLSG,DLSG
2197
+ LTIN,LTIN
2198
+ NNDW,NNDW
2199
+ QIFI,QIFI
2200
+ DAQL,DAQL
2201
+ DYRG,DYRG
2202
+ HAWD,HAWD
2203
+ TKKH,TKKH
2204
+ HVPF,HVPF
2205
+ CMSK,CMSK
2206
+ HYAK,HYAK
2207
+ CTHE,CTHE
2208
+ ACLN,ACLN
2209
+ ILRS,ILRS
2210
+ RAHP,RAHP
2211
+ TTCW,TTCW
2212
+ LIFD,LIFD
2213
+ YRPE,YRPE
2214
+ GGEA,GGEA
2215
+ LGSN,LGSN
2216
+ HETY,HETY
2217
+ HVYV,HVYV
2218
+ PRLD,PRLD
2219
+ WYCQ,WYCQ
2220
+ TMQM,TMQM
2221
+ QNRH,QNRH
2222
+ PALF,PALF
2223
+ FDHP,FDHP
2224
+ DNET,DNET
2225
+ AKLN,AKLN
2226
+ IQGF,IQGF
2227
+ IMHG,IMHG
2228
+ NGPL,NGPL
2229
+ NVCF,NVCF
2230
+ LIFN,LIFN
2231
+ WLAP,WLAP
2232
+ WCGQ,WCGQ
2233
+ SGWP,SGWP
2234
+ HGFQ,HGFQ
2235
+ GQFR,GQFR
2236
+ NQER,NQER
2237
+ GRAM,GRAM
2238
+ GASV,GASV
2239
+ VWLA,VWLA
2240
+ QMMC,QMMC
2241
+ YLMA,YLMA
2242
+ FAEL,FAEL
2243
+ KEYH,KEYH
2244
+ GDFY,GDFY
2245
+ RPQT,RPQT
2246
+ MWVK,MWVK
2247
+ GWVS,GWVS
2248
+ WNKY,WNKY
2249
+ LCRD,LCRD
2250
+ CTKF,CTKF
2251
+ VCYP,VCYP
2252
+ SQFQ,SQFQ
2253
+ YLSR,YLSR
2254
+ HESM,HESM
2255
+ MVRG,MVRG
2256
+ LFEM,LFEM
2257
+ TCLH,TCLH
2258
+ LGEL,LGEL
2259
+ WYLC,WYLC
2260
+ GQRD,GQRD
2261
+ PHGL,PHGL
2262
+ WCDN,WCDN
2263
+ MVVI,MVVI
2264
+ RHYP,RHYP
2265
+ WSMV,WSMV
2266
+ YITL,YITL
2267
+ DYHL,DYHL
2268
+ MELW,MELW
2269
+ REYD,REYD
2270
+ VREK,VREK
2271
+ NCTQ,NCTQ
2272
+ HTWQ,HTWQ
2273
+ YSHI,YSHI
2274
+ WRIN,WRIN
2275
+ QGAY,QGAY
2276
+ NTQD,NTQD
2277
+ YRLW,YRLW
2278
+ LKYP,LKYP
2279
+ HYKY,HYKY
2280
+ AYQP,AYQP
2281
+ GFCI,GFCI
2282
+ GAMQ,GAMQ
2283
+ YPSG,YPSG
2284
+ YLNI,YLNI
2285
+ LYPL,LYPL
2286
+ SYCY,SYCY
2287
+ VGVW,VGVW
2288
+ NPPW,NPPW
2289
+ NMNG,NMNG
2290
+ RMSM,RMSM
2291
+ YHGR,YHGR
2292
+ LEPK,LEPK
2293
+ FYYH,FYYH
2294
+ TGVC,TGVC
2295
+ GQWY,GQWY
2296
+ IQDK,IQDK
2297
+ HQWA,HQWA
2298
+ TTYP,TTYP
2299
+ RPIM,RPIM
2300
+ HDEW,HDEW
2301
+ EKFK,EKFK
2302
+ TMNI,TMNI
2303
+ EQLS,EQLS
2304
+ VRIY,VRIY
2305
+ RHQH,RHQH
2306
+ HKWC,HKWC
2307
+ RQDW,RQDW
2308
+ NAEN,NAEN
2309
+ QLGK,QLGK
2310
+ PYFA,PYFA
2311
+ QDPF,QDPF
2312
+ LFFR,LFFR
2313
+ FEQT,FEQT
2314
+ CRYR,CRYR
2315
+ NGVY,NGVY
2316
+ KYLC,KYLC
2317
+ GYMS,GYMS
2318
+ NWHA,NWHA
2319
+ MLWS,MLWS
2320
+ NATQ,NATQ
2321
+ WIMA,WIMA
2322
+ AIGY,AIGY
2323
+ PAEF,PAEF
2324
+ MEIK,MEIK
2325
+ AILP,AILP
2326
+ ENWC,ENWC
2327
+ SMID,SMID
2328
+ APYM,APYM
2329
+ PSCG,PSCG
2330
+ ATSP,ATSP
2331
+ FFDQ,FFDQ
2332
+ FEYQ,FEYQ
2333
+ RRLH,RRLH
2334
+ QCLR,QCLR
2335
+ CQFG,CQFG
2336
+ SLWF,SLWF
2337
+ PGFK,PGFK
2338
+ KVDL,KVDL
2339
+ INPN,INPN
2340
+ LGAH,LGAH
2341
+ YCGI,YCGI
2342
+ YKIK,YKIK
2343
+ YCPT,YCPT
2344
+ KRGW,KRGW
2345
+ YTHK,YTHK
2346
+ GIPV,GIPV
2347
+ IVYQ,IVYQ
2348
+ VYNW,VYNW
2349
+ TTDF,TTDF
2350
+ MFNM,MFNM
2351
+ PIRS,PIRS
2352
+ PMWH,PMWH
2353
+ QLLN,QLLN
2354
+ PCIN,PCIN
2355
+ VPWE,VPWE
2356
+ VFNK,VFNK
2357
+ WTTV,WTTV
2358
+ SYPT,SYPT
2359
+ RDVN,RDVN
2360
+ CHPL,CHPL
2361
+ HSYA,HSYA
2362
+ KDTH,KDTH
2363
+ LTNM,LTNM
2364
+ GQPN,GQPN
2365
+ VEWI,VEWI
2366
+ RMLT,RMLT
2367
+ ENRS,ENRS
2368
+ RSMM,RSMM
2369
+ GSCI,GSCI
2370
+ SCEE,SCEE
2371
+ PFKV,PFKV
2372
+ YFDA,YFDA
2373
+ DCLA,DCLA
2374
+ HSHA,HSHA
2375
+ MVGG,MVGG
2376
+ FVDY,FVDY
2377
+ LIYY,LIYY
2378
+ DNQR,DNQR
2379
+ DFVS,DFVS
2380
+ VMLY,VMLY
2381
+ ETIR,ETIR
2382
+ MEME,MEME
2383
+ NSRN,NSRN
2384
+ EDEM,EDEM
2385
+ MMGP,MMGP
2386
+ QIQD,QIQD
2387
+ QNTE,QNTE
2388
+ VWFP,VWFP
2389
+ KLIY,KLIY
2390
+ WHYL,WHYL
2391
+ RGWE,RGWE
2392
+ WNTL,WNTL
2393
+ WDGL,WDGL
2394
+ RCEF,RCEF
2395
+ FEFV,FEFV
2396
+ ACGA,ACGA
2397
+ WLEA,WLEA
2398
+ NHYN,NHYN
2399
+ WMEM,WMEM
2400
+ YEEQ,YEEQ
2401
+ CYGR,CYGR
2402
+ QADC,QADC
2403
+ AGVP,AGVP
2404
+ MITR,MITR
2405
+ QDPC,QDPC
2406
+ FNWP,FNWP
2407
+ EFHR,EFHR
2408
+ NNPP,NNPP
2409
+ WMVF,WMVF
2410
+ DDRT,DDRT
2411
+ ILAP,ILAP
2412
+ DSNK,DSNK
2413
+ DYMN,DYMN
2414
+ HYTA,HYTA
2415
+ YKDI,YKDI
2416
+ TKIF,TKIF
2417
+ LWIK,LWIK
2418
+ RLRP,RLRP
2419
+ EAGH,EAGH
2420
+ WFHL,WFHL
2421
+ TIET,TIET
2422
+ RDIM,RDIM
2423
+ CMIM,CMIM
2424
+ AIFA,AIFA
2425
+ YDGQ,YDGQ
2426
+ DAFK,DAFK
2427
+ CDYL,CDYL
2428
+ DRKQ,DRKQ
2429
+ EESF,EESF
2430
+ IMQT,IMQT
2431
+ CIKV,CIKV
2432
+ WPTL,WPTL
2433
+ RIAK,RIAK
2434
+ PEKT,PEKT
2435
+ HFHH,HFHH
2436
+ HAHK,HAHK
2437
+ PWLE,PWLE
2438
+ QHTV,QHTV
2439
+ AKAP,AKAP
2440
+ GMMM,GMMM
2441
+ MLCG,MLCG
2442
+ KNAF,KNAF
2443
+ TNPQ,TNPQ
2444
+ GTYH,GTYH
2445
+ WYEC,WYEC
2446
+ GCDC,GCDC
2447
+ FVMF,FVMF
2448
+ AQTR,AQTR
2449
+ SGLL,SGLL
2450
+ VCFQ,VCFQ
2451
+ FFHT,FFHT
2452
+ QIDP,QIDP
2453
+ QNLR,QNLR
2454
+ EPYF,EPYF
2455
+ KMMT,KMMT
2456
+ WFTA,WFTA
2457
+ DSQW,DSQW
2458
+ CIVI,CIVI
2459
+ RCFH,RCFH
2460
+ CKSQ,CKSQ
2461
+ KQLY,KQLY
2462
+ FVHW,FVHW
2463
+ GKKL,GKKL
2464
+ VIDG,VIDG
2465
+ FATV,FATV
2466
+ ELCT,ELCT
2467
+ LDNI,LDNI
2468
+ NQIV,NQIV
2469
+ ECMW,ECMW
2470
+ YFHS,YFHS
2471
+ YSAS,YSAS
2472
+ WAKL,WAKL
2473
+ NSWN,NSWN
2474
+ DCMT,DCMT
2475
+ HNLH,HNLH
2476
+ TTAF,TTAF
2477
+ TQLW,TQLW
2478
+ KHMK,KHMK
2479
+ NPKL,NPKL
2480
+ VSIE,VSIE
2481
+ NAFY,NAFY
2482
+ CFCQ,CFCQ
2483
+ PSSP,PSSP
2484
+ RIPA,RIPA
2485
+ EYHC,EYHC
2486
+ LEQE,LEQE
2487
+ VFEY,VFEY
2488
+ TDRE,TDRE
2489
+ PFRL,PFRL
2490
+ LARK,LARK
2491
+ ATQI,ATQI
2492
+ AMPG,AMPG
2493
+ KHTN,KHTN
2494
+ MSKR,MSKR
2495
+ AGWM,AGWM
2496
+ DPND,DPND
2497
+ GLYG,GLYG
2498
+ MKCK,MKCK
2499
+ CQQF,CQQF
2500
+ CTCG,CTCG
2501
+ AIRV,AIRV
2502
+ DADI,DADI
2503
+ MGDL,MGDL
2504
+ FKGP,FKGP
2505
+ NWLS,NWLS
2506
+ CMSN,CMSN
2507
+ EMRG,EMRG
2508
+ WMEH,WMEH
2509
+ GDDP,GDDP
2510
+ SLIC,SLIC
2511
+ INSK,INSK
2512
+ DVHY,DVHY
2513
+ YRMG,YRMG
2514
+ IDHL,IDHL
2515
+ MARL,MARL
2516
+ WVCY,WVCY
2517
+ CKEE,CKEE
2518
+ VIYW,VIYW
2519
+ IRTD,IRTD
2520
+ QQRF,QQRF
2521
+ TVAI,TVAI
2522
+ YEKT,YEKT
2523
+ LSSF,LSSF
2524
+ HAWV,HAWV
2525
+ RERS,RERS
2526
+ SNGL,SNGL
2527
+ ETEA,ETEA
2528
+ DSPN,DSPN
2529
+ SVTP,SVTP
2530
+ VEHQ,VEHQ
2531
+ LDNC,LDNC
2532
+ EQPV,EQPV
2533
+ LCPI,LCPI
2534
+ NPPR,NPPR
2535
+ HDFV,HDFV
2536
+ MKGW,MKGW
2537
+ CSFY,CSFY
2538
+ KFPH,KFPH
2539
+ CNYD,CNYD
2540
+ SDMN,SDMN
2541
+ CSDC,CSDC
2542
+ TTTM,TTTM
2543
+ HQVK,HQVK
2544
+ ALQA,ALQA
2545
+ VIGC,VIGC
2546
+ IIHI,IIHI
2547
+ IWPY,IWPY
2548
+ HLDY,HLDY
2549
+ YCIW,YCIW
2550
+ HVGA,HVGA
2551
+ WHIA,WHIA
2552
+ ARVC,ARVC
2553
+ SEFA,SEFA
2554
+ PYKS,PYKS
2555
+ QDWV,QDWV
2556
+ GLKL,GLKL
2557
+ VLEA,VLEA
2558
+ KYAT,KYAT
2559
+ GAKS,GAKS
2560
+ WVLA,WVLA
2561
+ DNTM,DNTM
2562
+ WMGV,WMGV
2563
+ HLYK,HLYK
2564
+ TYHF,TYHF
2565
+ LSHV,LSHV
2566
+ RNCE,RNCE
2567
+ KVQF,KVQF
2568
+ DAIT,DAIT
2569
+ RRML,RRML
2570
+ MTSL,MTSL
2571
+ LTKG,LTKG
2572
+ AQNA,AQNA
2573
+ CTTP,CTTP
2574
+ IRQR,IRQR
2575
+ QWCT,QWCT
2576
+ HSHL,HSHL
2577
+ QCPG,QCPG
2578
+ MERP,MERP
2579
+ IDGG,IDGG
2580
+ CTDI,CTDI
2581
+ WTVK,WTVK
2582
+ AADY,AADY
2583
+ CEGA,CEGA
2584
+ ETAA,ETAA
2585
+ AHNR,AHNR
2586
+ WESS,WESS
2587
+ TWDE,TWDE
2588
+ QLPT,QLPT
2589
+ FAAF,FAAF
2590
+ TINK,TINK
2591
+ QESG,QESG
2592
+ DRNW,DRNW
2593
+ YTWY,YTWY
2594
+ TTCN,TTCN
2595
+ VTFP,VTFP
2596
+ PHRP,PHRP
2597
+ RFEI,RFEI
2598
+ PKEP,PKEP
2599
+ QSCW,QSCW
2600
+ VGYK,VGYK
2601
+ PPYM,PPYM
2602
+ GCDE,GCDE
2603
+ DGIS,DGIS
2604
+ HCWC,HCWC
2605
+ WPIR,WPIR
2606
+ KKMF,KKMF
2607
+ VVVV,VVVV
2608
+ GGMT,GGMT
2609
+ YKNA,YKNA
2610
+ ELAE,ELAE
2611
+ HYVR,HYVR
2612
+ IIEG,IIEG
2613
+ LETR,LETR
2614
+ TRKK,TRKK
2615
+ KTWK,KTWK
2616
+ CQHA,CQHA
2617
+ PCLP,PCLP
2618
+ WYVP,WYVP
2619
+ FYGW,FYGW
2620
+ IRPY,IRPY
2621
+ SFEP,SFEP
2622
+ DEAC,DEAC
2623
+ MHIA,MHIA
2624
+ FWPH,FWPH
2625
+ YRMQ,YRMQ
2626
+ ESAM,ESAM
2627
+ WSWY,WSWY
2628
+ MCIH,MCIH
2629
+ FNDC,FNDC
2630
+ EHFF,EHFF
2631
+ TYAH,TYAH
2632
+ SHSR,SHSR
2633
+ YNMS,YNMS
2634
+ TNCG,TNCG
2635
+ ECIF,ECIF
2636
+ HVFL,HVFL
2637
+ WYEA,WYEA
2638
+ SKEP,SKEP
2639
+ LRAR,LRAR
2640
+ PPFG,PPFG
2641
+ TGNE,TGNE
2642
+ WPHP,WPHP
2643
+ CETQ,CETQ
2644
+ QRMY,QRMY
2645
+ VPLQ,VPLQ
2646
+ ADVE,ADVE
2647
+ HGEE,HGEE
2648
+ VRGP,VRGP
2649
+ VTPT,VTPT
2650
+ ARRF,ARRF
2651
+ HMLH,HMLH
2652
+ RDWT,RDWT
2653
+ PGVH,PGVH
2654
+ DKEY,DKEY
2655
+ PLPI,PLPI
2656
+ SLHD,SLHD
2657
+ WQVK,WQVK
2658
+ PWSS,PWSS
2659
+ REHK,REHK
2660
+ NSNA,NSNA
2661
+ KNCF,KNCF
2662
+ EWPH,EWPH
2663
+ MPQG,MPQG
2664
+ DTEL,DTEL
2665
+ RASG,RASG
2666
+ CGHG,CGHG
2667
+ DCYT,DCYT
2668
+ YKCT,YKCT
2669
+ DPPQ,DPPQ
2670
+ QPIS,QPIS
2671
+ ADYS,ADYS
2672
+ GNMV,GNMV
2673
+ FAAI,FAAI
2674
+ DMQN,DMQN
2675
+ IAEL,IAEL
2676
+ TEER,TEER
2677
+ DFAR,DFAR
2678
+ DPHQ,DPHQ
2679
+ PWAD,PWAD
2680
+ PLWM,PLWM
2681
+ KCHD,KCHD
2682
+ MLEA,MLEA
2683
+ MSEK,MSEK
2684
+ DWMK,DWMK
2685
+ FNNT,FNNT
2686
+ YVCG,YVCG
2687
+ NQHS,NQHS
2688
+ IIAA,IIAA
2689
+ PVVK,PVVK
2690
+ LCPV,LCPV
2691
+ QSWE,QSWE
2692
+ IQCY,IQCY
2693
+ EAKQ,EAKQ
2694
+ CMFT,CMFT
2695
+ THWG,THWG
2696
+ IKKE,IKKE
2697
+ KLPK,KLPK
2698
+ VGHM,VGHM
2699
+ TPEL,TPEL
2700
+ RIRP,RIRP
2701
+ ITGT,ITGT
2702
+ WDRP,WDRP
2703
+ WHKT,WHKT
2704
+ SMCP,SMCP
2705
+ NYKW,NYKW
2706
+ HAAP,HAAP
2707
+ PRWL,PRWL
2708
+ YPKV,YPKV
2709
+ SHDF,SHDF
2710
+ TFKT,TFKT
2711
+ RCPL,RCPL
2712
+ ECWN,ECWN
2713
+ CPFA,CPFA
2714
+ PRNI,PRNI
2715
+ NYAG,NYAG
2716
+ RVDV,RVDV
2717
+ KQGV,KQGV
2718
+ RDKH,RDKH
2719
+ AMYF,AMYF
2720
+ IMDL,IMDL
2721
+ STAM,STAM
2722
+ GWFT,GWFT
2723
+ SFNT,SFNT
2724
+ IFGV,IFGV
2725
+ EMMI,EMMI
2726
+ GDLP,GDLP
2727
+ IDFE,IDFE
2728
+ PQFR,PQFR
2729
+ NTPP,NTPP
2730
+ CLWD,CLWD
2731
+ NHYA,NHYA
2732
+ ICMS,ICMS
2733
+ HLHC,HLHC
2734
+ KNDI,KNDI
2735
+ AWPE,AWPE
2736
+ PPCN,PPCN
2737
+ WHLQ,WHLQ
2738
+ AFMV,AFMV
2739
+ VSFD,VSFD
2740
+ WLNR,WLNR
2741
+ AMID,AMID
2742
+ HTGE,HTGE
2743
+ MPCW,MPCW
2744
+ TRTF,TRTF
2745
+ WRMV,WRMV
2746
+ ILIE,ILIE
2747
+ GPKK,GPKK
2748
+ SLQG,SLQG
2749
+ FPVY,FPVY
2750
+ WCLI,WCLI
2751
+ MYCE,MYCE
2752
+ GEQN,GEQN
2753
+ MFME,MFME
2754
+ KNKE,KNKE
2755
+ HHLF,HHLF
2756
+ VDQP,VDQP
2757
+ YGVK,YGVK
2758
+ CTLR,CTLR
2759
+ HGYN,HGYN
2760
+ VSEW,VSEW
2761
+ QPYQ,QPYQ
2762
+ QPNY,QPNY
2763
+ HDSR,HDSR
2764
+ HFTC,HFTC
2765
+ MYHV,MYHV
2766
+ LWNQ,LWNQ
2767
+ PAMN,PAMN
2768
+ PLPW,PLPW
2769
+ PMTV,PMTV
2770
+ YTDQ,YTDQ
2771
+ FCER,FCER
2772
+ QNNH,QNNH
2773
+ SFDF,SFDF
2774
+ RHQL,RHQL
2775
+ DLHE,DLHE
2776
+ KWKH,KWKH
2777
+ MQRH,MQRH
2778
+ NWET,NWET
2779
+ QVCV,QVCV
2780
+ EPEP,EPEP
2781
+ VRSE,VRSE
2782
+ ERMM,ERMM
2783
+ TVRI,TVRI
2784
+ GCMG,GCMG
2785
+ MAWA,MAWA
2786
+ DGLA,DGLA
2787
+ LPCQ,LPCQ
2788
+ THDF,THDF
2789
+ TWYH,TWYH
2790
+ IDAT,IDAT
2791
+ SFSE,SFSE
2792
+ WCHR,WCHR
2793
+ NNTH,NNTH
2794
+ QPHC,QPHC
2795
+ EERR,EERR
2796
+ EVIQ,EVIQ
2797
+ LRWA,LRWA
2798
+ LHIE,LHIE
2799
+ DATT,DATT
2800
+ ESDH,ESDH
2801
+ TRHH,TRHH
2802
+ HMGV,HMGV
2803
+ HNVN,HNVN
2804
+ MTLK,MTLK
2805
+ KGSQ,KGSQ
2806
+ GSGW,GSGW
2807
+ VPTP,VPTP
2808
+ RIGG,RIGG
2809
+ GVQR,GVQR
2810
+ ANVM,ANVM
2811
+ TQHM,TQHM
2812
+ CDGP,CDGP
2813
+ EIQR,EIQR
2814
+ SPTN,SPTN
2815
+ QWNF,QWNF
2816
+ TFFT,TFFT
2817
+ VENC,VENC
2818
+ LRKC,LRKC
2819
+ AQML,AQML
2820
+ ASGA,ASGA
2821
+ VHYM,VHYM
2822
+ QGRK,QGRK
2823
+ TGCF,TGCF
2824
+ KVLK,KVLK
2825
+ MCIN,MCIN
2826
+ APTA,APTA
2827
+ LCNK,LCNK
2828
+ CYLF,CYLF
2829
+ FCYN,FCYN
2830
+ ALGE,ALGE
2831
+ QDQP,QDQP
2832
+ VVFN,VVFN
2833
+ LLSQ,LLSQ
2834
+ GEYW,GEYW
2835
+ PFGI,PFGI
2836
+ MLQM,MLQM
2837
+ FRDP,FRDP
2838
+ EKST,EKST
2839
+ GSER,GSER
2840
+ HGPT,HGPT
2841
+ LQGA,LQGA
2842
+ VVDT,VVDT
2843
+ IGFS,IGFS
2844
+ ENWL,ENWL
2845
+ SCCA,SCCA
2846
+ WSNI,WSNI
2847
+ ALCI,ALCI
2848
+ DWFH,DWFH
2849
+ GNDD,GNDD
2850
+ NQWQ,NQWQ
2851
+ KLHW,KLHW
2852
+ WNNG,WNNG
2853
+ EETS,EETS
2854
+ HKKD,HKKD
2855
+ YCIK,YCIK
2856
+ YWKM,YWKM
2857
+ MSLT,MSLT
2858
+ HPMN,HPMN
2859
+ GRGT,GRGT
2860
+ GRNN,GRNN
2861
+ HINF,HINF
2862
+ HDIF,HDIF
2863
+ AKKT,AKKT
2864
+ HVVK,HVVK
2865
+ RTEH,RTEH
2866
+ QDKN,QDKN
2867
+ VLVT,VLVT
2868
+ VLIY,VLIY
2869
+ PVWT,PVWT
2870
+ NHTY,NHTY
2871
+ EYRT,EYRT
2872
+ ANAD,ANAD
2873
+ KAQH,KAQH
2874
+ VWCN,VWCN
2875
+ EDHN,EDHN
2876
+ DNMP,DNMP
2877
+ PIEA,PIEA
2878
+ TNIF,TNIF
2879
+ RHLM,RHLM
2880
+ FFWS,FFWS
2881
+ SFVF,SFVF
2882
+ IDEG,IDEG
2883
+ SRIK,SRIK
2884
+ WVGG,WVGG
2885
+ ICKQ,ICKQ
2886
+ HVKL,HVKL
2887
+ YNER,YNER
2888
+ IFNM,IFNM
2889
+ MCTR,MCTR
2890
+ KEKK,KEKK
2891
+ SVDK,SVDK
2892
+ IAYK,IAYK
2893
+ LPNI,LPNI
2894
+ DRWH,DRWH
2895
+ STAC,STAC
2896
+ CIQV,CIQV
2897
+ ASVA,ASVA
2898
+ EVQC,EVQC
2899
+ TLMM,TLMM
2900
+ YERP,YERP
2901
+ GNII,GNII
2902
+ HCGN,HCGN
2903
+ RECT,RECT
2904
+ HLKG,HLKG
2905
+ TRRA,TRRA
2906
+ RGES,RGES
2907
+ KKDA,KKDA
2908
+ EMFY,EMFY
2909
+ ARCE,ARCE
2910
+ VLTK,VLTK
2911
+ AMHN,AMHN
2912
+ LMQD,LMQD
2913
+ EHGH,EHGH
2914
+ QGSV,QGSV
2915
+ NCWW,NCWW
2916
+ RVSR,RVSR
2917
+ IWRS,IWRS
2918
+ EWMI,EWMI
2919
+ VFSE,VFSE
2920
+ IRLS,IRLS
2921
+ APFF,APFF
2922
+ YSWV,YSWV
2923
+ ANAL,ANAL
2924
+ VLPA,VLPA
2925
+ HTDS,HTDS
2926
+ GYKE,GYKE
2927
+ YVDL,YVDL
2928
+ MVLY,MVLY
2929
+ ITVP,ITVP
2930
+ GIHK,GIHK
2931
+ QSIW,QSIW
2932
+ ALGT,ALGT
2933
+ NVTI,NVTI
2934
+ GWVW,GWVW
2935
+ LGYN,LGYN
2936
+ YTCT,YTCT
2937
+ QWAC,QWAC
2938
+ AAMR,AAMR
2939
+ DRAT,DRAT
2940
+ CTCA,CTCA
2941
+ GYAS,GYAS
2942
+ FELH,FELH
2943
+ ATPA,ATPA
2944
+ DLMR,DLMR
2945
+ FYAH,FYAH
2946
+ WTNK,WTNK
2947
+ QHFT,QHFT
2948
+ NCCW,NCCW
2949
+ DLFE,DLFE
2950
+ LVLF,LVLF
2951
+ FASE,FASE
2952
+ ALQT,ALQT
2953
+ DINN,DINN
2954
+ WALA,WALA
2955
+ CGGS,CGGS
2956
+ GCSA,GCSA
2957
+ FCNY,FCNY
2958
+ DYTD,DYTD
2959
+ SKMQ,SKMQ
2960
+ FNDV,FNDV
2961
+ VTAA,VTAA
2962
+ LHEA,LHEA
2963
+ LFVW,LFVW
2964
+ ENHL,ENHL
2965
+ EGCY,EGCY
2966
+ HKGD,HKGD
2967
+ NACC,NACC
2968
+ HADF,HADF
2969
+ TSNK,TSNK
2970
+ ALHP,ALHP
2971
+ MAHN,MAHN
2972
+ EPEG,EPEG
2973
+ TQRA,TQRA
2974
+ QDVC,QDVC
2975
+ YEPQ,YEPQ
2976
+ WVNR,WVNR
2977
+ CVQA,CVQA
2978
+ AVVV,AVVV
2979
+ RCSP,RCSP
2980
+ FADC,FADC
2981
+ TVQA,TVQA
2982
+ VCSS,VCSS
2983
+ HNMT,HNMT
2984
+ FDNL,FDNL
2985
+ IKKQ,IKKQ
2986
+ NCEQ,NCEQ
2987
+ MVHE,MVHE
2988
+ LPIE,LPIE
2989
+ FNLH,FNLH
2990
+ VPLC,VPLC
2991
+ AGPK,AGPK
2992
+ AQIP,AQIP
2993
+ SLSE,SLSE
2994
+ SEVS,SEVS
2995
+ DLSC,DLSC
2996
+ HETF,HETF
2997
+ GQRM,GQRM
2998
+ YCEW,YCEW
2999
+ GYNC,GYNC
3000
+ CGEV,CGEV
3001
+ TNGY,TNGY
3002
+ PQEV,PQEV
3003
+ PMSP,PMSP
3004
+ EYLP,EYLP
3005
+ LHSK,LHSK
3006
+ ILRP,ILRP
3007
+ RMFL,RMFL
3008
+ CNAL,CNAL
3009
+ IHQM,IHQM
3010
+ QGES,QGES
3011
+ DHCH,DHCH
3012
+ CMPD,CMPD
3013
+ LSVQ,LSVQ
3014
+ WVPV,WVPV
3015
+ AQKR,AQKR
3016
+ EAKS,EAKS
3017
+ DDTA,DDTA
3018
+ HVLL,HVLL
3019
+ RHTK,RHTK
3020
+ AGAH,AGAH
3021
+ PTGS,PTGS
3022
+ VHDH,VHDH
3023
+ WCII,WCII
3024
+ QMGP,QMGP
3025
+ HWWY,HWWY
3026
+ KVGE,KVGE
3027
+ SPPD,SPPD
3028
+ AYLI,AYLI
3029
+ TWFS,TWFS
3030
+ TTKT,TTKT
3031
+ QHHN,QHHN
3032
+ IQYY,IQYY
3033
+ AWFM,AWFM
3034
+ ADKN,ADKN
3035
+ SEIG,SEIG
3036
+ SIRK,SIRK
3037
+ CQCG,CQCG
3038
+ NLWQ,NLWQ
3039
+ MKCR,MKCR
3040
+ NHYC,NHYC
3041
+ DMAF,DMAF
3042
+ EPMC,EPMC
3043
+ LNCI,LNCI
3044
+ NDPK,NDPK
3045
+ SVAH,SVAH
3046
+ YADM,YADM
3047
+ ANDQ,ANDQ
3048
+ QMNV,QMNV
3049
+ PWMR,PWMR
3050
+ VMHY,VMHY
3051
+ PRRP,PRRP
3052
+ DTTC,DTTC
3053
+ KGMG,KGMG
3054
+ NIFD,NIFD
3055
+ CDFS,CDFS
3056
+ CCKT,CCKT
3057
+ QLDC,QLDC
3058
+ HQKW,HQKW
3059
+ NTWH,NTWH
3060
+ PRFI,PRFI
3061
+ YARD,YARD
3062
+ PSED,PSED
3063
+ HGFP,HGFP
3064
+ RTAE,RTAE
3065
+ HGQD,HGQD
3066
+ ILLY,ILLY
3067
+ QQDC,QQDC
3068
+ IVGF,IVGF
3069
+ VTFE,VTFE
3070
+ RCTH,RCTH
3071
+ VAQC,VAQC
3072
+ WHFK,WHFK
3073
+ YWGT,YWGT
3074
+ PIPL,PIPL
3075
+ HDFY,HDFY
3076
+ HHML,HHML
3077
+ GWSG,GWSG
3078
+ SRSV,SRSV
3079
+ IWDV,IWDV
3080
+ HEIK,HEIK
3081
+ YMYA,YMYA
3082
+ LVEW,LVEW
3083
+ VERV,VERV
3084
+ KATH,KATH
3085
+ TVIN,TVIN
3086
+ EWNL,EWNL
3087
+ HNYH,HNYH
3088
+ ILRQ,ILRQ
3089
+ ATHA,ATHA
3090
+ SEVP,SEVP
3091
+ VWNV,VWNV
3092
+ TKCT,TKCT
3093
+ RYYV,RYYV
3094
+ KAKV,KAKV
3095
+ SSAY,SSAY
3096
+ LMCM,LMCM
3097
+ VVHK,VVHK
3098
+ QLVQ,QLVQ
3099
+ VNGR,VNGR
3100
+ SCVI,SCVI
3101
+ VTVG,VTVG
3102
+ HGRK,HGRK
3103
+ MEDV,MEDV
3104
+ ALRI,ALRI
3105
+ PKHD,PKHD
3106
+ QLRC,QLRC
3107
+ DHIG,DHIG
3108
+ CYWH,CYWH
3109
+ WMAL,WMAL
3110
+ YGAM,YGAM
3111
+ VGSD,VGSD
3112
+ EFYI,EFYI
3113
+ CSNS,CSNS
3114
+ EYTI,EYTI
3115
+ NDYM,NDYM
3116
+ PWST,PWST
3117
+ TCPY,TCPY
3118
+ HCDA,HCDA
3119
+ SLDY,SLDY
3120
+ VWLV,VWLV
3121
+ FEKA,FEKA
3122
+ IWPI,IWPI
3123
+ QKRM,QKRM
3124
+ FCDH,FCDH
3125
+ YYHN,YYHN
3126
+ NSMQ,NSMQ
3127
+ LVME,LVME
3128
+ HCDD,HCDD
3129
+ NAGS,NAGS
3130
+ MSVP,MSVP
3131
+ DNQC,DNQC
3132
+ LFCG,LFCG
3133
+ VTTD,VTTD
3134
+ SCKL,SCKL
3135
+ RSSI,RSSI
3136
+ RMCP,RMCP
3137
+ MTQM,MTQM
3138
+ LGRY,LGRY
3139
+ KPWM,KPWM
3140
+ RGPN,RGPN
3141
+ ERWD,ERWD
3142
+ IDRY,IDRY
3143
+ LPML,LPML
3144
+ PDAR,PDAR
3145
+ HISK,HISK
3146
+ TPCR,TPCR
3147
+ MDDL,MDDL
3148
+ GWMD,GWMD
3149
+ CVTS,CVTS
3150
+ LAID,LAID
3151
+ TVKF,TVKF
3152
+ MNVC,MNVC
3153
+ IAQE,IAQE
3154
+ IMHA,IMHA
3155
+ WHSL,WHSL
3156
+ KKGF,KKGF
3157
+ KCPR,KCPR
3158
+ THWR,THWR
3159
+ FQYV,FQYV
3160
+ IRTS,IRTS
3161
+ KVRG,KVRG
3162
+ ESEE,ESEE
3163
+ TWLW,TWLW
3164
+ DYNK,DYNK
3165
+ SKTQ,SKTQ
3166
+ IGAA,IGAA
3167
+ EQMR,EQMR
3168
+ GYVS,GYVS
3169
+ IMCH,IMCH
3170
+ GFNF,GFNF
3171
+ EIVW,EIVW
3172
+ PKFS,PKFS
3173
+ IQIT,IQIT
3174
+ VGYP,VGYP
3175
+ KVPL,KVPL
3176
+ EKTV,EKTV
3177
+ DSLP,DSLP
3178
+ SIHA,SIHA
3179
+ EPPK,EPPK
3180
+ MFHP,MFHP
3181
+ LCKA,LCKA
3182
+ VRAN,VRAN
3183
+ WDPV,WDPV
3184
+ GMAM,GMAM
3185
+ NPGM,NPGM
3186
+ VHNH,VHNH
3187
+ WCNN,WCNN
3188
+ ADNF,ADNF
3189
+ AGSC,AGSC
3190
+ VALE,VALE
3191
+ AIGG,AIGG
3192
+ PWVL,PWVL
3193
+ GGMN,GGMN
3194
+ VDWA,VDWA
3195
+ WYGE,WYGE
3196
+ PRMT,PRMT
3197
+ APAM,APAM
3198
+ DYDA,DYDA
3199
+ GHSG,GHSG
3200
+ YIPM,YIPM
3201
+ ITEM,ITEM
3202
+ YTAK,YTAK
3203
+ RVWA,RVWA
3204
+ AQPS,AQPS
3205
+ YAYN,YAYN
3206
+ WGWC,WGWC
3207
+ MTGS,MTGS
3208
+ ELVR,ELVR
3209
+ QGED,QGED
3210
+ LFPD,LFPD
3211
+ YHGA,YHGA
3212
+ PKIN,PKIN
3213
+ QKRC,QKRC
3214
+ KTRW,KTRW
3215
+ PVQD,PVQD
3216
+ NYSP,NYSP
3217
+ DDWI,DDWI
3218
+ GKAA,GKAA
3219
+ FHAQ,FHAQ
3220
+ HFVT,HFVT
3221
+ KWLY,KWLY
3222
+ VRQC,VRQC
3223
+ TWER,TWER
3224
+ AMEF,AMEF
3225
+ HIWR,HIWR
3226
+ VDEC,VDEC
3227
+ FLDR,FLDR
3228
+ MNTM,MNTM
3229
+ SKNF,SKNF
3230
+ IKER,IKER
3231
+ NLMT,NLMT
3232
+ QFMC,QFMC
3233
+ KGSE,KGSE
3234
+ HPNT,HPNT
3235
+ EYYH,EYYH
3236
+ FAPR,FAPR
3237
+ VSDL,VSDL
3238
+ DYME,DYME
3239
+ EGKV,EGKV
3240
+ SNGF,SNGF
3241
+ QKPM,QKPM
3242
+ FKST,FKST
3243
+ EMPY,EMPY
3244
+ TMCF,TMCF
3245
+ FMDK,FMDK
3246
+ VPYD,VPYD
3247
+ PRRN,PRRN
3248
+ WGPV,WGPV
3249
+ MGPI,MGPI
3250
+ KHDQ,KHDQ
3251
+ YCSM,YCSM
3252
+ MLAE,MLAE
3253
+ CVEK,CVEK
3254
+ MQAV,MQAV
3255
+ WGCR,WGCR
3256
+ RRVY,RRVY
3257
+ GINC,GINC
3258
+ QTSD,QTSD
3259
+ RDLK,RDLK
3260
+ IQTT,IQTT
3261
+ KQDD,KQDD
3262
+ FNYW,FNYW
3263
+ CHHY,CHHY
3264
+ YVCR,YVCR
3265
+ RTKF,RTKF
3266
+ MGEQ,MGEQ
3267
+ CCRV,CCRV
3268
+ SMQE,SMQE
3269
+ DCNY,DCNY
3270
+ DWKQ,DWKQ
3271
+ IVFT,IVFT
3272
+ PCHQ,PCHQ
3273
+ SPSR,SPSR
3274
+ FFTN,FFTN
3275
+ WVDV,WVDV
3276
+ MWRQ,MWRQ
3277
+ CEDC,CEDC
3278
+ NWGD,NWGD
3279
+ GGHG,GGHG
3280
+ SMRW,SMRW
3281
+ WNEF,WNEF
3282
+ GMRD,GMRD
3283
+ WCTM,WCTM
3284
+ VLGF,VLGF
3285
+ VYHK,VYHK
3286
+ LKSQ,LKSQ
3287
+ MEDW,MEDW
3288
+ DNGG,DNGG
3289
+ DGIK,DGIK
3290
+ VKEP,VKEP
3291
+ DPWG,DPWG
3292
+ RRET,RRET
3293
+ LDMR,LDMR
3294
+ WFND,WFND
3295
+ SKDS,SKDS
3296
+ ERIL,ERIL
3297
+ IKIM,IKIM
3298
+ ITRE,ITRE
3299
+ DMNE,DMNE
3300
+ FACL,FACL
3301
+ ACDG,ACDG
3302
+ SNPI,SNPI
3303
+ QHSM,QHSM
3304
+ PSGD,PSGD
3305
+ IPLY,IPLY
3306
+ TSVR,TSVR
3307
+ FSAF,FSAF
3308
+ IQKY,IQKY
3309
+ NFST,NFST
3310
+ WVGW,WVGW
data/splits/4AA_implicit.csv ADDED
@@ -0,0 +1,2847 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EHFR,EHFR
3
+ YVTL,YVTL
4
+ GQGV,GQGV
5
+ PCFK,PCFK
6
+ VVIV,VVIV
7
+ WGDY,WGDY
8
+ SLYW,SLYW
9
+ QGRE,QGRE
10
+ SRPT,SRPT
11
+ NGDS,NGDS
12
+ VNWW,VNWW
13
+ LDNH,LDNH
14
+ WLSC,WLSC
15
+ YYTK,YYTK
16
+ GPNT,GPNT
17
+ LTQE,LTQE
18
+ VMHV,VMHV
19
+ GTLM,GTLM
20
+ QRRW,QRRW
21
+ LRYM,LRYM
22
+ WTWS,WTWS
23
+ VKFG,VKFG
24
+ GQYP,GQYP
25
+ KWIC,KWIC
26
+ PQHG,PQHG
27
+ FLIS,FLIS
28
+ QEGR,QEGR
29
+ ISML,ISML
30
+ HRCS,HRCS
31
+ NNKC,NNKC
32
+ FART,FART
33
+ TIDH,TIDH
34
+ RDTI,RDTI
35
+ ICLP,ICLP
36
+ AMEN,AMEN
37
+ QHFV,QHFV
38
+ GHSS,GHSS
39
+ YNML,YNML
40
+ RHDG,RHDG
41
+ VMVL,VMVL
42
+ QNCG,QNCG
43
+ FRPQ,FRPQ
44
+ FVFN,FVFN
45
+ FCND,FCND
46
+ CETY,CETY
47
+ GYQH,GYQH
48
+ FLRH,FLRH
49
+ IMRY,IMRY
50
+ PIDV,PIDV
51
+ HTIQ,HTIQ
52
+ KDFM,KDFM
53
+ ESSS,ESSS
54
+ ASRE,ASRE
55
+ LCLQ,LCLQ
56
+ IVMA,IVMA
57
+ RVQQ,RVQQ
58
+ KDDD,KDDD
59
+ VQCL,VQCL
60
+ CSYR,CSYR
61
+ SPVN,SPVN
62
+ MIAY,MIAY
63
+ SNSF,SNSF
64
+ TLRK,TLRK
65
+ IAMI,IAMI
66
+ CPYV,CPYV
67
+ NWWG,NWWG
68
+ LQMG,LQMG
69
+ DLCG,DLCG
70
+ LVVF,LVVF
71
+ LWMR,LWMR
72
+ NNDK,NNDK
73
+ EDQK,EDQK
74
+ CACS,CACS
75
+ IWHF,IWHF
76
+ IPTH,IPTH
77
+ VDRN,VDRN
78
+ HELI,HELI
79
+ GGHN,GGHN
80
+ FSDP,FSDP
81
+ CKVH,CKVH
82
+ NTVG,NTVG
83
+ SPFH,SPFH
84
+ FSRK,FSRK
85
+ PYHQ,PYHQ
86
+ FDNT,FDNT
87
+ PKDM,PKDM
88
+ LIFE,LIFE
89
+ ALDA,ALDA
90
+ PGKM,PGKM
91
+ QRNF,QRNF
92
+ SSNN,SSNN
93
+ FKKL,FKKL
94
+ RLKR,RLKR
95
+ KSIY,KSIY
96
+ NFQF,NFQF
97
+ MAFM,MAFM
98
+ AASF,AASF
99
+ CFEE,CFEE
100
+ DHAR,DHAR
101
+ IDRH,IDRH
102
+ KSVT,KSVT
103
+ SMSN,SMSN
104
+ AAYQ,AAYQ
105
+ FYVR,FYVR
106
+ TFKF,TFKF
107
+ GMMY,GMMY
108
+ RGQL,RGQL
109
+ NYER,NYER
110
+ WKWY,WKWY
111
+ EDTS,EDTS
112
+ PEGV,PEGV
113
+ NNPT,NNPT
114
+ DENA,DENA
115
+ YEPH,YEPH
116
+ IYGF,IYGF
117
+ FRID,FRID
118
+ MRVV,MRVV
119
+ IVFN,IVFN
120
+ LPGP,LPGP
121
+ MGNM,MGNM
122
+ TFWF,TFWF
123
+ LFHI,LFHI
124
+ HYGW,HYGW
125
+ FLDY,FLDY
126
+ QYTM,QYTM
127
+ YMSS,YMSS
128
+ FHWL,FHWL
129
+ HFFG,HFFG
130
+ LKSL,LKSL
131
+ HYAN,HYAN
132
+ PEYE,PEYE
133
+ EVFQ,EVFQ
134
+ FSQR,FSQR
135
+ ETIT,ETIT
136
+ CNNF,CNNF
137
+ MKPK,MKPK
138
+ GMLH,GMLH
139
+ SCQK,SCQK
140
+ WQCY,WQCY
141
+ FIRG,FIRG
142
+ THIC,THIC
143
+ KQRT,KQRT
144
+ QYRQ,QYRQ
145
+ WDWF,WDWF
146
+ QADM,QADM
147
+ GVVY,GVVY
148
+ MWGY,MWGY
149
+ SLHS,SLHS
150
+ PTHA,PTHA
151
+ PTRW,PTRW
152
+ ADSC,ADSC
153
+ QVLA,QVLA
154
+ KSLG,KSLG
155
+ PGNE,PGNE
156
+ IPRW,IPRW
157
+ HTSM,HTSM
158
+ VDEF,VDEF
159
+ VVKC,VVKC
160
+ KTLL,KTLL
161
+ KIED,KIED
162
+ ANFS,ANFS
163
+ DKYV,DKYV
164
+ DQPA,DQPA
165
+ HWLK,HWLK
166
+ TDPL,TDPL
167
+ QCQW,QCQW
168
+ PCPL,PCPL
169
+ MLPR,MLPR
170
+ NDNV,NDNV
171
+ LTPR,LTPR
172
+ YRVC,YRVC
173
+ RFLT,RFLT
174
+ SEAH,SEAH
175
+ VHHC,VHHC
176
+ SMAS,SMAS
177
+ NQEF,NQEF
178
+ DEPV,DEPV
179
+ THHL,THHL
180
+ VVLI,VVLI
181
+ AGVI,AGVI
182
+ STYP,STYP
183
+ LKLW,LKLW
184
+ SFQK,SFQK
185
+ SNID,SNID
186
+ KGCR,KGCR
187
+ FDNC,FDNC
188
+ RCKP,RCKP
189
+ MYYA,MYYA
190
+ DTCD,DTCD
191
+ LQLG,LQLG
192
+ LNWY,LNWY
193
+ MIQK,MIQK
194
+ WDDK,WDDK
195
+ YNKT,YNKT
196
+ TKAK,TKAK
197
+ VRNG,VRNG
198
+ KTTF,KTTF
199
+ IAIM,IAIM
200
+ FGGV,FGGV
201
+ SHPE,SHPE
202
+ EKME,EKME
203
+ YSCA,YSCA
204
+ TGPT,TGPT
205
+ AAKQ,AAKQ
206
+ LQRL,LQRL
207
+ TTLY,TTLY
208
+ VSFH,VSFH
209
+ WYNA,WYNA
210
+ EPWP,EPWP
211
+ DNCQ,DNCQ
212
+ FLHM,FLHM
213
+ WYIG,WYIG
214
+ CQYS,CQYS
215
+ SNCV,SNCV
216
+ DIGN,DIGN
217
+ NYQM,NYQM
218
+ QASA,QASA
219
+ FFRN,FFRN
220
+ WKSH,WKSH
221
+ HEGV,HEGV
222
+ VFWY,VFWY
223
+ QNFH,QNFH
224
+ ISFP,ISFP
225
+ ARGH,ARGH
226
+ CRMN,CRMN
227
+ QNMV,QNMV
228
+ IKQG,IKQG
229
+ PMMV,PMMV
230
+ HILY,HILY
231
+ AQFQ,AQFQ
232
+ MHGM,MHGM
233
+ WMFD,WMFD
234
+ NWPN,NWPN
235
+ HPNK,HPNK
236
+ LRTE,LRTE
237
+ AEAF,AEAF
238
+ FHLQ,FHLQ
239
+ PIAC,PIAC
240
+ KDKI,KDKI
241
+ MKCC,MKCC
242
+ NAWY,NAWY
243
+ YVLE,YVLE
244
+ QKYR,QKYR
245
+ TFIL,TFIL
246
+ DVGL,DVGL
247
+ VNAK,VNAK
248
+ CEIA,CEIA
249
+ CLHA,CLHA
250
+ TPEA,TPEA
251
+ EEKV,EEKV
252
+ GTEV,GTEV
253
+ PYYT,PYYT
254
+ AMQN,AMQN
255
+ KDYC,KDYC
256
+ QNIE,QNIE
257
+ MWLR,MWLR
258
+ AELN,AELN
259
+ TDNF,TDNF
260
+ YPDQ,YPDQ
261
+ PMWQ,PMWQ
262
+ WTIN,WTIN
263
+ TQLE,TQLE
264
+ CWWT,CWWT
265
+ PFGR,PFGR
266
+ KWMG,KWMG
267
+ ICDY,ICDY
268
+ IFWQ,IFWQ
269
+ VGNK,VGNK
270
+ EGSR,EGSR
271
+ KSKE,KSKE
272
+ AQYS,AQYS
273
+ ECDG,ECDG
274
+ AFRN,AFRN
275
+ NLSK,NLSK
276
+ IIDS,IIDS
277
+ MFSG,MFSG
278
+ IING,IING
279
+ IHGP,IHGP
280
+ PEFC,PEFC
281
+ LPRM,LPRM
282
+ VWGW,VWGW
283
+ RAKD,RAKD
284
+ TDRL,TDRL
285
+ PWRH,PWRH
286
+ THKW,THKW
287
+ AQMK,AQMK
288
+ SNYD,SNYD
289
+ KVFQ,KVFQ
290
+ SWML,SWML
291
+ NMHN,NMHN
292
+ VNRT,VNRT
293
+ QHTR,QHTR
294
+ RTHG,RTHG
295
+ MKIF,MKIF
296
+ WAWT,WAWT
297
+ TFIS,TFIS
298
+ AIWE,AIWE
299
+ MINS,MINS
300
+ KNFC,KNFC
301
+ YSME,YSME
302
+ LKTH,LKTH
303
+ PGHR,PGHR
304
+ QCWM,QCWM
305
+ YYFW,YYFW
306
+ GCQH,GCQH
307
+ LFGA,LFGA
308
+ LREV,LREV
309
+ CGCQ,CGCQ
310
+ GQFY,GQFY
311
+ DGDL,DGDL
312
+ IISC,IISC
313
+ GLTR,GLTR
314
+ RGVQ,RGVQ
315
+ DFWH,DFWH
316
+ GFKP,GFKP
317
+ TTNE,TTNE
318
+ FEDH,FEDH
319
+ YTAV,YTAV
320
+ AMVW,AMVW
321
+ NECR,NECR
322
+ PNYQ,PNYQ
323
+ WVLQ,WVLQ
324
+ NPGA,NPGA
325
+ LTYP,LTYP
326
+ GTTS,GTTS
327
+ CPKM,CPKM
328
+ FFIR,FFIR
329
+ KTGN,KTGN
330
+ RGSY,RGSY
331
+ NKNQ,NKNQ
332
+ CWYK,CWYK
333
+ SSRN,SSRN
334
+ KNGD,KNGD
335
+ PECK,PECK
336
+ NRKC,NRKC
337
+ HTAN,HTAN
338
+ KWPM,KWPM
339
+ PDKM,PDKM
340
+ WYVV,WYVV
341
+ GCPK,GCPK
342
+ DRKK,DRKK
343
+ WNWC,WNWC
344
+ PAYK,PAYK
345
+ QKEE,QKEE
346
+ LYGF,LYGF
347
+ GRAR,GRAR
348
+ HFLW,HFLW
349
+ FHSL,FHSL
350
+ HERQ,HERQ
351
+ TMMP,TMMP
352
+ PEMT,PEMT
353
+ LSGH,LSGH
354
+ MPNS,MPNS
355
+ KKYS,KKYS
356
+ WDCD,WDCD
357
+ RSDW,RSDW
358
+ QPAL,QPAL
359
+ EYPW,EYPW
360
+ CFRY,CFRY
361
+ QETE,QETE
362
+ GWDT,GWDT
363
+ HDRQ,HDRQ
364
+ RTNL,RTNL
365
+ IMVW,IMVW
366
+ VKEQ,VKEQ
367
+ HEAH,HEAH
368
+ DEMY,DEMY
369
+ WNLY,WNLY
370
+ SPHG,SPHG
371
+ FQYW,FQYW
372
+ VSDD,VSDD
373
+ SDAK,SDAK
374
+ HLDH,HLDH
375
+ GGMD,GGMD
376
+ PFRW,PFRW
377
+ NMLG,NMLG
378
+ NWSN,NWSN
379
+ TRGN,TRGN
380
+ QICA,QICA
381
+ DHNR,DHNR
382
+ CDKL,CDKL
383
+ ADPG,ADPG
384
+ DHFW,DHFW
385
+ WSSW,WSSW
386
+ ITNH,ITNH
387
+ CACL,CACL
388
+ HFDQ,HFDQ
389
+ FIQA,FIQA
390
+ NQQC,NQQC
391
+ ALCP,ALCP
392
+ GPCV,GPCV
393
+ DFWV,DFWV
394
+ PHPC,PHPC
395
+ AVSM,AVSM
396
+ TSPC,TSPC
397
+ MWLF,MWLF
398
+ HKQL,HKQL
399
+ SAHD,SAHD
400
+ YNQP,YNQP
401
+ PQYL,PQYL
402
+ SHSC,SHSC
403
+ FNCR,FNCR
404
+ DHHE,DHHE
405
+ MIIM,MIIM
406
+ RYPG,RYPG
407
+ PMFI,PMFI
408
+ TIRG,TIRG
409
+ WHEL,WHEL
410
+ ALWP,ALWP
411
+ CMKC,CMKC
412
+ ESTM,ESTM
413
+ EMIR,EMIR
414
+ WKEG,WKEG
415
+ NKPS,NKPS
416
+ FCDI,FCDI
417
+ PAYR,PAYR
418
+ NTGH,NTGH
419
+ GREQ,GREQ
420
+ RHIQ,RHIQ
421
+ KKYC,KKYC
422
+ YSST,YSST
423
+ MNWM,MNWM
424
+ DNTG,DNTG
425
+ NPIF,NPIF
426
+ YLYA,YLYA
427
+ AISE,AISE
428
+ SAAR,SAAR
429
+ WQRL,WQRL
430
+ KIWI,KIWI
431
+ YMKK,YMKK
432
+ IWQA,IWQA
433
+ TLRF,TLRF
434
+ WVNW,WVNW
435
+ QKIH,QKIH
436
+ ENNT,ENNT
437
+ CQAA,CQAA
438
+ RMNL,RMNL
439
+ DCMQ,DCMQ
440
+ GCAE,GCAE
441
+ IYCC,IYCC
442
+ HYGI,HYGI
443
+ LNMQ,LNMQ
444
+ IGNE,IGNE
445
+ CRNA,CRNA
446
+ EWIH,EWIH
447
+ VSRP,VSRP
448
+ RWPF,RWPF
449
+ ADSL,ADSL
450
+ ARRH,ARRH
451
+ GSTY,GSTY
452
+ SITI,SITI
453
+ IVKV,IVKV
454
+ HTPP,HTPP
455
+ QHKL,QHKL
456
+ LDNT,LDNT
457
+ KCWA,KCWA
458
+ KDKA,KDKA
459
+ DMII,DMII
460
+ YDMP,YDMP
461
+ KDNF,KDNF
462
+ RGQA,RGQA
463
+ QSIS,QSIS
464
+ NMSV,NMSV
465
+ METL,METL
466
+ VYAY,VYAY
467
+ VYCP,VYCP
468
+ FIVM,FIVM
469
+ CTRV,CTRV
470
+ FPDC,FPDC
471
+ GSKT,GSKT
472
+ CGFR,CGFR
473
+ TRSH,TRSH
474
+ VSAC,VSAC
475
+ WMEV,WMEV
476
+ SKYM,SKYM
477
+ IEMQ,IEMQ
478
+ KTSE,KTSE
479
+ HFIG,HFIG
480
+ PRTR,PRTR
481
+ WNGE,WNGE
482
+ DFVP,DFVP
483
+ KHDF,KHDF
484
+ WELM,WELM
485
+ KPRK,KPRK
486
+ SLGK,SLGK
487
+ MWKN,MWKN
488
+ NTDY,NTDY
489
+ NKIP,NKIP
490
+ GCQW,GCQW
491
+ CGNR,CGNR
492
+ MDCH,MDCH
493
+ FYEH,FYEH
494
+ NENL,NENL
495
+ TSMR,TSMR
496
+ ETVQ,ETVQ
497
+ NLYD,NLYD
498
+ QSLR,QSLR
499
+ CCNM,CCNM
500
+ TIKC,TIKC
501
+ RPFF,RPFF
502
+ SIVL,SIVL
503
+ ADRN,ADRN
504
+ RVSH,RVSH
505
+ VPTF,VPTF
506
+ IYTM,IYTM
507
+ ENRG,ENRG
508
+ PIRF,PIRF
509
+ SGHT,SGHT
510
+ KFMW,KFMW
511
+ VNVE,VNVE
512
+ DHQD,DHQD
513
+ RKQI,RKQI
514
+ PPFQ,PPFQ
515
+ CYAI,CYAI
516
+ VEGM,VEGM
517
+ PVKV,PVKV
518
+ CGEF,CGEF
519
+ EFET,EFET
520
+ LHGV,LHGV
521
+ CQNM,CQNM
522
+ HHPD,HHPD
523
+ GIDF,GIDF
524
+ DVSH,DVSH
525
+ EQNY,EQNY
526
+ YWVP,YWVP
527
+ SREV,SREV
528
+ CFVK,CFVK
529
+ CSCE,CSCE
530
+ KQFR,KQFR
531
+ GFMM,GFMM
532
+ SDGG,SDGG
533
+ PTVI,PTVI
534
+ KVCK,KVCK
535
+ GDQE,GDQE
536
+ GVNQ,GVNQ
537
+ EHHR,EHHR
538
+ ESYE,ESYE
539
+ MKLK,MKLK
540
+ CKWA,CKWA
541
+ IWGS,IWGS
542
+ RHTY,RHTY
543
+ EHGF,EHGF
544
+ VGFI,VGFI
545
+ YMQM,YMQM
546
+ IQDC,IQDC
547
+ TMFP,TMFP
548
+ YLLA,YLLA
549
+ FNGV,FNGV
550
+ CQWN,CQWN
551
+ IIFC,IIFC
552
+ CYFW,CYFW
553
+ NDCR,NDCR
554
+ WCLH,WCLH
555
+ ICRV,ICRV
556
+ LFVP,LFVP
557
+ MVGK,MVGK
558
+ PDMV,PDMV
559
+ FQWF,FQWF
560
+ LICH,LICH
561
+ WRPF,WRPF
562
+ VEDP,VEDP
563
+ MFAC,MFAC
564
+ WWNS,WWNS
565
+ CGMH,CGMH
566
+ NTVT,NTVT
567
+ YMLC,YMLC
568
+ VTIH,VTIH
569
+ MWML,MWML
570
+ IHIF,IHIF
571
+ RSVW,RSVW
572
+ KFCC,KFCC
573
+ IYSG,IYSG
574
+ IVVS,IVVS
575
+ FWYD,FWYD
576
+ LMVK,LMVK
577
+ LFAS,LFAS
578
+ SVHG,SVHG
579
+ GVDR,GVDR
580
+ VIHW,VIHW
581
+ GNQY,GNQY
582
+ MMVT,MMVT
583
+ DYPH,DYPH
584
+ WHYA,WHYA
585
+ FAIF,FAIF
586
+ MNLG,MNLG
587
+ SSLA,SSLA
588
+ KATL,KATL
589
+ RYCC,RYCC
590
+ HKCL,HKCL
591
+ LDTN,LDTN
592
+ MFIG,MFIG
593
+ HPVY,HPVY
594
+ GPTW,GPTW
595
+ LVMI,LVMI
596
+ KLPT,KLPT
597
+ RLNQ,RLNQ
598
+ IKKN,IKKN
599
+ AQKT,AQKT
600
+ LCEE,LCEE
601
+ FQCR,FQCR
602
+ QSEW,QSEW
603
+ VVWW,VVWW
604
+ TSND,TSND
605
+ SYQF,SYQF
606
+ ATLM,ATLM
607
+ WENR,WENR
608
+ KTVS,KTVS
609
+ GKTK,GKTK
610
+ FFVR,FFVR
611
+ CEEV,CEEV
612
+ MACL,MACL
613
+ MWWT,MWWT
614
+ ICGH,ICGH
615
+ IDQT,IDQT
616
+ QGCS,QGCS
617
+ IVIL,IVIL
618
+ NTKN,NTKN
619
+ DLPK,DLPK
620
+ FRMH,FRMH
621
+ LEWF,LEWF
622
+ RFID,RFID
623
+ PLYY,PLYY
624
+ HIIP,HIIP
625
+ CWMM,CWMM
626
+ HIQQ,HIQQ
627
+ ILEE,ILEE
628
+ YNNA,YNNA
629
+ FRFM,FRFM
630
+ RWTW,RWTW
631
+ NSQL,NSQL
632
+ NMKK,NMKK
633
+ LTSA,LTSA
634
+ ARTD,ARTD
635
+ WCYH,WCYH
636
+ CMWP,CMWP
637
+ PHCS,PHCS
638
+ SPLN,SPLN
639
+ APDW,APDW
640
+ KEGM,KEGM
641
+ SYEV,SYEV
642
+ YCPI,YCPI
643
+ TRVA,TRVA
644
+ RAMI,RAMI
645
+ DLEF,DLEF
646
+ QCTI,QCTI
647
+ YEPV,YEPV
648
+ CWAK,CWAK
649
+ DPDD,DPDD
650
+ TTCK,TTCK
651
+ NESI,NESI
652
+ DGVF,DGVF
653
+ DVYL,DVYL
654
+ NHPH,NHPH
655
+ KRKQ,KRKQ
656
+ FEFA,FEFA
657
+ LDAC,LDAC
658
+ YSQT,YSQT
659
+ TYFF,TYFF
660
+ ALVN,ALVN
661
+ VMVQ,VMVQ
662
+ LVVR,LVVR
663
+ RSWY,RSWY
664
+ QHKQ,QHKQ
665
+ LHEP,LHEP
666
+ ARSK,ARSK
667
+ FPVM,FPVM
668
+ MHVD,MHVD
669
+ RMQL,RMQL
670
+ FDEA,FDEA
671
+ YSIF,YSIF
672
+ MEWG,MEWG
673
+ QIII,QIII
674
+ LIAY,LIAY
675
+ LSPG,LSPG
676
+ LSFH,LSFH
677
+ VWAT,VWAT
678
+ WMKT,WMKT
679
+ SEWR,SEWR
680
+ VAMH,VAMH
681
+ CVGQ,CVGQ
682
+ DAAY,DAAY
683
+ CHYS,CHYS
684
+ IWMF,IWMF
685
+ SRCW,SRCW
686
+ ETMQ,ETMQ
687
+ QHPN,QHPN
688
+ MLEP,MLEP
689
+ YLWR,YLWR
690
+ IHQN,IHQN
691
+ KWPF,KWPF
692
+ SKWK,SKWK
693
+ YIRV,YIRV
694
+ AVKM,AVKM
695
+ SRER,SRER
696
+ KVTF,KVTF
697
+ MART,MART
698
+ HLPC,HLPC
699
+ CGNP,CGNP
700
+ LIHW,LIHW
701
+ WRHE,WRHE
702
+ PLPD,PLPD
703
+ AISH,AISH
704
+ LVGA,LVGA
705
+ GDAN,GDAN
706
+ KFDC,KFDC
707
+ TVYN,TVYN
708
+ EHMH,EHMH
709
+ KEHT,KEHT
710
+ IGSA,IGSA
711
+ HNMM,HNMM
712
+ LWNH,LWNH
713
+ PKYY,PKYY
714
+ CCIM,CCIM
715
+ TIDF,TIDF
716
+ GYTV,GYTV
717
+ LNIN,LNIN
718
+ WRAS,WRAS
719
+ LANP,LANP
720
+ KEYL,KEYL
721
+ PIAT,PIAT
722
+ HNYA,HNYA
723
+ ERTP,ERTP
724
+ TAHS,TAHS
725
+ CMPV,CMPV
726
+ DPHY,DPHY
727
+ HVCL,HVCL
728
+ NRKH,NRKH
729
+ RHCG,RHCG
730
+ AKRV,AKRV
731
+ TSYG,TSYG
732
+ WASS,WASS
733
+ NIDH,NIDH
734
+ WNVC,WNVC
735
+ TLSL,TLSL
736
+ NMWE,NMWE
737
+ LKIY,LKIY
738
+ CVNT,CVNT
739
+ EEAP,EEAP
740
+ MGNK,MGNK
741
+ KDQP,KDQP
742
+ DTRQ,DTRQ
743
+ QVIH,QVIH
744
+ PTKM,PTKM
745
+ FVNA,FVNA
746
+ MTTV,MTTV
747
+ TLDK,TLDK
748
+ NRDV,NRDV
749
+ WYSN,WYSN
750
+ KKEI,KKEI
751
+ VKTG,VKTG
752
+ RTMS,RTMS
753
+ GVTS,GVTS
754
+ QVFG,QVFG
755
+ FSID,FSID
756
+ MCGK,MCGK
757
+ KRYF,KRYF
758
+ FSCA,FSCA
759
+ ENWS,ENWS
760
+ KLPS,KLPS
761
+ WHPD,WHPD
762
+ WERR,WERR
763
+ GVPD,GVPD
764
+ QATI,QATI
765
+ QIRW,QIRW
766
+ VCQD,VCQD
767
+ NHNV,NHNV
768
+ KSPQ,KSPQ
769
+ MQQR,MQQR
770
+ WMYG,WMYG
771
+ YIGK,YIGK
772
+ WAAR,WAAR
773
+ ILEI,ILEI
774
+ EWFS,EWFS
775
+ YAGP,YAGP
776
+ WSWA,WSWA
777
+ WKMM,WKMM
778
+ FWTN,FWTN
779
+ NIRN,NIRN
780
+ EEHP,EEHP
781
+ GKPE,GKPE
782
+ KPLK,KPLK
783
+ EWPS,EWPS
784
+ KLRS,KLRS
785
+ EYWY,EYWY
786
+ CFGR,CFGR
787
+ NEGH,NEGH
788
+ GDRM,GDRM
789
+ VGTI,VGTI
790
+ YCGL,YCGL
791
+ EVMG,EVMG
792
+ PQNW,PQNW
793
+ QVQH,QVQH
794
+ NTPI,NTPI
795
+ VMVI,VMVI
796
+ RHAT,RHAT
797
+ RVCM,RVCM
798
+ LSIN,LSIN
799
+ ATKL,ATKL
800
+ PSNL,PSNL
801
+ WESL,WESL
802
+ QPDG,QPDG
803
+ YRFL,YRFL
804
+ RTPP,RTPP
805
+ IVKA,IVKA
806
+ VVPK,VVPK
807
+ YKPE,YKPE
808
+ FHFW,FHFW
809
+ TRLH,TRLH
810
+ ITSL,ITSL
811
+ GEIE,GEIE
812
+ FGGQ,FGGQ
813
+ SSPD,SSPD
814
+ TWHQ,TWHQ
815
+ GEDR,GEDR
816
+ HNEC,HNEC
817
+ PQNL,PQNL
818
+ ECNQ,ECNQ
819
+ TSEQ,TSEQ
820
+ RLVW,RLVW
821
+ RDNT,RDNT
822
+ QMAH,QMAH
823
+ NVFD,NVFD
824
+ YHYT,YHYT
825
+ PMNY,PMNY
826
+ MGML,MGML
827
+ AFYE,AFYE
828
+ EGLV,EGLV
829
+ PGLP,PGLP
830
+ YNFY,YNFY
831
+ KTAC,KTAC
832
+ YHMC,YHMC
833
+ IATA,IATA
834
+ LPDV,LPDV
835
+ LIAS,LIAS
836
+ DDLY,DDLY
837
+ MQNA,MQNA
838
+ HISP,HISP
839
+ VKIN,VKIN
840
+ LGNP,LGNP
841
+ SCGK,SCGK
842
+ MKED,MKED
843
+ VAWY,VAWY
844
+ HWPF,HWPF
845
+ LMPY,LMPY
846
+ QICW,QICW
847
+ CPCR,CPCR
848
+ RSYT,RSYT
849
+ NLYT,NLYT
850
+ KIHC,KIHC
851
+ LMCP,LMCP
852
+ VTEC,VTEC
853
+ WSWH,WSWH
854
+ SSYR,SSYR
855
+ FTQN,FTQN
856
+ NRML,NRML
857
+ SFFL,SFFL
858
+ QRTP,QRTP
859
+ VPII,VPII
860
+ MYEK,MYEK
861
+ TSTF,TSTF
862
+ PKYM,PKYM
863
+ IITN,IITN
864
+ HPVW,HPVW
865
+ VNLP,VNLP
866
+ SVKP,SVKP
867
+ CDKP,CDKP
868
+ FDWC,FDWC
869
+ AKSF,AKSF
870
+ LYYM,LYYM
871
+ EDWE,EDWE
872
+ QDET,QDET
873
+ YACA,YACA
874
+ QMWK,QMWK
875
+ IIMH,IIMH
876
+ QCTC,QCTC
877
+ PMHG,PMHG
878
+ ECKA,ECKA
879
+ KQYL,KQYL
880
+ SPCL,SPCL
881
+ MQEK,MQEK
882
+ LIAN,LIAN
883
+ SCDL,SCDL
884
+ NDFP,NDFP
885
+ QKGE,QKGE
886
+ HNDV,HNDV
887
+ HHHE,HHHE
888
+ AWNV,AWNV
889
+ SPSG,SPSG
890
+ LRIA,LRIA
891
+ IVID,IVID
892
+ YQYS,YQYS
893
+ DGTP,DGTP
894
+ DTTL,DTTL
895
+ HWDN,HWDN
896
+ DGNC,DGNC
897
+ QGEG,QGEG
898
+ SNPP,SNPP
899
+ KVSH,KVSH
900
+ WLWV,WLWV
901
+ LMMA,LMMA
902
+ MGCM,MGCM
903
+ LNKP,LNKP
904
+ TPIG,TPIG
905
+ WEWH,WEWH
906
+ QCVC,QCVC
907
+ RMKF,RMKF
908
+ MDDE,MDDE
909
+ PGPQ,PGPQ
910
+ DVTF,DVTF
911
+ DKCT,DKCT
912
+ ECRP,ECRP
913
+ CCGP,CCGP
914
+ RFVF,RFVF
915
+ LMNC,LMNC
916
+ TICP,TICP
917
+ AKWL,AKWL
918
+ MTPT,MTPT
919
+ NTVY,NTVY
920
+ KNRS,KNRS
921
+ CYIT,CYIT
922
+ IAGE,IAGE
923
+ ICKI,ICKI
924
+ GQAS,GQAS
925
+ RNFR,RNFR
926
+ SAQL,SAQL
927
+ ICMM,ICMM
928
+ NVED,NVED
929
+ RLSM,RLSM
930
+ INMV,INMV
931
+ FGRY,FGRY
932
+ RGRW,RGRW
933
+ CNNG,CNNG
934
+ YGES,YGES
935
+ YLCN,YLCN
936
+ PASK,PASK
937
+ LYIT,LYIT
938
+ PLPM,PLPM
939
+ GDMM,GDMM
940
+ TQER,TQER
941
+ MTHR,MTHR
942
+ NKCN,NKCN
943
+ NDQA,NDQA
944
+ FIWM,FIWM
945
+ ATER,ATER
946
+ ISVY,ISVY
947
+ HVTG,HVTG
948
+ RPSV,RPSV
949
+ SGET,SGET
950
+ TNGR,TNGR
951
+ SFLC,SFLC
952
+ PPRD,PPRD
953
+ AWKW,AWKW
954
+ DLCW,DLCW
955
+ RFFC,RFFC
956
+ VIQR,VIQR
957
+ KIPG,KIPG
958
+ SRPE,SRPE
959
+ LMMY,LMMY
960
+ YDTM,YDTM
961
+ MTIG,MTIG
962
+ GSHI,GSHI
963
+ NCRV,NCRV
964
+ KPDS,KPDS
965
+ CNNL,CNNL
966
+ MGVK,MGVK
967
+ GWED,GWED
968
+ TYHW,TYHW
969
+ NMVM,NMVM
970
+ EHFA,EHFA
971
+ SVCM,SVCM
972
+ THET,THET
973
+ QGYR,QGYR
974
+ NTDC,NTDC
975
+ ETST,ETST
976
+ MLSM,MLSM
977
+ YRRQ,YRRQ
978
+ VWEA,VWEA
979
+ PFYW,PFYW
980
+ TIEI,TIEI
981
+ ECCQ,ECCQ
982
+ SVDV,SVDV
983
+ RYKY,RYKY
984
+ AMDS,AMDS
985
+ DMMK,DMMK
986
+ AMKE,AMKE
987
+ GNHG,GNHG
988
+ DVPC,DVPC
989
+ IQVY,IQVY
990
+ FNDE,FNDE
991
+ HTLS,HTLS
992
+ DFIG,DFIG
993
+ PVMD,PVMD
994
+ RAMS,RAMS
995
+ IASS,IASS
996
+ DMKH,DMKH
997
+ KYPS,KYPS
998
+ PVQF,PVQF
999
+ SWPS,SWPS
1000
+ EQCI,EQCI
1001
+ CNMV,CNMV
1002
+ MVVG,MVVG
1003
+ SVDI,SVDI
1004
+ RRRN,RRRN
1005
+ AFSS,AFSS
1006
+ PAQD,PAQD
1007
+ QATV,QATV
1008
+ GFCR,GFCR
1009
+ FNYS,FNYS
1010
+ VNWN,VNWN
1011
+ PMIH,PMIH
1012
+ VDYL,VDYL
1013
+ VICL,VICL
1014
+ KYGL,KYGL
1015
+ FHIK,FHIK
1016
+ VVNY,VVNY
1017
+ TYYT,TYYT
1018
+ DLPN,DLPN
1019
+ RNGI,RNGI
1020
+ FFGV,FFGV
1021
+ EIQM,EIQM
1022
+ HRNL,HRNL
1023
+ FHGM,FHGM
1024
+ PQVA,PQVA
1025
+ WIVH,WIVH
1026
+ PTWK,PTWK
1027
+ MFHD,MFHD
1028
+ FHST,FHST
1029
+ NMHA,NMHA
1030
+ KSPI,KSPI
1031
+ DDTI,DDTI
1032
+ DVQS,DVQS
1033
+ KGAT,KGAT
1034
+ RITF,RITF
1035
+ PNPL,PNPL
1036
+ VICS,VICS
1037
+ EDMF,EDMF
1038
+ ESID,ESID
1039
+ VESG,VESG
1040
+ EKEN,EKEN
1041
+ KAYY,KAYY
1042
+ LMPW,LMPW
1043
+ ANLI,ANLI
1044
+ CALR,CALR
1045
+ KMGI,KMGI
1046
+ NEAE,NEAE
1047
+ QAIC,QAIC
1048
+ NKQE,NKQE
1049
+ IRTF,IRTF
1050
+ TCQN,TCQN
1051
+ RPAY,RPAY
1052
+ GTHQ,GTHQ
1053
+ HVCH,HVCH
1054
+ GVQA,GVQA
1055
+ KQTA,KQTA
1056
+ NWSA,NWSA
1057
+ GLSC,GLSC
1058
+ SPRS,SPRS
1059
+ EDHW,EDHW
1060
+ YHMA,YHMA
1061
+ QFIR,QFIR
1062
+ LKYW,LKYW
1063
+ VRMD,VRMD
1064
+ TPNP,TPNP
1065
+ LFCF,LFCF
1066
+ CEVE,CEVE
1067
+ MDDA,MDDA
1068
+ HNYT,HNYT
1069
+ WPVP,WPVP
1070
+ TTAV,TTAV
1071
+ RTRI,RTRI
1072
+ AYAG,AYAG
1073
+ DYDT,DYDT
1074
+ GEQE,GEQE
1075
+ EGSA,EGSA
1076
+ LEMF,LEMF
1077
+ IEFW,IEFW
1078
+ PIQS,PIQS
1079
+ EAMT,EAMT
1080
+ QCNI,QCNI
1081
+ QNGS,QNGS
1082
+ CFNL,CFNL
1083
+ QQSV,QQSV
1084
+ TCPN,TCPN
1085
+ SWSR,SWSR
1086
+ RTYL,RTYL
1087
+ YPAE,YPAE
1088
+ GLMQ,GLMQ
1089
+ QCHI,QCHI
1090
+ IGIR,IGIR
1091
+ ICPI,ICPI
1092
+ CDFV,CDFV
1093
+ PISQ,PISQ
1094
+ QYQM,QYQM
1095
+ DFHH,DFHH
1096
+ FVHV,FVHV
1097
+ RKSK,RKSK
1098
+ STLE,STLE
1099
+ MKLD,MKLD
1100
+ AAYG,AAYG
1101
+ LHNG,LHNG
1102
+ RWCV,RWCV
1103
+ QCWF,QCWF
1104
+ CKSK,CKSK
1105
+ QIGA,QIGA
1106
+ SFKL,SFKL
1107
+ PFLN,PFLN
1108
+ TGPF,TGPF
1109
+ HTEE,HTEE
1110
+ PDFH,PDFH
1111
+ IFIM,IFIM
1112
+ ITCK,ITCK
1113
+ SKKP,SKKP
1114
+ QLMD,QLMD
1115
+ NLYY,NLYY
1116
+ EVHQ,EVHQ
1117
+ TSLV,TSLV
1118
+ GRHR,GRHR
1119
+ HYQF,HYQF
1120
+ LSGA,LSGA
1121
+ IAPM,IAPM
1122
+ GEIN,GEIN
1123
+ GLRD,GLRD
1124
+ TDNC,TDNC
1125
+ NMTH,NMTH
1126
+ DHLQ,DHLQ
1127
+ GLQA,GLQA
1128
+ FNEP,FNEP
1129
+ KHSF,KHSF
1130
+ FKKS,FKKS
1131
+ DIMC,DIMC
1132
+ HWVN,HWVN
1133
+ RNFI,RNFI
1134
+ HDEG,HDEG
1135
+ WVVN,WVVN
1136
+ EQKE,EQKE
1137
+ SCFD,SCFD
1138
+ ADRR,ADRR
1139
+ TVAF,TVAF
1140
+ RVDC,RVDC
1141
+ YFNI,YFNI
1142
+ NGMW,NGMW
1143
+ EKFN,EKFN
1144
+ NFII,NFII
1145
+ SAFS,SAFS
1146
+ NNNY,NNNY
1147
+ PFGK,PFGK
1148
+ SGMK,SGMK
1149
+ SHTM,SHTM
1150
+ HDNH,HDNH
1151
+ EWWP,EWWP
1152
+ PFKH,PFKH
1153
+ WSYL,WSYL
1154
+ IHSC,IHSC
1155
+ YYED,YYED
1156
+ HCMW,HCMW
1157
+ RNKR,RNKR
1158
+ CRNH,CRNH
1159
+ HPVR,HPVR
1160
+ TKLY,TKLY
1161
+ NNSK,NNSK
1162
+ SPIQ,SPIQ
1163
+ KWQV,KWQV
1164
+ INEP,INEP
1165
+ TCQL,TCQL
1166
+ VKRD,VKRD
1167
+ VPHC,VPHC
1168
+ GNAN,GNAN
1169
+ IVNE,IVNE
1170
+ GMYD,GMYD
1171
+ VTLF,VTLF
1172
+ INSN,INSN
1173
+ EQKH,EQKH
1174
+ IRKL,IRKL
1175
+ APIY,APIY
1176
+ SPSP,SPSP
1177
+ TWKI,TWKI
1178
+ TWTC,TWTC
1179
+ SFSI,SFSI
1180
+ CSHN,CSHN
1181
+ RHCA,RHCA
1182
+ WTEF,WTEF
1183
+ SNCQ,SNCQ
1184
+ SLME,SLME
1185
+ GHEW,GHEW
1186
+ EMPE,EMPE
1187
+ ADFS,ADFS
1188
+ GHHS,GHHS
1189
+ HGMC,HGMC
1190
+ LWWR,LWWR
1191
+ NVSG,NVSG
1192
+ IVKI,IVKI
1193
+ YNHE,YNHE
1194
+ CPGT,CPGT
1195
+ LAKQ,LAKQ
1196
+ KNRG,KNRG
1197
+ DFGC,DFGC
1198
+ WNAI,WNAI
1199
+ SKCN,SKCN
1200
+ FYSC,FYSC
1201
+ CTIY,CTIY
1202
+ CSDP,CSDP
1203
+ CHTN,CHTN
1204
+ FWKT,FWKT
1205
+ DWQH,DWQH
1206
+ HWCS,HWCS
1207
+ WWLW,WWLW
1208
+ HADL,HADL
1209
+ RITQ,RITQ
1210
+ SREP,SREP
1211
+ VQIF,VQIF
1212
+ HMPQ,HMPQ
1213
+ TFFD,TFFD
1214
+ KGMF,KGMF
1215
+ DKVN,DKVN
1216
+ YWKS,YWKS
1217
+ LHLT,LHLT
1218
+ WHDD,WHDD
1219
+ WALN,WALN
1220
+ QSQH,QSQH
1221
+ DFIT,DFIT
1222
+ IGRW,IGRW
1223
+ SKVS,SKVS
1224
+ MDSD,MDSD
1225
+ HPER,HPER
1226
+ CRDW,CRDW
1227
+ WECW,WECW
1228
+ DWEI,DWEI
1229
+ DCDT,DCDT
1230
+ SWIH,SWIH
1231
+ VQGR,VQGR
1232
+ DPTP,DPTP
1233
+ PHDF,PHDF
1234
+ HHTM,HHTM
1235
+ DLYK,DLYK
1236
+ CLIQ,CLIQ
1237
+ APYR,APYR
1238
+ CPQV,CPQV
1239
+ RLYF,RLYF
1240
+ SSLI,SSLI
1241
+ FAEQ,FAEQ
1242
+ ELQC,ELQC
1243
+ ARFI,ARFI
1244
+ SQTV,SQTV
1245
+ SWHG,SWHG
1246
+ TNSD,TNSD
1247
+ HPCP,HPCP
1248
+ AFAN,AFAN
1249
+ KAHA,KAHA
1250
+ AMSN,AMSN
1251
+ VPMS,VPMS
1252
+ FRDK,FRDK
1253
+ STCQ,STCQ
1254
+ GIMS,GIMS
1255
+ ICRN,ICRN
1256
+ IYAG,IYAG
1257
+ VWSY,VWSY
1258
+ QAFG,QAFG
1259
+ NNCH,NNCH
1260
+ GGNN,GGNN
1261
+ HVMA,HVMA
1262
+ KGRI,KGRI
1263
+ LAFF,LAFF
1264
+ KCDN,KCDN
1265
+ RFVY,RFVY
1266
+ EYKW,EYKW
1267
+ ASIR,ASIR
1268
+ HPGV,HPGV
1269
+ AKDI,AKDI
1270
+ KDNH,KDNH
1271
+ KKLH,KKLH
1272
+ LCLD,LCLD
1273
+ MLTD,MLTD
1274
+ LAQY,LAQY
1275
+ EPFC,EPFC
1276
+ VRRL,VRRL
1277
+ FSNW,FSNW
1278
+ WDEK,WDEK
1279
+ YDDY,YDDY
1280
+ LNQE,LNQE
1281
+ NLHN,NLHN
1282
+ SLRQ,SLRQ
1283
+ AFAT,AFAT
1284
+ INVT,INVT
1285
+ YYNC,YYNC
1286
+ TMKL,TMKL
1287
+ AIKI,AIKI
1288
+ GALS,GALS
1289
+ IIML,IIML
1290
+ MQRK,MQRK
1291
+ KAWH,KAWH
1292
+ KWYI,KWYI
1293
+ MKIP,MKIP
1294
+ CLLI,CLLI
1295
+ HHCW,HHCW
1296
+ WWFP,WWFP
1297
+ ISTG,ISTG
1298
+ KYGD,KYGD
1299
+ RPCK,RPCK
1300
+ QMGG,QMGG
1301
+ DARI,DARI
1302
+ PMCE,PMCE
1303
+ VYIV,VYIV
1304
+ VHEY,VHEY
1305
+ QMCG,QMCG
1306
+ CLVL,CLVL
1307
+ EKCS,EKCS
1308
+ IKVP,IKVP
1309
+ DNRP,DNRP
1310
+ WLCW,WLCW
1311
+ RSNE,RSNE
1312
+ QKLT,QKLT
1313
+ TEND,TEND
1314
+ FMFY,FMFY
1315
+ QYKD,QYKD
1316
+ HQTR,HQTR
1317
+ ALVT,ALVT
1318
+ NGSN,NGSN
1319
+ EALD,EALD
1320
+ MPWC,MPWC
1321
+ GGMI,GGMI
1322
+ VIRS,VIRS
1323
+ LSSM,LSSM
1324
+ SYAY,SYAY
1325
+ CVVM,CVVM
1326
+ WTNV,WTNV
1327
+ TVFM,TVFM
1328
+ DECF,DECF
1329
+ RETI,RETI
1330
+ IMTD,IMTD
1331
+ MQDV,MQDV
1332
+ GNEP,GNEP
1333
+ RFMC,RFMC
1334
+ GGLH,GGLH
1335
+ PKFI,PKFI
1336
+ NGQG,NGQG
1337
+ TFKL,TFKL
1338
+ SRYN,SRYN
1339
+ FFSS,FFSS
1340
+ CKET,CKET
1341
+ DDCM,DDCM
1342
+ WRVY,WRVY
1343
+ RGTH,RGTH
1344
+ WWNN,WWNN
1345
+ NMGV,NMGV
1346
+ GFAE,GFAE
1347
+ MMPT,MMPT
1348
+ YIQY,YIQY
1349
+ TNSF,TNSF
1350
+ HHDM,HHDM
1351
+ DQMG,DQMG
1352
+ LHPN,LHPN
1353
+ PWEA,PWEA
1354
+ KGVA,KGVA
1355
+ DRRI,DRRI
1356
+ DFVM,DFVM
1357
+ WDVE,WDVE
1358
+ SQYV,SQYV
1359
+ GDIA,GDIA
1360
+ STMC,STMC
1361
+ CTLK,CTLK
1362
+ INYK,INYK
1363
+ DAVY,DAVY
1364
+ SAAE,SAAE
1365
+ NQPC,NQPC
1366
+ VGHL,VGHL
1367
+ TSVY,TSVY
1368
+ IDQL,IDQL
1369
+ ETDI,ETDI
1370
+ FCCD,FCCD
1371
+ SRGC,SRGC
1372
+ IHND,IHND
1373
+ HKYL,HKYL
1374
+ NQPS,NQPS
1375
+ YWAE,YWAE
1376
+ SDNQ,SDNQ
1377
+ ISEG,ISEG
1378
+ NATM,NATM
1379
+ NGCV,NGCV
1380
+ YNTC,YNTC
1381
+ FFKA,FFKA
1382
+ CTTS,CTTS
1383
+ EEAD,EEAD
1384
+ KHAQ,KHAQ
1385
+ STQQ,STQQ
1386
+ MMTS,MMTS
1387
+ HDHV,HDHV
1388
+ KGLQ,KGLQ
1389
+ PKAM,PKAM
1390
+ DQVY,DQVY
1391
+ QQHD,QQHD
1392
+ QQLR,QQLR
1393
+ YCEQ,YCEQ
1394
+ YYGY,YYGY
1395
+ RLRR,RLRR
1396
+ VRYS,VRYS
1397
+ ESHL,ESHL
1398
+ DYGS,DYGS
1399
+ ITKD,ITKD
1400
+ VTDQ,VTDQ
1401
+ GLYK,GLYK
1402
+ FNAL,FNAL
1403
+ HMHG,HMHG
1404
+ TDVT,TDVT
1405
+ VSGS,VSGS
1406
+ FSER,FSER
1407
+ MHAN,MHAN
1408
+ WYEG,WYEG
1409
+ PCRG,PCRG
1410
+ DIEM,DIEM
1411
+ ESNY,ESNY
1412
+ HYHD,HYHD
1413
+ VFCC,VFCC
1414
+ DSYW,DSYW
1415
+ AQQM,AQQM
1416
+ CYCW,CYCW
1417
+ GKWK,GKWK
1418
+ GRSD,GRSD
1419
+ QEKH,QEKH
1420
+ LPLF,LPLF
1421
+ YPYG,YPYG
1422
+ YDYS,YDYS
1423
+ TSHA,TSHA
1424
+ KMIK,KMIK
1425
+ ACWG,ACWG
1426
+ TGEQ,TGEQ
1427
+ PHKQ,PHKQ
1428
+ VGKH,VGKH
1429
+ GHQI,GHQI
1430
+ QPPG,QPPG
1431
+ TYEF,TYEF
1432
+ GHHY,GHHY
1433
+ SLSL,SLSL
1434
+ ARNE,ARNE
1435
+ DATD,DATD
1436
+ LAND,LAND
1437
+ QIQA,QIQA
1438
+ EHYP,EHYP
1439
+ CASL,CASL
1440
+ EPSF,EPSF
1441
+ VELW,VELW
1442
+ YERL,YERL
1443
+ DEKS,DEKS
1444
+ NQMW,NQMW
1445
+ WMHS,WMHS
1446
+ KNMV,KNMV
1447
+ QTIW,QTIW
1448
+ TQDA,TQDA
1449
+ PYGD,PYGD
1450
+ SFMV,SFMV
1451
+ GHCR,GHCR
1452
+ TFIR,TFIR
1453
+ YAEP,YAEP
1454
+ IYTY,IYTY
1455
+ FSEY,FSEY
1456
+ FINQ,FINQ
1457
+ CGKE,CGKE
1458
+ DDCQ,DDCQ
1459
+ FDEE,FDEE
1460
+ EMLR,EMLR
1461
+ CKFY,CKFY
1462
+ KRHR,KRHR
1463
+ RNVV,RNVV
1464
+ CVAP,CVAP
1465
+ GFQD,GFQD
1466
+ YSHN,YSHN
1467
+ FDRF,FDRF
1468
+ AEQN,AEQN
1469
+ VFEW,VFEW
1470
+ ETTF,ETTF
1471
+ MSME,MSME
1472
+ GNSW,GNSW
1473
+ YTGA,YTGA
1474
+ DYRD,DYRD
1475
+ CMQY,CMQY
1476
+ TCCW,TCCW
1477
+ MHEG,MHEG
1478
+ IDIK,IDIK
1479
+ MMKE,MMKE
1480
+ MVDE,MVDE
1481
+ PECI,PECI
1482
+ MLPV,MLPV
1483
+ AKEK,AKEK
1484
+ MTTY,MTTY
1485
+ NFHF,NFHF
1486
+ KVCV,KVCV
1487
+ PSSQ,PSSQ
1488
+ ILSQ,ILSQ
1489
+ RGCN,RGCN
1490
+ SLLK,SLLK
1491
+ TISM,TISM
1492
+ CKNM,CKNM
1493
+ HMFA,HMFA
1494
+ NEPY,NEPY
1495
+ FIWR,FIWR
1496
+ ACGP,ACGP
1497
+ CMIN,CMIN
1498
+ YQSH,YQSH
1499
+ TKRK,TKRK
1500
+ KKMP,KKMP
1501
+ HDGN,HDGN
1502
+ ANDY,ANDY
1503
+ HDWG,HDWG
1504
+ CGEE,CGEE
1505
+ FYLE,FYLE
1506
+ ICVE,ICVE
1507
+ KGWY,KGWY
1508
+ FVQW,FVQW
1509
+ QQVE,QQVE
1510
+ NCRD,NCRD
1511
+ DLFD,DLFD
1512
+ IVRM,IVRM
1513
+ FIWK,FIWK
1514
+ YLCY,YLCY
1515
+ SWMY,SWMY
1516
+ QWDN,QWDN
1517
+ DEGK,DEGK
1518
+ RPIK,RPIK
1519
+ AMFA,AMFA
1520
+ QTCQ,QTCQ
1521
+ NTKK,NTKK
1522
+ SCLE,SCLE
1523
+ LPME,LPME
1524
+ SEYY,SEYY
1525
+ WWRY,WWRY
1526
+ DDNS,DDNS
1527
+ EKNR,EKNR
1528
+ PKIQ,PKIQ
1529
+ LRTG,LRTG
1530
+ NANG,NANG
1531
+ SMNT,SMNT
1532
+ GHVQ,GHVQ
1533
+ KAPS,KAPS
1534
+ QHNC,QHNC
1535
+ RQAQ,RQAQ
1536
+ TAKP,TAKP
1537
+ GSCG,GSCG
1538
+ DYGD,DYGD
1539
+ GTSS,GTSS
1540
+ ALYP,ALYP
1541
+ TDER,TDER
1542
+ HYPN,HYPN
1543
+ ERAS,ERAS
1544
+ RMSP,RMSP
1545
+ CIMM,CIMM
1546
+ EYTT,EYTT
1547
+ YSEC,YSEC
1548
+ LLGP,LLGP
1549
+ PEAA,PEAA
1550
+ NQGY,NQGY
1551
+ HQPP,HQPP
1552
+ NRTP,NRTP
1553
+ LALL,LALL
1554
+ TPGN,TPGN
1555
+ LEIA,LEIA
1556
+ WEEA,WEEA
1557
+ GVAG,GVAG
1558
+ TCRA,TCRA
1559
+ KVDG,KVDG
1560
+ CMVY,CMVY
1561
+ CHMI,CHMI
1562
+ GGYF,GGYF
1563
+ MPTV,MPTV
1564
+ GNNK,GNNK
1565
+ GEIA,GEIA
1566
+ SHPY,SHPY
1567
+ PMRM,PMRM
1568
+ GRTR,GRTR
1569
+ PHEM,PHEM
1570
+ IYEL,IYEL
1571
+ FCDK,FCDK
1572
+ IYSF,IYSF
1573
+ NRDP,NRDP
1574
+ AHGE,AHGE
1575
+ MMYF,MMYF
1576
+ YKQE,YKQE
1577
+ GKFT,GKFT
1578
+ IENN,IENN
1579
+ HAHG,HAHG
1580
+ KVEH,KVEH
1581
+ VILD,VILD
1582
+ FSFE,FSFE
1583
+ LVHQ,LVHQ
1584
+ LICG,LICG
1585
+ WSEM,WSEM
1586
+ FHTK,FHTK
1587
+ WCSE,WCSE
1588
+ VDGM,VDGM
1589
+ VVYE,VVYE
1590
+ HPWQ,HPWQ
1591
+ PDAY,PDAY
1592
+ AGIA,AGIA
1593
+ HCGK,HCGK
1594
+ SAVI,SAVI
1595
+ EKHK,EKHK
1596
+ ICPA,ICPA
1597
+ RWFE,RWFE
1598
+ WHDE,WHDE
1599
+ QIRD,QIRD
1600
+ QKPK,QKPK
1601
+ TQCF,TQCF
1602
+ GVMW,GVMW
1603
+ QGGM,QGGM
1604
+ CREN,CREN
1605
+ ASAL,ASAL
1606
+ VRTV,VRTV
1607
+ DVAV,DVAV
1608
+ EFWC,EFWC
1609
+ EHMY,EHMY
1610
+ VWHM,VWHM
1611
+ FSLN,FSLN
1612
+ KYLA,KYLA
1613
+ QETP,QETP
1614
+ DDAG,DDAG
1615
+ ILAV,ILAV
1616
+ SSNL,SSNL
1617
+ CCNW,CCNW
1618
+ KPAI,KPAI
1619
+ WTVI,WTVI
1620
+ KQHA,KQHA
1621
+ QVRI,QVRI
1622
+ CRVT,CRVT
1623
+ PHQM,PHQM
1624
+ AEMH,AEMH
1625
+ HHYG,HHYG
1626
+ MSCY,MSCY
1627
+ EHLG,EHLG
1628
+ EPPA,EPPA
1629
+ AQIM,AQIM
1630
+ CNII,CNII
1631
+ NADT,NADT
1632
+ MHKK,MHKK
1633
+ EDFY,EDFY
1634
+ CPLP,CPLP
1635
+ LVDD,LVDD
1636
+ YFKR,YFKR
1637
+ DFID,DFID
1638
+ KKQF,KKQF
1639
+ VDNK,VDNK
1640
+ WWWQ,WWWQ
1641
+ VECI,VECI
1642
+ TIEG,TIEG
1643
+ TWRG,TWRG
1644
+ SANG,SANG
1645
+ YWFC,YWFC
1646
+ HCFS,HCFS
1647
+ SWFN,SWFN
1648
+ QQLK,QQLK
1649
+ HPYR,HPYR
1650
+ QDLI,QDLI
1651
+ HYTL,HYTL
1652
+ QSAM,QSAM
1653
+ LASK,LASK
1654
+ VDAW,VDAW
1655
+ LTGH,LTGH
1656
+ VCFH,VCFH
1657
+ QNGD,QNGD
1658
+ WMAR,WMAR
1659
+ CCLC,CCLC
1660
+ HSQV,HSQV
1661
+ KEWG,KEWG
1662
+ PWHA,PWHA
1663
+ HAPI,HAPI
1664
+ TAQW,TAQW
1665
+ PRNF,PRNF
1666
+ ADIG,ADIG
1667
+ DYLC,DYLC
1668
+ WYLN,WYLN
1669
+ IQEP,IQEP
1670
+ RNGR,RNGR
1671
+ QWQY,QWQY
1672
+ GIWI,GIWI
1673
+ MGTS,MGTS
1674
+ NRTE,NRTE
1675
+ MEEF,MEEF
1676
+ DNWK,DNWK
1677
+ HCMM,HCMM
1678
+ YATS,YATS
1679
+ QMQW,QMQW
1680
+ CWKE,CWKE
1681
+ NVSW,NVSW
1682
+ NIRC,NIRC
1683
+ RFVD,RFVD
1684
+ KGFK,KGFK
1685
+ SWTE,SWTE
1686
+ WMAY,WMAY
1687
+ NFLA,NFLA
1688
+ CHTP,CHTP
1689
+ HRMT,HRMT
1690
+ LDVQ,LDVQ
1691
+ HSLA,HSLA
1692
+ FSLW,FSLW
1693
+ RPNY,RPNY
1694
+ DCME,DCME
1695
+ RESY,RESY
1696
+ TKRF,TKRF
1697
+ TVHS,TVHS
1698
+ YPKI,YPKI
1699
+ VQYS,VQYS
1700
+ YHLW,YHLW
1701
+ EYQD,EYQD
1702
+ AQHV,AQHV
1703
+ VDQD,VDQD
1704
+ VEWT,VEWT
1705
+ NSLC,NSLC
1706
+ QFCW,QFCW
1707
+ AHCC,AHCC
1708
+ TSLH,TSLH
1709
+ ICSC,ICSC
1710
+ RGGL,RGGL
1711
+ QNEW,QNEW
1712
+ LAEL,LAEL
1713
+ CIDA,CIDA
1714
+ DKEP,DKEP
1715
+ NPYL,NPYL
1716
+ LPSA,LPSA
1717
+ NPKC,NPKC
1718
+ QVLV,QVLV
1719
+ DIWW,DIWW
1720
+ AQCR,AQCR
1721
+ ALNH,ALNH
1722
+ PQTE,PQTE
1723
+ QSHE,QSHE
1724
+ IDLS,IDLS
1725
+ WSKM,WSKM
1726
+ CHQI,CHQI
1727
+ INRL,INRL
1728
+ VIVA,VIVA
1729
+ VPYG,VPYG
1730
+ LFPL,LFPL
1731
+ SPDA,SPDA
1732
+ PTVC,PTVC
1733
+ HPVD,HPVD
1734
+ PNIY,PNIY
1735
+ FPYF,FPYF
1736
+ LSLS,LSLS
1737
+ PPSS,PPSS
1738
+ GSYS,GSYS
1739
+ ADCD,ADCD
1740
+ DTEI,DTEI
1741
+ VLMD,VLMD
1742
+ FWGY,FWGY
1743
+ HLWE,HLWE
1744
+ NCHC,NCHC
1745
+ WIQM,WIQM
1746
+ FAMD,FAMD
1747
+ CCKQ,CCKQ
1748
+ EMKY,EMKY
1749
+ HRMM,HRMM
1750
+ DRYC,DRYC
1751
+ LPHQ,LPHQ
1752
+ HPQC,HPQC
1753
+ TYMI,TYMI
1754
+ HPQM,HPQM
1755
+ CCWI,CCWI
1756
+ MPNQ,MPNQ
1757
+ IMIT,IMIT
1758
+ TWAR,TWAR
1759
+ HVVL,HVVL
1760
+ CSSG,CSSG
1761
+ PAEM,PAEM
1762
+ MHWD,MHWD
1763
+ QVYE,QVYE
1764
+ QCNV,QCNV
1765
+ RYDQ,RYDQ
1766
+ QILA,QILA
1767
+ ANYR,ANYR
1768
+ VNGE,VNGE
1769
+ WKPR,WKPR
1770
+ LVMT,LVMT
1771
+ HNIR,HNIR
1772
+ FFNS,FFNS
1773
+ GSRR,GSRR
1774
+ VTVI,VTVI
1775
+ PMML,PMML
1776
+ ECTM,ECTM
1777
+ CNHN,CNHN
1778
+ QSHA,QSHA
1779
+ AFNV,AFNV
1780
+ DKAD,DKAD
1781
+ WALC,WALC
1782
+ CNRV,CNRV
1783
+ MFKT,MFKT
1784
+ DIVY,DIVY
1785
+ VPES,VPES
1786
+ PLEW,PLEW
1787
+ HSCT,HSCT
1788
+ SHGA,SHGA
1789
+ TTET,TTET
1790
+ PIQW,PIQW
1791
+ DLTL,DLTL
1792
+ VPLE,VPLE
1793
+ VLTG,VLTG
1794
+ KCNM,KCNM
1795
+ IDCY,IDCY
1796
+ DSPK,DSPK
1797
+ PDKC,PDKC
1798
+ WTCY,WTCY
1799
+ RIMR,RIMR
1800
+ VNPA,VNPA
1801
+ FVDP,FVDP
1802
+ RPDP,RPDP
1803
+ TLEF,TLEF
1804
+ DLNF,DLNF
1805
+ CVES,CVES
1806
+ WKWF,WKWF
1807
+ PNMI,PNMI
1808
+ DIVA,DIVA
1809
+ GKWC,GKWC
1810
+ HDYC,HDYC
1811
+ HAMN,HAMN
1812
+ TEPW,TEPW
1813
+ VYHI,VYHI
1814
+ MTHE,MTHE
1815
+ IGYD,IGYD
1816
+ PARW,PARW
1817
+ FQCG,FQCG
1818
+ MLSF,MLSF
1819
+ NGFG,NGFG
1820
+ YDMF,YDMF
1821
+ ATVW,ATVW
1822
+ PFHY,PFHY
1823
+ QYTG,QYTG
1824
+ KISE,KISE
1825
+ WPSR,WPSR
1826
+ LLIN,LLIN
1827
+ CMIF,CMIF
1828
+ NKKT,NKKT
1829
+ PLYW,PLYW
1830
+ QQFN,QQFN
1831
+ SQMQ,SQMQ
1832
+ AWQH,AWQH
1833
+ RDCC,RDCC
1834
+ YRWA,YRWA
1835
+ PGCG,PGCG
1836
+ STHQ,STHQ
1837
+ EQFS,EQFS
1838
+ KWVH,KWVH
1839
+ PDVA,PDVA
1840
+ LCVI,LCVI
1841
+ LTIN,LTIN
1842
+ VYEM,VYEM
1843
+ DYRG,DYRG
1844
+ GSRP,GSRP
1845
+ IYFQ,IYFQ
1846
+ LYNA,LYNA
1847
+ QFWL,QFWL
1848
+ LTAF,LTAF
1849
+ KEDA,KEDA
1850
+ HVPF,HVPF
1851
+ GQSM,GQSM
1852
+ YRHQ,YRHQ
1853
+ QDCH,QDCH
1854
+ PEMD,PEMD
1855
+ QPTI,QPTI
1856
+ NGNG,NGNG
1857
+ NQNL,NQNL
1858
+ KYEH,KYEH
1859
+ TQFF,TQFF
1860
+ FPFA,FPFA
1861
+ AQLP,AQLP
1862
+ CTHE,CTHE
1863
+ VAVM,VAVM
1864
+ KPLY,KPLY
1865
+ SSLF,SSLF
1866
+ MIRW,MIRW
1867
+ RAHP,RAHP
1868
+ VREH,VREH
1869
+ NAMG,NAMG
1870
+ FKKQ,FKKQ
1871
+ KQRA,KQRA
1872
+ LIFD,LIFD
1873
+ YRPE,YRPE
1874
+ TFAD,TFAD
1875
+ QIAA,QIAA
1876
+ INFW,INFW
1877
+ CRKA,CRKA
1878
+ GDAV,GDAV
1879
+ HETY,HETY
1880
+ GVGT,GVGT
1881
+ NDIP,NDIP
1882
+ CRSM,CRSM
1883
+ HTGT,HTGT
1884
+ STMK,STMK
1885
+ EQGS,EQGS
1886
+ TMQM,TMQM
1887
+ ENKD,ENKD
1888
+ KAFL,KAFL
1889
+ LRLK,LRLK
1890
+ VSPT,VSPT
1891
+ AHLV,AHLV
1892
+ CYHE,CYHE
1893
+ RLWK,RLWK
1894
+ LYWK,LYWK
1895
+ SATM,SATM
1896
+ PMSW,PMSW
1897
+ NTYF,NTYF
1898
+ EDHE,EDHE
1899
+ MMIQ,MMIQ
1900
+ KGPY,KGPY
1901
+ TCRP,TCRP
1902
+ TSKR,TSKR
1903
+ EVDV,EVDV
1904
+ QWRH,QWRH
1905
+ TYTI,TYTI
1906
+ NRFT,NRFT
1907
+ FRPY,FRPY
1908
+ LLPI,LLPI
1909
+ FGPP,FGPP
1910
+ PHYI,PHYI
1911
+ CMCK,CMCK
1912
+ NMPI,NMPI
1913
+ GQFR,GQFR
1914
+ NQER,NQER
1915
+ LMKS,LMKS
1916
+ ENCI,ENCI
1917
+ TNNV,TNNV
1918
+ GRAM,GRAM
1919
+ AAMV,AAMV
1920
+ IFYL,IFYL
1921
+ EKIL,EKIL
1922
+ QMAP,QMAP
1923
+ RYGL,RYGL
1924
+ NQAI,NQAI
1925
+ LYSR,LYSR
1926
+ DRES,DRES
1927
+ PTHS,PTHS
1928
+ TLDQ,TLDQ
1929
+ KEYH,KEYH
1930
+ HDWC,HDWC
1931
+ RPQT,RPQT
1932
+ PMNG,PMNG
1933
+ GWVS,GWVS
1934
+ PQEW,PQEW
1935
+ QRRP,QRRP
1936
+ CEIQ,CEIQ
1937
+ WNKY,WNKY
1938
+ FRVE,FRVE
1939
+ PTYH,PTYH
1940
+ LCRD,LCRD
1941
+ MEWQ,MEWQ
1942
+ QMNY,QMNY
1943
+ NFDK,NFDK
1944
+ QRKH,QRKH
1945
+ YLSR,YLSR
1946
+ MVRG,MVRG
1947
+ IQQH,IQQH
1948
+ RMEE,RMEE
1949
+ HYDN,HYDN
1950
+ KMSH,KMSH
1951
+ LWEL,LWEL
1952
+ WYLC,WYLC
1953
+ NGLM,NGLM
1954
+ QWPS,QWPS
1955
+ WFMM,WFMM
1956
+ KFEY,KFEY
1957
+ CQMT,CQMT
1958
+ GHFL,GHFL
1959
+ TERR,TERR
1960
+ VFGH,VFGH
1961
+ NRRF,NRRF
1962
+ RMRM,RMRM
1963
+ ITAK,ITAK
1964
+ WQWH,WQWH
1965
+ MQLP,MQLP
1966
+ LTSP,LTSP
1967
+ VEYQ,VEYQ
1968
+ YYHP,YYHP
1969
+ KDHH,KDHH
1970
+ PETW,PETW
1971
+ HDEA,HDEA
1972
+ WSMV,WSMV
1973
+ YITL,YITL
1974
+ FLQY,FLQY
1975
+ LLDM,LLDM
1976
+ IEHF,IEHF
1977
+ KVWR,KVWR
1978
+ THAR,THAR
1979
+ VREK,VREK
1980
+ SARM,SARM
1981
+ FSKI,FSKI
1982
+ NCTQ,NCTQ
1983
+ VEGT,VEGT
1984
+ SIKV,SIKV
1985
+ WSTR,WSTR
1986
+ NTQD,NTQD
1987
+ DNDN,DNDN
1988
+ SGFT,SGFT
1989
+ YRLW,YRLW
1990
+ KNWK,KNWK
1991
+ AVKA,AVKA
1992
+ APIK,APIK
1993
+ YDSL,YDSL
1994
+ TNHT,TNHT
1995
+ HYKY,HYKY
1996
+ SDNK,SDNK
1997
+ GAMQ,GAMQ
1998
+ PWIH,PWIH
1999
+ SYQG,SYQG
2000
+ CPDS,CPDS
2001
+ TGFQ,TGFQ
2002
+ YLNI,YLNI
2003
+ NVFM,NVFM
2004
+ TIDN,TIDN
2005
+ ATTP,ATTP
2006
+ GGGT,GGGT
2007
+ CHAG,CHAG
2008
+ NMNG,NMNG
2009
+ QLRT,QLRT
2010
+ LKWC,LKWC
2011
+ CDDQ,CDDQ
2012
+ YHGR,YHGR
2013
+ LEPK,LEPK
2014
+ QANG,QANG
2015
+ IDCV,IDCV
2016
+ FYYH,FYYH
2017
+ MHDH,MHDH
2018
+ YSQC,YSQC
2019
+ EYFL,EYFL
2020
+ HLIS,HLIS
2021
+ IQDK,IQDK
2022
+ HSIE,HSIE
2023
+ EKFK,EKFK
2024
+ EGKE,EGKE
2025
+ TMNI,TMNI
2026
+ SSQA,SSQA
2027
+ SASM,SASM
2028
+ ECIL,ECIL
2029
+ VRAD,VRAD
2030
+ EQLS,EQLS
2031
+ WKPD,WKPD
2032
+ HKWC,HKWC
2033
+ RQDW,RQDW
2034
+ YEFT,YEFT
2035
+ RPMP,RPMP
2036
+ FLCK,FLCK
2037
+ MMGK,MMGK
2038
+ FAYK,FAYK
2039
+ TFHN,TFHN
2040
+ PYFA,PYFA
2041
+ QSDI,QSDI
2042
+ MRQR,MRQR
2043
+ NGVY,NGVY
2044
+ KYLC,KYLC
2045
+ NAFE,NAFE
2046
+ HMYC,HMYC
2047
+ YDQM,YDQM
2048
+ WKMF,WKMF
2049
+ SRFI,SRFI
2050
+ YRQK,YRQK
2051
+ GIMY,GIMY
2052
+ GYMS,GYMS
2053
+ FCME,FCME
2054
+ IRFT,IRFT
2055
+ NWHA,NWHA
2056
+ MLWS,MLWS
2057
+ RHFG,RHFG
2058
+ WIMA,WIMA
2059
+ PAEF,PAEF
2060
+ CHTE,CHTE
2061
+ MEIK,MEIK
2062
+ AILP,AILP
2063
+ PQSC,PQSC
2064
+ SAHG,SAHG
2065
+ LREP,LREP
2066
+ PFQN,PFQN
2067
+ SCYA,SCYA
2068
+ PSCG,PSCG
2069
+ SACP,SACP
2070
+ AVEV,AVEV
2071
+ ETED,ETED
2072
+ YIRN,YIRN
2073
+ AHTQ,AHTQ
2074
+ ATSP,ATSP
2075
+ QHRR,QHRR
2076
+ IVYE,IVYE
2077
+ IRCV,IRCV
2078
+ AWKE,AWKE
2079
+ KSSH,KSSH
2080
+ HWFY,HWFY
2081
+ RSVM,RSVM
2082
+ RCHT,RCHT
2083
+ YKIK,YKIK
2084
+ YERF,YERF
2085
+ YCPT,YCPT
2086
+ IRPV,IRPV
2087
+ CDWV,CDWV
2088
+ CTFA,CTFA
2089
+ GIPV,GIPV
2090
+ RWNY,RWNY
2091
+ YPKQ,YPKQ
2092
+ WAPI,WAPI
2093
+ TTCV,TTCV
2094
+ HNPW,HNPW
2095
+ IVYQ,IVYQ
2096
+ DSDC,DSDC
2097
+ TTDF,TTDF
2098
+ WTTV,WTTV
2099
+ SYPT,SYPT
2100
+ EVER,EVER
2101
+ NGTR,NGTR
2102
+ TTWV,TTWV
2103
+ CKRQ,CKRQ
2104
+ ENEI,ENEI
2105
+ KANP,KANP
2106
+ HDVL,HDVL
2107
+ ENWI,ENWI
2108
+ ENRS,ENRS
2109
+ RAEK,RAEK
2110
+ RSMM,RSMM
2111
+ KNCQ,KNCQ
2112
+ MCFD,MCFD
2113
+ SCEE,SCEE
2114
+ PFKV,PFKV
2115
+ DVTW,DVTW
2116
+ HSHA,HSHA
2117
+ MVGG,MVGG
2118
+ MLFY,MLFY
2119
+ SQLI,SQLI
2120
+ KCTN,KCTN
2121
+ ADDD,ADDD
2122
+ QAFQ,QAFQ
2123
+ VMLY,VMLY
2124
+ LWNE,LWNE
2125
+ CNAE,CNAE
2126
+ MMAN,MMAN
2127
+ AEQT,AEQT
2128
+ TWQQ,TWQQ
2129
+ CRVV,CRVV
2130
+ HIWA,HIWA
2131
+ NSRN,NSRN
2132
+ CETD,CETD
2133
+ HDVQ,HDVQ
2134
+ CERW,CERW
2135
+ EIWE,EIWE
2136
+ PRHF,PRHF
2137
+ RGWE,RGWE
2138
+ WNTL,WNTL
2139
+ DWSH,DWSH
2140
+ EFYV,EFYV
2141
+ WWAR,WWAR
2142
+ WLEA,WLEA
2143
+ ECRM,ECRM
2144
+ VQAI,VQAI
2145
+ MWSY,MWSY
2146
+ RHWS,RHWS
2147
+ YEEQ,YEEQ
2148
+ CLAV,CLAV
2149
+ VHGN,VHGN
2150
+ WSSQ,WSSQ
2151
+ QCRE,QCRE
2152
+ VLKI,VLKI
2153
+ VQIH,VQIH
2154
+ CYGR,CYGR
2155
+ IRPT,IRPT
2156
+ LYQQ,LYQQ
2157
+ RRFV,RRFV
2158
+ CLYP,CLYP
2159
+ SHYK,SHYK
2160
+ YVNE,YVNE
2161
+ HVTC,HVTC
2162
+ MDNG,MDNG
2163
+ MITR,MITR
2164
+ FNWP,FNWP
2165
+ HWDR,HWDR
2166
+ TIWW,TIWW
2167
+ EFHR,EFHR
2168
+ WEIP,WEIP
2169
+ WMVF,WMVF
2170
+ TITV,TITV
2171
+ CGGR,CGGR
2172
+ AHLH,AHLH
2173
+ RAFC,RAFC
2174
+ TEQF,TEQF
2175
+ CDHT,CDHT
2176
+ NSLS,NSLS
2177
+ MFYG,MFYG
2178
+ EIHC,EIHC
2179
+ RFII,RFII
2180
+ EIWP,EIWP
2181
+ YWWL,YWWL
2182
+ GIVG,GIVG
2183
+ WNTE,WNTE
2184
+ ETWT,ETWT
2185
+ KYVE,KYVE
2186
+ DSNK,DSNK
2187
+ CGCS,CGCS
2188
+ WTND,WTND
2189
+ TKML,TKML
2190
+ QHLM,QHLM
2191
+ HAMG,HAMG
2192
+ APSY,APSY
2193
+ LGVQ,LGVQ
2194
+ DSIL,DSIL
2195
+ LLEW,LLEW
2196
+ RDSQ,RDSQ
2197
+ NYFR,NYFR
2198
+ LWIK,LWIK
2199
+ MHME,MHME
2200
+ WRFL,WRFL
2201
+ DLEN,DLEN
2202
+ LSWQ,LSWQ
2203
+ WDLC,WDLC
2204
+ EAGH,EAGH
2205
+ RLRT,RLRT
2206
+ WFHL,WFHL
2207
+ VLYT,VLYT
2208
+ AIFA,AIFA
2209
+ MAEL,MAEL
2210
+ QPCF,QPCF
2211
+ NCCY,NCCY
2212
+ DRKQ,DRKQ
2213
+ GMQY,GMQY
2214
+ PYHE,PYHE
2215
+ NKQM,NKQM
2216
+ IMQT,IMQT
2217
+ LMAV,LMAV
2218
+ CIKV,CIKV
2219
+ SKLC,SKLC
2220
+ SQYW,SQYW
2221
+ WPTL,WPTL
2222
+ RIAK,RIAK
2223
+ PEKT,PEKT
2224
+ FTSS,FTSS
2225
+ HMGP,HMGP
2226
+ HAHK,HAHK
2227
+ NQKS,NQKS
2228
+ FDIG,FDIG
2229
+ WTWV,WTWV
2230
+ RQYK,RQYK
2231
+ PAGF,PAGF
2232
+ MAIL,MAIL
2233
+ AGTK,AGTK
2234
+ WYEC,WYEC
2235
+ AQTR,AQTR
2236
+ HPPF,HPPF
2237
+ QIDP,QIDP
2238
+ PTGW,PTGW
2239
+ LYHI,LYHI
2240
+ LDII,LDII
2241
+ HVMD,HVMD
2242
+ HQWP,HQWP
2243
+ WGYN,WGYN
2244
+ THGQ,THGQ
2245
+ DSQW,DSQW
2246
+ NFLD,NFLD
2247
+ CIVI,CIVI
2248
+ MKQR,MKQR
2249
+ EYYR,EYYR
2250
+ KQLY,KQLY
2251
+ PQVK,PQVK
2252
+ VIQW,VIQW
2253
+ WSRA,WSRA
2254
+ YDWH,YDWH
2255
+ QVIC,QVIC
2256
+ RCQE,RCQE
2257
+ GKKL,GKKL
2258
+ CDFW,CDFW
2259
+ HKCP,HKCP
2260
+ HTWG,HTWG
2261
+ HWSW,HWSW
2262
+ LFWT,LFWT
2263
+ DNSY,DNSY
2264
+ RISF,RISF
2265
+ ELCT,ELCT
2266
+ NQIV,NQIV
2267
+ LYMV,LYMV
2268
+ YSAS,YSAS
2269
+ WAKL,WAKL
2270
+ NSWN,NSWN
2271
+ AYPG,AYPG
2272
+ DCMT,DCMT
2273
+ QDHH,QDHH
2274
+ HNLH,HNLH
2275
+ NFCE,NFCE
2276
+ QQEC,QQEC
2277
+ VWHK,VWHK
2278
+ KHMK,KHMK
2279
+ GTQI,GTQI
2280
+ NPKL,NPKL
2281
+ YWVE,YWVE
2282
+ PLWT,PLWT
2283
+ QIKR,QIKR
2284
+ CFCQ,CFCQ
2285
+ VAKP,VAKP
2286
+ WKWW,WKWW
2287
+ CISE,CISE
2288
+ SMPI,SMPI
2289
+ RGTN,RGTN
2290
+ RIPA,RIPA
2291
+ LEQE,LEQE
2292
+ DCVQ,DCVQ
2293
+ VTMD,VTMD
2294
+ RTLY,RTLY
2295
+ HTNY,HTNY
2296
+ SASD,SASD
2297
+ KHTN,KHTN
2298
+ SLAH,SLAH
2299
+ MSKR,MSKR
2300
+ AQAI,AQAI
2301
+ GTYN,GTYN
2302
+ MKCK,MKCK
2303
+ PYNN,PYNN
2304
+ HCQV,HCQV
2305
+ PILK,PILK
2306
+ DADI,DADI
2307
+ ERPM,ERPM
2308
+ LEPE,LEPE
2309
+ YYWW,YYWW
2310
+ YEHE,YEHE
2311
+ MYTM,MYTM
2312
+ PPAP,PPAP
2313
+ CMSN,CMSN
2314
+ WPDT,WPDT
2315
+ YIPT,YIPT
2316
+ GHWP,GHWP
2317
+ CAGT,CAGT
2318
+ IRCG,IRCG
2319
+ VACC,VACC
2320
+ FFQY,FFQY
2321
+ INSK,INSK
2322
+ NWIP,NWIP
2323
+ GERH,GERH
2324
+ SDDG,SDDG
2325
+ MARL,MARL
2326
+ CTAD,CTAD
2327
+ CATS,CATS
2328
+ WVCY,WVCY
2329
+ TSSK,TSSK
2330
+ PKVD,PKVD
2331
+ FAWE,FAWE
2332
+ LSKT,LSKT
2333
+ EHAM,EHAM
2334
+ LSRK,LSRK
2335
+ CFKP,CFKP
2336
+ MHRY,MHRY
2337
+ QQRF,QQRF
2338
+ LWQS,LWQS
2339
+ FFSN,FFSN
2340
+ EEDD,EEDD
2341
+ KQMR,KQMR
2342
+ RPPK,RPPK
2343
+ PPSV,PPSV
2344
+ ETEA,ETEA
2345
+ GALG,GALG
2346
+ DSPN,DSPN
2347
+ PSEH,PSEH
2348
+ SIMA,SIMA
2349
+ NRQH,NRQH
2350
+ SFSA,SFSA
2351
+ EIWD,EIWD
2352
+ LCPI,LCPI
2353
+ YPVE,YPVE
2354
+ SKFG,SKFG
2355
+ THHE,THHE
2356
+ NLGT,NLGT
2357
+ VTWD,VTWD
2358
+ RVAC,RVAC
2359
+ CSFY,CSFY
2360
+ QEVY,QEVY
2361
+ TRQL,TRQL
2362
+ SGHY,SGHY
2363
+ GTNS,GTNS
2364
+ HQVK,HQVK
2365
+ ISWH,ISWH
2366
+ VIGC,VIGC
2367
+ DWYH,DWYH
2368
+ LPQE,LPQE
2369
+ PFCQ,PFCQ
2370
+ IFNF,IFNF
2371
+ QHYH,QHYH
2372
+ WRDP,WRDP
2373
+ WGVV,WGVV
2374
+ SHPK,SHPK
2375
+ RTRW,RTRW
2376
+ GCTQ,GCTQ
2377
+ RKHM,RKHM
2378
+ WHIA,WHIA
2379
+ ARVC,ARVC
2380
+ TCYV,TCYV
2381
+ NEWQ,NEWQ
2382
+ RHPM,RHPM
2383
+ QDWV,QDWV
2384
+ YQST,YQST
2385
+ LREE,LREE
2386
+ QSRY,QSRY
2387
+ RIRE,RIRE
2388
+ FGMR,FGMR
2389
+ IHIP,IHIP
2390
+ VWCS,VWCS
2391
+ LIYN,LIYN
2392
+ MDDQ,MDDQ
2393
+ CWND,CWND
2394
+ TKQM,TKQM
2395
+ THQI,THQI
2396
+ VQAN,VQAN
2397
+ WVLA,WVLA
2398
+ WMGV,WMGV
2399
+ CWFF,CWFF
2400
+ DFGH,DFGH
2401
+ DWHK,DWHK
2402
+ EYLG,EYLG
2403
+ TSWQ,TSWQ
2404
+ DAVT,DAVT
2405
+ YLSS,YLSS
2406
+ PFLH,PFLH
2407
+ NWTC,NWTC
2408
+ LHFH,LHFH
2409
+ RWDA,RWDA
2410
+ IRAN,IRAN
2411
+ LRSY,LRSY
2412
+ MEHV,MEHV
2413
+ GSTQ,GSTQ
2414
+ HYTF,HYTF
2415
+ TVSC,TVSC
2416
+ IDAI,IDAI
2417
+ EVMK,EVMK
2418
+ TWMD,TWMD
2419
+ MDPA,MDPA
2420
+ RVHY,RVHY
2421
+ CDHM,CDHM
2422
+ MTSL,MTSL
2423
+ DIIH,DIIH
2424
+ ATCF,ATCF
2425
+ WAHF,WAHF
2426
+ MIHQ,MIHQ
2427
+ GYQE,GYQE
2428
+ CQDE,CQDE
2429
+ HSPC,HSPC
2430
+ CSKH,CSKH
2431
+ PWCM,PWCM
2432
+ YIIE,YIIE
2433
+ IDGG,IDGG
2434
+ MKTE,MKTE
2435
+ TDEP,TDEP
2436
+ WTVK,WTVK
2437
+ WASA,WASA
2438
+ CEGA,CEGA
2439
+ CVLH,CVLH
2440
+ ETAA,ETAA
2441
+ DKYP,DKYP
2442
+ THGL,THGL
2443
+ AYAW,AYAW
2444
+ WESS,WESS
2445
+ IIIQ,IIIQ
2446
+ QPQR,QPQR
2447
+ TCHM,TCHM
2448
+ SEAQ,SEAQ
2449
+ TVAT,TVAT
2450
+ CSFT,CSFT
2451
+ KPWY,KPWY
2452
+ TAAE,TAAE
2453
+ HCKV,HCKV
2454
+ HQGG,HQGG
2455
+ AVAV,AVAV
2456
+ SEGI,SEGI
2457
+ PHRP,PHRP
2458
+ FFKV,FFKV
2459
+ RFEI,RFEI
2460
+ SKAR,SKAR
2461
+ WCMR,WCMR
2462
+ LMYG,LMYG
2463
+ VGYK,VGYK
2464
+ HFME,HFME
2465
+ CNNV,CNNV
2466
+ DGIS,DGIS
2467
+ QEAC,QEAC
2468
+ KKMF,KKMF
2469
+ DGQD,DGQD
2470
+ GLDP,GLDP
2471
+ ELAE,ELAE
2472
+ NYYC,NYYC
2473
+ PTSI,PTSI
2474
+ HYVR,HYVR
2475
+ AGKM,AGKM
2476
+ YGEG,YGEG
2477
+ AGRQ,AGRQ
2478
+ PMKC,PMKC
2479
+ KWAL,KWAL
2480
+ IIEG,IIEG
2481
+ RSFF,RSFF
2482
+ DAYV,DAYV
2483
+ CAYW,CAYW
2484
+ FNAS,FNAS
2485
+ DMPC,DMPC
2486
+ SFEP,SFEP
2487
+ YFHF,YFHF
2488
+ DEAC,DEAC
2489
+ MHIA,MHIA
2490
+ FWPH,FWPH
2491
+ GTQA,GTQA
2492
+ YRMQ,YRMQ
2493
+ HGMW,HGMW
2494
+ ESAM,ESAM
2495
+ WSWY,WSWY
2496
+ FNDC,FNDC
2497
+ HKES,HKES
2498
+ EHFF,EHFF
2499
+ PHNT,PHNT
2500
+ EMVC,EMVC
2501
+ PPFM,PPFM
2502
+ CEWC,CEWC
2503
+ ECIF,ECIF
2504
+ EPEI,EPEI
2505
+ PERF,PERF
2506
+ HVFL,HVFL
2507
+ DLND,DLND
2508
+ HTFC,HTFC
2509
+ AEDL,AEDL
2510
+ PPFG,PPFG
2511
+ WPHP,WPHP
2512
+ KSVL,KSVL
2513
+ MEEK,MEEK
2514
+ TAHI,TAHI
2515
+ LILA,LILA
2516
+ VRGP,VRGP
2517
+ DQPM,DQPM
2518
+ HKDS,HKDS
2519
+ GWNP,GWNP
2520
+ TNYH,TNYH
2521
+ PKEQ,PKEQ
2522
+ RYHA,RYHA
2523
+ FHHI,FHHI
2524
+ WSQS,WSQS
2525
+ PPEL,PPEL
2526
+ CCKG,CCKG
2527
+ PLPI,PLPI
2528
+ CKVK,CKVK
2529
+ IVWS,IVWS
2530
+ WQVK,WQVK
2531
+ YQKA,YQKA
2532
+ GWVH,GWVH
2533
+ PGQI,PGQI
2534
+ YICL,YICL
2535
+ NSNA,NSNA
2536
+ SLMP,SLMP
2537
+ LKNR,LKNR
2538
+ NVRQ,NVRQ
2539
+ TLAC,TLAC
2540
+ EWPH,EWPH
2541
+ DCHI,DCHI
2542
+ CCSK,CCSK
2543
+ IDMV,IDMV
2544
+ CYDP,CYDP
2545
+ INLI,INLI
2546
+ RHVE,RHVE
2547
+ MEYY,MEYY
2548
+ DTEL,DTEL
2549
+ MDSE,MDSE
2550
+ EKAI,EKAI
2551
+ ICYL,ICYL
2552
+ YIWR,YIWR
2553
+ PHCI,PHCI
2554
+ QQCF,QQCF
2555
+ GGTV,GGTV
2556
+ KTDV,KTDV
2557
+ KCMY,KCMY
2558
+ GNMV,GNMV
2559
+ FPYS,FPYS
2560
+ MPNI,MPNI
2561
+ KHIM,KHIM
2562
+ DMQN,DMQN
2563
+ GRAV,GRAV
2564
+ MADN,MADN
2565
+ RPCQ,RPCQ
2566
+ NANC,NANC
2567
+ MSEK,MSEK
2568
+ FNNT,FNNT
2569
+ TVFH,TVFH
2570
+ FSSV,FSSV
2571
+ WRYD,WRYD
2572
+ GPTV,GPTV
2573
+ NQHS,NQHS
2574
+ HDSH,HDSH
2575
+ IIAA,IIAA
2576
+ PVDI,PVDI
2577
+ EVTR,EVTR
2578
+ GEMY,GEMY
2579
+ QMEN,QMEN
2580
+ CMMG,CMMG
2581
+ KRDN,KRDN
2582
+ IAMY,IAMY
2583
+ IKKE,IKKE
2584
+ NDSA,NDSA
2585
+ PWRG,PWRG
2586
+ MEGA,MEGA
2587
+ KLPK,KLPK
2588
+ VGHM,VGHM
2589
+ NDGK,NDGK
2590
+ DEWH,DEWH
2591
+ IKPK,IKPK
2592
+ GFYA,GFYA
2593
+ TPEL,TPEL
2594
+ DPNQ,DPNQ
2595
+ CAAG,CAAG
2596
+ GGKF,GGKF
2597
+ VRPR,VRPR
2598
+ KIRL,KIRL
2599
+ HYKN,HYKN
2600
+ WDRP,WDRP
2601
+ YPMP,YPMP
2602
+ FKHG,FKHG
2603
+ KDCW,KDCW
2604
+ WVCW,WVCW
2605
+ GCLE,GCLE
2606
+ WHKT,WHKT
2607
+ RGIW,RGIW
2608
+ VNYM,VNYM
2609
+ IMWD,IMWD
2610
+ MHWW,MHWW
2611
+ ETDG,ETDG
2612
+ TNKF,TNKF
2613
+ NATY,NATY
2614
+ PHSS,PHSS
2615
+ FMDM,FMDM
2616
+ SHDF,SHDF
2617
+ PNVW,PNVW
2618
+ DDRF,DDRF
2619
+ KEDM,KEDM
2620
+ FLYK,FLYK
2621
+ HPHF,HPHF
2622
+ ECWN,ECWN
2623
+ FLNT,FLNT
2624
+ PRNI,PRNI
2625
+ MHTV,MHTV
2626
+ CIGK,CIGK
2627
+ HMPA,HMPA
2628
+ VQDI,VQDI
2629
+ SMYF,SMYF
2630
+ PFCI,PFCI
2631
+ RFWM,RFWM
2632
+ MDVL,MDVL
2633
+ TWYI,TWYI
2634
+ VNGV,VNGV
2635
+ CLWD,CLWD
2636
+ PWYD,PWYD
2637
+ MEYE,MEYE
2638
+ CFWW,CFWW
2639
+ GMLN,GMLN
2640
+ PYWN,PYWN
2641
+ QGIM,QGIM
2642
+ KTTE,KTTE
2643
+ SSPY,SSPY
2644
+ VNTY,VNTY
2645
+ NKYR,NKYR
2646
+ ISWS,ISWS
2647
+ AWPE,AWPE
2648
+ CLAN,CLAN
2649
+ HSIK,HSIK
2650
+ WHLQ,WHLQ
2651
+ AFMV,AFMV
2652
+ EGGM,EGGM
2653
+ DCMI,DCMI
2654
+ EFIW,EFIW
2655
+ QWHL,QWHL
2656
+ TVSY,TVSY
2657
+ MYKV,MYKV
2658
+ HTGE,HTGE
2659
+ EFAL,EFAL
2660
+ QFAP,QFAP
2661
+ QYHC,QYHC
2662
+ INDA,INDA
2663
+ DGNV,DGNV
2664
+ GGRM,GGRM
2665
+ NPYQ,NPYQ
2666
+ WEVQ,WEVQ
2667
+ HMQV,HMQV
2668
+ FNGY,FNGY
2669
+ ILIE,ILIE
2670
+ WCEG,WCEG
2671
+ SLQG,SLQG
2672
+ MSQT,MSQT
2673
+ MYCE,MYCE
2674
+ GEQN,GEQN
2675
+ GKPV,GKPV
2676
+ SEEN,SEEN
2677
+ NYPR,NYPR
2678
+ CPFK,CPFK
2679
+ KWCY,KWCY
2680
+ VHYW,VHYW
2681
+ PQYN,PQYN
2682
+ FWNT,FWNT
2683
+ WNQR,WNQR
2684
+ WVYV,WVYV
2685
+ VLSR,VLSR
2686
+ VSEW,VSEW
2687
+ MYYD,MYYD
2688
+ MKGP,MKGP
2689
+ HDFM,HDFM
2690
+ YPAA,YPAA
2691
+ QPNY,QPNY
2692
+ HDSR,HDSR
2693
+ LEVR,LEVR
2694
+ IVPD,IVPD
2695
+ KNFH,KNFH
2696
+ PGTF,PGTF
2697
+ CAIT,CAIT
2698
+ WEWL,WEWL
2699
+ YKAA,YKAA
2700
+ LRQA,LRQA
2701
+ IGKQ,IGKQ
2702
+ DQHG,DQHG
2703
+ FQQK,FQQK
2704
+ EISN,EISN
2705
+ HWDC,HWDC
2706
+ QNYR,QNYR
2707
+ LRSK,LRSK
2708
+ IPQL,IPQL
2709
+ QTQL,QTQL
2710
+ DRYV,DRYV
2711
+ QVCV,QVCV
2712
+ QVGT,QVGT
2713
+ HYRI,HYRI
2714
+ NGNW,NGNW
2715
+ SSNA,SSNA
2716
+ MYME,MYME
2717
+ CCHV,CCHV
2718
+ TEVD,TEVD
2719
+ ESSN,ESSN
2720
+ CVHV,CVHV
2721
+ WSEF,WSEF
2722
+ VRSE,VRSE
2723
+ TVRI,TVRI
2724
+ LPGK,LPGK
2725
+ IRGH,IRGH
2726
+ HNYD,HNYD
2727
+ MAWA,MAWA
2728
+ IAWD,IAWD
2729
+ LPCQ,LPCQ
2730
+ GCTR,GCTR
2731
+ THDF,THDF
2732
+ TWYH,TWYH
2733
+ PLYV,PLYV
2734
+ IDAT,IDAT
2735
+ QWMR,QWMR
2736
+ IWQS,IWQS
2737
+ SFSE,SFSE
2738
+ WCHR,WCHR
2739
+ NNTH,NNTH
2740
+ QPHC,QPHC
2741
+ NCMH,NCMH
2742
+ TIKL,TIKL
2743
+ EVIQ,EVIQ
2744
+ FRVV,FRVV
2745
+ DATT,DATT
2746
+ CYLN,CYLN
2747
+ PGER,PGER
2748
+ YEYT,YEYT
2749
+ WMTL,WMTL
2750
+ TRHH,TRHH
2751
+ NCCN,NCCN
2752
+ HMGV,HMGV
2753
+ LWIL,LWIL
2754
+ FGQW,FGQW
2755
+ IQQM,IQQM
2756
+ FALN,FALN
2757
+ VNNP,VNNP
2758
+ AMAR,AMAR
2759
+ MTLK,MTLK
2760
+ GVQR,GVQR
2761
+ FVEQ,FVEQ
2762
+ ANVM,ANVM
2763
+ TQHM,TQHM
2764
+ VMDD,VMDD
2765
+ LANV,LANV
2766
+ NLSL,NLSL
2767
+ SPTN,SPTN
2768
+ GMCT,GMCT
2769
+ INKV,INKV
2770
+ LCIQ,LCIQ
2771
+ VNPT,VNPT
2772
+ CKIL,CKIL
2773
+ QGRK,QGRK
2774
+ IKAS,IKAS
2775
+ EWDH,EWDH
2776
+ EDNM,EDNM
2777
+ DINK,DINK
2778
+ LDRY,LDRY
2779
+ VEQN,VEQN
2780
+ APTA,APTA
2781
+ RCAC,RCAC
2782
+ KIRK,KIRK
2783
+ GCYQ,GCYQ
2784
+ LAMP,LAMP
2785
+ VGGR,VGGR
2786
+ IPAR,IPAR
2787
+ PFEY,PFEY
2788
+ HQTL,HQTL
2789
+ YEHW,YEHW
2790
+ AVEG,AVEG
2791
+ MGKK,MGKK
2792
+ RIVL,RIVL
2793
+ VPDM,VPDM
2794
+ LLSQ,LLSQ
2795
+ GEYW,GEYW
2796
+ MQWC,MQWC
2797
+ TYTN,TYTN
2798
+ RAPP,RAPP
2799
+ DSWK,DSWK
2800
+ TQTY,TQTY
2801
+ PMNL,PMNL
2802
+ GDYN,GDYN
2803
+ AFHW,AFHW
2804
+ VYPL,VYPL
2805
+ IIHM,IIHM
2806
+ IGFS,IGFS
2807
+ SCCA,SCCA
2808
+ GSCW,GSCW
2809
+ KLNV,KLNV
2810
+ KLMQ,KLMQ
2811
+ GNDD,GNDD
2812
+ MWQC,MWQC
2813
+ NHTC,NHTC
2814
+ NQWQ,NQWQ
2815
+ PLGA,PLGA
2816
+ LHTG,LHTG
2817
+ IGTG,IGTG
2818
+ DKLF,DKLF
2819
+ KHEG,KHEG
2820
+ HKKD,HKKD
2821
+ TVVE,TVVE
2822
+ MSLT,MSLT
2823
+ RMND,RMND
2824
+ YVAL,YVAL
2825
+ GRGT,GRGT
2826
+ GRNN,GRNN
2827
+ HINF,HINF
2828
+ QMSP,QMSP
2829
+ EWQK,EWQK
2830
+ FAWH,FAWH
2831
+ HHKH,HHKH
2832
+ ALAF,ALAF
2833
+ QDKN,QDKN
2834
+ LSEK,LSEK
2835
+ VLVT,VLVT
2836
+ MELV,MELV
2837
+ KSDN,KSDN
2838
+ SQKC,SQKC
2839
+ HYNT,HYNT
2840
+ PYLT,PYLT
2841
+ GVND,GVND
2842
+ VDQW,VDQW
2843
+ RFSN,RFSN
2844
+ PTPF,PTPF
2845
+ NHTY,NHTY
2846
+ ADDP,ADDP
2847
+ MALD,MALD
data/splits/4AA_implicit_test.csv ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EHFR,EHFR
3
+ YVTL,YVTL
4
+ GQGV,GQGV
5
+ PCFK,PCFK
6
+ VVIV,VVIV
7
+ WGDY,WGDY
8
+ SLYW,SLYW
9
+ QGRE,QGRE
10
+ SRPT,SRPT
11
+ NGDS,NGDS
12
+ VNWW,VNWW
13
+ LDNH,LDNH
14
+ WLSC,WLSC
15
+ YYTK,YYTK
16
+ GPNT,GPNT
17
+ LTQE,LTQE
18
+ VMHV,VMHV
19
+ GTLM,GTLM
20
+ QRRW,QRRW
21
+ LRYM,LRYM
22
+ WTWS,WTWS
23
+ VKFG,VKFG
24
+ GQYP,GQYP
25
+ KWIC,KWIC
26
+ PQHG,PQHG
27
+ FLIS,FLIS
28
+ QEGR,QEGR
29
+ ISML,ISML
30
+ HRCS,HRCS
31
+ NNKC,NNKC
32
+ FART,FART
33
+ TIDH,TIDH
34
+ RDTI,RDTI
35
+ ICLP,ICLP
36
+ AMEN,AMEN
37
+ QHFV,QHFV
38
+ GHSS,GHSS
39
+ YNML,YNML
40
+ RHDG,RHDG
41
+ VMVL,VMVL
42
+ QNCG,QNCG
43
+ FRPQ,FRPQ
44
+ FVFN,FVFN
45
+ FCND,FCND
46
+ CETY,CETY
47
+ GYQH,GYQH
48
+ FLRH,FLRH
49
+ IMRY,IMRY
50
+ PIDV,PIDV
51
+ HTIQ,HTIQ
52
+ KDFM,KDFM
53
+ ESSS,ESSS
54
+ ASRE,ASRE
55
+ LCLQ,LCLQ
56
+ IVMA,IVMA
57
+ RVQQ,RVQQ
58
+ KDDD,KDDD
59
+ VQCL,VQCL
60
+ CSYR,CSYR
61
+ SPVN,SPVN
62
+ MIAY,MIAY
63
+ SNSF,SNSF
64
+ TLRK,TLRK
65
+ IAMI,IAMI
66
+ CPYV,CPYV
67
+ NWWG,NWWG
68
+ LQMG,LQMG
69
+ DLCG,DLCG
70
+ LVVF,LVVF
71
+ LWMR,LWMR
72
+ NNDK,NNDK
73
+ EDQK,EDQK
74
+ CACS,CACS
75
+ IWHF,IWHF
76
+ IPTH,IPTH
77
+ VDRN,VDRN
78
+ HELI,HELI
79
+ GGHN,GGHN
80
+ FSDP,FSDP
81
+ CKVH,CKVH
82
+ NTVG,NTVG
83
+ SPFH,SPFH
84
+ FSRK,FSRK
85
+ PYHQ,PYHQ
86
+ FDNT,FDNT
87
+ PKDM,PKDM
88
+ LIFE,LIFE
89
+ ALDA,ALDA
90
+ PGKM,PGKM
91
+ QRNF,QRNF
92
+ SSNN,SSNN
93
+ FKKL,FKKL
94
+ RLKR,RLKR
95
+ KSIY,KSIY
96
+ NFQF,NFQF
97
+ MAFM,MAFM
98
+ AASF,AASF
99
+ CFEE,CFEE
100
+ DHAR,DHAR
101
+ IDRH,IDRH
data/splits/4AA_implicit_train.csv ADDED
@@ -0,0 +1,2647 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EKME,EKME
3
+ YSCA,YSCA
4
+ TGPT,TGPT
5
+ AAKQ,AAKQ
6
+ LQRL,LQRL
7
+ TTLY,TTLY
8
+ VSFH,VSFH
9
+ WYNA,WYNA
10
+ EPWP,EPWP
11
+ DNCQ,DNCQ
12
+ FLHM,FLHM
13
+ WYIG,WYIG
14
+ CQYS,CQYS
15
+ SNCV,SNCV
16
+ DIGN,DIGN
17
+ NYQM,NYQM
18
+ QASA,QASA
19
+ FFRN,FFRN
20
+ WKSH,WKSH
21
+ HEGV,HEGV
22
+ VFWY,VFWY
23
+ QNFH,QNFH
24
+ ISFP,ISFP
25
+ ARGH,ARGH
26
+ CRMN,CRMN
27
+ QNMV,QNMV
28
+ IKQG,IKQG
29
+ PMMV,PMMV
30
+ HILY,HILY
31
+ AQFQ,AQFQ
32
+ MHGM,MHGM
33
+ WMFD,WMFD
34
+ NWPN,NWPN
35
+ HPNK,HPNK
36
+ LRTE,LRTE
37
+ AEAF,AEAF
38
+ FHLQ,FHLQ
39
+ PIAC,PIAC
40
+ KDKI,KDKI
41
+ MKCC,MKCC
42
+ NAWY,NAWY
43
+ YVLE,YVLE
44
+ QKYR,QKYR
45
+ TFIL,TFIL
46
+ DVGL,DVGL
47
+ VNAK,VNAK
48
+ CEIA,CEIA
49
+ CLHA,CLHA
50
+ TPEA,TPEA
51
+ EEKV,EEKV
52
+ GTEV,GTEV
53
+ PYYT,PYYT
54
+ AMQN,AMQN
55
+ KDYC,KDYC
56
+ QNIE,QNIE
57
+ MWLR,MWLR
58
+ AELN,AELN
59
+ TDNF,TDNF
60
+ YPDQ,YPDQ
61
+ PMWQ,PMWQ
62
+ WTIN,WTIN
63
+ TQLE,TQLE
64
+ CWWT,CWWT
65
+ PFGR,PFGR
66
+ KWMG,KWMG
67
+ ICDY,ICDY
68
+ IFWQ,IFWQ
69
+ VGNK,VGNK
70
+ EGSR,EGSR
71
+ KSKE,KSKE
72
+ AQYS,AQYS
73
+ ECDG,ECDG
74
+ AFRN,AFRN
75
+ NLSK,NLSK
76
+ IIDS,IIDS
77
+ MFSG,MFSG
78
+ IING,IING
79
+ IHGP,IHGP
80
+ PEFC,PEFC
81
+ LPRM,LPRM
82
+ VWGW,VWGW
83
+ RAKD,RAKD
84
+ TDRL,TDRL
85
+ PWRH,PWRH
86
+ THKW,THKW
87
+ AQMK,AQMK
88
+ SNYD,SNYD
89
+ KVFQ,KVFQ
90
+ SWML,SWML
91
+ NMHN,NMHN
92
+ VNRT,VNRT
93
+ QHTR,QHTR
94
+ RTHG,RTHG
95
+ MKIF,MKIF
96
+ WAWT,WAWT
97
+ TFIS,TFIS
98
+ AIWE,AIWE
99
+ MINS,MINS
100
+ KNFC,KNFC
101
+ YSME,YSME
102
+ LKTH,LKTH
103
+ PGHR,PGHR
104
+ QCWM,QCWM
105
+ YYFW,YYFW
106
+ GCQH,GCQH
107
+ LFGA,LFGA
108
+ LREV,LREV
109
+ CGCQ,CGCQ
110
+ GQFY,GQFY
111
+ DGDL,DGDL
112
+ IISC,IISC
113
+ GLTR,GLTR
114
+ RGVQ,RGVQ
115
+ DFWH,DFWH
116
+ GFKP,GFKP
117
+ TTNE,TTNE
118
+ FEDH,FEDH
119
+ YTAV,YTAV
120
+ AMVW,AMVW
121
+ NECR,NECR
122
+ PNYQ,PNYQ
123
+ WVLQ,WVLQ
124
+ NPGA,NPGA
125
+ LTYP,LTYP
126
+ GTTS,GTTS
127
+ CPKM,CPKM
128
+ FFIR,FFIR
129
+ KTGN,KTGN
130
+ RGSY,RGSY
131
+ NKNQ,NKNQ
132
+ CWYK,CWYK
133
+ SSRN,SSRN
134
+ KNGD,KNGD
135
+ PECK,PECK
136
+ NRKC,NRKC
137
+ HTAN,HTAN
138
+ KWPM,KWPM
139
+ PDKM,PDKM
140
+ WYVV,WYVV
141
+ GCPK,GCPK
142
+ DRKK,DRKK
143
+ WNWC,WNWC
144
+ PAYK,PAYK
145
+ QKEE,QKEE
146
+ LYGF,LYGF
147
+ GRAR,GRAR
148
+ HFLW,HFLW
149
+ FHSL,FHSL
150
+ HERQ,HERQ
151
+ TMMP,TMMP
152
+ PEMT,PEMT
153
+ LSGH,LSGH
154
+ MPNS,MPNS
155
+ KKYS,KKYS
156
+ WDCD,WDCD
157
+ RSDW,RSDW
158
+ QPAL,QPAL
159
+ EYPW,EYPW
160
+ CFRY,CFRY
161
+ QETE,QETE
162
+ GWDT,GWDT
163
+ HDRQ,HDRQ
164
+ RTNL,RTNL
165
+ IMVW,IMVW
166
+ VKEQ,VKEQ
167
+ HEAH,HEAH
168
+ DEMY,DEMY
169
+ WNLY,WNLY
170
+ SPHG,SPHG
171
+ FQYW,FQYW
172
+ VSDD,VSDD
173
+ SDAK,SDAK
174
+ HLDH,HLDH
175
+ GGMD,GGMD
176
+ PFRW,PFRW
177
+ NMLG,NMLG
178
+ NWSN,NWSN
179
+ TRGN,TRGN
180
+ QICA,QICA
181
+ DHNR,DHNR
182
+ CDKL,CDKL
183
+ ADPG,ADPG
184
+ DHFW,DHFW
185
+ WSSW,WSSW
186
+ ITNH,ITNH
187
+ CACL,CACL
188
+ HFDQ,HFDQ
189
+ FIQA,FIQA
190
+ NQQC,NQQC
191
+ ALCP,ALCP
192
+ GPCV,GPCV
193
+ DFWV,DFWV
194
+ PHPC,PHPC
195
+ AVSM,AVSM
196
+ TSPC,TSPC
197
+ MWLF,MWLF
198
+ HKQL,HKQL
199
+ SAHD,SAHD
200
+ YNQP,YNQP
201
+ PQYL,PQYL
202
+ SHSC,SHSC
203
+ FNCR,FNCR
204
+ DHHE,DHHE
205
+ MIIM,MIIM
206
+ RYPG,RYPG
207
+ PMFI,PMFI
208
+ TIRG,TIRG
209
+ WHEL,WHEL
210
+ ALWP,ALWP
211
+ CMKC,CMKC
212
+ ESTM,ESTM
213
+ EMIR,EMIR
214
+ WKEG,WKEG
215
+ NKPS,NKPS
216
+ FCDI,FCDI
217
+ PAYR,PAYR
218
+ NTGH,NTGH
219
+ GREQ,GREQ
220
+ RHIQ,RHIQ
221
+ KKYC,KKYC
222
+ YSST,YSST
223
+ MNWM,MNWM
224
+ DNTG,DNTG
225
+ NPIF,NPIF
226
+ YLYA,YLYA
227
+ AISE,AISE
228
+ SAAR,SAAR
229
+ WQRL,WQRL
230
+ KIWI,KIWI
231
+ YMKK,YMKK
232
+ IWQA,IWQA
233
+ TLRF,TLRF
234
+ WVNW,WVNW
235
+ QKIH,QKIH
236
+ ENNT,ENNT
237
+ CQAA,CQAA
238
+ RMNL,RMNL
239
+ DCMQ,DCMQ
240
+ GCAE,GCAE
241
+ IYCC,IYCC
242
+ HYGI,HYGI
243
+ LNMQ,LNMQ
244
+ IGNE,IGNE
245
+ CRNA,CRNA
246
+ EWIH,EWIH
247
+ VSRP,VSRP
248
+ RWPF,RWPF
249
+ ADSL,ADSL
250
+ ARRH,ARRH
251
+ GSTY,GSTY
252
+ SITI,SITI
253
+ IVKV,IVKV
254
+ HTPP,HTPP
255
+ QHKL,QHKL
256
+ LDNT,LDNT
257
+ KCWA,KCWA
258
+ KDKA,KDKA
259
+ DMII,DMII
260
+ YDMP,YDMP
261
+ KDNF,KDNF
262
+ RGQA,RGQA
263
+ QSIS,QSIS
264
+ NMSV,NMSV
265
+ METL,METL
266
+ VYAY,VYAY
267
+ VYCP,VYCP
268
+ FIVM,FIVM
269
+ CTRV,CTRV
270
+ FPDC,FPDC
271
+ GSKT,GSKT
272
+ CGFR,CGFR
273
+ TRSH,TRSH
274
+ VSAC,VSAC
275
+ WMEV,WMEV
276
+ SKYM,SKYM
277
+ IEMQ,IEMQ
278
+ KTSE,KTSE
279
+ HFIG,HFIG
280
+ PRTR,PRTR
281
+ WNGE,WNGE
282
+ DFVP,DFVP
283
+ KHDF,KHDF
284
+ WELM,WELM
285
+ KPRK,KPRK
286
+ SLGK,SLGK
287
+ MWKN,MWKN
288
+ NTDY,NTDY
289
+ NKIP,NKIP
290
+ GCQW,GCQW
291
+ CGNR,CGNR
292
+ MDCH,MDCH
293
+ FYEH,FYEH
294
+ NENL,NENL
295
+ TSMR,TSMR
296
+ ETVQ,ETVQ
297
+ NLYD,NLYD
298
+ QSLR,QSLR
299
+ CCNM,CCNM
300
+ TIKC,TIKC
301
+ RPFF,RPFF
302
+ SIVL,SIVL
303
+ ADRN,ADRN
304
+ RVSH,RVSH
305
+ VPTF,VPTF
306
+ IYTM,IYTM
307
+ ENRG,ENRG
308
+ PIRF,PIRF
309
+ SGHT,SGHT
310
+ KFMW,KFMW
311
+ VNVE,VNVE
312
+ DHQD,DHQD
313
+ RKQI,RKQI
314
+ PPFQ,PPFQ
315
+ CYAI,CYAI
316
+ VEGM,VEGM
317
+ PVKV,PVKV
318
+ CGEF,CGEF
319
+ EFET,EFET
320
+ LHGV,LHGV
321
+ CQNM,CQNM
322
+ HHPD,HHPD
323
+ GIDF,GIDF
324
+ DVSH,DVSH
325
+ EQNY,EQNY
326
+ YWVP,YWVP
327
+ SREV,SREV
328
+ CFVK,CFVK
329
+ CSCE,CSCE
330
+ KQFR,KQFR
331
+ GFMM,GFMM
332
+ SDGG,SDGG
333
+ PTVI,PTVI
334
+ KVCK,KVCK
335
+ GDQE,GDQE
336
+ GVNQ,GVNQ
337
+ EHHR,EHHR
338
+ ESYE,ESYE
339
+ MKLK,MKLK
340
+ CKWA,CKWA
341
+ IWGS,IWGS
342
+ RHTY,RHTY
343
+ EHGF,EHGF
344
+ VGFI,VGFI
345
+ YMQM,YMQM
346
+ IQDC,IQDC
347
+ TMFP,TMFP
348
+ YLLA,YLLA
349
+ FNGV,FNGV
350
+ CQWN,CQWN
351
+ IIFC,IIFC
352
+ CYFW,CYFW
353
+ NDCR,NDCR
354
+ WCLH,WCLH
355
+ ICRV,ICRV
356
+ LFVP,LFVP
357
+ MVGK,MVGK
358
+ PDMV,PDMV
359
+ FQWF,FQWF
360
+ LICH,LICH
361
+ WRPF,WRPF
362
+ VEDP,VEDP
363
+ MFAC,MFAC
364
+ WWNS,WWNS
365
+ CGMH,CGMH
366
+ NTVT,NTVT
367
+ YMLC,YMLC
368
+ VTIH,VTIH
369
+ MWML,MWML
370
+ IHIF,IHIF
371
+ RSVW,RSVW
372
+ KFCC,KFCC
373
+ IYSG,IYSG
374
+ IVVS,IVVS
375
+ FWYD,FWYD
376
+ LMVK,LMVK
377
+ LFAS,LFAS
378
+ SVHG,SVHG
379
+ GVDR,GVDR
380
+ VIHW,VIHW
381
+ GNQY,GNQY
382
+ MMVT,MMVT
383
+ DYPH,DYPH
384
+ WHYA,WHYA
385
+ FAIF,FAIF
386
+ MNLG,MNLG
387
+ SSLA,SSLA
388
+ KATL,KATL
389
+ RYCC,RYCC
390
+ HKCL,HKCL
391
+ LDTN,LDTN
392
+ MFIG,MFIG
393
+ HPVY,HPVY
394
+ GPTW,GPTW
395
+ LVMI,LVMI
396
+ KLPT,KLPT
397
+ RLNQ,RLNQ
398
+ IKKN,IKKN
399
+ AQKT,AQKT
400
+ LCEE,LCEE
401
+ FQCR,FQCR
402
+ QSEW,QSEW
403
+ VVWW,VVWW
404
+ TSND,TSND
405
+ SYQF,SYQF
406
+ ATLM,ATLM
407
+ WENR,WENR
408
+ KTVS,KTVS
409
+ GKTK,GKTK
410
+ FFVR,FFVR
411
+ CEEV,CEEV
412
+ MACL,MACL
413
+ MWWT,MWWT
414
+ ICGH,ICGH
415
+ IDQT,IDQT
416
+ QGCS,QGCS
417
+ IVIL,IVIL
418
+ NTKN,NTKN
419
+ DLPK,DLPK
420
+ FRMH,FRMH
421
+ LEWF,LEWF
422
+ RFID,RFID
423
+ PLYY,PLYY
424
+ HIIP,HIIP
425
+ CWMM,CWMM
426
+ HIQQ,HIQQ
427
+ ILEE,ILEE
428
+ YNNA,YNNA
429
+ FRFM,FRFM
430
+ RWTW,RWTW
431
+ NSQL,NSQL
432
+ NMKK,NMKK
433
+ LTSA,LTSA
434
+ ARTD,ARTD
435
+ WCYH,WCYH
436
+ CMWP,CMWP
437
+ PHCS,PHCS
438
+ SPLN,SPLN
439
+ APDW,APDW
440
+ KEGM,KEGM
441
+ SYEV,SYEV
442
+ YCPI,YCPI
443
+ TRVA,TRVA
444
+ RAMI,RAMI
445
+ DLEF,DLEF
446
+ QCTI,QCTI
447
+ YEPV,YEPV
448
+ CWAK,CWAK
449
+ DPDD,DPDD
450
+ TTCK,TTCK
451
+ NESI,NESI
452
+ DGVF,DGVF
453
+ DVYL,DVYL
454
+ NHPH,NHPH
455
+ KRKQ,KRKQ
456
+ FEFA,FEFA
457
+ LDAC,LDAC
458
+ YSQT,YSQT
459
+ TYFF,TYFF
460
+ ALVN,ALVN
461
+ VMVQ,VMVQ
462
+ LVVR,LVVR
463
+ RSWY,RSWY
464
+ QHKQ,QHKQ
465
+ LHEP,LHEP
466
+ ARSK,ARSK
467
+ FPVM,FPVM
468
+ MHVD,MHVD
469
+ RMQL,RMQL
470
+ FDEA,FDEA
471
+ YSIF,YSIF
472
+ MEWG,MEWG
473
+ QIII,QIII
474
+ LIAY,LIAY
475
+ LSPG,LSPG
476
+ LSFH,LSFH
477
+ VWAT,VWAT
478
+ WMKT,WMKT
479
+ SEWR,SEWR
480
+ VAMH,VAMH
481
+ CVGQ,CVGQ
482
+ DAAY,DAAY
483
+ CHYS,CHYS
484
+ IWMF,IWMF
485
+ SRCW,SRCW
486
+ ETMQ,ETMQ
487
+ QHPN,QHPN
488
+ MLEP,MLEP
489
+ YLWR,YLWR
490
+ IHQN,IHQN
491
+ KWPF,KWPF
492
+ SKWK,SKWK
493
+ YIRV,YIRV
494
+ AVKM,AVKM
495
+ SRER,SRER
496
+ KVTF,KVTF
497
+ MART,MART
498
+ HLPC,HLPC
499
+ CGNP,CGNP
500
+ LIHW,LIHW
501
+ WRHE,WRHE
502
+ PLPD,PLPD
503
+ AISH,AISH
504
+ LVGA,LVGA
505
+ GDAN,GDAN
506
+ KFDC,KFDC
507
+ TVYN,TVYN
508
+ EHMH,EHMH
509
+ KEHT,KEHT
510
+ IGSA,IGSA
511
+ HNMM,HNMM
512
+ LWNH,LWNH
513
+ PKYY,PKYY
514
+ CCIM,CCIM
515
+ TIDF,TIDF
516
+ GYTV,GYTV
517
+ LNIN,LNIN
518
+ WRAS,WRAS
519
+ LANP,LANP
520
+ KEYL,KEYL
521
+ PIAT,PIAT
522
+ HNYA,HNYA
523
+ ERTP,ERTP
524
+ TAHS,TAHS
525
+ CMPV,CMPV
526
+ DPHY,DPHY
527
+ HVCL,HVCL
528
+ NRKH,NRKH
529
+ RHCG,RHCG
530
+ AKRV,AKRV
531
+ TSYG,TSYG
532
+ WASS,WASS
533
+ NIDH,NIDH
534
+ WNVC,WNVC
535
+ TLSL,TLSL
536
+ NMWE,NMWE
537
+ LKIY,LKIY
538
+ CVNT,CVNT
539
+ EEAP,EEAP
540
+ MGNK,MGNK
541
+ KDQP,KDQP
542
+ DTRQ,DTRQ
543
+ QVIH,QVIH
544
+ PTKM,PTKM
545
+ FVNA,FVNA
546
+ MTTV,MTTV
547
+ TLDK,TLDK
548
+ NRDV,NRDV
549
+ WYSN,WYSN
550
+ KKEI,KKEI
551
+ VKTG,VKTG
552
+ RTMS,RTMS
553
+ GVTS,GVTS
554
+ QVFG,QVFG
555
+ FSID,FSID
556
+ MCGK,MCGK
557
+ KRYF,KRYF
558
+ FSCA,FSCA
559
+ ENWS,ENWS
560
+ KLPS,KLPS
561
+ WHPD,WHPD
562
+ WERR,WERR
563
+ GVPD,GVPD
564
+ QATI,QATI
565
+ QIRW,QIRW
566
+ VCQD,VCQD
567
+ NHNV,NHNV
568
+ KSPQ,KSPQ
569
+ MQQR,MQQR
570
+ WMYG,WMYG
571
+ YIGK,YIGK
572
+ WAAR,WAAR
573
+ ILEI,ILEI
574
+ EWFS,EWFS
575
+ YAGP,YAGP
576
+ WSWA,WSWA
577
+ WKMM,WKMM
578
+ FWTN,FWTN
579
+ NIRN,NIRN
580
+ EEHP,EEHP
581
+ GKPE,GKPE
582
+ KPLK,KPLK
583
+ EWPS,EWPS
584
+ KLRS,KLRS
585
+ EYWY,EYWY
586
+ CFGR,CFGR
587
+ NEGH,NEGH
588
+ GDRM,GDRM
589
+ VGTI,VGTI
590
+ YCGL,YCGL
591
+ EVMG,EVMG
592
+ PQNW,PQNW
593
+ QVQH,QVQH
594
+ NTPI,NTPI
595
+ VMVI,VMVI
596
+ RHAT,RHAT
597
+ RVCM,RVCM
598
+ LSIN,LSIN
599
+ ATKL,ATKL
600
+ PSNL,PSNL
601
+ WESL,WESL
602
+ QPDG,QPDG
603
+ YRFL,YRFL
604
+ RTPP,RTPP
605
+ IVKA,IVKA
606
+ VVPK,VVPK
607
+ YKPE,YKPE
608
+ FHFW,FHFW
609
+ TRLH,TRLH
610
+ ITSL,ITSL
611
+ GEIE,GEIE
612
+ FGGQ,FGGQ
613
+ SSPD,SSPD
614
+ TWHQ,TWHQ
615
+ GEDR,GEDR
616
+ HNEC,HNEC
617
+ PQNL,PQNL
618
+ ECNQ,ECNQ
619
+ TSEQ,TSEQ
620
+ RLVW,RLVW
621
+ RDNT,RDNT
622
+ QMAH,QMAH
623
+ NVFD,NVFD
624
+ YHYT,YHYT
625
+ PMNY,PMNY
626
+ MGML,MGML
627
+ AFYE,AFYE
628
+ EGLV,EGLV
629
+ PGLP,PGLP
630
+ YNFY,YNFY
631
+ KTAC,KTAC
632
+ YHMC,YHMC
633
+ IATA,IATA
634
+ LPDV,LPDV
635
+ LIAS,LIAS
636
+ DDLY,DDLY
637
+ MQNA,MQNA
638
+ HISP,HISP
639
+ VKIN,VKIN
640
+ LGNP,LGNP
641
+ SCGK,SCGK
642
+ MKED,MKED
643
+ VAWY,VAWY
644
+ HWPF,HWPF
645
+ LMPY,LMPY
646
+ QICW,QICW
647
+ CPCR,CPCR
648
+ RSYT,RSYT
649
+ NLYT,NLYT
650
+ KIHC,KIHC
651
+ LMCP,LMCP
652
+ VTEC,VTEC
653
+ WSWH,WSWH
654
+ SSYR,SSYR
655
+ FTQN,FTQN
656
+ NRML,NRML
657
+ SFFL,SFFL
658
+ QRTP,QRTP
659
+ VPII,VPII
660
+ MYEK,MYEK
661
+ TSTF,TSTF
662
+ PKYM,PKYM
663
+ IITN,IITN
664
+ HPVW,HPVW
665
+ VNLP,VNLP
666
+ SVKP,SVKP
667
+ CDKP,CDKP
668
+ FDWC,FDWC
669
+ AKSF,AKSF
670
+ LYYM,LYYM
671
+ EDWE,EDWE
672
+ QDET,QDET
673
+ YACA,YACA
674
+ QMWK,QMWK
675
+ IIMH,IIMH
676
+ QCTC,QCTC
677
+ PMHG,PMHG
678
+ ECKA,ECKA
679
+ KQYL,KQYL
680
+ SPCL,SPCL
681
+ MQEK,MQEK
682
+ LIAN,LIAN
683
+ SCDL,SCDL
684
+ NDFP,NDFP
685
+ QKGE,QKGE
686
+ HNDV,HNDV
687
+ HHHE,HHHE
688
+ AWNV,AWNV
689
+ SPSG,SPSG
690
+ LRIA,LRIA
691
+ IVID,IVID
692
+ YQYS,YQYS
693
+ DGTP,DGTP
694
+ DTTL,DTTL
695
+ HWDN,HWDN
696
+ DGNC,DGNC
697
+ QGEG,QGEG
698
+ SNPP,SNPP
699
+ KVSH,KVSH
700
+ WLWV,WLWV
701
+ LMMA,LMMA
702
+ MGCM,MGCM
703
+ LNKP,LNKP
704
+ TPIG,TPIG
705
+ WEWH,WEWH
706
+ QCVC,QCVC
707
+ RMKF,RMKF
708
+ MDDE,MDDE
709
+ PGPQ,PGPQ
710
+ DVTF,DVTF
711
+ DKCT,DKCT
712
+ ECRP,ECRP
713
+ CCGP,CCGP
714
+ RFVF,RFVF
715
+ LMNC,LMNC
716
+ TICP,TICP
717
+ AKWL,AKWL
718
+ MTPT,MTPT
719
+ NTVY,NTVY
720
+ KNRS,KNRS
721
+ CYIT,CYIT
722
+ IAGE,IAGE
723
+ ICKI,ICKI
724
+ GQAS,GQAS
725
+ RNFR,RNFR
726
+ SAQL,SAQL
727
+ ICMM,ICMM
728
+ NVED,NVED
729
+ RLSM,RLSM
730
+ INMV,INMV
731
+ FGRY,FGRY
732
+ RGRW,RGRW
733
+ CNNG,CNNG
734
+ YGES,YGES
735
+ YLCN,YLCN
736
+ PASK,PASK
737
+ LYIT,LYIT
738
+ PLPM,PLPM
739
+ GDMM,GDMM
740
+ TQER,TQER
741
+ MTHR,MTHR
742
+ NKCN,NKCN
743
+ NDQA,NDQA
744
+ FIWM,FIWM
745
+ ATER,ATER
746
+ ISVY,ISVY
747
+ HVTG,HVTG
748
+ RPSV,RPSV
749
+ SGET,SGET
750
+ TNGR,TNGR
751
+ SFLC,SFLC
752
+ PPRD,PPRD
753
+ AWKW,AWKW
754
+ DLCW,DLCW
755
+ RFFC,RFFC
756
+ VIQR,VIQR
757
+ KIPG,KIPG
758
+ SRPE,SRPE
759
+ LMMY,LMMY
760
+ YDTM,YDTM
761
+ MTIG,MTIG
762
+ GSHI,GSHI
763
+ NCRV,NCRV
764
+ KPDS,KPDS
765
+ CNNL,CNNL
766
+ MGVK,MGVK
767
+ GWED,GWED
768
+ TYHW,TYHW
769
+ NMVM,NMVM
770
+ EHFA,EHFA
771
+ SVCM,SVCM
772
+ THET,THET
773
+ QGYR,QGYR
774
+ NTDC,NTDC
775
+ ETST,ETST
776
+ MLSM,MLSM
777
+ YRRQ,YRRQ
778
+ VWEA,VWEA
779
+ PFYW,PFYW
780
+ TIEI,TIEI
781
+ ECCQ,ECCQ
782
+ SVDV,SVDV
783
+ RYKY,RYKY
784
+ AMDS,AMDS
785
+ DMMK,DMMK
786
+ AMKE,AMKE
787
+ GNHG,GNHG
788
+ DVPC,DVPC
789
+ IQVY,IQVY
790
+ FNDE,FNDE
791
+ HTLS,HTLS
792
+ DFIG,DFIG
793
+ PVMD,PVMD
794
+ RAMS,RAMS
795
+ IASS,IASS
796
+ DMKH,DMKH
797
+ KYPS,KYPS
798
+ PVQF,PVQF
799
+ SWPS,SWPS
800
+ EQCI,EQCI
801
+ CNMV,CNMV
802
+ MVVG,MVVG
803
+ SVDI,SVDI
804
+ RRRN,RRRN
805
+ AFSS,AFSS
806
+ PAQD,PAQD
807
+ QATV,QATV
808
+ GFCR,GFCR
809
+ FNYS,FNYS
810
+ VNWN,VNWN
811
+ PMIH,PMIH
812
+ VDYL,VDYL
813
+ VICL,VICL
814
+ KYGL,KYGL
815
+ FHIK,FHIK
816
+ VVNY,VVNY
817
+ TYYT,TYYT
818
+ DLPN,DLPN
819
+ RNGI,RNGI
820
+ FFGV,FFGV
821
+ EIQM,EIQM
822
+ HRNL,HRNL
823
+ FHGM,FHGM
824
+ PQVA,PQVA
825
+ WIVH,WIVH
826
+ PTWK,PTWK
827
+ MFHD,MFHD
828
+ FHST,FHST
829
+ NMHA,NMHA
830
+ KSPI,KSPI
831
+ DDTI,DDTI
832
+ DVQS,DVQS
833
+ KGAT,KGAT
834
+ RITF,RITF
835
+ PNPL,PNPL
836
+ VICS,VICS
837
+ EDMF,EDMF
838
+ ESID,ESID
839
+ VESG,VESG
840
+ EKEN,EKEN
841
+ KAYY,KAYY
842
+ LMPW,LMPW
843
+ ANLI,ANLI
844
+ CALR,CALR
845
+ KMGI,KMGI
846
+ NEAE,NEAE
847
+ QAIC,QAIC
848
+ NKQE,NKQE
849
+ IRTF,IRTF
850
+ TCQN,TCQN
851
+ RPAY,RPAY
852
+ GTHQ,GTHQ
853
+ HVCH,HVCH
854
+ GVQA,GVQA
855
+ KQTA,KQTA
856
+ NWSA,NWSA
857
+ GLSC,GLSC
858
+ SPRS,SPRS
859
+ EDHW,EDHW
860
+ YHMA,YHMA
861
+ QFIR,QFIR
862
+ LKYW,LKYW
863
+ VRMD,VRMD
864
+ TPNP,TPNP
865
+ LFCF,LFCF
866
+ CEVE,CEVE
867
+ MDDA,MDDA
868
+ HNYT,HNYT
869
+ WPVP,WPVP
870
+ TTAV,TTAV
871
+ RTRI,RTRI
872
+ AYAG,AYAG
873
+ DYDT,DYDT
874
+ GEQE,GEQE
875
+ EGSA,EGSA
876
+ LEMF,LEMF
877
+ IEFW,IEFW
878
+ PIQS,PIQS
879
+ EAMT,EAMT
880
+ QCNI,QCNI
881
+ QNGS,QNGS
882
+ CFNL,CFNL
883
+ QQSV,QQSV
884
+ TCPN,TCPN
885
+ SWSR,SWSR
886
+ RTYL,RTYL
887
+ YPAE,YPAE
888
+ GLMQ,GLMQ
889
+ QCHI,QCHI
890
+ IGIR,IGIR
891
+ ICPI,ICPI
892
+ CDFV,CDFV
893
+ PISQ,PISQ
894
+ QYQM,QYQM
895
+ DFHH,DFHH
896
+ FVHV,FVHV
897
+ RKSK,RKSK
898
+ STLE,STLE
899
+ MKLD,MKLD
900
+ AAYG,AAYG
901
+ LHNG,LHNG
902
+ RWCV,RWCV
903
+ QCWF,QCWF
904
+ CKSK,CKSK
905
+ QIGA,QIGA
906
+ SFKL,SFKL
907
+ PFLN,PFLN
908
+ TGPF,TGPF
909
+ HTEE,HTEE
910
+ PDFH,PDFH
911
+ IFIM,IFIM
912
+ ITCK,ITCK
913
+ SKKP,SKKP
914
+ QLMD,QLMD
915
+ NLYY,NLYY
916
+ EVHQ,EVHQ
917
+ TSLV,TSLV
918
+ GRHR,GRHR
919
+ HYQF,HYQF
920
+ LSGA,LSGA
921
+ IAPM,IAPM
922
+ GEIN,GEIN
923
+ GLRD,GLRD
924
+ TDNC,TDNC
925
+ NMTH,NMTH
926
+ DHLQ,DHLQ
927
+ GLQA,GLQA
928
+ FNEP,FNEP
929
+ KHSF,KHSF
930
+ FKKS,FKKS
931
+ DIMC,DIMC
932
+ HWVN,HWVN
933
+ RNFI,RNFI
934
+ HDEG,HDEG
935
+ WVVN,WVVN
936
+ EQKE,EQKE
937
+ SCFD,SCFD
938
+ ADRR,ADRR
939
+ TVAF,TVAF
940
+ RVDC,RVDC
941
+ YFNI,YFNI
942
+ NGMW,NGMW
943
+ EKFN,EKFN
944
+ NFII,NFII
945
+ SAFS,SAFS
946
+ NNNY,NNNY
947
+ PFGK,PFGK
948
+ SGMK,SGMK
949
+ SHTM,SHTM
950
+ HDNH,HDNH
951
+ EWWP,EWWP
952
+ PFKH,PFKH
953
+ WSYL,WSYL
954
+ IHSC,IHSC
955
+ YYED,YYED
956
+ HCMW,HCMW
957
+ RNKR,RNKR
958
+ CRNH,CRNH
959
+ HPVR,HPVR
960
+ TKLY,TKLY
961
+ NNSK,NNSK
962
+ SPIQ,SPIQ
963
+ KWQV,KWQV
964
+ INEP,INEP
965
+ TCQL,TCQL
966
+ VKRD,VKRD
967
+ VPHC,VPHC
968
+ GNAN,GNAN
969
+ IVNE,IVNE
970
+ GMYD,GMYD
971
+ VTLF,VTLF
972
+ INSN,INSN
973
+ EQKH,EQKH
974
+ IRKL,IRKL
975
+ APIY,APIY
976
+ SPSP,SPSP
977
+ TWKI,TWKI
978
+ TWTC,TWTC
979
+ SFSI,SFSI
980
+ CSHN,CSHN
981
+ RHCA,RHCA
982
+ WTEF,WTEF
983
+ SNCQ,SNCQ
984
+ SLME,SLME
985
+ GHEW,GHEW
986
+ EMPE,EMPE
987
+ ADFS,ADFS
988
+ GHHS,GHHS
989
+ HGMC,HGMC
990
+ LWWR,LWWR
991
+ NVSG,NVSG
992
+ IVKI,IVKI
993
+ YNHE,YNHE
994
+ CPGT,CPGT
995
+ LAKQ,LAKQ
996
+ KNRG,KNRG
997
+ DFGC,DFGC
998
+ WNAI,WNAI
999
+ SKCN,SKCN
1000
+ FYSC,FYSC
1001
+ CTIY,CTIY
1002
+ CSDP,CSDP
1003
+ CHTN,CHTN
1004
+ FWKT,FWKT
1005
+ DWQH,DWQH
1006
+ HWCS,HWCS
1007
+ WWLW,WWLW
1008
+ HADL,HADL
1009
+ RITQ,RITQ
1010
+ SREP,SREP
1011
+ VQIF,VQIF
1012
+ HMPQ,HMPQ
1013
+ TFFD,TFFD
1014
+ KGMF,KGMF
1015
+ DKVN,DKVN
1016
+ YWKS,YWKS
1017
+ LHLT,LHLT
1018
+ WHDD,WHDD
1019
+ WALN,WALN
1020
+ QSQH,QSQH
1021
+ DFIT,DFIT
1022
+ IGRW,IGRW
1023
+ SKVS,SKVS
1024
+ MDSD,MDSD
1025
+ HPER,HPER
1026
+ CRDW,CRDW
1027
+ WECW,WECW
1028
+ DWEI,DWEI
1029
+ DCDT,DCDT
1030
+ SWIH,SWIH
1031
+ VQGR,VQGR
1032
+ DPTP,DPTP
1033
+ PHDF,PHDF
1034
+ HHTM,HHTM
1035
+ DLYK,DLYK
1036
+ CLIQ,CLIQ
1037
+ APYR,APYR
1038
+ CPQV,CPQV
1039
+ RLYF,RLYF
1040
+ SSLI,SSLI
1041
+ FAEQ,FAEQ
1042
+ ELQC,ELQC
1043
+ ARFI,ARFI
1044
+ SQTV,SQTV
1045
+ SWHG,SWHG
1046
+ TNSD,TNSD
1047
+ HPCP,HPCP
1048
+ AFAN,AFAN
1049
+ KAHA,KAHA
1050
+ AMSN,AMSN
1051
+ VPMS,VPMS
1052
+ FRDK,FRDK
1053
+ STCQ,STCQ
1054
+ GIMS,GIMS
1055
+ ICRN,ICRN
1056
+ IYAG,IYAG
1057
+ VWSY,VWSY
1058
+ QAFG,QAFG
1059
+ NNCH,NNCH
1060
+ GGNN,GGNN
1061
+ HVMA,HVMA
1062
+ KGRI,KGRI
1063
+ LAFF,LAFF
1064
+ KCDN,KCDN
1065
+ RFVY,RFVY
1066
+ EYKW,EYKW
1067
+ ASIR,ASIR
1068
+ HPGV,HPGV
1069
+ AKDI,AKDI
1070
+ KDNH,KDNH
1071
+ KKLH,KKLH
1072
+ LCLD,LCLD
1073
+ MLTD,MLTD
1074
+ LAQY,LAQY
1075
+ EPFC,EPFC
1076
+ VRRL,VRRL
1077
+ FSNW,FSNW
1078
+ WDEK,WDEK
1079
+ YDDY,YDDY
1080
+ LNQE,LNQE
1081
+ NLHN,NLHN
1082
+ SLRQ,SLRQ
1083
+ AFAT,AFAT
1084
+ INVT,INVT
1085
+ YYNC,YYNC
1086
+ TMKL,TMKL
1087
+ AIKI,AIKI
1088
+ GALS,GALS
1089
+ IIML,IIML
1090
+ MQRK,MQRK
1091
+ KAWH,KAWH
1092
+ KWYI,KWYI
1093
+ MKIP,MKIP
1094
+ CLLI,CLLI
1095
+ HHCW,HHCW
1096
+ WWFP,WWFP
1097
+ ISTG,ISTG
1098
+ KYGD,KYGD
1099
+ RPCK,RPCK
1100
+ QMGG,QMGG
1101
+ DARI,DARI
1102
+ PMCE,PMCE
1103
+ VYIV,VYIV
1104
+ VHEY,VHEY
1105
+ QMCG,QMCG
1106
+ CLVL,CLVL
1107
+ EKCS,EKCS
1108
+ IKVP,IKVP
1109
+ DNRP,DNRP
1110
+ WLCW,WLCW
1111
+ RSNE,RSNE
1112
+ QKLT,QKLT
1113
+ TEND,TEND
1114
+ FMFY,FMFY
1115
+ QYKD,QYKD
1116
+ HQTR,HQTR
1117
+ ALVT,ALVT
1118
+ NGSN,NGSN
1119
+ EALD,EALD
1120
+ MPWC,MPWC
1121
+ GGMI,GGMI
1122
+ VIRS,VIRS
1123
+ LSSM,LSSM
1124
+ SYAY,SYAY
1125
+ CVVM,CVVM
1126
+ WTNV,WTNV
1127
+ TVFM,TVFM
1128
+ DECF,DECF
1129
+ RETI,RETI
1130
+ IMTD,IMTD
1131
+ MQDV,MQDV
1132
+ GNEP,GNEP
1133
+ RFMC,RFMC
1134
+ GGLH,GGLH
1135
+ PKFI,PKFI
1136
+ NGQG,NGQG
1137
+ TFKL,TFKL
1138
+ SRYN,SRYN
1139
+ FFSS,FFSS
1140
+ CKET,CKET
1141
+ DDCM,DDCM
1142
+ WRVY,WRVY
1143
+ RGTH,RGTH
1144
+ WWNN,WWNN
1145
+ NMGV,NMGV
1146
+ GFAE,GFAE
1147
+ MMPT,MMPT
1148
+ YIQY,YIQY
1149
+ TNSF,TNSF
1150
+ HHDM,HHDM
1151
+ DQMG,DQMG
1152
+ LHPN,LHPN
1153
+ PWEA,PWEA
1154
+ KGVA,KGVA
1155
+ DRRI,DRRI
1156
+ DFVM,DFVM
1157
+ WDVE,WDVE
1158
+ SQYV,SQYV
1159
+ GDIA,GDIA
1160
+ STMC,STMC
1161
+ CTLK,CTLK
1162
+ INYK,INYK
1163
+ DAVY,DAVY
1164
+ SAAE,SAAE
1165
+ NQPC,NQPC
1166
+ VGHL,VGHL
1167
+ TSVY,TSVY
1168
+ IDQL,IDQL
1169
+ ETDI,ETDI
1170
+ FCCD,FCCD
1171
+ SRGC,SRGC
1172
+ IHND,IHND
1173
+ HKYL,HKYL
1174
+ NQPS,NQPS
1175
+ YWAE,YWAE
1176
+ SDNQ,SDNQ
1177
+ ISEG,ISEG
1178
+ NATM,NATM
1179
+ NGCV,NGCV
1180
+ YNTC,YNTC
1181
+ FFKA,FFKA
1182
+ CTTS,CTTS
1183
+ EEAD,EEAD
1184
+ KHAQ,KHAQ
1185
+ STQQ,STQQ
1186
+ MMTS,MMTS
1187
+ HDHV,HDHV
1188
+ KGLQ,KGLQ
1189
+ PKAM,PKAM
1190
+ DQVY,DQVY
1191
+ QQHD,QQHD
1192
+ QQLR,QQLR
1193
+ YCEQ,YCEQ
1194
+ YYGY,YYGY
1195
+ RLRR,RLRR
1196
+ VRYS,VRYS
1197
+ ESHL,ESHL
1198
+ DYGS,DYGS
1199
+ ITKD,ITKD
1200
+ VTDQ,VTDQ
1201
+ GLYK,GLYK
1202
+ FNAL,FNAL
1203
+ HMHG,HMHG
1204
+ TDVT,TDVT
1205
+ VSGS,VSGS
1206
+ FSER,FSER
1207
+ MHAN,MHAN
1208
+ WYEG,WYEG
1209
+ PCRG,PCRG
1210
+ DIEM,DIEM
1211
+ ESNY,ESNY
1212
+ HYHD,HYHD
1213
+ VFCC,VFCC
1214
+ DSYW,DSYW
1215
+ AQQM,AQQM
1216
+ CYCW,CYCW
1217
+ GKWK,GKWK
1218
+ GRSD,GRSD
1219
+ QEKH,QEKH
1220
+ LPLF,LPLF
1221
+ YPYG,YPYG
1222
+ YDYS,YDYS
1223
+ TSHA,TSHA
1224
+ KMIK,KMIK
1225
+ ACWG,ACWG
1226
+ TGEQ,TGEQ
1227
+ PHKQ,PHKQ
1228
+ VGKH,VGKH
1229
+ GHQI,GHQI
1230
+ QPPG,QPPG
1231
+ TYEF,TYEF
1232
+ GHHY,GHHY
1233
+ SLSL,SLSL
1234
+ ARNE,ARNE
1235
+ DATD,DATD
1236
+ LAND,LAND
1237
+ QIQA,QIQA
1238
+ EHYP,EHYP
1239
+ CASL,CASL
1240
+ EPSF,EPSF
1241
+ VELW,VELW
1242
+ YERL,YERL
1243
+ DEKS,DEKS
1244
+ NQMW,NQMW
1245
+ WMHS,WMHS
1246
+ KNMV,KNMV
1247
+ QTIW,QTIW
1248
+ TQDA,TQDA
1249
+ PYGD,PYGD
1250
+ SFMV,SFMV
1251
+ GHCR,GHCR
1252
+ TFIR,TFIR
1253
+ YAEP,YAEP
1254
+ IYTY,IYTY
1255
+ FSEY,FSEY
1256
+ FINQ,FINQ
1257
+ CGKE,CGKE
1258
+ DDCQ,DDCQ
1259
+ FDEE,FDEE
1260
+ EMLR,EMLR
1261
+ CKFY,CKFY
1262
+ KRHR,KRHR
1263
+ RNVV,RNVV
1264
+ CVAP,CVAP
1265
+ GFQD,GFQD
1266
+ YSHN,YSHN
1267
+ FDRF,FDRF
1268
+ AEQN,AEQN
1269
+ VFEW,VFEW
1270
+ ETTF,ETTF
1271
+ MSME,MSME
1272
+ GNSW,GNSW
1273
+ YTGA,YTGA
1274
+ DYRD,DYRD
1275
+ CMQY,CMQY
1276
+ TCCW,TCCW
1277
+ MHEG,MHEG
1278
+ IDIK,IDIK
1279
+ MMKE,MMKE
1280
+ MVDE,MVDE
1281
+ PECI,PECI
1282
+ MLPV,MLPV
1283
+ AKEK,AKEK
1284
+ MTTY,MTTY
1285
+ NFHF,NFHF
1286
+ KVCV,KVCV
1287
+ PSSQ,PSSQ
1288
+ ILSQ,ILSQ
1289
+ RGCN,RGCN
1290
+ SLLK,SLLK
1291
+ TISM,TISM
1292
+ CKNM,CKNM
1293
+ HMFA,HMFA
1294
+ NEPY,NEPY
1295
+ FIWR,FIWR
1296
+ ACGP,ACGP
1297
+ CMIN,CMIN
1298
+ YQSH,YQSH
1299
+ TKRK,TKRK
1300
+ KKMP,KKMP
1301
+ HDGN,HDGN
1302
+ ANDY,ANDY
1303
+ HDWG,HDWG
1304
+ CGEE,CGEE
1305
+ FYLE,FYLE
1306
+ ICVE,ICVE
1307
+ KGWY,KGWY
1308
+ FVQW,FVQW
1309
+ QQVE,QQVE
1310
+ NCRD,NCRD
1311
+ DLFD,DLFD
1312
+ IVRM,IVRM
1313
+ FIWK,FIWK
1314
+ YLCY,YLCY
1315
+ SWMY,SWMY
1316
+ QWDN,QWDN
1317
+ DEGK,DEGK
1318
+ RPIK,RPIK
1319
+ AMFA,AMFA
1320
+ QTCQ,QTCQ
1321
+ NTKK,NTKK
1322
+ SCLE,SCLE
1323
+ LPME,LPME
1324
+ SEYY,SEYY
1325
+ WWRY,WWRY
1326
+ DDNS,DDNS
1327
+ EKNR,EKNR
1328
+ PKIQ,PKIQ
1329
+ LRTG,LRTG
1330
+ NANG,NANG
1331
+ SMNT,SMNT
1332
+ GHVQ,GHVQ
1333
+ KAPS,KAPS
1334
+ QHNC,QHNC
1335
+ RQAQ,RQAQ
1336
+ TAKP,TAKP
1337
+ GSCG,GSCG
1338
+ DYGD,DYGD
1339
+ GTSS,GTSS
1340
+ ALYP,ALYP
1341
+ TDER,TDER
1342
+ HYPN,HYPN
1343
+ ERAS,ERAS
1344
+ RMSP,RMSP
1345
+ CIMM,CIMM
1346
+ EYTT,EYTT
1347
+ YSEC,YSEC
1348
+ LLGP,LLGP
1349
+ PEAA,PEAA
1350
+ NQGY,NQGY
1351
+ HQPP,HQPP
1352
+ NRTP,NRTP
1353
+ LALL,LALL
1354
+ TPGN,TPGN
1355
+ LEIA,LEIA
1356
+ WEEA,WEEA
1357
+ GVAG,GVAG
1358
+ TCRA,TCRA
1359
+ KVDG,KVDG
1360
+ CMVY,CMVY
1361
+ CHMI,CHMI
1362
+ GGYF,GGYF
1363
+ MPTV,MPTV
1364
+ GNNK,GNNK
1365
+ GEIA,GEIA
1366
+ SHPY,SHPY
1367
+ PMRM,PMRM
1368
+ GRTR,GRTR
1369
+ PHEM,PHEM
1370
+ IYEL,IYEL
1371
+ FCDK,FCDK
1372
+ IYSF,IYSF
1373
+ NRDP,NRDP
1374
+ AHGE,AHGE
1375
+ MMYF,MMYF
1376
+ YKQE,YKQE
1377
+ GKFT,GKFT
1378
+ IENN,IENN
1379
+ HAHG,HAHG
1380
+ KVEH,KVEH
1381
+ VILD,VILD
1382
+ FSFE,FSFE
1383
+ LVHQ,LVHQ
1384
+ LICG,LICG
1385
+ WSEM,WSEM
1386
+ FHTK,FHTK
1387
+ WCSE,WCSE
1388
+ VDGM,VDGM
1389
+ VVYE,VVYE
1390
+ HPWQ,HPWQ
1391
+ PDAY,PDAY
1392
+ AGIA,AGIA
1393
+ HCGK,HCGK
1394
+ SAVI,SAVI
1395
+ EKHK,EKHK
1396
+ ICPA,ICPA
1397
+ RWFE,RWFE
1398
+ WHDE,WHDE
1399
+ QIRD,QIRD
1400
+ QKPK,QKPK
1401
+ TQCF,TQCF
1402
+ GVMW,GVMW
1403
+ QGGM,QGGM
1404
+ CREN,CREN
1405
+ ASAL,ASAL
1406
+ VRTV,VRTV
1407
+ DVAV,DVAV
1408
+ EFWC,EFWC
1409
+ EHMY,EHMY
1410
+ VWHM,VWHM
1411
+ FSLN,FSLN
1412
+ KYLA,KYLA
1413
+ QETP,QETP
1414
+ DDAG,DDAG
1415
+ ILAV,ILAV
1416
+ SSNL,SSNL
1417
+ CCNW,CCNW
1418
+ KPAI,KPAI
1419
+ WTVI,WTVI
1420
+ KQHA,KQHA
1421
+ QVRI,QVRI
1422
+ CRVT,CRVT
1423
+ PHQM,PHQM
1424
+ AEMH,AEMH
1425
+ HHYG,HHYG
1426
+ MSCY,MSCY
1427
+ EHLG,EHLG
1428
+ EPPA,EPPA
1429
+ AQIM,AQIM
1430
+ CNII,CNII
1431
+ NADT,NADT
1432
+ MHKK,MHKK
1433
+ EDFY,EDFY
1434
+ CPLP,CPLP
1435
+ LVDD,LVDD
1436
+ YFKR,YFKR
1437
+ DFID,DFID
1438
+ KKQF,KKQF
1439
+ VDNK,VDNK
1440
+ WWWQ,WWWQ
1441
+ VECI,VECI
1442
+ TIEG,TIEG
1443
+ TWRG,TWRG
1444
+ SANG,SANG
1445
+ YWFC,YWFC
1446
+ HCFS,HCFS
1447
+ SWFN,SWFN
1448
+ QQLK,QQLK
1449
+ HPYR,HPYR
1450
+ QDLI,QDLI
1451
+ HYTL,HYTL
1452
+ QSAM,QSAM
1453
+ LASK,LASK
1454
+ VDAW,VDAW
1455
+ LTGH,LTGH
1456
+ VCFH,VCFH
1457
+ QNGD,QNGD
1458
+ WMAR,WMAR
1459
+ CCLC,CCLC
1460
+ HSQV,HSQV
1461
+ KEWG,KEWG
1462
+ PWHA,PWHA
1463
+ HAPI,HAPI
1464
+ TAQW,TAQW
1465
+ PRNF,PRNF
1466
+ ADIG,ADIG
1467
+ DYLC,DYLC
1468
+ WYLN,WYLN
1469
+ IQEP,IQEP
1470
+ RNGR,RNGR
1471
+ QWQY,QWQY
1472
+ GIWI,GIWI
1473
+ MGTS,MGTS
1474
+ NRTE,NRTE
1475
+ MEEF,MEEF
1476
+ DNWK,DNWK
1477
+ HCMM,HCMM
1478
+ YATS,YATS
1479
+ QMQW,QMQW
1480
+ CWKE,CWKE
1481
+ NVSW,NVSW
1482
+ NIRC,NIRC
1483
+ RFVD,RFVD
1484
+ KGFK,KGFK
1485
+ SWTE,SWTE
1486
+ WMAY,WMAY
1487
+ NFLA,NFLA
1488
+ CHTP,CHTP
1489
+ HRMT,HRMT
1490
+ LDVQ,LDVQ
1491
+ HSLA,HSLA
1492
+ FSLW,FSLW
1493
+ RPNY,RPNY
1494
+ DCME,DCME
1495
+ RESY,RESY
1496
+ TKRF,TKRF
1497
+ TVHS,TVHS
1498
+ YPKI,YPKI
1499
+ VQYS,VQYS
1500
+ YHLW,YHLW
1501
+ EYQD,EYQD
1502
+ AQHV,AQHV
1503
+ VDQD,VDQD
1504
+ VEWT,VEWT
1505
+ NSLC,NSLC
1506
+ QFCW,QFCW
1507
+ AHCC,AHCC
1508
+ TSLH,TSLH
1509
+ ICSC,ICSC
1510
+ RGGL,RGGL
1511
+ QNEW,QNEW
1512
+ LAEL,LAEL
1513
+ CIDA,CIDA
1514
+ DKEP,DKEP
1515
+ NPYL,NPYL
1516
+ LPSA,LPSA
1517
+ NPKC,NPKC
1518
+ QVLV,QVLV
1519
+ DIWW,DIWW
1520
+ AQCR,AQCR
1521
+ ALNH,ALNH
1522
+ PQTE,PQTE
1523
+ QSHE,QSHE
1524
+ IDLS,IDLS
1525
+ WSKM,WSKM
1526
+ CHQI,CHQI
1527
+ INRL,INRL
1528
+ VIVA,VIVA
1529
+ VPYG,VPYG
1530
+ LFPL,LFPL
1531
+ SPDA,SPDA
1532
+ PTVC,PTVC
1533
+ HPVD,HPVD
1534
+ PNIY,PNIY
1535
+ FPYF,FPYF
1536
+ LSLS,LSLS
1537
+ PPSS,PPSS
1538
+ GSYS,GSYS
1539
+ ADCD,ADCD
1540
+ DTEI,DTEI
1541
+ VLMD,VLMD
1542
+ FWGY,FWGY
1543
+ HLWE,HLWE
1544
+ NCHC,NCHC
1545
+ WIQM,WIQM
1546
+ FAMD,FAMD
1547
+ CCKQ,CCKQ
1548
+ EMKY,EMKY
1549
+ HRMM,HRMM
1550
+ DRYC,DRYC
1551
+ LPHQ,LPHQ
1552
+ HPQC,HPQC
1553
+ TYMI,TYMI
1554
+ HPQM,HPQM
1555
+ CCWI,CCWI
1556
+ MPNQ,MPNQ
1557
+ IMIT,IMIT
1558
+ TWAR,TWAR
1559
+ HVVL,HVVL
1560
+ CSSG,CSSG
1561
+ PAEM,PAEM
1562
+ MHWD,MHWD
1563
+ QVYE,QVYE
1564
+ QCNV,QCNV
1565
+ RYDQ,RYDQ
1566
+ QILA,QILA
1567
+ ANYR,ANYR
1568
+ VNGE,VNGE
1569
+ WKPR,WKPR
1570
+ LVMT,LVMT
1571
+ HNIR,HNIR
1572
+ FFNS,FFNS
1573
+ GSRR,GSRR
1574
+ VTVI,VTVI
1575
+ PMML,PMML
1576
+ ECTM,ECTM
1577
+ CNHN,CNHN
1578
+ QSHA,QSHA
1579
+ AFNV,AFNV
1580
+ DKAD,DKAD
1581
+ WALC,WALC
1582
+ CNRV,CNRV
1583
+ MFKT,MFKT
1584
+ DIVY,DIVY
1585
+ VPES,VPES
1586
+ PLEW,PLEW
1587
+ HSCT,HSCT
1588
+ SHGA,SHGA
1589
+ TTET,TTET
1590
+ PIQW,PIQW
1591
+ DLTL,DLTL
1592
+ VPLE,VPLE
1593
+ VLTG,VLTG
1594
+ KCNM,KCNM
1595
+ IDCY,IDCY
1596
+ DSPK,DSPK
1597
+ PDKC,PDKC
1598
+ WTCY,WTCY
1599
+ RIMR,RIMR
1600
+ VNPA,VNPA
1601
+ FVDP,FVDP
1602
+ RPDP,RPDP
1603
+ TLEF,TLEF
1604
+ DLNF,DLNF
1605
+ CVES,CVES
1606
+ WKWF,WKWF
1607
+ PNMI,PNMI
1608
+ DIVA,DIVA
1609
+ GKWC,GKWC
1610
+ HDYC,HDYC
1611
+ HAMN,HAMN
1612
+ TEPW,TEPW
1613
+ VYHI,VYHI
1614
+ MTHE,MTHE
1615
+ IGYD,IGYD
1616
+ PARW,PARW
1617
+ FQCG,FQCG
1618
+ MLSF,MLSF
1619
+ NGFG,NGFG
1620
+ YDMF,YDMF
1621
+ ATVW,ATVW
1622
+ PFHY,PFHY
1623
+ QYTG,QYTG
1624
+ KISE,KISE
1625
+ WPSR,WPSR
1626
+ LLIN,LLIN
1627
+ CMIF,CMIF
1628
+ NKKT,NKKT
1629
+ PLYW,PLYW
1630
+ QQFN,QQFN
1631
+ SQMQ,SQMQ
1632
+ AWQH,AWQH
1633
+ RDCC,RDCC
1634
+ YRWA,YRWA
1635
+ PGCG,PGCG
1636
+ STHQ,STHQ
1637
+ EQFS,EQFS
1638
+ KWVH,KWVH
1639
+ PDVA,PDVA
1640
+ LCVI,LCVI
1641
+ LTIN,LTIN
1642
+ VYEM,VYEM
1643
+ DYRG,DYRG
1644
+ GSRP,GSRP
1645
+ IYFQ,IYFQ
1646
+ LYNA,LYNA
1647
+ QFWL,QFWL
1648
+ LTAF,LTAF
1649
+ KEDA,KEDA
1650
+ HVPF,HVPF
1651
+ GQSM,GQSM
1652
+ YRHQ,YRHQ
1653
+ QDCH,QDCH
1654
+ PEMD,PEMD
1655
+ QPTI,QPTI
1656
+ NGNG,NGNG
1657
+ NQNL,NQNL
1658
+ KYEH,KYEH
1659
+ TQFF,TQFF
1660
+ FPFA,FPFA
1661
+ AQLP,AQLP
1662
+ CTHE,CTHE
1663
+ VAVM,VAVM
1664
+ KPLY,KPLY
1665
+ SSLF,SSLF
1666
+ MIRW,MIRW
1667
+ RAHP,RAHP
1668
+ VREH,VREH
1669
+ NAMG,NAMG
1670
+ FKKQ,FKKQ
1671
+ KQRA,KQRA
1672
+ LIFD,LIFD
1673
+ YRPE,YRPE
1674
+ TFAD,TFAD
1675
+ QIAA,QIAA
1676
+ INFW,INFW
1677
+ CRKA,CRKA
1678
+ GDAV,GDAV
1679
+ HETY,HETY
1680
+ GVGT,GVGT
1681
+ NDIP,NDIP
1682
+ CRSM,CRSM
1683
+ HTGT,HTGT
1684
+ STMK,STMK
1685
+ EQGS,EQGS
1686
+ TMQM,TMQM
1687
+ ENKD,ENKD
1688
+ KAFL,KAFL
1689
+ LRLK,LRLK
1690
+ VSPT,VSPT
1691
+ AHLV,AHLV
1692
+ CYHE,CYHE
1693
+ RLWK,RLWK
1694
+ LYWK,LYWK
1695
+ SATM,SATM
1696
+ PMSW,PMSW
1697
+ NTYF,NTYF
1698
+ EDHE,EDHE
1699
+ MMIQ,MMIQ
1700
+ KGPY,KGPY
1701
+ TCRP,TCRP
1702
+ TSKR,TSKR
1703
+ EVDV,EVDV
1704
+ QWRH,QWRH
1705
+ TYTI,TYTI
1706
+ NRFT,NRFT
1707
+ FRPY,FRPY
1708
+ LLPI,LLPI
1709
+ FGPP,FGPP
1710
+ PHYI,PHYI
1711
+ CMCK,CMCK
1712
+ NMPI,NMPI
1713
+ GQFR,GQFR
1714
+ NQER,NQER
1715
+ LMKS,LMKS
1716
+ ENCI,ENCI
1717
+ TNNV,TNNV
1718
+ GRAM,GRAM
1719
+ AAMV,AAMV
1720
+ IFYL,IFYL
1721
+ EKIL,EKIL
1722
+ QMAP,QMAP
1723
+ RYGL,RYGL
1724
+ NQAI,NQAI
1725
+ LYSR,LYSR
1726
+ DRES,DRES
1727
+ PTHS,PTHS
1728
+ TLDQ,TLDQ
1729
+ KEYH,KEYH
1730
+ HDWC,HDWC
1731
+ RPQT,RPQT
1732
+ PMNG,PMNG
1733
+ GWVS,GWVS
1734
+ PQEW,PQEW
1735
+ QRRP,QRRP
1736
+ CEIQ,CEIQ
1737
+ WNKY,WNKY
1738
+ FRVE,FRVE
1739
+ PTYH,PTYH
1740
+ LCRD,LCRD
1741
+ MEWQ,MEWQ
1742
+ QMNY,QMNY
1743
+ NFDK,NFDK
1744
+ QRKH,QRKH
1745
+ YLSR,YLSR
1746
+ MVRG,MVRG
1747
+ IQQH,IQQH
1748
+ RMEE,RMEE
1749
+ HYDN,HYDN
1750
+ KMSH,KMSH
1751
+ LWEL,LWEL
1752
+ WYLC,WYLC
1753
+ NGLM,NGLM
1754
+ QWPS,QWPS
1755
+ WFMM,WFMM
1756
+ KFEY,KFEY
1757
+ CQMT,CQMT
1758
+ GHFL,GHFL
1759
+ TERR,TERR
1760
+ VFGH,VFGH
1761
+ NRRF,NRRF
1762
+ RMRM,RMRM
1763
+ ITAK,ITAK
1764
+ WQWH,WQWH
1765
+ MQLP,MQLP
1766
+ LTSP,LTSP
1767
+ VEYQ,VEYQ
1768
+ YYHP,YYHP
1769
+ KDHH,KDHH
1770
+ PETW,PETW
1771
+ HDEA,HDEA
1772
+ WSMV,WSMV
1773
+ YITL,YITL
1774
+ FLQY,FLQY
1775
+ LLDM,LLDM
1776
+ IEHF,IEHF
1777
+ KVWR,KVWR
1778
+ THAR,THAR
1779
+ VREK,VREK
1780
+ SARM,SARM
1781
+ FSKI,FSKI
1782
+ NCTQ,NCTQ
1783
+ VEGT,VEGT
1784
+ SIKV,SIKV
1785
+ WSTR,WSTR
1786
+ NTQD,NTQD
1787
+ DNDN,DNDN
1788
+ SGFT,SGFT
1789
+ YRLW,YRLW
1790
+ KNWK,KNWK
1791
+ AVKA,AVKA
1792
+ APIK,APIK
1793
+ YDSL,YDSL
1794
+ TNHT,TNHT
1795
+ HYKY,HYKY
1796
+ SDNK,SDNK
1797
+ GAMQ,GAMQ
1798
+ PWIH,PWIH
1799
+ SYQG,SYQG
1800
+ CPDS,CPDS
1801
+ TGFQ,TGFQ
1802
+ YLNI,YLNI
1803
+ NVFM,NVFM
1804
+ TIDN,TIDN
1805
+ ATTP,ATTP
1806
+ GGGT,GGGT
1807
+ CHAG,CHAG
1808
+ NMNG,NMNG
1809
+ QLRT,QLRT
1810
+ LKWC,LKWC
1811
+ CDDQ,CDDQ
1812
+ YHGR,YHGR
1813
+ LEPK,LEPK
1814
+ QANG,QANG
1815
+ IDCV,IDCV
1816
+ FYYH,FYYH
1817
+ MHDH,MHDH
1818
+ YSQC,YSQC
1819
+ EYFL,EYFL
1820
+ HLIS,HLIS
1821
+ IQDK,IQDK
1822
+ HSIE,HSIE
1823
+ EKFK,EKFK
1824
+ EGKE,EGKE
1825
+ TMNI,TMNI
1826
+ SSQA,SSQA
1827
+ SASM,SASM
1828
+ ECIL,ECIL
1829
+ VRAD,VRAD
1830
+ EQLS,EQLS
1831
+ WKPD,WKPD
1832
+ HKWC,HKWC
1833
+ RQDW,RQDW
1834
+ YEFT,YEFT
1835
+ RPMP,RPMP
1836
+ FLCK,FLCK
1837
+ MMGK,MMGK
1838
+ FAYK,FAYK
1839
+ TFHN,TFHN
1840
+ PYFA,PYFA
1841
+ QSDI,QSDI
1842
+ MRQR,MRQR
1843
+ NGVY,NGVY
1844
+ KYLC,KYLC
1845
+ NAFE,NAFE
1846
+ HMYC,HMYC
1847
+ YDQM,YDQM
1848
+ WKMF,WKMF
1849
+ SRFI,SRFI
1850
+ YRQK,YRQK
1851
+ GIMY,GIMY
1852
+ GYMS,GYMS
1853
+ FCME,FCME
1854
+ IRFT,IRFT
1855
+ NWHA,NWHA
1856
+ MLWS,MLWS
1857
+ RHFG,RHFG
1858
+ WIMA,WIMA
1859
+ PAEF,PAEF
1860
+ CHTE,CHTE
1861
+ MEIK,MEIK
1862
+ AILP,AILP
1863
+ PQSC,PQSC
1864
+ SAHG,SAHG
1865
+ LREP,LREP
1866
+ PFQN,PFQN
1867
+ SCYA,SCYA
1868
+ PSCG,PSCG
1869
+ SACP,SACP
1870
+ AVEV,AVEV
1871
+ ETED,ETED
1872
+ YIRN,YIRN
1873
+ AHTQ,AHTQ
1874
+ ATSP,ATSP
1875
+ QHRR,QHRR
1876
+ IVYE,IVYE
1877
+ IRCV,IRCV
1878
+ AWKE,AWKE
1879
+ KSSH,KSSH
1880
+ HWFY,HWFY
1881
+ RSVM,RSVM
1882
+ RCHT,RCHT
1883
+ YKIK,YKIK
1884
+ YERF,YERF
1885
+ YCPT,YCPT
1886
+ IRPV,IRPV
1887
+ CDWV,CDWV
1888
+ CTFA,CTFA
1889
+ GIPV,GIPV
1890
+ RWNY,RWNY
1891
+ YPKQ,YPKQ
1892
+ WAPI,WAPI
1893
+ TTCV,TTCV
1894
+ HNPW,HNPW
1895
+ IVYQ,IVYQ
1896
+ DSDC,DSDC
1897
+ TTDF,TTDF
1898
+ WTTV,WTTV
1899
+ SYPT,SYPT
1900
+ EVER,EVER
1901
+ NGTR,NGTR
1902
+ TTWV,TTWV
1903
+ CKRQ,CKRQ
1904
+ ENEI,ENEI
1905
+ KANP,KANP
1906
+ HDVL,HDVL
1907
+ ENWI,ENWI
1908
+ ENRS,ENRS
1909
+ RAEK,RAEK
1910
+ RSMM,RSMM
1911
+ KNCQ,KNCQ
1912
+ MCFD,MCFD
1913
+ SCEE,SCEE
1914
+ PFKV,PFKV
1915
+ DVTW,DVTW
1916
+ HSHA,HSHA
1917
+ MVGG,MVGG
1918
+ MLFY,MLFY
1919
+ SQLI,SQLI
1920
+ KCTN,KCTN
1921
+ ADDD,ADDD
1922
+ QAFQ,QAFQ
1923
+ VMLY,VMLY
1924
+ LWNE,LWNE
1925
+ CNAE,CNAE
1926
+ MMAN,MMAN
1927
+ AEQT,AEQT
1928
+ TWQQ,TWQQ
1929
+ CRVV,CRVV
1930
+ HIWA,HIWA
1931
+ NSRN,NSRN
1932
+ CETD,CETD
1933
+ HDVQ,HDVQ
1934
+ CERW,CERW
1935
+ EIWE,EIWE
1936
+ PRHF,PRHF
1937
+ RGWE,RGWE
1938
+ WNTL,WNTL
1939
+ DWSH,DWSH
1940
+ EFYV,EFYV
1941
+ WWAR,WWAR
1942
+ WLEA,WLEA
1943
+ ECRM,ECRM
1944
+ VQAI,VQAI
1945
+ MWSY,MWSY
1946
+ RHWS,RHWS
1947
+ YEEQ,YEEQ
1948
+ CLAV,CLAV
1949
+ VHGN,VHGN
1950
+ WSSQ,WSSQ
1951
+ QCRE,QCRE
1952
+ VLKI,VLKI
1953
+ VQIH,VQIH
1954
+ CYGR,CYGR
1955
+ IRPT,IRPT
1956
+ LYQQ,LYQQ
1957
+ RRFV,RRFV
1958
+ CLYP,CLYP
1959
+ SHYK,SHYK
1960
+ YVNE,YVNE
1961
+ HVTC,HVTC
1962
+ MDNG,MDNG
1963
+ MITR,MITR
1964
+ FNWP,FNWP
1965
+ HWDR,HWDR
1966
+ TIWW,TIWW
1967
+ EFHR,EFHR
1968
+ WEIP,WEIP
1969
+ WMVF,WMVF
1970
+ TITV,TITV
1971
+ CGGR,CGGR
1972
+ AHLH,AHLH
1973
+ RAFC,RAFC
1974
+ TEQF,TEQF
1975
+ CDHT,CDHT
1976
+ NSLS,NSLS
1977
+ MFYG,MFYG
1978
+ EIHC,EIHC
1979
+ RFII,RFII
1980
+ EIWP,EIWP
1981
+ YWWL,YWWL
1982
+ GIVG,GIVG
1983
+ WNTE,WNTE
1984
+ ETWT,ETWT
1985
+ KYVE,KYVE
1986
+ DSNK,DSNK
1987
+ CGCS,CGCS
1988
+ WTND,WTND
1989
+ TKML,TKML
1990
+ QHLM,QHLM
1991
+ HAMG,HAMG
1992
+ APSY,APSY
1993
+ LGVQ,LGVQ
1994
+ DSIL,DSIL
1995
+ LLEW,LLEW
1996
+ RDSQ,RDSQ
1997
+ NYFR,NYFR
1998
+ LWIK,LWIK
1999
+ MHME,MHME
2000
+ WRFL,WRFL
2001
+ DLEN,DLEN
2002
+ LSWQ,LSWQ
2003
+ WDLC,WDLC
2004
+ EAGH,EAGH
2005
+ RLRT,RLRT
2006
+ WFHL,WFHL
2007
+ VLYT,VLYT
2008
+ AIFA,AIFA
2009
+ MAEL,MAEL
2010
+ QPCF,QPCF
2011
+ NCCY,NCCY
2012
+ DRKQ,DRKQ
2013
+ GMQY,GMQY
2014
+ PYHE,PYHE
2015
+ NKQM,NKQM
2016
+ IMQT,IMQT
2017
+ LMAV,LMAV
2018
+ CIKV,CIKV
2019
+ SKLC,SKLC
2020
+ SQYW,SQYW
2021
+ WPTL,WPTL
2022
+ RIAK,RIAK
2023
+ PEKT,PEKT
2024
+ FTSS,FTSS
2025
+ HMGP,HMGP
2026
+ HAHK,HAHK
2027
+ NQKS,NQKS
2028
+ FDIG,FDIG
2029
+ WTWV,WTWV
2030
+ RQYK,RQYK
2031
+ PAGF,PAGF
2032
+ MAIL,MAIL
2033
+ AGTK,AGTK
2034
+ WYEC,WYEC
2035
+ AQTR,AQTR
2036
+ HPPF,HPPF
2037
+ QIDP,QIDP
2038
+ PTGW,PTGW
2039
+ LYHI,LYHI
2040
+ LDII,LDII
2041
+ HVMD,HVMD
2042
+ HQWP,HQWP
2043
+ WGYN,WGYN
2044
+ THGQ,THGQ
2045
+ DSQW,DSQW
2046
+ NFLD,NFLD
2047
+ CIVI,CIVI
2048
+ MKQR,MKQR
2049
+ EYYR,EYYR
2050
+ KQLY,KQLY
2051
+ PQVK,PQVK
2052
+ VIQW,VIQW
2053
+ WSRA,WSRA
2054
+ YDWH,YDWH
2055
+ QVIC,QVIC
2056
+ RCQE,RCQE
2057
+ GKKL,GKKL
2058
+ CDFW,CDFW
2059
+ HKCP,HKCP
2060
+ HTWG,HTWG
2061
+ HWSW,HWSW
2062
+ LFWT,LFWT
2063
+ DNSY,DNSY
2064
+ RISF,RISF
2065
+ ELCT,ELCT
2066
+ NQIV,NQIV
2067
+ LYMV,LYMV
2068
+ YSAS,YSAS
2069
+ WAKL,WAKL
2070
+ NSWN,NSWN
2071
+ AYPG,AYPG
2072
+ DCMT,DCMT
2073
+ QDHH,QDHH
2074
+ HNLH,HNLH
2075
+ NFCE,NFCE
2076
+ QQEC,QQEC
2077
+ VWHK,VWHK
2078
+ KHMK,KHMK
2079
+ GTQI,GTQI
2080
+ NPKL,NPKL
2081
+ YWVE,YWVE
2082
+ PLWT,PLWT
2083
+ QIKR,QIKR
2084
+ CFCQ,CFCQ
2085
+ VAKP,VAKP
2086
+ WKWW,WKWW
2087
+ CISE,CISE
2088
+ SMPI,SMPI
2089
+ RGTN,RGTN
2090
+ RIPA,RIPA
2091
+ LEQE,LEQE
2092
+ DCVQ,DCVQ
2093
+ VTMD,VTMD
2094
+ RTLY,RTLY
2095
+ HTNY,HTNY
2096
+ SASD,SASD
2097
+ KHTN,KHTN
2098
+ SLAH,SLAH
2099
+ MSKR,MSKR
2100
+ AQAI,AQAI
2101
+ GTYN,GTYN
2102
+ MKCK,MKCK
2103
+ PYNN,PYNN
2104
+ HCQV,HCQV
2105
+ PILK,PILK
2106
+ DADI,DADI
2107
+ ERPM,ERPM
2108
+ LEPE,LEPE
2109
+ YYWW,YYWW
2110
+ YEHE,YEHE
2111
+ MYTM,MYTM
2112
+ PPAP,PPAP
2113
+ CMSN,CMSN
2114
+ WPDT,WPDT
2115
+ YIPT,YIPT
2116
+ GHWP,GHWP
2117
+ CAGT,CAGT
2118
+ IRCG,IRCG
2119
+ VACC,VACC
2120
+ FFQY,FFQY
2121
+ INSK,INSK
2122
+ NWIP,NWIP
2123
+ GERH,GERH
2124
+ SDDG,SDDG
2125
+ MARL,MARL
2126
+ CTAD,CTAD
2127
+ CATS,CATS
2128
+ WVCY,WVCY
2129
+ TSSK,TSSK
2130
+ PKVD,PKVD
2131
+ FAWE,FAWE
2132
+ LSKT,LSKT
2133
+ EHAM,EHAM
2134
+ LSRK,LSRK
2135
+ CFKP,CFKP
2136
+ MHRY,MHRY
2137
+ QQRF,QQRF
2138
+ LWQS,LWQS
2139
+ FFSN,FFSN
2140
+ EEDD,EEDD
2141
+ KQMR,KQMR
2142
+ RPPK,RPPK
2143
+ PPSV,PPSV
2144
+ ETEA,ETEA
2145
+ GALG,GALG
2146
+ DSPN,DSPN
2147
+ PSEH,PSEH
2148
+ SIMA,SIMA
2149
+ NRQH,NRQH
2150
+ SFSA,SFSA
2151
+ EIWD,EIWD
2152
+ LCPI,LCPI
2153
+ YPVE,YPVE
2154
+ SKFG,SKFG
2155
+ THHE,THHE
2156
+ NLGT,NLGT
2157
+ VTWD,VTWD
2158
+ RVAC,RVAC
2159
+ CSFY,CSFY
2160
+ QEVY,QEVY
2161
+ TRQL,TRQL
2162
+ SGHY,SGHY
2163
+ GTNS,GTNS
2164
+ HQVK,HQVK
2165
+ ISWH,ISWH
2166
+ VIGC,VIGC
2167
+ DWYH,DWYH
2168
+ LPQE,LPQE
2169
+ PFCQ,PFCQ
2170
+ IFNF,IFNF
2171
+ QHYH,QHYH
2172
+ WRDP,WRDP
2173
+ WGVV,WGVV
2174
+ SHPK,SHPK
2175
+ RTRW,RTRW
2176
+ GCTQ,GCTQ
2177
+ RKHM,RKHM
2178
+ WHIA,WHIA
2179
+ ARVC,ARVC
2180
+ TCYV,TCYV
2181
+ NEWQ,NEWQ
2182
+ RHPM,RHPM
2183
+ QDWV,QDWV
2184
+ YQST,YQST
2185
+ LREE,LREE
2186
+ QSRY,QSRY
2187
+ RIRE,RIRE
2188
+ FGMR,FGMR
2189
+ IHIP,IHIP
2190
+ VWCS,VWCS
2191
+ LIYN,LIYN
2192
+ MDDQ,MDDQ
2193
+ CWND,CWND
2194
+ TKQM,TKQM
2195
+ THQI,THQI
2196
+ VQAN,VQAN
2197
+ WVLA,WVLA
2198
+ WMGV,WMGV
2199
+ CWFF,CWFF
2200
+ DFGH,DFGH
2201
+ DWHK,DWHK
2202
+ EYLG,EYLG
2203
+ TSWQ,TSWQ
2204
+ DAVT,DAVT
2205
+ YLSS,YLSS
2206
+ PFLH,PFLH
2207
+ NWTC,NWTC
2208
+ LHFH,LHFH
2209
+ RWDA,RWDA
2210
+ IRAN,IRAN
2211
+ LRSY,LRSY
2212
+ MEHV,MEHV
2213
+ GSTQ,GSTQ
2214
+ HYTF,HYTF
2215
+ TVSC,TVSC
2216
+ IDAI,IDAI
2217
+ EVMK,EVMK
2218
+ TWMD,TWMD
2219
+ MDPA,MDPA
2220
+ RVHY,RVHY
2221
+ CDHM,CDHM
2222
+ MTSL,MTSL
2223
+ DIIH,DIIH
2224
+ ATCF,ATCF
2225
+ WAHF,WAHF
2226
+ MIHQ,MIHQ
2227
+ GYQE,GYQE
2228
+ CQDE,CQDE
2229
+ HSPC,HSPC
2230
+ CSKH,CSKH
2231
+ PWCM,PWCM
2232
+ YIIE,YIIE
2233
+ IDGG,IDGG
2234
+ MKTE,MKTE
2235
+ TDEP,TDEP
2236
+ WTVK,WTVK
2237
+ WASA,WASA
2238
+ CEGA,CEGA
2239
+ CVLH,CVLH
2240
+ ETAA,ETAA
2241
+ DKYP,DKYP
2242
+ THGL,THGL
2243
+ AYAW,AYAW
2244
+ WESS,WESS
2245
+ IIIQ,IIIQ
2246
+ QPQR,QPQR
2247
+ TCHM,TCHM
2248
+ SEAQ,SEAQ
2249
+ TVAT,TVAT
2250
+ CSFT,CSFT
2251
+ KPWY,KPWY
2252
+ TAAE,TAAE
2253
+ HCKV,HCKV
2254
+ HQGG,HQGG
2255
+ AVAV,AVAV
2256
+ SEGI,SEGI
2257
+ PHRP,PHRP
2258
+ FFKV,FFKV
2259
+ RFEI,RFEI
2260
+ SKAR,SKAR
2261
+ WCMR,WCMR
2262
+ LMYG,LMYG
2263
+ VGYK,VGYK
2264
+ HFME,HFME
2265
+ CNNV,CNNV
2266
+ DGIS,DGIS
2267
+ QEAC,QEAC
2268
+ KKMF,KKMF
2269
+ DGQD,DGQD
2270
+ GLDP,GLDP
2271
+ ELAE,ELAE
2272
+ NYYC,NYYC
2273
+ PTSI,PTSI
2274
+ HYVR,HYVR
2275
+ AGKM,AGKM
2276
+ YGEG,YGEG
2277
+ AGRQ,AGRQ
2278
+ PMKC,PMKC
2279
+ KWAL,KWAL
2280
+ IIEG,IIEG
2281
+ RSFF,RSFF
2282
+ DAYV,DAYV
2283
+ CAYW,CAYW
2284
+ FNAS,FNAS
2285
+ DMPC,DMPC
2286
+ SFEP,SFEP
2287
+ YFHF,YFHF
2288
+ DEAC,DEAC
2289
+ MHIA,MHIA
2290
+ FWPH,FWPH
2291
+ GTQA,GTQA
2292
+ YRMQ,YRMQ
2293
+ HGMW,HGMW
2294
+ ESAM,ESAM
2295
+ WSWY,WSWY
2296
+ FNDC,FNDC
2297
+ HKES,HKES
2298
+ EHFF,EHFF
2299
+ PHNT,PHNT
2300
+ EMVC,EMVC
2301
+ PPFM,PPFM
2302
+ CEWC,CEWC
2303
+ ECIF,ECIF
2304
+ EPEI,EPEI
2305
+ PERF,PERF
2306
+ HVFL,HVFL
2307
+ DLND,DLND
2308
+ HTFC,HTFC
2309
+ AEDL,AEDL
2310
+ PPFG,PPFG
2311
+ WPHP,WPHP
2312
+ KSVL,KSVL
2313
+ MEEK,MEEK
2314
+ TAHI,TAHI
2315
+ LILA,LILA
2316
+ VRGP,VRGP
2317
+ DQPM,DQPM
2318
+ HKDS,HKDS
2319
+ GWNP,GWNP
2320
+ TNYH,TNYH
2321
+ PKEQ,PKEQ
2322
+ RYHA,RYHA
2323
+ FHHI,FHHI
2324
+ WSQS,WSQS
2325
+ PPEL,PPEL
2326
+ CCKG,CCKG
2327
+ PLPI,PLPI
2328
+ CKVK,CKVK
2329
+ IVWS,IVWS
2330
+ WQVK,WQVK
2331
+ YQKA,YQKA
2332
+ GWVH,GWVH
2333
+ PGQI,PGQI
2334
+ YICL,YICL
2335
+ NSNA,NSNA
2336
+ SLMP,SLMP
2337
+ LKNR,LKNR
2338
+ NVRQ,NVRQ
2339
+ TLAC,TLAC
2340
+ EWPH,EWPH
2341
+ DCHI,DCHI
2342
+ CCSK,CCSK
2343
+ IDMV,IDMV
2344
+ CYDP,CYDP
2345
+ INLI,INLI
2346
+ RHVE,RHVE
2347
+ MEYY,MEYY
2348
+ DTEL,DTEL
2349
+ MDSE,MDSE
2350
+ EKAI,EKAI
2351
+ ICYL,ICYL
2352
+ YIWR,YIWR
2353
+ PHCI,PHCI
2354
+ QQCF,QQCF
2355
+ GGTV,GGTV
2356
+ KTDV,KTDV
2357
+ KCMY,KCMY
2358
+ GNMV,GNMV
2359
+ FPYS,FPYS
2360
+ MPNI,MPNI
2361
+ KHIM,KHIM
2362
+ DMQN,DMQN
2363
+ GRAV,GRAV
2364
+ MADN,MADN
2365
+ RPCQ,RPCQ
2366
+ NANC,NANC
2367
+ MSEK,MSEK
2368
+ FNNT,FNNT
2369
+ TVFH,TVFH
2370
+ FSSV,FSSV
2371
+ WRYD,WRYD
2372
+ GPTV,GPTV
2373
+ NQHS,NQHS
2374
+ HDSH,HDSH
2375
+ IIAA,IIAA
2376
+ PVDI,PVDI
2377
+ EVTR,EVTR
2378
+ GEMY,GEMY
2379
+ QMEN,QMEN
2380
+ CMMG,CMMG
2381
+ KRDN,KRDN
2382
+ IAMY,IAMY
2383
+ IKKE,IKKE
2384
+ NDSA,NDSA
2385
+ PWRG,PWRG
2386
+ MEGA,MEGA
2387
+ KLPK,KLPK
2388
+ VGHM,VGHM
2389
+ NDGK,NDGK
2390
+ DEWH,DEWH
2391
+ IKPK,IKPK
2392
+ GFYA,GFYA
2393
+ TPEL,TPEL
2394
+ DPNQ,DPNQ
2395
+ CAAG,CAAG
2396
+ GGKF,GGKF
2397
+ VRPR,VRPR
2398
+ KIRL,KIRL
2399
+ HYKN,HYKN
2400
+ WDRP,WDRP
2401
+ YPMP,YPMP
2402
+ FKHG,FKHG
2403
+ KDCW,KDCW
2404
+ WVCW,WVCW
2405
+ GCLE,GCLE
2406
+ WHKT,WHKT
2407
+ RGIW,RGIW
2408
+ VNYM,VNYM
2409
+ IMWD,IMWD
2410
+ MHWW,MHWW
2411
+ ETDG,ETDG
2412
+ TNKF,TNKF
2413
+ NATY,NATY
2414
+ PHSS,PHSS
2415
+ FMDM,FMDM
2416
+ SHDF,SHDF
2417
+ PNVW,PNVW
2418
+ DDRF,DDRF
2419
+ KEDM,KEDM
2420
+ FLYK,FLYK
2421
+ HPHF,HPHF
2422
+ ECWN,ECWN
2423
+ FLNT,FLNT
2424
+ PRNI,PRNI
2425
+ MHTV,MHTV
2426
+ CIGK,CIGK
2427
+ HMPA,HMPA
2428
+ VQDI,VQDI
2429
+ SMYF,SMYF
2430
+ PFCI,PFCI
2431
+ RFWM,RFWM
2432
+ MDVL,MDVL
2433
+ TWYI,TWYI
2434
+ VNGV,VNGV
2435
+ CLWD,CLWD
2436
+ PWYD,PWYD
2437
+ MEYE,MEYE
2438
+ CFWW,CFWW
2439
+ GMLN,GMLN
2440
+ PYWN,PYWN
2441
+ QGIM,QGIM
2442
+ KTTE,KTTE
2443
+ SSPY,SSPY
2444
+ VNTY,VNTY
2445
+ NKYR,NKYR
2446
+ ISWS,ISWS
2447
+ AWPE,AWPE
2448
+ CLAN,CLAN
2449
+ HSIK,HSIK
2450
+ WHLQ,WHLQ
2451
+ AFMV,AFMV
2452
+ EGGM,EGGM
2453
+ DCMI,DCMI
2454
+ EFIW,EFIW
2455
+ QWHL,QWHL
2456
+ TVSY,TVSY
2457
+ MYKV,MYKV
2458
+ HTGE,HTGE
2459
+ EFAL,EFAL
2460
+ QFAP,QFAP
2461
+ QYHC,QYHC
2462
+ INDA,INDA
2463
+ DGNV,DGNV
2464
+ GGRM,GGRM
2465
+ NPYQ,NPYQ
2466
+ WEVQ,WEVQ
2467
+ HMQV,HMQV
2468
+ FNGY,FNGY
2469
+ ILIE,ILIE
2470
+ WCEG,WCEG
2471
+ SLQG,SLQG
2472
+ MSQT,MSQT
2473
+ MYCE,MYCE
2474
+ GEQN,GEQN
2475
+ GKPV,GKPV
2476
+ SEEN,SEEN
2477
+ NYPR,NYPR
2478
+ CPFK,CPFK
2479
+ KWCY,KWCY
2480
+ VHYW,VHYW
2481
+ PQYN,PQYN
2482
+ FWNT,FWNT
2483
+ WNQR,WNQR
2484
+ WVYV,WVYV
2485
+ VLSR,VLSR
2486
+ VSEW,VSEW
2487
+ MYYD,MYYD
2488
+ MKGP,MKGP
2489
+ HDFM,HDFM
2490
+ YPAA,YPAA
2491
+ QPNY,QPNY
2492
+ HDSR,HDSR
2493
+ LEVR,LEVR
2494
+ IVPD,IVPD
2495
+ KNFH,KNFH
2496
+ PGTF,PGTF
2497
+ CAIT,CAIT
2498
+ WEWL,WEWL
2499
+ YKAA,YKAA
2500
+ LRQA,LRQA
2501
+ IGKQ,IGKQ
2502
+ DQHG,DQHG
2503
+ FQQK,FQQK
2504
+ EISN,EISN
2505
+ HWDC,HWDC
2506
+ QNYR,QNYR
2507
+ LRSK,LRSK
2508
+ IPQL,IPQL
2509
+ QTQL,QTQL
2510
+ DRYV,DRYV
2511
+ QVCV,QVCV
2512
+ QVGT,QVGT
2513
+ HYRI,HYRI
2514
+ NGNW,NGNW
2515
+ SSNA,SSNA
2516
+ MYME,MYME
2517
+ CCHV,CCHV
2518
+ TEVD,TEVD
2519
+ ESSN,ESSN
2520
+ CVHV,CVHV
2521
+ WSEF,WSEF
2522
+ VRSE,VRSE
2523
+ TVRI,TVRI
2524
+ LPGK,LPGK
2525
+ IRGH,IRGH
2526
+ HNYD,HNYD
2527
+ MAWA,MAWA
2528
+ IAWD,IAWD
2529
+ LPCQ,LPCQ
2530
+ GCTR,GCTR
2531
+ THDF,THDF
2532
+ TWYH,TWYH
2533
+ PLYV,PLYV
2534
+ IDAT,IDAT
2535
+ QWMR,QWMR
2536
+ IWQS,IWQS
2537
+ SFSE,SFSE
2538
+ WCHR,WCHR
2539
+ NNTH,NNTH
2540
+ QPHC,QPHC
2541
+ NCMH,NCMH
2542
+ TIKL,TIKL
2543
+ EVIQ,EVIQ
2544
+ FRVV,FRVV
2545
+ DATT,DATT
2546
+ CYLN,CYLN
2547
+ PGER,PGER
2548
+ YEYT,YEYT
2549
+ WMTL,WMTL
2550
+ TRHH,TRHH
2551
+ NCCN,NCCN
2552
+ HMGV,HMGV
2553
+ LWIL,LWIL
2554
+ FGQW,FGQW
2555
+ IQQM,IQQM
2556
+ FALN,FALN
2557
+ VNNP,VNNP
2558
+ AMAR,AMAR
2559
+ MTLK,MTLK
2560
+ GVQR,GVQR
2561
+ FVEQ,FVEQ
2562
+ ANVM,ANVM
2563
+ TQHM,TQHM
2564
+ VMDD,VMDD
2565
+ LANV,LANV
2566
+ NLSL,NLSL
2567
+ SPTN,SPTN
2568
+ GMCT,GMCT
2569
+ INKV,INKV
2570
+ LCIQ,LCIQ
2571
+ VNPT,VNPT
2572
+ CKIL,CKIL
2573
+ QGRK,QGRK
2574
+ IKAS,IKAS
2575
+ EWDH,EWDH
2576
+ EDNM,EDNM
2577
+ DINK,DINK
2578
+ LDRY,LDRY
2579
+ VEQN,VEQN
2580
+ APTA,APTA
2581
+ RCAC,RCAC
2582
+ KIRK,KIRK
2583
+ GCYQ,GCYQ
2584
+ LAMP,LAMP
2585
+ VGGR,VGGR
2586
+ IPAR,IPAR
2587
+ PFEY,PFEY
2588
+ HQTL,HQTL
2589
+ YEHW,YEHW
2590
+ AVEG,AVEG
2591
+ MGKK,MGKK
2592
+ RIVL,RIVL
2593
+ VPDM,VPDM
2594
+ LLSQ,LLSQ
2595
+ GEYW,GEYW
2596
+ MQWC,MQWC
2597
+ TYTN,TYTN
2598
+ RAPP,RAPP
2599
+ DSWK,DSWK
2600
+ TQTY,TQTY
2601
+ PMNL,PMNL
2602
+ GDYN,GDYN
2603
+ AFHW,AFHW
2604
+ VYPL,VYPL
2605
+ IIHM,IIHM
2606
+ IGFS,IGFS
2607
+ SCCA,SCCA
2608
+ GSCW,GSCW
2609
+ KLNV,KLNV
2610
+ KLMQ,KLMQ
2611
+ GNDD,GNDD
2612
+ MWQC,MWQC
2613
+ NHTC,NHTC
2614
+ NQWQ,NQWQ
2615
+ PLGA,PLGA
2616
+ LHTG,LHTG
2617
+ IGTG,IGTG
2618
+ DKLF,DKLF
2619
+ KHEG,KHEG
2620
+ HKKD,HKKD
2621
+ TVVE,TVVE
2622
+ MSLT,MSLT
2623
+ RMND,RMND
2624
+ YVAL,YVAL
2625
+ GRGT,GRGT
2626
+ GRNN,GRNN
2627
+ HINF,HINF
2628
+ QMSP,QMSP
2629
+ EWQK,EWQK
2630
+ FAWH,FAWH
2631
+ HHKH,HHKH
2632
+ ALAF,ALAF
2633
+ QDKN,QDKN
2634
+ LSEK,LSEK
2635
+ VLVT,VLVT
2636
+ MELV,MELV
2637
+ KSDN,KSDN
2638
+ SQKC,SQKC
2639
+ HYNT,HYNT
2640
+ PYLT,PYLT
2641
+ GVND,GVND
2642
+ VDQW,VDQW
2643
+ RFSN,RFSN
2644
+ PTPF,PTPF
2645
+ NHTY,NHTY
2646
+ ADDP,ADDP
2647
+ MALD,MALD
data/splits/4AA_implicit_val.csv ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ KSVT,KSVT
3
+ SMSN,SMSN
4
+ AAYQ,AAYQ
5
+ FYVR,FYVR
6
+ TFKF,TFKF
7
+ GMMY,GMMY
8
+ RGQL,RGQL
9
+ NYER,NYER
10
+ WKWY,WKWY
11
+ EDTS,EDTS
12
+ PEGV,PEGV
13
+ NNPT,NNPT
14
+ DENA,DENA
15
+ YEPH,YEPH
16
+ IYGF,IYGF
17
+ FRID,FRID
18
+ MRVV,MRVV
19
+ IVFN,IVFN
20
+ LPGP,LPGP
21
+ MGNM,MGNM
22
+ TFWF,TFWF
23
+ LFHI,LFHI
24
+ HYGW,HYGW
25
+ FLDY,FLDY
26
+ QYTM,QYTM
27
+ YMSS,YMSS
28
+ FHWL,FHWL
29
+ HFFG,HFFG
30
+ LKSL,LKSL
31
+ HYAN,HYAN
32
+ PEYE,PEYE
33
+ EVFQ,EVFQ
34
+ FSQR,FSQR
35
+ ETIT,ETIT
36
+ CNNF,CNNF
37
+ MKPK,MKPK
38
+ GMLH,GMLH
39
+ SCQK,SCQK
40
+ WQCY,WQCY
41
+ FIRG,FIRG
42
+ THIC,THIC
43
+ KQRT,KQRT
44
+ QYRQ,QYRQ
45
+ WDWF,WDWF
46
+ QADM,QADM
47
+ GVVY,GVVY
48
+ MWGY,MWGY
49
+ SLHS,SLHS
50
+ PTHA,PTHA
51
+ PTRW,PTRW
52
+ ADSC,ADSC
53
+ QVLA,QVLA
54
+ KSLG,KSLG
55
+ PGNE,PGNE
56
+ IPRW,IPRW
57
+ HTSM,HTSM
58
+ VDEF,VDEF
59
+ VVKC,VVKC
60
+ KTLL,KTLL
61
+ KIED,KIED
62
+ ANFS,ANFS
63
+ DKYV,DKYV
64
+ DQPA,DQPA
65
+ HWLK,HWLK
66
+ TDPL,TDPL
67
+ QCQW,QCQW
68
+ PCPL,PCPL
69
+ MLPR,MLPR
70
+ NDNV,NDNV
71
+ LTPR,LTPR
72
+ YRVC,YRVC
73
+ RFLT,RFLT
74
+ SEAH,SEAH
75
+ VHHC,VHHC
76
+ SMAS,SMAS
77
+ NQEF,NQEF
78
+ DEPV,DEPV
79
+ THHL,THHL
80
+ VVLI,VVLI
81
+ AGVI,AGVI
82
+ STYP,STYP
83
+ LKLW,LKLW
84
+ SFQK,SFQK
85
+ SNID,SNID
86
+ KGCR,KGCR
87
+ FDNC,FDNC
88
+ RCKP,RCKP
89
+ MYYA,MYYA
90
+ DTCD,DTCD
91
+ LQLG,LQLG
92
+ LNWY,LNWY
93
+ MIQK,MIQK
94
+ WDDK,WDDK
95
+ YNKT,YNKT
96
+ TKAK,TKAK
97
+ VRNG,VRNG
98
+ KTTF,KTTF
99
+ IAIM,IAIM
100
+ FGGV,FGGV
101
+ SHPE,SHPE
data/splits/4AA_test.csv ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ FLRH,FLRH
3
+ IMRY,IMRY
4
+ RTVD,RTVD
5
+ PIDV,PIDV
6
+ AKIR,AKIR
7
+ IPGD,IPGD
8
+ HTIQ,HTIQ
9
+ WWQW,WWQW
10
+ KDFM,KDFM
11
+ ESSS,ESSS
12
+ ASRE,ASRE
13
+ LCLQ,LCLQ
14
+ VIYH,VIYH
15
+ IVMA,IVMA
16
+ RVQQ,RVQQ
17
+ DQKV,DQKV
18
+ KDDD,KDDD
19
+ CPEE,CPEE
20
+ WSAQ,WSAQ
21
+ VQCL,VQCL
22
+ CSYR,CSYR
23
+ LAWA,LAWA
24
+ VWVR,VWVR
25
+ SPVN,SPVN
26
+ MIAY,MIAY
27
+ GLIL,GLIL
28
+ AGVD,AGVD
29
+ SNSF,SNSF
30
+ FHSE,FHSE
31
+ TLRK,TLRK
32
+ IAMI,IAMI
33
+ CPYV,CPYV
34
+ NWWG,NWWG
35
+ LQMG,LQMG
36
+ KEGH,KEGH
37
+ FGGW,FGGW
38
+ NVFH,NVFH
39
+ DLCG,DLCG
40
+ APWF,APWF
41
+ QVTC,QVTC
42
+ YQGI,YQGI
43
+ LVVF,LVVF
44
+ RVQS,RVQS
45
+ KAEP,KAEP
46
+ WQVP,WQVP
47
+ LWMR,LWMR
48
+ NNDK,NNDK
49
+ MHVR,MHVR
50
+ EDQK,EDQK
51
+ CACS,CACS
52
+ CKHT,CKHT
53
+ IWHF,IWHF
54
+ IPTH,IPTH
55
+ VDRN,VDRN
56
+ YCPF,YCPF
57
+ HELI,HELI
58
+ SFCH,SFCH
59
+ YLID,YLID
60
+ GGHN,GGHN
61
+ FSDP,FSDP
62
+ CKVH,CKVH
63
+ NTVG,NTVG
64
+ IHNV,IHNV
65
+ YMQN,YMQN
66
+ PRVC,PRVC
67
+ SDFS,SDFS
68
+ SPFH,SPFH
69
+ FSRK,FSRK
70
+ QNWV,QNWV
71
+ PYHQ,PYHQ
72
+ FDNT,FDNT
73
+ KAQW,KAQW
74
+ PKDM,PKDM
75
+ LIFE,LIFE
76
+ ALDA,ALDA
77
+ PGKM,PGKM
78
+ QRNF,QRNF
79
+ SSNN,SSNN
80
+ FKKL,FKKL
81
+ MWQK,MWQK
82
+ RLKR,RLKR
83
+ KSIY,KSIY
84
+ GCIC,GCIC
85
+ INVH,INVH
86
+ NFQF,NFQF
87
+ RDWY,RDWY
88
+ SNIN,SNIN
89
+ ESIC,ESIC
90
+ LIRH,LIRH
91
+ CIYL,CIYL
92
+ MAFM,MAFM
93
+ AASF,AASF
94
+ HENV,HENV
95
+ EHEV,EHEV
96
+ LFNV,LFNV
97
+ RQLP,RQLP
98
+ PNHP,PNHP
99
+ CFEE,CFEE
100
+ DHAR,DHAR
101
+ IDRH,IDRH
data/splits/4AA_train.csv ADDED
@@ -0,0 +1,3110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EFPT,EFPT
3
+ CEML,CEML
4
+ KSVT,KSVT
5
+ SMSN,SMSN
6
+ AAYQ,AAYQ
7
+ KQTG,KQTG
8
+ WFPD,WFPD
9
+ FYVR,FYVR
10
+ RYSH,RYSH
11
+ TFKF,TFKF
12
+ LLRT,LLRT
13
+ IERA,IERA
14
+ KPHD,KPHD
15
+ GMMY,GMMY
16
+ RGQL,RGQL
17
+ NYER,NYER
18
+ NDWN,NDWN
19
+ WKWY,WKWY
20
+ QNCY,QNCY
21
+ FFSG,FFSG
22
+ WEFG,WEFG
23
+ FCGM,FCGM
24
+ EDTS,EDTS
25
+ PEGV,PEGV
26
+ RTNT,RTNT
27
+ NNPT,NNPT
28
+ DENA,DENA
29
+ YEPH,YEPH
30
+ NSYF,NSYF
31
+ IYGF,IYGF
32
+ DTGE,DTGE
33
+ TWWQ,TWWQ
34
+ FRID,FRID
35
+ MRVV,MRVV
36
+ ITDP,ITDP
37
+ IVFN,IVFN
38
+ PYDT,PYDT
39
+ LPGP,LPGP
40
+ MGNM,MGNM
41
+ TFWF,TFWF
42
+ LFHI,LFHI
43
+ HYGW,HYGW
44
+ LWPA,LWPA
45
+ FLDY,FLDY
46
+ QKAD,QKAD
47
+ QYTM,QYTM
48
+ YMSS,YMSS
49
+ NWWL,NWWL
50
+ FHWL,FHWL
51
+ HFFG,HFFG
52
+ DGNM,DGNM
53
+ HWPA,HWPA
54
+ VAEY,VAEY
55
+ LKSL,LKSL
56
+ HYAN,HYAN
57
+ PEYE,PEYE
58
+ QLFI,QLFI
59
+ CGNH,CGNH
60
+ EVFQ,EVFQ
61
+ FSQR,FSQR
62
+ ETIT,ETIT
63
+ CNNF,CNNF
64
+ ETRW,ETRW
65
+ MKPK,MKPK
66
+ YAQK,YAQK
67
+ GMLH,GMLH
68
+ WANF,WANF
69
+ TQVG,TQVG
70
+ THGW,THGW
71
+ NSGW,NSGW
72
+ HPLH,HPLH
73
+ TWDS,TWDS
74
+ SCQK,SCQK
75
+ WQCY,WQCY
76
+ FIRG,FIRG
77
+ PSMR,PSMR
78
+ THIC,THIC
79
+ KQRT,KQRT
80
+ LICP,LICP
81
+ DLYC,DLYC
82
+ WMDW,WMDW
83
+ QYRQ,QYRQ
84
+ WDWF,WDWF
85
+ NGYG,NGYG
86
+ CRTL,CRTL
87
+ QADM,QADM
88
+ GVVY,GVVY
89
+ EWNK,EWNK
90
+ MWGY,MWGY
91
+ SLHS,SLHS
92
+ PTHA,PTHA
93
+ ANIP,ANIP
94
+ ANMY,ANMY
95
+ RMMC,RMMC
96
+ MWCA,MWCA
97
+ PTRW,PTRW
98
+ ADSC,ADSC
99
+ PYRM,PYRM
100
+ QVLA,QVLA
101
+ KSLG,KSLG
102
+ GLCK,GLCK
103
+ QIEG,QIEG
104
+ PGNE,PGNE
105
+ IPRW,IPRW
106
+ HTSM,HTSM
107
+ IPEM,IPEM
108
+ NQRK,NQRK
109
+ WVTA,WVTA
110
+ SFIT,SFIT
111
+ VDEF,VDEF
112
+ NDPC,NDPC
113
+ VVKC,VVKC
114
+ TSTY,TSTY
115
+ QRCV,QRCV
116
+ SQFR,SQFR
117
+ KTLL,KTLL
118
+ TYHA,TYHA
119
+ CIAC,CIAC
120
+ KIED,KIED
121
+ ANFS,ANFS
122
+ KTNF,KTNF
123
+ KAAC,KAAC
124
+ ADYH,ADYH
125
+ EKIA,EKIA
126
+ HWRN,HWRN
127
+ DKYV,DKYV
128
+ DQPA,DQPA
129
+ DIFK,DIFK
130
+ MFVD,MFVD
131
+ HWLK,HWLK
132
+ TDQK,TDQK
133
+ DFIC,DFIC
134
+ LDDQ,LDDQ
135
+ DAWF,DAWF
136
+ TDPL,TDPL
137
+ QCQW,QCQW
138
+ MKHG,MKHG
139
+ PCPL,PCPL
140
+ MLPR,MLPR
141
+ NDNV,NDNV
142
+ LTPR,LTPR
143
+ YRVC,YRVC
144
+ HVHQ,HVHQ
145
+ YMPM,YMPM
146
+ FGMM,FGMM
147
+ RFLT,RFLT
148
+ SEAH,SEAH
149
+ VHHC,VHHC
150
+ SMAS,SMAS
151
+ GSLP,GSLP
152
+ NQEF,NQEF
153
+ DEPV,DEPV
154
+ THHL,THHL
155
+ QSAP,QSAP
156
+ VVLI,VVLI
157
+ AGVI,AGVI
158
+ STYP,STYP
159
+ FYWL,FYWL
160
+ SWGL,SWGL
161
+ SSTS,SSTS
162
+ LDGR,LDGR
163
+ LKLW,LKLW
164
+ RYPT,RYPT
165
+ TTHG,TTHG
166
+ SFQK,SFQK
167
+ SNID,SNID
168
+ KGCR,KGCR
169
+ MVHP,MVHP
170
+ FDNC,FDNC
171
+ TFRF,TFRF
172
+ LHIM,LHIM
173
+ RCKP,RCKP
174
+ MYYA,MYYA
175
+ ALQG,ALQG
176
+ DTCD,DTCD
177
+ MLPY,MLPY
178
+ RLEH,RLEH
179
+ LQLG,LQLG
180
+ LNWY,LNWY
181
+ GLQK,GLQK
182
+ GITD,GITD
183
+ NPDS,NPDS
184
+ MDKA,MDKA
185
+ DLYV,DLYV
186
+ AAKD,AAKD
187
+ WRPI,WRPI
188
+ MIQK,MIQK
189
+ WDDK,WDDK
190
+ YNKT,YNKT
191
+ VIHL,VIHL
192
+ ERQS,ERQS
193
+ AYIY,AYIY
194
+ NHDQ,NHDQ
195
+ TKAK,TKAK
196
+ VRNG,VRNG
197
+ IPVY,IPVY
198
+ SQVM,SQVM
199
+ KTTF,KTTF
200
+ KGHN,KGHN
201
+ RTSK,RTSK
202
+ IAIM,IAIM
203
+ FGGV,FGGV
204
+ SHPE,SHPE
205
+ QDFY,QDFY
206
+ SEME,SEME
207
+ EKME,EKME
208
+ YSCA,YSCA
209
+ PTMP,PTMP
210
+ KEWD,KEWD
211
+ TGPT,TGPT
212
+ ILIG,ILIG
213
+ AAKQ,AAKQ
214
+ SGLQ,SGLQ
215
+ LQRL,LQRL
216
+ TTLY,TTLY
217
+ PFGV,PFGV
218
+ FPCE,FPCE
219
+ VSFH,VSFH
220
+ PHEV,PHEV
221
+ EHLC,EHLC
222
+ FFSV,FFSV
223
+ DCVV,DCVV
224
+ RPEY,RPEY
225
+ WYNA,WYNA
226
+ FLKS,FLKS
227
+ EPWP,EPWP
228
+ PMPK,PMPK
229
+ DNCQ,DNCQ
230
+ MYCF,MYCF
231
+ FLHM,FLHM
232
+ WYIG,WYIG
233
+ LNPR,LNPR
234
+ HLSN,HLSN
235
+ CQYS,CQYS
236
+ QQSD,QQSD
237
+ RSAH,RSAH
238
+ SNCV,SNCV
239
+ DIGN,DIGN
240
+ WKRV,WKRV
241
+ NYQM,NYQM
242
+ PFTA,PFTA
243
+ QASA,QASA
244
+ WQRR,WQRR
245
+ FFRN,FFRN
246
+ WKSH,WKSH
247
+ HEGV,HEGV
248
+ SIKA,SIKA
249
+ YSGC,YSGC
250
+ HRND,HRND
251
+ VFWY,VFWY
252
+ NFRP,NFRP
253
+ PVVH,PVVH
254
+ YQIW,YQIW
255
+ QNFH,QNFH
256
+ ISFP,ISFP
257
+ YLCA,YLCA
258
+ NQSC,NQSC
259
+ SMYW,SMYW
260
+ HWTP,HWTP
261
+ ARGH,ARGH
262
+ CRMN,CRMN
263
+ FKWW,FKWW
264
+ QNMV,QNMV
265
+ EWLN,EWLN
266
+ MSLN,MSLN
267
+ AFCQ,AFCQ
268
+ WKPH,WKPH
269
+ IKQG,IKQG
270
+ GADR,GADR
271
+ AKCN,AKCN
272
+ PMMV,PMMV
273
+ LDPH,LDPH
274
+ QMKT,QMKT
275
+ EANQ,EANQ
276
+ HILY,HILY
277
+ DGCA,DGCA
278
+ QWFI,QWFI
279
+ RRCY,RRCY
280
+ AQFQ,AQFQ
281
+ PICC,PICC
282
+ GPGP,GPGP
283
+ IAYH,IAYH
284
+ MHGM,MHGM
285
+ WMFD,WMFD
286
+ VHTK,VHTK
287
+ NWPN,NWPN
288
+ LKRV,LKRV
289
+ GFYF,GFYF
290
+ QTSK,QTSK
291
+ HPNK,HPNK
292
+ QTSA,QTSA
293
+ FGAR,FGAR
294
+ ITWF,ITWF
295
+ LRTE,LRTE
296
+ AEAF,AEAF
297
+ FHLQ,FHLQ
298
+ PIAC,PIAC
299
+ QWPL,QWPL
300
+ KDKI,KDKI
301
+ STSR,STSR
302
+ PTIG,PTIG
303
+ QIVA,QIVA
304
+ MKCC,MKCC
305
+ WSYN,WSYN
306
+ NAWY,NAWY
307
+ YVLE,YVLE
308
+ VCRV,VCRV
309
+ MHAQ,MHAQ
310
+ TSWC,TSWC
311
+ QKYR,QKYR
312
+ TFIL,TFIL
313
+ ITKR,ITKR
314
+ DVGL,DVGL
315
+ FVEL,FVEL
316
+ CGPF,CGPF
317
+ GPSL,GPSL
318
+ LQPT,LQPT
319
+ WKLD,WKLD
320
+ MRLH,MRLH
321
+ TYAM,TYAM
322
+ VNAK,VNAK
323
+ CEIA,CEIA
324
+ WQDT,WQDT
325
+ SRQV,SRQV
326
+ CLHA,CLHA
327
+ TPEA,TPEA
328
+ EEKV,EEKV
329
+ NHEL,NHEL
330
+ NKCC,NKCC
331
+ VVSN,VVSN
332
+ EETC,EETC
333
+ GTEV,GTEV
334
+ NMYR,NMYR
335
+ PYYT,PYYT
336
+ FRCR,FRCR
337
+ AMQN,AMQN
338
+ KDYC,KDYC
339
+ QNIE,QNIE
340
+ MMDH,MMDH
341
+ MWLR,MWLR
342
+ AELN,AELN
343
+ KNQT,KNQT
344
+ TNHK,TNHK
345
+ TDNF,TDNF
346
+ YPDQ,YPDQ
347
+ NTDP,NTDP
348
+ SEGL,SEGL
349
+ PMWQ,PMWQ
350
+ CPQA,CPQA
351
+ PDPH,PDPH
352
+ NYWT,NYWT
353
+ WTIN,WTIN
354
+ TQLE,TQLE
355
+ HFAP,HFAP
356
+ NSCW,NSCW
357
+ LVTE,LVTE
358
+ CWWT,CWWT
359
+ PECP,PECP
360
+ WGWE,WGWE
361
+ FYTW,FYTW
362
+ LHGA,LHGA
363
+ PFGR,PFGR
364
+ SNIA,SNIA
365
+ HPIQ,HPIQ
366
+ KWMG,KWMG
367
+ VIWL,VIWL
368
+ LEIC,LEIC
369
+ LSIQ,LSIQ
370
+ VSSK,VSSK
371
+ ICDY,ICDY
372
+ FFCC,FFCC
373
+ IFWQ,IFWQ
374
+ YWWI,YWWI
375
+ HTDL,HTDL
376
+ DPDA,DPDA
377
+ HLYC,HLYC
378
+ SVCF,SVCF
379
+ VGNK,VGNK
380
+ IKGW,IKGW
381
+ DVFC,DVFC
382
+ PKSC,PKSC
383
+ CAML,CAML
384
+ INCE,INCE
385
+ QEKW,QEKW
386
+ EGSR,EGSR
387
+ NDAL,NDAL
388
+ VGCC,VGCC
389
+ AHVE,AHVE
390
+ KSKE,KSKE
391
+ VPKS,VPKS
392
+ AQYS,AQYS
393
+ CHYC,CHYC
394
+ EVNN,EVNN
395
+ VQVG,VQVG
396
+ ECDG,ECDG
397
+ CSIT,CSIT
398
+ EEFG,EEFG
399
+ AFRN,AFRN
400
+ NLSK,NLSK
401
+ DSQY,DSQY
402
+ ELHH,ELHH
403
+ SNFS,SNFS
404
+ SYPG,SYPG
405
+ DFAK,DFAK
406
+ QAQD,QAQD
407
+ IIDS,IIDS
408
+ MFSG,MFSG
409
+ LANN,LANN
410
+ KKNF,KKNF
411
+ VLNM,VLNM
412
+ DEWW,DEWW
413
+ LEHK,LEHK
414
+ SMVW,SMVW
415
+ IING,IING
416
+ GIIY,GIIY
417
+ IHGP,IHGP
418
+ PEFC,PEFC
419
+ RKHR,RKHR
420
+ DRPY,DRPY
421
+ CPVD,CPVD
422
+ CKVF,CKVF
423
+ PEID,PEID
424
+ LPRM,LPRM
425
+ PRRA,PRRA
426
+ YFTF,YFTF
427
+ VWGW,VWGW
428
+ AGTA,AGTA
429
+ NRHA,NRHA
430
+ VNVC,VNVC
431
+ KHRY,KHRY
432
+ RAKD,RAKD
433
+ YGPH,YGPH
434
+ GCHN,GCHN
435
+ TDRL,TDRL
436
+ HRLK,HRLK
437
+ PWRH,PWRH
438
+ KKTN,KKTN
439
+ TELH,TELH
440
+ EKTL,EKTL
441
+ TWVR,TWVR
442
+ SHQN,SHQN
443
+ TCIA,TCIA
444
+ THKW,THKW
445
+ VGES,VGES
446
+ KMHT,KMHT
447
+ VDYD,VDYD
448
+ KVYT,KVYT
449
+ KGFR,KGFR
450
+ NWNG,NWNG
451
+ PQPK,PQPK
452
+ FKVT,FKVT
453
+ VIDA,VIDA
454
+ AQMK,AQMK
455
+ LGCI,LGCI
456
+ AWDV,AWDV
457
+ AHKR,AHKR
458
+ HAPF,HAPF
459
+ LINW,LINW
460
+ SNYD,SNYD
461
+ KVFQ,KVFQ
462
+ SWML,SWML
463
+ NMHN,NMHN
464
+ RVSQ,RVSQ
465
+ MYCI,MYCI
466
+ WQPN,WQPN
467
+ VNRT,VNRT
468
+ QHTR,QHTR
469
+ RCRP,RCRP
470
+ HCWT,HCWT
471
+ GQSH,GQSH
472
+ RTHG,RTHG
473
+ NQQL,NQQL
474
+ SNHN,SNHN
475
+ MKIF,MKIF
476
+ YYHC,YYHC
477
+ YRMP,YRMP
478
+ SHQH,SHQH
479
+ WAWT,WAWT
480
+ TFIS,TFIS
481
+ DDWR,DDWR
482
+ AMDV,AMDV
483
+ ADRK,ADRK
484
+ HMHR,HMHR
485
+ FNAG,FNAG
486
+ EMYK,EMYK
487
+ WVSQ,WVSQ
488
+ AHFV,AHFV
489
+ RQHV,RQHV
490
+ TQYA,TQYA
491
+ AIWE,AIWE
492
+ CMKR,CMKR
493
+ IKWC,IKWC
494
+ RFML,RFML
495
+ VCGS,VCGS
496
+ RTNP,RTNP
497
+ MTYG,MTYG
498
+ MINS,MINS
499
+ DIKD,DIKD
500
+ KNFC,KNFC
501
+ ENSS,ENSS
502
+ MSPW,MSPW
503
+ VAYR,VAYR
504
+ YSME,YSME
505
+ LKTH,LKTH
506
+ PGHR,PGHR
507
+ LPAH,LPAH
508
+ QCWM,QCWM
509
+ GWSQ,GWSQ
510
+ IVSP,IVSP
511
+ YYFW,YYFW
512
+ WMAW,WMAW
513
+ LLDC,LLDC
514
+ WTHH,WTHH
515
+ GCQH,GCQH
516
+ WTGS,WTGS
517
+ LFGA,LFGA
518
+ LREV,LREV
519
+ TYGD,TYGD
520
+ VHTR,VHTR
521
+ CGCQ,CGCQ
522
+ FVIH,FVIH
523
+ GQFY,GQFY
524
+ ELYA,ELYA
525
+ EMRT,EMRT
526
+ DGDL,DGDL
527
+ QEPC,QEPC
528
+ NHDA,NHDA
529
+ IISC,IISC
530
+ NSMN,NSMN
531
+ GLTR,GLTR
532
+ CISI,CISI
533
+ RGVQ,RGVQ
534
+ KDMC,KDMC
535
+ SGYG,SGYG
536
+ ENNH,ENNH
537
+ NDPM,NDPM
538
+ AYAE,AYAE
539
+ DFWH,DFWH
540
+ GDII,GDII
541
+ GFKP,GFKP
542
+ CAYP,CAYP
543
+ KHDS,KHDS
544
+ GAWV,GAWV
545
+ TTNE,TTNE
546
+ FEDH,FEDH
547
+ YTAV,YTAV
548
+ AMVW,AMVW
549
+ LSQT,LSQT
550
+ NECR,NECR
551
+ GLGA,GLGA
552
+ PNYQ,PNYQ
553
+ KSTF,KSTF
554
+ KVCI,KVCI
555
+ RKIS,RKIS
556
+ PILN,PILN
557
+ GCCR,GCCR
558
+ SVCQ,SVCQ
559
+ WVLQ,WVLQ
560
+ LLGC,LLGC
561
+ HAWM,HAWM
562
+ AVQK,AVQK
563
+ NPGA,NPGA
564
+ TPEW,TPEW
565
+ SDNG,SDNG
566
+ PIQV,PIQV
567
+ YIMK,YIMK
568
+ NYMG,NYMG
569
+ LTYP,LTYP
570
+ GTTS,GTTS
571
+ FERD,FERD
572
+ EIGH,EIGH
573
+ CPKM,CPKM
574
+ QSNI,QSNI
575
+ HSKL,HSKL
576
+ CWEM,CWEM
577
+ NDWV,NDWV
578
+ FFIR,FFIR
579
+ CNSC,CNSC
580
+ KTGN,KTGN
581
+ RGSY,RGSY
582
+ CVLQ,CVLQ
583
+ NKNQ,NKNQ
584
+ CWYK,CWYK
585
+ SSRN,SSRN
586
+ KNGD,KNGD
587
+ PECK,PECK
588
+ NRKC,NRKC
589
+ HTAN,HTAN
590
+ NSKE,NSKE
591
+ ALFY,ALFY
592
+ KWPM,KWPM
593
+ CFTK,CFTK
594
+ PDKM,PDKM
595
+ TQIS,TQIS
596
+ DEWM,DEWM
597
+ EHMK,EHMK
598
+ IYNR,IYNR
599
+ HAFR,HAFR
600
+ DECE,DECE
601
+ WYVV,WYVV
602
+ GCPK,GCPK
603
+ DRKK,DRKK
604
+ KEVR,KEVR
605
+ WNWC,WNWC
606
+ PAYK,PAYK
607
+ QKEE,QKEE
608
+ QGEC,QGEC
609
+ LYGF,LYGF
610
+ IWAR,IWAR
611
+ GRAR,GRAR
612
+ AIYC,AIYC
613
+ TTVF,TTVF
614
+ HFLW,HFLW
615
+ NARM,NARM
616
+ RHRA,RHRA
617
+ QQDS,QQDS
618
+ FHSL,FHSL
619
+ AVDA,AVDA
620
+ HERQ,HERQ
621
+ TMMP,TMMP
622
+ PEMT,PEMT
623
+ AAFS,AAFS
624
+ QEEG,QEEG
625
+ ATMV,ATMV
626
+ CAFW,CAFW
627
+ LSGH,LSGH
628
+ YKLV,YKLV
629
+ MPNS,MPNS
630
+ CKMP,CKMP
631
+ KKYS,KKYS
632
+ IITA,IITA
633
+ PAIY,PAIY
634
+ WDCD,WDCD
635
+ RSDW,RSDW
636
+ QPAL,QPAL
637
+ EYPW,EYPW
638
+ FCWC,FCWC
639
+ CFRY,CFRY
640
+ QLCG,QLCG
641
+ QETE,QETE
642
+ TCNV,TCNV
643
+ DQFR,DQFR
644
+ HKTS,HKTS
645
+ GWDT,GWDT
646
+ SFMN,SFMN
647
+ GHMF,GHMF
648
+ HDRQ,HDRQ
649
+ RTNL,RTNL
650
+ YNKP,YNKP
651
+ KDIG,KDIG
652
+ LECA,LECA
653
+ RRGK,RRGK
654
+ LNPY,LNPY
655
+ IMVW,IMVW
656
+ PVDS,PVDS
657
+ RERF,RERF
658
+ KAER,KAER
659
+ FMGP,FMGP
660
+ PSAT,PSAT
661
+ VKEQ,VKEQ
662
+ CKCE,CKCE
663
+ TAAR,TAAR
664
+ QPRA,QPRA
665
+ HEAH,HEAH
666
+ LCNA,LCNA
667
+ DEMY,DEMY
668
+ TEPE,TEPE
669
+ WNLY,WNLY
670
+ SPHG,SPHG
671
+ GQHQ,GQHQ
672
+ ADYM,ADYM
673
+ FQYW,FQYW
674
+ VSDD,VSDD
675
+ NAKM,NAKM
676
+ QMDQ,QMDQ
677
+ SDAK,SDAK
678
+ HLDH,HLDH
679
+ SCCY,SCCY
680
+ HGSC,HGSC
681
+ RAVV,RAVV
682
+ KCND,KCND
683
+ LIDG,LIDG
684
+ GGMD,GGMD
685
+ PFRW,PFRW
686
+ NMLG,NMLG
687
+ NWSN,NWSN
688
+ TRGN,TRGN
689
+ MIYA,MIYA
690
+ IFAS,IFAS
691
+ QICA,QICA
692
+ VTIL,VTIL
693
+ NYLP,NYLP
694
+ QFGT,QFGT
695
+ GLDV,GLDV
696
+ DHNR,DHNR
697
+ NYRP,NYRP
698
+ CDKL,CDKL
699
+ WQSQ,WQSQ
700
+ ADPG,ADPG
701
+ FEHM,FEHM
702
+ DHFW,DHFW
703
+ WSSW,WSSW
704
+ FPMF,FPMF
705
+ SLLD,SLLD
706
+ KVRS,KVRS
707
+ ITNH,ITNH
708
+ GLAR,GLAR
709
+ CACL,CACL
710
+ HFDQ,HFDQ
711
+ LDPE,LDPE
712
+ FIQA,FIQA
713
+ WFSN,WFSN
714
+ NQQC,NQQC
715
+ CGWR,CGWR
716
+ QMDV,QMDV
717
+ QCEQ,QCEQ
718
+ YNKS,YNKS
719
+ ALCP,ALCP
720
+ RRKI,RRKI
721
+ GPCV,GPCV
722
+ HISI,HISI
723
+ QVAD,QVAD
724
+ DFWV,DFWV
725
+ WEAM,WEAM
726
+ CFWG,CFWG
727
+ GCWP,GCWP
728
+ PHPC,PHPC
729
+ AVSM,AVSM
730
+ TSPC,TSPC
731
+ MWLF,MWLF
732
+ RHSI,RHSI
733
+ HKQL,HKQL
734
+ IPTF,IPTF
735
+ SSNP,SSNP
736
+ ATLL,ATLL
737
+ SAHD,SAHD
738
+ YNQP,YNQP
739
+ RPSF,RPSF
740
+ TCHS,TCHS
741
+ AVGR,AVGR
742
+ HYPI,HYPI
743
+ PQYL,PQYL
744
+ GNIA,GNIA
745
+ LWQD,LWQD
746
+ SHSC,SHSC
747
+ FNCR,FNCR
748
+ VAGR,VAGR
749
+ YNLS,YNLS
750
+ NSLF,NSLF
751
+ MIIM,MIIM
752
+ RYPG,RYPG
753
+ DCQI,DCQI
754
+ PMFI,PMFI
755
+ CIFL,CIFL
756
+ TIRG,TIRG
757
+ CRVF,CRVF
758
+ WHEL,WHEL
759
+ ALWP,ALWP
760
+ CMKC,CMKC
761
+ PWID,PWID
762
+ ESTM,ESTM
763
+ EHDY,EHDY
764
+ EMIR,EMIR
765
+ NAWM,NAWM
766
+ LNCW,LNCW
767
+ FYDK,FYDK
768
+ WKEG,WKEG
769
+ HHCG,HHCG
770
+ NKPS,NKPS
771
+ FCDI,FCDI
772
+ HDSC,HDSC
773
+ PAYR,PAYR
774
+ YFMV,YFMV
775
+ NTGH,NTGH
776
+ SREK,SREK
777
+ FYCL,FYCL
778
+ GFHM,GFHM
779
+ EQCV,EQCV
780
+ GREQ,GREQ
781
+ NYCP,NYCP
782
+ EKER,EKER
783
+ SAIS,SAIS
784
+ RHIQ,RHIQ
785
+ AITQ,AITQ
786
+ FEAT,FEAT
787
+ PKPF,PKPF
788
+ HIMN,HIMN
789
+ DSSQ,DSSQ
790
+ KKYC,KKYC
791
+ YSST,YSST
792
+ LETH,LETH
793
+ PSFA,PSFA
794
+ VQLN,VQLN
795
+ LCCH,LCCH
796
+ MNWM,MNWM
797
+ DNTG,DNTG
798
+ YRQF,YRQF
799
+ NPIF,NPIF
800
+ AISE,AISE
801
+ ECGH,ECGH
802
+ WQRL,WQRL
803
+ YMKK,YMKK
804
+ IWQA,IWQA
805
+ TLRF,TLRF
806
+ HGYL,HGYL
807
+ SHPF,SHPF
808
+ WVNW,WVNW
809
+ QKIH,QKIH
810
+ REVN,REVN
811
+ CQAA,CQAA
812
+ YMCK,YMCK
813
+ GCAE,GCAE
814
+ IYCC,IYCC
815
+ LHPT,LHPT
816
+ HYGI,HYGI
817
+ MVPE,MVPE
818
+ LGKM,LGKM
819
+ WGRS,WGRS
820
+ GSIK,GSIK
821
+ KPPN,KPPN
822
+ MNGQ,MNGQ
823
+ EYCW,EYCW
824
+ SCWT,SCWT
825
+ IGNE,IGNE
826
+ DAVK,DAVK
827
+ EWIH,EWIH
828
+ NTWM,NTWM
829
+ ADSL,ADSL
830
+ QHHD,QHHD
831
+ YAYE,YAYE
832
+ CTVL,CTVL
833
+ YPNQ,YPNQ
834
+ GSTY,GSTY
835
+ NYDY,NYDY
836
+ SITI,SITI
837
+ TWNQ,TWNQ
838
+ KWRC,KWRC
839
+ IVKV,IVKV
840
+ QHKL,QHKL
841
+ HRRT,HRRT
842
+ SLIP,SLIP
843
+ EPYW,EPYW
844
+ WYST,WYST
845
+ KRQD,KRQD
846
+ KDKA,KDKA
847
+ LVTY,LVTY
848
+ PLKQ,PLKQ
849
+ DFTH,DFTH
850
+ PRYH,PRYH
851
+ HKEW,HKEW
852
+ DMII,DMII
853
+ MKSQ,MKSQ
854
+ AGAE,AGAE
855
+ KSCD,KSCD
856
+ STKQ,STKQ
857
+ THTI,THTI
858
+ KDNF,KDNF
859
+ KAHG,KAHG
860
+ RGQA,RGQA
861
+ QDKS,QDKS
862
+ QSIS,QSIS
863
+ NMSV,NMSV
864
+ WAAK,WAAK
865
+ GYRA,GYRA
866
+ METL,METL
867
+ VYAY,VYAY
868
+ HAQA,HAQA
869
+ HETA,HETA
870
+ KVYK,KVYK
871
+ VYCP,VYCP
872
+ LLGG,LLGG
873
+ YQLI,YQLI
874
+ AYLP,AYLP
875
+ FIVM,FIVM
876
+ WMAS,WMAS
877
+ CTRV,CTRV
878
+ NMDG,NMDG
879
+ FPDC,FPDC
880
+ CIYD,CIYD
881
+ GSKT,GSKT
882
+ CGFR,CGFR
883
+ TRSH,TRSH
884
+ MVSL,MVSL
885
+ RRKV,RRKV
886
+ VLQI,VLQI
887
+ VSAC,VSAC
888
+ DWSM,DWSM
889
+ NHNN,NHNN
890
+ AFYN,AFYN
891
+ EDDP,EDDP
892
+ DHMM,DHMM
893
+ QTHP,QTHP
894
+ TTPK,TTPK
895
+ KWHA,KWHA
896
+ WCTW,WCTW
897
+ ELHW,ELHW
898
+ PRGH,PRGH
899
+ KTSE,KTSE
900
+ HFIG,HFIG
901
+ PRTR,PRTR
902
+ GTPP,GTPP
903
+ APLR,APLR
904
+ FPAW,FPAW
905
+ WNGE,WNGE
906
+ EYQP,EYQP
907
+ KHDF,KHDF
908
+ WELM,WELM
909
+ KPRK,KPRK
910
+ YSVA,YSVA
911
+ TQSV,TQSV
912
+ VMIK,VMIK
913
+ RNHG,RNHG
914
+ SLGK,SLGK
915
+ ETEG,ETEG
916
+ MWKN,MWKN
917
+ TVHD,TVHD
918
+ TPHR,TPHR
919
+ SIHY,SIHY
920
+ LLWM,LLWM
921
+ EVGN,EVGN
922
+ NKIP,NKIP
923
+ PQDM,PQDM
924
+ YDMK,YDMK
925
+ CTPI,CTPI
926
+ FYEH,FYEH
927
+ NENL,NENL
928
+ ETVQ,ETVQ
929
+ NLYD,NLYD
930
+ SIRG,SIRG
931
+ PGIN,PGIN
932
+ QSMW,QSMW
933
+ HMLW,HMLW
934
+ QSLR,QSLR
935
+ CCNM,CCNM
936
+ LKAW,LKAW
937
+ TIKC,TIKC
938
+ RPFF,RPFF
939
+ SIVL,SIVL
940
+ TVYC,TVYC
941
+ HWRD,HWRD
942
+ PVDL,PVDL
943
+ RVSH,RVSH
944
+ QGGV,QGGV
945
+ ERLR,ERLR
946
+ IYTM,IYTM
947
+ KSYI,KSYI
948
+ KPGM,KPGM
949
+ WTFI,WTFI
950
+ ICAC,ICAC
951
+ TSMW,TSMW
952
+ PIRF,PIRF
953
+ NKRN,NKRN
954
+ VFNR,VFNR
955
+ QQRK,QQRK
956
+ KFMW,KFMW
957
+ PHVT,PHVT
958
+ KQFW,KQFW
959
+ DHQD,DHQD
960
+ SHLR,SHLR
961
+ ADDC,ADDC
962
+ RKQI,RKQI
963
+ PPFQ,PPFQ
964
+ VECA,VECA
965
+ EPSY,EPSY
966
+ RAYD,RAYD
967
+ QRFT,QRFT
968
+ CYAI,CYAI
969
+ NSYW,NSYW
970
+ VEGM,VEGM
971
+ PVKV,PVKV
972
+ CGEF,CGEF
973
+ QDEA,QDEA
974
+ EFET,EFET
975
+ QTCH,QTCH
976
+ LIFR,LIFR
977
+ CQNM,CQNM
978
+ HHPD,HHPD
979
+ GFIK,GFIK
980
+ TTLH,TTLH
981
+ SGED,SGED
982
+ PRPE,PRPE
983
+ GIDF,GIDF
984
+ DVSH,DVSH
985
+ CCVH,CCVH
986
+ KTCF,KTCF
987
+ ISYI,ISYI
988
+ NQII,NQII
989
+ GPCY,GPCY
990
+ HCVE,HCVE
991
+ FSYV,FSYV
992
+ SIAV,SIAV
993
+ VVNC,VVNC
994
+ VCHD,VCHD
995
+ DTDS,DTDS
996
+ CFVK,CFVK
997
+ VNDH,VNDH
998
+ VHWL,VHWL
999
+ CSCE,CSCE
1000
+ KQFR,KQFR
1001
+ EFDP,EFDP
1002
+ RDAI,RDAI
1003
+ PTMQ,PTMQ
1004
+ SDGG,SDGG
1005
+ PTVI,PTVI
1006
+ MIMP,MIMP
1007
+ QSVW,QSVW
1008
+ ETQH,ETQH
1009
+ KVCK,KVCK
1010
+ QRSE,QRSE
1011
+ CQAE,CQAE
1012
+ HVFE,HVFE
1013
+ EHHR,EHHR
1014
+ IKYC,IKYC
1015
+ YWPA,YWPA
1016
+ PGVW,PGVW
1017
+ EHGF,EHGF
1018
+ IQDC,IQDC
1019
+ YVHI,YVHI
1020
+ TMFP,TMFP
1021
+ AEDA,AEDA
1022
+ PCTW,PCTW
1023
+ FNGV,FNGV
1024
+ AHVD,AHVD
1025
+ CGIY,CGIY
1026
+ CYFW,CYFW
1027
+ GQCG,GQCG
1028
+ NTSW,NTSW
1029
+ WYSQ,WYSQ
1030
+ WCLH,WCLH
1031
+ GHCP,GHCP
1032
+ YPMN,YPMN
1033
+ WCPF,WCPF
1034
+ MVGK,MVGK
1035
+ DCVI,DCVI
1036
+ WRPF,WRPF
1037
+ VEDP,VEDP
1038
+ WWNS,WWNS
1039
+ PQFN,PQFN
1040
+ ITGY,ITGY
1041
+ QRWD,QRWD
1042
+ FPDR,FPDR
1043
+ EKQQ,EKQQ
1044
+ FWYD,FWYD
1045
+ TYYL,TYYL
1046
+ THCL,THCL
1047
+ RGTI,RGTI
1048
+ ALVA,ALVA
1049
+ LEWG,LEWG
1050
+ RVQH,RVQH
1051
+ LFAS,LFAS
1052
+ SVHG,SVHG
1053
+ SREG,SREG
1054
+ GVDR,GVDR
1055
+ MMVT,MMVT
1056
+ VPGH,VPGH
1057
+ YTQD,YTQD
1058
+ DYPH,DYPH
1059
+ RRMG,RRMG
1060
+ AVDP,AVDP
1061
+ EVKQ,EVKQ
1062
+ LTFC,LTFC
1063
+ KATL,KATL
1064
+ MYKA,MYKA
1065
+ RYCC,RYCC
1066
+ VHKY,VHKY
1067
+ LVMI,LVMI
1068
+ YHFK,YHFK
1069
+ RLNQ,RLNQ
1070
+ EMEV,EMEV
1071
+ DPGV,DPGV
1072
+ AAWQ,AAWQ
1073
+ FDDC,FDDC
1074
+ SKLN,SKLN
1075
+ LCEE,LCEE
1076
+ VVWW,VVWW
1077
+ KLQI,KLQI
1078
+ DHDA,DHDA
1079
+ RTQF,RTQF
1080
+ STTP,STTP
1081
+ FFEI,FFEI
1082
+ IKCN,IKCN
1083
+ ACWP,ACWP
1084
+ AWGE,AWGE
1085
+ HYRY,HYRY
1086
+ FFVR,FFVR
1087
+ CEEV,CEEV
1088
+ LQMY,LQMY
1089
+ SACN,SACN
1090
+ MWWT,MWWT
1091
+ IDQT,IDQT
1092
+ QGCS,QGCS
1093
+ WRLE,WRLE
1094
+ AAPH,AAPH
1095
+ NTKN,NTKN
1096
+ FMSK,FMSK
1097
+ MGLI,MGLI
1098
+ YECV,YECV
1099
+ RFID,RFID
1100
+ CEDK,CEDK
1101
+ ILEE,ILEE
1102
+ RAMW,RAMW
1103
+ FPTQ,FPTQ
1104
+ HRCR,HRCR
1105
+ FIMH,FIMH
1106
+ KIPT,KIPT
1107
+ NNNH,NNNH
1108
+ ANWL,ANWL
1109
+ ARTD,ARTD
1110
+ APDW,APDW
1111
+ AFCR,AFCR
1112
+ FQPT,FQPT
1113
+ ALNI,ALNI
1114
+ FNWC,FNWC
1115
+ SYEV,SYEV
1116
+ IMYD,IMYD
1117
+ TRVA,TRVA
1118
+ RAMI,RAMI
1119
+ QCTI,QCTI
1120
+ NQRE,NQRE
1121
+ YEPV,YEPV
1122
+ CWAK,CWAK
1123
+ LFCK,LFCK
1124
+ NESI,NESI
1125
+ WHQL,WHQL
1126
+ DGVF,DGVF
1127
+ NHPH,NHPH
1128
+ YSQT,YSQT
1129
+ PWDL,PWDL
1130
+ FEDQ,FEDQ
1131
+ YMPP,YMPP
1132
+ RNCR,RNCR
1133
+ NWWQ,NWWQ
1134
+ VMVQ,VMVQ
1135
+ YSSP,YSSP
1136
+ QHKQ,QHKQ
1137
+ FPVM,FPVM
1138
+ ADHT,ADHT
1139
+ ATSY,ATSY
1140
+ DLPV,DLPV
1141
+ ETRD,ETRD
1142
+ MHVD,MHVD
1143
+ VIHG,VIHG
1144
+ FDEA,FDEA
1145
+ TSNQ,TSNQ
1146
+ YSIF,YSIF
1147
+ QIII,QIII
1148
+ RGGM,RGGM
1149
+ VAMH,VAMH
1150
+ CVGQ,CVGQ
1151
+ RPYI,RPYI
1152
+ DEFL,DEFL
1153
+ QHPN,QHPN
1154
+ MLEP,MLEP
1155
+ IEDW,IEDW
1156
+ YLWR,YLWR
1157
+ DHHD,DHHD
1158
+ KSWF,KSWF
1159
+ AVKM,AVKM
1160
+ SRER,SRER
1161
+ AMNH,AMNH
1162
+ MDDM,MDDM
1163
+ CGNP,CGNP
1164
+ GEHF,GEHF
1165
+ PLPD,PLPD
1166
+ FSHK,FSHK
1167
+ AISH,AISH
1168
+ GDAN,GDAN
1169
+ CQYQ,CQYQ
1170
+ AGEQ,AGEQ
1171
+ RQWE,RQWE
1172
+ RRQL,RRQL
1173
+ WHGN,WHGN
1174
+ PKYY,PKYY
1175
+ CCIM,CCIM
1176
+ KWRL,KWRL
1177
+ LTTE,LTTE
1178
+ MYNQ,MYNQ
1179
+ SSAI,SSAI
1180
+ LYCT,LYCT
1181
+ CMPV,CMPV
1182
+ HVCL,HVCL
1183
+ NRKH,NRKH
1184
+ KECR,KECR
1185
+ TWKA,TWKA
1186
+ WASS,WASS
1187
+ KKIY,KKIY
1188
+ VAFT,VAFT
1189
+ LDVC,LDVC
1190
+ YEPK,YEPK
1191
+ CISD,CISD
1192
+ NMWE,NMWE
1193
+ CVNT,CVNT
1194
+ TLDK,TLDK
1195
+ CYGK,CYGK
1196
+ PVTT,PVTT
1197
+ HDDC,HDDC
1198
+ LGLF,LGLF
1199
+ LAQN,LAQN
1200
+ NTFF,NTFF
1201
+ KISF,KISF
1202
+ GVLP,GVLP
1203
+ LNVM,LNVM
1204
+ NAGK,NAGK
1205
+ NHNV,NHNV
1206
+ LRCW,LRCW
1207
+ WMYG,WMYG
1208
+ GCYY,GCYY
1209
+ CSYK,CSYK
1210
+ PDLM,PDLM
1211
+ WSWA,WSWA
1212
+ GQSL,GQSL
1213
+ NWRM,NWRM
1214
+ EEHP,EEHP
1215
+ WCYP,WCYP
1216
+ NNAM,NNAM
1217
+ MHHK,MHHK
1218
+ FGFS,FGFS
1219
+ KPLK,KPLK
1220
+ TYPI,TYPI
1221
+ CDRS,CDRS
1222
+ WMFH,WMFH
1223
+ HEWG,HEWG
1224
+ PQNW,PQNW
1225
+ QVQH,QVQH
1226
+ NTPI,NTPI
1227
+ VMVI,VMVI
1228
+ RHAT,RHAT
1229
+ PKKF,PKKF
1230
+ IIHN,IIHN
1231
+ LVNS,LVNS
1232
+ YTSK,YTSK
1233
+ IAGS,IAGS
1234
+ KNGE,KNGE
1235
+ GESD,GESD
1236
+ IAKF,IAKF
1237
+ QPDG,QPDG
1238
+ CLEN,CLEN
1239
+ MQHR,MQHR
1240
+ YRFL,YRFL
1241
+ RTPP,RTPP
1242
+ IVKA,IVKA
1243
+ KDPY,KDPY
1244
+ SSFF,SSFF
1245
+ YKPE,YKPE
1246
+ NEDG,NEDG
1247
+ YEGS,YEGS
1248
+ FGGQ,FGGQ
1249
+ NDFM,NDFM
1250
+ HNEC,HNEC
1251
+ NQLR,NQLR
1252
+ RLVW,RLVW
1253
+ NVFD,NVFD
1254
+ PMNY,PMNY
1255
+ RLHA,RLHA
1256
+ DIII,DIII
1257
+ WMLQ,WMLQ
1258
+ KACN,KACN
1259
+ IATA,IATA
1260
+ LIAS,LIAS
1261
+ SVSF,SVSF
1262
+ CKIS,CKIS
1263
+ YNFT,YNFT
1264
+ CKGC,CKGC
1265
+ EQIV,EQIV
1266
+ FRKQ,FRKQ
1267
+ VAWY,VAWY
1268
+ NWPG,NWPG
1269
+ RIHV,RIHV
1270
+ TIEC,TIEC
1271
+ NLYT,NLYT
1272
+ CTYK,CTYK
1273
+ FTQN,FTQN
1274
+ WVER,WVER
1275
+ VPII,VPII
1276
+ DKNM,DKNM
1277
+ MYEK,MYEK
1278
+ WYWF,WYWF
1279
+ IDVQ,IDVQ
1280
+ SWTV,SWTV
1281
+ DTVE,DTVE
1282
+ CDKP,CDKP
1283
+ WEEQ,WEEQ
1284
+ AFRC,AFRC
1285
+ NTKQ,NTKQ
1286
+ KQED,KQED
1287
+ PESP,PESP
1288
+ HDTQ,HDTQ
1289
+ VHHE,VHHE
1290
+ SWGA,SWGA
1291
+ YEHR,YEHR
1292
+ EDWE,EDWE
1293
+ QSVY,QSVY
1294
+ AMWP,AMWP
1295
+ VLHP,VLHP
1296
+ KEYR,KEYR
1297
+ ECKA,ECKA
1298
+ LIAN,LIAN
1299
+ SFHW,SFHW
1300
+ VCHQ,VCHQ
1301
+ CHYW,CHYW
1302
+ PVPV,PVPV
1303
+ SPSG,SPSG
1304
+ LRIA,LRIA
1305
+ YQYS,YQYS
1306
+ GGLV,GGLV
1307
+ DTTL,DTTL
1308
+ TKKN,TKKN
1309
+ QIYT,QIYT
1310
+ NMYD,NMYD
1311
+ EFRP,EFRP
1312
+ FTIK,FTIK
1313
+ WLWV,WLWV
1314
+ CKVC,CKVC
1315
+ TPIG,TPIG
1316
+ WEWH,WEWH
1317
+ QCVC,QCVC
1318
+ NTDK,NTDK
1319
+ WDCT,WDCT
1320
+ RMKF,RMKF
1321
+ LRIY,LRIY
1322
+ FETM,FETM
1323
+ ECRP,ECRP
1324
+ RFVF,RFVF
1325
+ VQIP,VQIP
1326
+ DQEE,DQEE
1327
+ MVLS,MVLS
1328
+ CYIT,CYIT
1329
+ FNPE,FNPE
1330
+ IAGE,IAGE
1331
+ LMWK,LMWK
1332
+ GQAS,GQAS
1333
+ LEWQ,LEWQ
1334
+ IGHI,IGHI
1335
+ ICMM,ICMM
1336
+ MEDM,MEDM
1337
+ FGRY,FGRY
1338
+ VGWW,VGWW
1339
+ KDIN,KDIN
1340
+ RGRW,RGRW
1341
+ YGES,YGES
1342
+ HNTA,HNTA
1343
+ EGNH,EGNH
1344
+ VNTW,VNTW
1345
+ PPVQ,PPVQ
1346
+ PWWA,PWWA
1347
+ MTHR,MTHR
1348
+ NKCN,NKCN
1349
+ IGRL,IGRL
1350
+ FIWM,FIWM
1351
+ PGTT,PGTT
1352
+ HGAE,HGAE
1353
+ PMWS,PMWS
1354
+ KWNC,KWNC
1355
+ GFDW,GFDW
1356
+ QKKG,QKKG
1357
+ HVTG,HVTG
1358
+ SGET,SGET
1359
+ AWKW,AWKW
1360
+ TRPE,TRPE
1361
+ RVHE,RVHE
1362
+ EEPS,EEPS
1363
+ CVSY,CVSY
1364
+ SRPE,SRPE
1365
+ LMMY,LMMY
1366
+ CSDW,CSDW
1367
+ RIHP,RIHP
1368
+ YDTM,YDTM
1369
+ YTYT,YTYT
1370
+ GSHI,GSHI
1371
+ NCRV,NCRV
1372
+ MGVK,MGVK
1373
+ LWNY,LWNY
1374
+ MYWC,MYWC
1375
+ ETST,ETST
1376
+ NCVV,NCVV
1377
+ TIEI,TIEI
1378
+ DMMK,DMMK
1379
+ NMIR,NMIR
1380
+ EFVF,EFVF
1381
+ KTSK,KTSK
1382
+ EQFD,EQFD
1383
+ PVMD,PVMD
1384
+ IGYL,IGYL
1385
+ LLLM,LLLM
1386
+ HDRC,HDRC
1387
+ QGCV,QGCV
1388
+ WQPH,WQPH
1389
+ DRWA,DRWA
1390
+ MVVG,MVVG
1391
+ TMMC,TMMC
1392
+ KVYN,KVYN
1393
+ AIIA,AIIA
1394
+ FNYS,FNYS
1395
+ HRCA,HRCA
1396
+ IAWP,IAWP
1397
+ VDYL,VDYL
1398
+ VICL,VICL
1399
+ HYDA,HYDA
1400
+ ADSA,ADSA
1401
+ TYYT,TYYT
1402
+ SQRL,SQRL
1403
+ EIQM,EIQM
1404
+ HRNL,HRNL
1405
+ DQVF,DQVF
1406
+ PTWK,PTWK
1407
+ LCSY,LCSY
1408
+ FHST,FHST
1409
+ DKQV,DKQV
1410
+ PFSM,PFSM
1411
+ QLAQ,QLAQ
1412
+ EDMF,EDMF
1413
+ EKEN,EKEN
1414
+ LMPW,LMPW
1415
+ CALR,CALR
1416
+ RPAY,RPAY
1417
+ HVCH,HVCH
1418
+ GVQA,GVQA
1419
+ TLGN,TLGN
1420
+ QFIR,QFIR
1421
+ LKYW,LKYW
1422
+ MTWR,MTWR
1423
+ VRMD,VRMD
1424
+ LLFQ,LLFQ
1425
+ CEVE,CEVE
1426
+ MDDA,MDDA
1427
+ WPVP,WPVP
1428
+ DTVI,DTVI
1429
+ WGVM,WGVM
1430
+ DYDT,DYDT
1431
+ NWKV,NWKV
1432
+ DLPC,DLPC
1433
+ LEMF,LEMF
1434
+ VHVQ,VHVQ
1435
+ RVWF,RVWF
1436
+ KNDW,KNDW
1437
+ EAMT,EAMT
1438
+ IQLT,IQLT
1439
+ HYYY,HYYY
1440
+ PEGN,PEGN
1441
+ TRQR,TRQR
1442
+ EIML,EIML
1443
+ ETGF,ETGF
1444
+ QQSV,QQSV
1445
+ KFIP,KFIP
1446
+ TVAM,TVAM
1447
+ GTHA,GTHA
1448
+ QCHI,QCHI
1449
+ IGIR,IGIR
1450
+ HQLS,HQLS
1451
+ QYQM,QYQM
1452
+ PNEP,PNEP
1453
+ RKSK,RKSK
1454
+ AAYG,AAYG
1455
+ ATCM,ATCM
1456
+ DDCH,DDCH
1457
+ NTAF,NTAF
1458
+ LHNG,LHNG
1459
+ ECIA,ECIA
1460
+ EWAA,EWAA
1461
+ CKSK,CKSK
1462
+ MDTH,MDTH
1463
+ DNCP,DNCP
1464
+ NLDP,NLDP
1465
+ KFVS,KFVS
1466
+ TSLV,TSLV
1467
+ QVCA,QVCA
1468
+ DPDM,DPDM
1469
+ LSGA,LSGA
1470
+ MLVC,MLVC
1471
+ IAPM,IAPM
1472
+ CGFY,CGFY
1473
+ VIHV,VIHV
1474
+ GLRD,GLRD
1475
+ EYDE,EYDE
1476
+ FNEP,FNEP
1477
+ DIMC,DIMC
1478
+ AFIC,AFIC
1479
+ KFMD,KFMD
1480
+ YFNI,YFNI
1481
+ HTMD,HTMD
1482
+ EKFN,EKFN
1483
+ NFII,NFII
1484
+ MNHN,MNHN
1485
+ PFGK,PFGK
1486
+ EWSD,EWSD
1487
+ SGMK,SGMK
1488
+ SHTM,SHTM
1489
+ RFQL,RFQL
1490
+ LNTY,LNTY
1491
+ EWPT,EWPT
1492
+ KINA,KINA
1493
+ YYED,YYED
1494
+ LGSD,LGSD
1495
+ CRNH,CRNH
1496
+ NNSK,NNSK
1497
+ SPIQ,SPIQ
1498
+ KWQV,KWQV
1499
+ KKQA,KKQA
1500
+ TSFI,TSFI
1501
+ NPFE,NPFE
1502
+ MKCE,MKCE
1503
+ EQKH,EQKH
1504
+ WENH,WENH
1505
+ VDLI,VDLI
1506
+ WVDF,WVDF
1507
+ FFAM,FFAM
1508
+ SPSP,SPSP
1509
+ TWKI,TWKI
1510
+ TWTC,TWTC
1511
+ RTGE,RTGE
1512
+ SFSI,SFSI
1513
+ WTEF,WTEF
1514
+ DIFH,DIFH
1515
+ QFGV,QFGV
1516
+ NRRP,NRRP
1517
+ GHHS,GHHS
1518
+ IYMN,IYMN
1519
+ TTWT,TTWT
1520
+ NVSG,NVSG
1521
+ LAKQ,LAKQ
1522
+ WNAI,WNAI
1523
+ GHWG,GHWG
1524
+ SPPG,SPPG
1525
+ FWKT,FWKT
1526
+ ERKH,ERKH
1527
+ WWLW,WWLW
1528
+ MNKM,MNKM
1529
+ RITQ,RITQ
1530
+ MFMR,MFMR
1531
+ KVHQ,KVHQ
1532
+ TFFD,TFFD
1533
+ FWFT,FWFT
1534
+ LHLT,LHLT
1535
+ LQQF,LQQF
1536
+ CWVA,CWVA
1537
+ IRQW,IRQW
1538
+ EGHH,EGHH
1539
+ VHLC,VHLC
1540
+ TVKK,TVKK
1541
+ QSQH,QSQH
1542
+ DFIT,DFIT
1543
+ IGRW,IGRW
1544
+ FVHC,FVHC
1545
+ TERW,TERW
1546
+ KFSH,KFSH
1547
+ FECP,FECP
1548
+ DWFG,DWFG
1549
+ SWIH,SWIH
1550
+ KGTR,KGTR
1551
+ HEII,HEII
1552
+ PHDF,PHDF
1553
+ QLQA,QLQA
1554
+ RLYF,RLYF
1555
+ TGYA,TGYA
1556
+ KRKH,KRKH
1557
+ YQCY,YQCY
1558
+ MPWS,MPWS
1559
+ SQTV,SQTV
1560
+ RRNV,RRNV
1561
+ HPCP,HPCP
1562
+ SVRC,SVRC
1563
+ GHVA,GHVA
1564
+ LNVW,LNVW
1565
+ PSRY,PSRY
1566
+ ICRN,ICRN
1567
+ YTNI,YTNI
1568
+ YGIM,YGIM
1569
+ GGNN,GGNN
1570
+ TWEG,TWEG
1571
+ NCYT,NCYT
1572
+ WNDT,WNDT
1573
+ EVSF,EVSF
1574
+ KCDN,KCDN
1575
+ YYVE,YYVE
1576
+ RFVY,RFVY
1577
+ AVIG,AVIG
1578
+ AVKT,AVKT
1579
+ KKLH,KKLH
1580
+ MLTD,MLTD
1581
+ TPMH,TPMH
1582
+ LAQY,LAQY
1583
+ GDIH,GDIH
1584
+ NVEM,NVEM
1585
+ THMI,THMI
1586
+ EMLC,EMLC
1587
+ TFCW,TFCW
1588
+ RVQA,RVQA
1589
+ WHED,WHED
1590
+ SLRQ,SLRQ
1591
+ CKHA,CKHA
1592
+ INVT,INVT
1593
+ QHTI,QHTI
1594
+ VQYL,VQYL
1595
+ FSWQ,FSWQ
1596
+ SFSW,SFSW
1597
+ KPFI,KPFI
1598
+ QIND,QIND
1599
+ KWYI,KWYI
1600
+ IVFD,IVFD
1601
+ TYSC,TYSC
1602
+ KYGD,KYGD
1603
+ KDID,KDID
1604
+ HQTQ,HQTQ
1605
+ SACQ,SACQ
1606
+ VYIV,VYIV
1607
+ PNLV,PNLV
1608
+ MHFD,MHFD
1609
+ KWCN,KWCN
1610
+ GMRN,GMRN
1611
+ PIHW,PIHW
1612
+ QRYQ,QRYQ
1613
+ GESI,GESI
1614
+ WCHK,WCHK
1615
+ HQTR,HQTR
1616
+ PNDQ,PNDQ
1617
+ SRIN,SRIN
1618
+ FLWG,FLWG
1619
+ NGSN,NGSN
1620
+ GKSF,GKSF
1621
+ PCMM,PCMM
1622
+ DQPL,DQPL
1623
+ SYQY,SYQY
1624
+ YYTT,YYTT
1625
+ VIRS,VIRS
1626
+ LSSM,LSSM
1627
+ RACV,RACV
1628
+ KSTA,KSTA
1629
+ NFTQ,NFTQ
1630
+ CVVM,CVVM
1631
+ WTNV,WTNV
1632
+ LCVG,LCVG
1633
+ IMTD,IMTD
1634
+ HTHW,HTHW
1635
+ GNEP,GNEP
1636
+ NGQG,NGQG
1637
+ TFKL,TFKL
1638
+ SVYH,SVYH
1639
+ TVCM,TVCM
1640
+ CQYY,CQYY
1641
+ FFSS,FFSS
1642
+ TWVS,TWVS
1643
+ KMKK,KMKK
1644
+ VNVQ,VNVQ
1645
+ PWEA,PWEA
1646
+ LDIG,LDIG
1647
+ DFVM,DFVM
1648
+ SAAE,SAAE
1649
+ NQPC,NQPC
1650
+ ADMI,ADMI
1651
+ ETDI,ETDI
1652
+ FCCD,FCCD
1653
+ EDVT,EDVT
1654
+ YPLG,YPLG
1655
+ CLCS,CLCS
1656
+ CTTS,CTTS
1657
+ KYLL,KYLL
1658
+ QVGN,QVGN
1659
+ KPRW,KPRW
1660
+ DEQN,DEQN
1661
+ KHAQ,KHAQ
1662
+ TFSK,TFSK
1663
+ MMTS,MMTS
1664
+ QQHD,QQHD
1665
+ CSYV,CSYV
1666
+ DYGS,DYGS
1667
+ ITKD,ITKD
1668
+ LWLA,LWLA
1669
+ GKRS,GKRS
1670
+ YWLV,YWLV
1671
+ EFAN,EFAN
1672
+ MPMS,MPMS
1673
+ MAYH,MAYH
1674
+ PCRG,PCRG
1675
+ WEKY,WEKY
1676
+ MGED,MGED
1677
+ AQQM,AQQM
1678
+ CYCW,CYCW
1679
+ IEDM,IEDM
1680
+ NEND,NEND
1681
+ SFEE,SFEE
1682
+ FQNE,FQNE
1683
+ LPLF,LPLF
1684
+ MQCV,MQCV
1685
+ YDYS,YDYS
1686
+ IDCA,IDCA
1687
+ EWDN,EWDN
1688
+ TGWP,TGWP
1689
+ SPWD,SPWD
1690
+ DIDS,DIDS
1691
+ EVTC,EVTC
1692
+ KMIK,KMIK
1693
+ GDSE,GDSE
1694
+ DCYA,DCYA
1695
+ YLET,YLET
1696
+ LLCC,LLCC
1697
+ TGEQ,TGEQ
1698
+ IMWR,IMWR
1699
+ WQQW,WQQW
1700
+ QPPG,QPPG
1701
+ AMEV,AMEV
1702
+ TYEF,TYEF
1703
+ DGHR,DGHR
1704
+ HPHP,HPHP
1705
+ GHHY,GHHY
1706
+ ARNE,ARNE
1707
+ ACTC,ACTC
1708
+ PTNL,PTNL
1709
+ KWWQ,KWWQ
1710
+ EPSF,EPSF
1711
+ PIDQ,PIDQ
1712
+ QTIW,QTIW
1713
+ DAMF,DAMF
1714
+ RKGV,RKGV
1715
+ DDHT,DDHT
1716
+ TFIR,TFIR
1717
+ LQQC,LQQC
1718
+ RDWA,RDWA
1719
+ FINQ,FINQ
1720
+ VRAK,VRAK
1721
+ FDEE,FDEE
1722
+ RTCP,RTCP
1723
+ LHWM,LHWM
1724
+ KDNC,KDNC
1725
+ VLVP,VLVP
1726
+ VFEW,VFEW
1727
+ QIIN,QIIN
1728
+ DSQR,DSQR
1729
+ LNDW,LNDW
1730
+ VIPR,VIPR
1731
+ HQWN,HQWN
1732
+ NLDH,NLDH
1733
+ MSHM,MSHM
1734
+ STGT,STGT
1735
+ YQYH,YQYH
1736
+ KLRI,KLRI
1737
+ MMKE,MMKE
1738
+ VRKW,VRKW
1739
+ VKYE,VKYE
1740
+ DEAH,DEAH
1741
+ PECI,PECI
1742
+ CADI,CADI
1743
+ VSAK,VSAK
1744
+ ENMS,ENMS
1745
+ PSSQ,PSSQ
1746
+ DKCQ,DKCQ
1747
+ IDCS,IDCS
1748
+ SLLK,SLLK
1749
+ DPHG,DPHG
1750
+ HMFA,HMFA
1751
+ WRHF,WRHF
1752
+ KPKS,KPKS
1753
+ NEPY,NEPY
1754
+ CMIN,CMIN
1755
+ KAYQ,KAYQ
1756
+ TTMH,TTMH
1757
+ DDVL,DDVL
1758
+ ERPG,ERPG
1759
+ KKMP,KKMP
1760
+ TENK,TENK
1761
+ LNWM,LNWM
1762
+ GHAL,GHAL
1763
+ WMKD,WMKD
1764
+ FIWK,FIWK
1765
+ SWMY,SWMY
1766
+ LYHF,LYHF
1767
+ MKYP,MKYP
1768
+ REMS,REMS
1769
+ AMFA,AMFA
1770
+ YSYT,YSYT
1771
+ EKKW,EKKW
1772
+ LRTG,LRTG
1773
+ RFMT,RFMT
1774
+ LHHW,LHHW
1775
+ KAPS,KAPS
1776
+ TFTD,TFTD
1777
+ IMSG,IMSG
1778
+ YKMQ,YKMQ
1779
+ CPPP,CPPP
1780
+ FMDA,FMDA
1781
+ YYAM,YYAM
1782
+ GTSS,GTSS
1783
+ ALYP,ALYP
1784
+ RYND,RYND
1785
+ WWFL,WWFL
1786
+ WYAT,WYAT
1787
+ CIMM,CIMM
1788
+ IVHK,IVHK
1789
+ RYSF,RYSF
1790
+ ENEA,ENEA
1791
+ WPFT,WPFT
1792
+ QVLH,QVLH
1793
+ ECFV,ECFV
1794
+ HQPP,HQPP
1795
+ SQQV,SQQV
1796
+ NRTP,NRTP
1797
+ WEEA,WEEA
1798
+ ICNQ,ICNQ
1799
+ KVDG,KVDG
1800
+ GSLK,GSLK
1801
+ CMVY,CMVY
1802
+ GNNK,GNNK
1803
+ IRQM,IRQM
1804
+ ASHG,ASHG
1805
+ PHEM,PHEM
1806
+ NNSM,NNSM
1807
+ ADRM,ADRM
1808
+ TAYW,TAYW
1809
+ FCDK,FCDK
1810
+ ETYQ,ETYQ
1811
+ MMYF,MMYF
1812
+ FLIM,FLIM
1813
+ GKFT,GKFT
1814
+ IQAK,IQAK
1815
+ GINY,GINY
1816
+ VILD,VILD
1817
+ CPLH,CPLH
1818
+ LVHQ,LVHQ
1819
+ LCKP,LCKP
1820
+ RVNH,RVNH
1821
+ HCGK,HCGK
1822
+ HCQH,HCQH
1823
+ GFKI,GFKI
1824
+ AGET,AGET
1825
+ AIHM,AIHM
1826
+ QITF,QITF
1827
+ RWFE,RWFE
1828
+ GYED,GYED
1829
+ PCSM,PCSM
1830
+ CREN,CREN
1831
+ ASAL,ASAL
1832
+ VRTV,VRTV
1833
+ AQVE,AQVE
1834
+ QFND,QFND
1835
+ FGGR,FGGR
1836
+ KEAT,KEAT
1837
+ CRVT,CRVT
1838
+ PKKG,PKKG
1839
+ PHQM,PHQM
1840
+ PAKY,PAKY
1841
+ IYSC,IYSC
1842
+ GVVM,GVVM
1843
+ IKLF,IKLF
1844
+ HHYG,HHYG
1845
+ SACL,SACL
1846
+ GQCS,GQCS
1847
+ AQIM,AQIM
1848
+ FCVN,FCVN
1849
+ HVWG,HVWG
1850
+ NADT,NADT
1851
+ MHKK,MHKK
1852
+ VWLY,VWLY
1853
+ FPCQ,FPCQ
1854
+ PEKW,PEKW
1855
+ AFYP,AFYP
1856
+ TQYY,TQYY
1857
+ VECI,VECI
1858
+ KWFQ,KWFQ
1859
+ TIEG,TIEG
1860
+ STHY,STHY
1861
+ QSAM,QSAM
1862
+ RNDD,RNDD
1863
+ EQHA,EQHA
1864
+ VCFH,VCFH
1865
+ YRWF,YRWF
1866
+ VQKH,VQKH
1867
+ CSRT,CSRT
1868
+ PRNF,PRNF
1869
+ ADIG,ADIG
1870
+ FHNV,FHNV
1871
+ NVAH,NVAH
1872
+ WYLN,WYLN
1873
+ DKWK,DKWK
1874
+ GIWI,GIWI
1875
+ MEEF,MEEF
1876
+ HCMM,HCMM
1877
+ TLIM,TLIM
1878
+ PYKV,PYKV
1879
+ YATS,YATS
1880
+ QMQW,QMQW
1881
+ KMLA,KMLA
1882
+ ALWW,ALWW
1883
+ WKHD,WKHD
1884
+ DVHP,DVHP
1885
+ EYHF,EYHF
1886
+ DLSY,DLSY
1887
+ NIRC,NIRC
1888
+ GGFI,GGFI
1889
+ RGWM,RGWM
1890
+ SWMR,SWMR
1891
+ FNQD,FNQD
1892
+ WMAY,WMAY
1893
+ ITKT,ITKT
1894
+ LIAV,LIAV
1895
+ CHTP,CHTP
1896
+ QIDD,QIDD
1897
+ PHAG,PHAG
1898
+ IDNA,IDNA
1899
+ DHSC,DHSC
1900
+ EWLA,EWLA
1901
+ FSLW,FSLW
1902
+ QEYK,QEYK
1903
+ LHHR,LHHR
1904
+ VQYS,VQYS
1905
+ VDQD,VDQD
1906
+ RGGL,RGGL
1907
+ ERTH,ERTH
1908
+ WYDD,WYDD
1909
+ EMFN,EMFN
1910
+ DKEP,DKEP
1911
+ TLCW,TLCW
1912
+ QSHE,QSHE
1913
+ IDLS,IDLS
1914
+ KKRT,KKRT
1915
+ CHQI,CHQI
1916
+ PLWC,PLWC
1917
+ VIVA,VIVA
1918
+ KDTG,KDTG
1919
+ PTVC,PTVC
1920
+ YICD,YICD
1921
+ CKRT,CKRT
1922
+ DIDP,DIDP
1923
+ DTEI,DTEI
1924
+ GIAK,GIAK
1925
+ KQLM,KQLM
1926
+ PYPN,PYPN
1927
+ LFLK,LFLK
1928
+ MRGF,MRGF
1929
+ FAMD,FAMD
1930
+ TNTS,TNTS
1931
+ MMIA,MMIA
1932
+ HPQM,HPQM
1933
+ FHEQ,FHEQ
1934
+ CHLP,CHLP
1935
+ NMFS,NMFS
1936
+ LSRR,LSRR
1937
+ CSSG,CSSG
1938
+ PYHS,PYHS
1939
+ ILDG,ILDG
1940
+ MHWD,MHWD
1941
+ QCNV,QCNV
1942
+ QILA,QILA
1943
+ QSCQ,QSCQ
1944
+ QYTN,QYTN
1945
+ WKPR,WKPR
1946
+ RTHR,RTHR
1947
+ FYMW,FYMW
1948
+ SGGN,SGGN
1949
+ CAHL,CAHL
1950
+ HNIR,HNIR
1951
+ MFKN,MFKN
1952
+ KLTN,KLTN
1953
+ CKGT,CKGT
1954
+ CNHN,CNHN
1955
+ FIVT,FIVT
1956
+ MCWG,MCWG
1957
+ TPPR,TPPR
1958
+ FLEW,FLEW
1959
+ QSHA,QSHA
1960
+ DKAD,DKAD
1961
+ DIVY,DIVY
1962
+ MKVM,MKVM
1963
+ TACP,TACP
1964
+ TWNE,TWNE
1965
+ HAYR,HAYR
1966
+ VYRY,VYRY
1967
+ PDKC,PDKC
1968
+ WTCY,WTCY
1969
+ AMWN,AMWN
1970
+ FRFY,FRFY
1971
+ IMRC,IMRC
1972
+ VNPA,VNPA
1973
+ RPDP,RPDP
1974
+ TLEF,TLEF
1975
+ DLNF,DLNF
1976
+ TCPQ,TCPQ
1977
+ VLRR,VLRR
1978
+ WKWF,WKWF
1979
+ VEHK,VEHK
1980
+ DNEA,DNEA
1981
+ DIVA,DIVA
1982
+ FNSQ,FNSQ
1983
+ QYTG,QYTG
1984
+ WPSR,WPSR
1985
+ VATT,VATT
1986
+ CMIF,CMIF
1987
+ EREH,EREH
1988
+ LGKL,LGKL
1989
+ AWQH,AWQH
1990
+ KRCC,KRCC
1991
+ MWCW,MWCW
1992
+ VCGY,VCGY
1993
+ SFFG,SFFG
1994
+ TNRR,TNRR
1995
+ LCVI,LCVI
1996
+ DLSG,DLSG
1997
+ LTIN,LTIN
1998
+ NNDW,NNDW
1999
+ QIFI,QIFI
2000
+ DAQL,DAQL
2001
+ DYRG,DYRG
2002
+ HAWD,HAWD
2003
+ TKKH,TKKH
2004
+ HVPF,HVPF
2005
+ CMSK,CMSK
2006
+ HYAK,HYAK
2007
+ CTHE,CTHE
2008
+ ACLN,ACLN
2009
+ ILRS,ILRS
2010
+ RAHP,RAHP
2011
+ TTCW,TTCW
2012
+ LIFD,LIFD
2013
+ YRPE,YRPE
2014
+ GGEA,GGEA
2015
+ LGSN,LGSN
2016
+ HETY,HETY
2017
+ HVYV,HVYV
2018
+ PRLD,PRLD
2019
+ WYCQ,WYCQ
2020
+ TMQM,TMQM
2021
+ QNRH,QNRH
2022
+ PALF,PALF
2023
+ FDHP,FDHP
2024
+ DNET,DNET
2025
+ AKLN,AKLN
2026
+ IQGF,IQGF
2027
+ IMHG,IMHG
2028
+ NGPL,NGPL
2029
+ NVCF,NVCF
2030
+ LIFN,LIFN
2031
+ WLAP,WLAP
2032
+ WCGQ,WCGQ
2033
+ SGWP,SGWP
2034
+ HGFQ,HGFQ
2035
+ GQFR,GQFR
2036
+ NQER,NQER
2037
+ GRAM,GRAM
2038
+ GASV,GASV
2039
+ VWLA,VWLA
2040
+ QMMC,QMMC
2041
+ YLMA,YLMA
2042
+ FAEL,FAEL
2043
+ KEYH,KEYH
2044
+ GDFY,GDFY
2045
+ RPQT,RPQT
2046
+ MWVK,MWVK
2047
+ GWVS,GWVS
2048
+ WNKY,WNKY
2049
+ LCRD,LCRD
2050
+ CTKF,CTKF
2051
+ VCYP,VCYP
2052
+ SQFQ,SQFQ
2053
+ YLSR,YLSR
2054
+ HESM,HESM
2055
+ MVRG,MVRG
2056
+ LFEM,LFEM
2057
+ TCLH,TCLH
2058
+ LGEL,LGEL
2059
+ WYLC,WYLC
2060
+ GQRD,GQRD
2061
+ PHGL,PHGL
2062
+ WCDN,WCDN
2063
+ MVVI,MVVI
2064
+ RHYP,RHYP
2065
+ WSMV,WSMV
2066
+ YITL,YITL
2067
+ DYHL,DYHL
2068
+ MELW,MELW
2069
+ REYD,REYD
2070
+ VREK,VREK
2071
+ NCTQ,NCTQ
2072
+ HTWQ,HTWQ
2073
+ YSHI,YSHI
2074
+ WRIN,WRIN
2075
+ QGAY,QGAY
2076
+ NTQD,NTQD
2077
+ YRLW,YRLW
2078
+ LKYP,LKYP
2079
+ HYKY,HYKY
2080
+ AYQP,AYQP
2081
+ GFCI,GFCI
2082
+ GAMQ,GAMQ
2083
+ YPSG,YPSG
2084
+ YLNI,YLNI
2085
+ LYPL,LYPL
2086
+ SYCY,SYCY
2087
+ VGVW,VGVW
2088
+ NPPW,NPPW
2089
+ NMNG,NMNG
2090
+ RMSM,RMSM
2091
+ YHGR,YHGR
2092
+ LEPK,LEPK
2093
+ FYYH,FYYH
2094
+ TGVC,TGVC
2095
+ GQWY,GQWY
2096
+ IQDK,IQDK
2097
+ HQWA,HQWA
2098
+ TTYP,TTYP
2099
+ RPIM,RPIM
2100
+ HDEW,HDEW
2101
+ EKFK,EKFK
2102
+ TMNI,TMNI
2103
+ EQLS,EQLS
2104
+ VRIY,VRIY
2105
+ RHQH,RHQH
2106
+ HKWC,HKWC
2107
+ RQDW,RQDW
2108
+ NAEN,NAEN
2109
+ QLGK,QLGK
2110
+ PYFA,PYFA
2111
+ QDPF,QDPF
2112
+ LFFR,LFFR
2113
+ FEQT,FEQT
2114
+ CRYR,CRYR
2115
+ NGVY,NGVY
2116
+ KYLC,KYLC
2117
+ GYMS,GYMS
2118
+ NWHA,NWHA
2119
+ MLWS,MLWS
2120
+ NATQ,NATQ
2121
+ WIMA,WIMA
2122
+ AIGY,AIGY
2123
+ PAEF,PAEF
2124
+ MEIK,MEIK
2125
+ AILP,AILP
2126
+ ENWC,ENWC
2127
+ SMID,SMID
2128
+ APYM,APYM
2129
+ PSCG,PSCG
2130
+ ATSP,ATSP
2131
+ FFDQ,FFDQ
2132
+ FEYQ,FEYQ
2133
+ RRLH,RRLH
2134
+ QCLR,QCLR
2135
+ CQFG,CQFG
2136
+ SLWF,SLWF
2137
+ PGFK,PGFK
2138
+ KVDL,KVDL
2139
+ INPN,INPN
2140
+ LGAH,LGAH
2141
+ YCGI,YCGI
2142
+ YKIK,YKIK
2143
+ YCPT,YCPT
2144
+ KRGW,KRGW
2145
+ YTHK,YTHK
2146
+ GIPV,GIPV
2147
+ IVYQ,IVYQ
2148
+ VYNW,VYNW
2149
+ TTDF,TTDF
2150
+ MFNM,MFNM
2151
+ PIRS,PIRS
2152
+ PMWH,PMWH
2153
+ QLLN,QLLN
2154
+ PCIN,PCIN
2155
+ VPWE,VPWE
2156
+ VFNK,VFNK
2157
+ WTTV,WTTV
2158
+ SYPT,SYPT
2159
+ RDVN,RDVN
2160
+ CHPL,CHPL
2161
+ HSYA,HSYA
2162
+ KDTH,KDTH
2163
+ LTNM,LTNM
2164
+ GQPN,GQPN
2165
+ VEWI,VEWI
2166
+ RMLT,RMLT
2167
+ ENRS,ENRS
2168
+ RSMM,RSMM
2169
+ GSCI,GSCI
2170
+ SCEE,SCEE
2171
+ PFKV,PFKV
2172
+ YFDA,YFDA
2173
+ DCLA,DCLA
2174
+ HSHA,HSHA
2175
+ MVGG,MVGG
2176
+ FVDY,FVDY
2177
+ LIYY,LIYY
2178
+ DNQR,DNQR
2179
+ DFVS,DFVS
2180
+ VMLY,VMLY
2181
+ ETIR,ETIR
2182
+ MEME,MEME
2183
+ NSRN,NSRN
2184
+ EDEM,EDEM
2185
+ MMGP,MMGP
2186
+ QIQD,QIQD
2187
+ QNTE,QNTE
2188
+ VWFP,VWFP
2189
+ KLIY,KLIY
2190
+ WHYL,WHYL
2191
+ RGWE,RGWE
2192
+ WNTL,WNTL
2193
+ WDGL,WDGL
2194
+ RCEF,RCEF
2195
+ FEFV,FEFV
2196
+ ACGA,ACGA
2197
+ WLEA,WLEA
2198
+ NHYN,NHYN
2199
+ WMEM,WMEM
2200
+ YEEQ,YEEQ
2201
+ CYGR,CYGR
2202
+ QADC,QADC
2203
+ AGVP,AGVP
2204
+ MITR,MITR
2205
+ QDPC,QDPC
2206
+ FNWP,FNWP
2207
+ EFHR,EFHR
2208
+ NNPP,NNPP
2209
+ WMVF,WMVF
2210
+ DDRT,DDRT
2211
+ ILAP,ILAP
2212
+ DSNK,DSNK
2213
+ DYMN,DYMN
2214
+ HYTA,HYTA
2215
+ YKDI,YKDI
2216
+ TKIF,TKIF
2217
+ LWIK,LWIK
2218
+ RLRP,RLRP
2219
+ EAGH,EAGH
2220
+ WFHL,WFHL
2221
+ TIET,TIET
2222
+ RDIM,RDIM
2223
+ CMIM,CMIM
2224
+ AIFA,AIFA
2225
+ YDGQ,YDGQ
2226
+ DAFK,DAFK
2227
+ CDYL,CDYL
2228
+ DRKQ,DRKQ
2229
+ EESF,EESF
2230
+ IMQT,IMQT
2231
+ CIKV,CIKV
2232
+ WPTL,WPTL
2233
+ RIAK,RIAK
2234
+ PEKT,PEKT
2235
+ HFHH,HFHH
2236
+ HAHK,HAHK
2237
+ PWLE,PWLE
2238
+ QHTV,QHTV
2239
+ AKAP,AKAP
2240
+ GMMM,GMMM
2241
+ MLCG,MLCG
2242
+ KNAF,KNAF
2243
+ TNPQ,TNPQ
2244
+ GTYH,GTYH
2245
+ WYEC,WYEC
2246
+ GCDC,GCDC
2247
+ FVMF,FVMF
2248
+ AQTR,AQTR
2249
+ SGLL,SGLL
2250
+ VCFQ,VCFQ
2251
+ FFHT,FFHT
2252
+ QIDP,QIDP
2253
+ QNLR,QNLR
2254
+ EPYF,EPYF
2255
+ KMMT,KMMT
2256
+ WFTA,WFTA
2257
+ DSQW,DSQW
2258
+ CIVI,CIVI
2259
+ RCFH,RCFH
2260
+ CKSQ,CKSQ
2261
+ KQLY,KQLY
2262
+ FVHW,FVHW
2263
+ GKKL,GKKL
2264
+ VIDG,VIDG
2265
+ FATV,FATV
2266
+ ELCT,ELCT
2267
+ LDNI,LDNI
2268
+ NQIV,NQIV
2269
+ ECMW,ECMW
2270
+ YFHS,YFHS
2271
+ YSAS,YSAS
2272
+ WAKL,WAKL
2273
+ NSWN,NSWN
2274
+ DCMT,DCMT
2275
+ HNLH,HNLH
2276
+ TTAF,TTAF
2277
+ TQLW,TQLW
2278
+ KHMK,KHMK
2279
+ NPKL,NPKL
2280
+ VSIE,VSIE
2281
+ NAFY,NAFY
2282
+ CFCQ,CFCQ
2283
+ PSSP,PSSP
2284
+ RIPA,RIPA
2285
+ EYHC,EYHC
2286
+ LEQE,LEQE
2287
+ VFEY,VFEY
2288
+ TDRE,TDRE
2289
+ PFRL,PFRL
2290
+ LARK,LARK
2291
+ ATQI,ATQI
2292
+ AMPG,AMPG
2293
+ KHTN,KHTN
2294
+ MSKR,MSKR
2295
+ AGWM,AGWM
2296
+ DPND,DPND
2297
+ GLYG,GLYG
2298
+ MKCK,MKCK
2299
+ CQQF,CQQF
2300
+ CTCG,CTCG
2301
+ AIRV,AIRV
2302
+ DADI,DADI
2303
+ MGDL,MGDL
2304
+ FKGP,FKGP
2305
+ NWLS,NWLS
2306
+ CMSN,CMSN
2307
+ EMRG,EMRG
2308
+ WMEH,WMEH
2309
+ GDDP,GDDP
2310
+ SLIC,SLIC
2311
+ INSK,INSK
2312
+ DVHY,DVHY
2313
+ YRMG,YRMG
2314
+ IDHL,IDHL
2315
+ MARL,MARL
2316
+ WVCY,WVCY
2317
+ CKEE,CKEE
2318
+ VIYW,VIYW
2319
+ IRTD,IRTD
2320
+ QQRF,QQRF
2321
+ TVAI,TVAI
2322
+ YEKT,YEKT
2323
+ LSSF,LSSF
2324
+ HAWV,HAWV
2325
+ RERS,RERS
2326
+ SNGL,SNGL
2327
+ ETEA,ETEA
2328
+ DSPN,DSPN
2329
+ SVTP,SVTP
2330
+ VEHQ,VEHQ
2331
+ LDNC,LDNC
2332
+ EQPV,EQPV
2333
+ LCPI,LCPI
2334
+ NPPR,NPPR
2335
+ HDFV,HDFV
2336
+ MKGW,MKGW
2337
+ CSFY,CSFY
2338
+ KFPH,KFPH
2339
+ CNYD,CNYD
2340
+ SDMN,SDMN
2341
+ CSDC,CSDC
2342
+ TTTM,TTTM
2343
+ HQVK,HQVK
2344
+ ALQA,ALQA
2345
+ VIGC,VIGC
2346
+ IIHI,IIHI
2347
+ IWPY,IWPY
2348
+ HLDY,HLDY
2349
+ YCIW,YCIW
2350
+ HVGA,HVGA
2351
+ WHIA,WHIA
2352
+ ARVC,ARVC
2353
+ SEFA,SEFA
2354
+ PYKS,PYKS
2355
+ QDWV,QDWV
2356
+ GLKL,GLKL
2357
+ VLEA,VLEA
2358
+ KYAT,KYAT
2359
+ GAKS,GAKS
2360
+ WVLA,WVLA
2361
+ DNTM,DNTM
2362
+ WMGV,WMGV
2363
+ HLYK,HLYK
2364
+ TYHF,TYHF
2365
+ LSHV,LSHV
2366
+ RNCE,RNCE
2367
+ KVQF,KVQF
2368
+ DAIT,DAIT
2369
+ RRML,RRML
2370
+ MTSL,MTSL
2371
+ LTKG,LTKG
2372
+ AQNA,AQNA
2373
+ CTTP,CTTP
2374
+ IRQR,IRQR
2375
+ QWCT,QWCT
2376
+ HSHL,HSHL
2377
+ QCPG,QCPG
2378
+ MERP,MERP
2379
+ IDGG,IDGG
2380
+ CTDI,CTDI
2381
+ WTVK,WTVK
2382
+ AADY,AADY
2383
+ CEGA,CEGA
2384
+ ETAA,ETAA
2385
+ AHNR,AHNR
2386
+ WESS,WESS
2387
+ TWDE,TWDE
2388
+ QLPT,QLPT
2389
+ FAAF,FAAF
2390
+ TINK,TINK
2391
+ QESG,QESG
2392
+ DRNW,DRNW
2393
+ YTWY,YTWY
2394
+ TTCN,TTCN
2395
+ VTFP,VTFP
2396
+ PHRP,PHRP
2397
+ RFEI,RFEI
2398
+ PKEP,PKEP
2399
+ QSCW,QSCW
2400
+ VGYK,VGYK
2401
+ PPYM,PPYM
2402
+ GCDE,GCDE
2403
+ DGIS,DGIS
2404
+ HCWC,HCWC
2405
+ WPIR,WPIR
2406
+ KKMF,KKMF
2407
+ VVVV,VVVV
2408
+ GGMT,GGMT
2409
+ YKNA,YKNA
2410
+ ELAE,ELAE
2411
+ HYVR,HYVR
2412
+ IIEG,IIEG
2413
+ LETR,LETR
2414
+ TRKK,TRKK
2415
+ KTWK,KTWK
2416
+ CQHA,CQHA
2417
+ PCLP,PCLP
2418
+ WYVP,WYVP
2419
+ FYGW,FYGW
2420
+ IRPY,IRPY
2421
+ SFEP,SFEP
2422
+ DEAC,DEAC
2423
+ MHIA,MHIA
2424
+ FWPH,FWPH
2425
+ YRMQ,YRMQ
2426
+ ESAM,ESAM
2427
+ WSWY,WSWY
2428
+ MCIH,MCIH
2429
+ FNDC,FNDC
2430
+ EHFF,EHFF
2431
+ TYAH,TYAH
2432
+ SHSR,SHSR
2433
+ YNMS,YNMS
2434
+ TNCG,TNCG
2435
+ ECIF,ECIF
2436
+ HVFL,HVFL
2437
+ WYEA,WYEA
2438
+ SKEP,SKEP
2439
+ LRAR,LRAR
2440
+ PPFG,PPFG
2441
+ TGNE,TGNE
2442
+ WPHP,WPHP
2443
+ CETQ,CETQ
2444
+ QRMY,QRMY
2445
+ VPLQ,VPLQ
2446
+ ADVE,ADVE
2447
+ HGEE,HGEE
2448
+ VRGP,VRGP
2449
+ VTPT,VTPT
2450
+ ARRF,ARRF
2451
+ HMLH,HMLH
2452
+ RDWT,RDWT
2453
+ PGVH,PGVH
2454
+ DKEY,DKEY
2455
+ PLPI,PLPI
2456
+ SLHD,SLHD
2457
+ WQVK,WQVK
2458
+ PWSS,PWSS
2459
+ REHK,REHK
2460
+ NSNA,NSNA
2461
+ KNCF,KNCF
2462
+ EWPH,EWPH
2463
+ MPQG,MPQG
2464
+ DTEL,DTEL
2465
+ RASG,RASG
2466
+ CGHG,CGHG
2467
+ DCYT,DCYT
2468
+ YKCT,YKCT
2469
+ DPPQ,DPPQ
2470
+ QPIS,QPIS
2471
+ ADYS,ADYS
2472
+ GNMV,GNMV
2473
+ FAAI,FAAI
2474
+ DMQN,DMQN
2475
+ IAEL,IAEL
2476
+ TEER,TEER
2477
+ DFAR,DFAR
2478
+ DPHQ,DPHQ
2479
+ PWAD,PWAD
2480
+ PLWM,PLWM
2481
+ KCHD,KCHD
2482
+ MLEA,MLEA
2483
+ MSEK,MSEK
2484
+ DWMK,DWMK
2485
+ FNNT,FNNT
2486
+ YVCG,YVCG
2487
+ NQHS,NQHS
2488
+ IIAA,IIAA
2489
+ PVVK,PVVK
2490
+ LCPV,LCPV
2491
+ QSWE,QSWE
2492
+ IQCY,IQCY
2493
+ EAKQ,EAKQ
2494
+ CMFT,CMFT
2495
+ THWG,THWG
2496
+ IKKE,IKKE
2497
+ KLPK,KLPK
2498
+ VGHM,VGHM
2499
+ TPEL,TPEL
2500
+ RIRP,RIRP
2501
+ ITGT,ITGT
2502
+ WDRP,WDRP
2503
+ WHKT,WHKT
2504
+ SMCP,SMCP
2505
+ NYKW,NYKW
2506
+ HAAP,HAAP
2507
+ PRWL,PRWL
2508
+ YPKV,YPKV
2509
+ SHDF,SHDF
2510
+ TFKT,TFKT
2511
+ RCPL,RCPL
2512
+ ECWN,ECWN
2513
+ CPFA,CPFA
2514
+ PRNI,PRNI
2515
+ NYAG,NYAG
2516
+ RVDV,RVDV
2517
+ KQGV,KQGV
2518
+ RDKH,RDKH
2519
+ AMYF,AMYF
2520
+ IMDL,IMDL
2521
+ STAM,STAM
2522
+ GWFT,GWFT
2523
+ SFNT,SFNT
2524
+ IFGV,IFGV
2525
+ EMMI,EMMI
2526
+ GDLP,GDLP
2527
+ IDFE,IDFE
2528
+ PQFR,PQFR
2529
+ NTPP,NTPP
2530
+ CLWD,CLWD
2531
+ NHYA,NHYA
2532
+ ICMS,ICMS
2533
+ HLHC,HLHC
2534
+ KNDI,KNDI
2535
+ AWPE,AWPE
2536
+ PPCN,PPCN
2537
+ WHLQ,WHLQ
2538
+ AFMV,AFMV
2539
+ VSFD,VSFD
2540
+ WLNR,WLNR
2541
+ AMID,AMID
2542
+ HTGE,HTGE
2543
+ MPCW,MPCW
2544
+ TRTF,TRTF
2545
+ WRMV,WRMV
2546
+ ILIE,ILIE
2547
+ GPKK,GPKK
2548
+ SLQG,SLQG
2549
+ FPVY,FPVY
2550
+ WCLI,WCLI
2551
+ MYCE,MYCE
2552
+ GEQN,GEQN
2553
+ MFME,MFME
2554
+ KNKE,KNKE
2555
+ HHLF,HHLF
2556
+ VDQP,VDQP
2557
+ YGVK,YGVK
2558
+ CTLR,CTLR
2559
+ HGYN,HGYN
2560
+ VSEW,VSEW
2561
+ QPYQ,QPYQ
2562
+ QPNY,QPNY
2563
+ HDSR,HDSR
2564
+ HFTC,HFTC
2565
+ MYHV,MYHV
2566
+ LWNQ,LWNQ
2567
+ PAMN,PAMN
2568
+ PLPW,PLPW
2569
+ PMTV,PMTV
2570
+ YTDQ,YTDQ
2571
+ FCER,FCER
2572
+ QNNH,QNNH
2573
+ SFDF,SFDF
2574
+ RHQL,RHQL
2575
+ DLHE,DLHE
2576
+ KWKH,KWKH
2577
+ MQRH,MQRH
2578
+ NWET,NWET
2579
+ QVCV,QVCV
2580
+ EPEP,EPEP
2581
+ VRSE,VRSE
2582
+ ERMM,ERMM
2583
+ TVRI,TVRI
2584
+ GCMG,GCMG
2585
+ MAWA,MAWA
2586
+ DGLA,DGLA
2587
+ LPCQ,LPCQ
2588
+ THDF,THDF
2589
+ TWYH,TWYH
2590
+ IDAT,IDAT
2591
+ SFSE,SFSE
2592
+ WCHR,WCHR
2593
+ NNTH,NNTH
2594
+ QPHC,QPHC
2595
+ EERR,EERR
2596
+ EVIQ,EVIQ
2597
+ LRWA,LRWA
2598
+ LHIE,LHIE
2599
+ DATT,DATT
2600
+ ESDH,ESDH
2601
+ TRHH,TRHH
2602
+ HMGV,HMGV
2603
+ HNVN,HNVN
2604
+ MTLK,MTLK
2605
+ KGSQ,KGSQ
2606
+ GSGW,GSGW
2607
+ VPTP,VPTP
2608
+ RIGG,RIGG
2609
+ GVQR,GVQR
2610
+ ANVM,ANVM
2611
+ TQHM,TQHM
2612
+ CDGP,CDGP
2613
+ EIQR,EIQR
2614
+ SPTN,SPTN
2615
+ QWNF,QWNF
2616
+ TFFT,TFFT
2617
+ VENC,VENC
2618
+ LRKC,LRKC
2619
+ AQML,AQML
2620
+ ASGA,ASGA
2621
+ VHYM,VHYM
2622
+ QGRK,QGRK
2623
+ TGCF,TGCF
2624
+ KVLK,KVLK
2625
+ MCIN,MCIN
2626
+ APTA,APTA
2627
+ LCNK,LCNK
2628
+ CYLF,CYLF
2629
+ FCYN,FCYN
2630
+ ALGE,ALGE
2631
+ QDQP,QDQP
2632
+ VVFN,VVFN
2633
+ LLSQ,LLSQ
2634
+ GEYW,GEYW
2635
+ PFGI,PFGI
2636
+ MLQM,MLQM
2637
+ FRDP,FRDP
2638
+ EKST,EKST
2639
+ GSER,GSER
2640
+ HGPT,HGPT
2641
+ LQGA,LQGA
2642
+ VVDT,VVDT
2643
+ IGFS,IGFS
2644
+ ENWL,ENWL
2645
+ SCCA,SCCA
2646
+ WSNI,WSNI
2647
+ ALCI,ALCI
2648
+ DWFH,DWFH
2649
+ GNDD,GNDD
2650
+ NQWQ,NQWQ
2651
+ KLHW,KLHW
2652
+ WNNG,WNNG
2653
+ EETS,EETS
2654
+ HKKD,HKKD
2655
+ YCIK,YCIK
2656
+ YWKM,YWKM
2657
+ MSLT,MSLT
2658
+ HPMN,HPMN
2659
+ GRGT,GRGT
2660
+ GRNN,GRNN
2661
+ HINF,HINF
2662
+ HDIF,HDIF
2663
+ AKKT,AKKT
2664
+ HVVK,HVVK
2665
+ RTEH,RTEH
2666
+ QDKN,QDKN
2667
+ VLVT,VLVT
2668
+ VLIY,VLIY
2669
+ PVWT,PVWT
2670
+ NHTY,NHTY
2671
+ EYRT,EYRT
2672
+ ANAD,ANAD
2673
+ KAQH,KAQH
2674
+ VWCN,VWCN
2675
+ EDHN,EDHN
2676
+ DNMP,DNMP
2677
+ PIEA,PIEA
2678
+ TNIF,TNIF
2679
+ RHLM,RHLM
2680
+ FFWS,FFWS
2681
+ SFVF,SFVF
2682
+ IDEG,IDEG
2683
+ SRIK,SRIK
2684
+ WVGG,WVGG
2685
+ ICKQ,ICKQ
2686
+ HVKL,HVKL
2687
+ YNER,YNER
2688
+ IFNM,IFNM
2689
+ MCTR,MCTR
2690
+ KEKK,KEKK
2691
+ SVDK,SVDK
2692
+ IAYK,IAYK
2693
+ LPNI,LPNI
2694
+ DRWH,DRWH
2695
+ STAC,STAC
2696
+ CIQV,CIQV
2697
+ ASVA,ASVA
2698
+ EVQC,EVQC
2699
+ TLMM,TLMM
2700
+ YERP,YERP
2701
+ GNII,GNII
2702
+ HCGN,HCGN
2703
+ RECT,RECT
2704
+ HLKG,HLKG
2705
+ TRRA,TRRA
2706
+ RGES,RGES
2707
+ KKDA,KKDA
2708
+ EMFY,EMFY
2709
+ ARCE,ARCE
2710
+ VLTK,VLTK
2711
+ AMHN,AMHN
2712
+ LMQD,LMQD
2713
+ EHGH,EHGH
2714
+ QGSV,QGSV
2715
+ NCWW,NCWW
2716
+ RVSR,RVSR
2717
+ IWRS,IWRS
2718
+ EWMI,EWMI
2719
+ VFSE,VFSE
2720
+ IRLS,IRLS
2721
+ APFF,APFF
2722
+ YSWV,YSWV
2723
+ ANAL,ANAL
2724
+ VLPA,VLPA
2725
+ HTDS,HTDS
2726
+ GYKE,GYKE
2727
+ YVDL,YVDL
2728
+ MVLY,MVLY
2729
+ ITVP,ITVP
2730
+ GIHK,GIHK
2731
+ QSIW,QSIW
2732
+ ALGT,ALGT
2733
+ NVTI,NVTI
2734
+ GWVW,GWVW
2735
+ LGYN,LGYN
2736
+ YTCT,YTCT
2737
+ QWAC,QWAC
2738
+ AAMR,AAMR
2739
+ DRAT,DRAT
2740
+ CTCA,CTCA
2741
+ GYAS,GYAS
2742
+ FELH,FELH
2743
+ ATPA,ATPA
2744
+ DLMR,DLMR
2745
+ FYAH,FYAH
2746
+ WTNK,WTNK
2747
+ QHFT,QHFT
2748
+ NCCW,NCCW
2749
+ DLFE,DLFE
2750
+ LVLF,LVLF
2751
+ FASE,FASE
2752
+ ALQT,ALQT
2753
+ DINN,DINN
2754
+ WALA,WALA
2755
+ CGGS,CGGS
2756
+ GCSA,GCSA
2757
+ FCNY,FCNY
2758
+ DYTD,DYTD
2759
+ SKMQ,SKMQ
2760
+ FNDV,FNDV
2761
+ VTAA,VTAA
2762
+ LHEA,LHEA
2763
+ LFVW,LFVW
2764
+ ENHL,ENHL
2765
+ EGCY,EGCY
2766
+ HKGD,HKGD
2767
+ NACC,NACC
2768
+ HADF,HADF
2769
+ TSNK,TSNK
2770
+ ALHP,ALHP
2771
+ MAHN,MAHN
2772
+ EPEG,EPEG
2773
+ TQRA,TQRA
2774
+ QDVC,QDVC
2775
+ YEPQ,YEPQ
2776
+ WVNR,WVNR
2777
+ CVQA,CVQA
2778
+ AVVV,AVVV
2779
+ RCSP,RCSP
2780
+ FADC,FADC
2781
+ TVQA,TVQA
2782
+ VCSS,VCSS
2783
+ HNMT,HNMT
2784
+ FDNL,FDNL
2785
+ IKKQ,IKKQ
2786
+ NCEQ,NCEQ
2787
+ MVHE,MVHE
2788
+ LPIE,LPIE
2789
+ FNLH,FNLH
2790
+ VPLC,VPLC
2791
+ AGPK,AGPK
2792
+ AQIP,AQIP
2793
+ SLSE,SLSE
2794
+ SEVS,SEVS
2795
+ DLSC,DLSC
2796
+ HETF,HETF
2797
+ GQRM,GQRM
2798
+ YCEW,YCEW
2799
+ GYNC,GYNC
2800
+ CGEV,CGEV
2801
+ TNGY,TNGY
2802
+ PQEV,PQEV
2803
+ PMSP,PMSP
2804
+ EYLP,EYLP
2805
+ LHSK,LHSK
2806
+ ILRP,ILRP
2807
+ RMFL,RMFL
2808
+ CNAL,CNAL
2809
+ IHQM,IHQM
2810
+ QGES,QGES
2811
+ DHCH,DHCH
2812
+ CMPD,CMPD
2813
+ LSVQ,LSVQ
2814
+ WVPV,WVPV
2815
+ AQKR,AQKR
2816
+ EAKS,EAKS
2817
+ DDTA,DDTA
2818
+ HVLL,HVLL
2819
+ RHTK,RHTK
2820
+ AGAH,AGAH
2821
+ PTGS,PTGS
2822
+ VHDH,VHDH
2823
+ WCII,WCII
2824
+ QMGP,QMGP
2825
+ HWWY,HWWY
2826
+ KVGE,KVGE
2827
+ SPPD,SPPD
2828
+ AYLI,AYLI
2829
+ TWFS,TWFS
2830
+ TTKT,TTKT
2831
+ QHHN,QHHN
2832
+ IQYY,IQYY
2833
+ AWFM,AWFM
2834
+ ADKN,ADKN
2835
+ SEIG,SEIG
2836
+ SIRK,SIRK
2837
+ CQCG,CQCG
2838
+ NLWQ,NLWQ
2839
+ MKCR,MKCR
2840
+ NHYC,NHYC
2841
+ DMAF,DMAF
2842
+ EPMC,EPMC
2843
+ LNCI,LNCI
2844
+ NDPK,NDPK
2845
+ SVAH,SVAH
2846
+ YADM,YADM
2847
+ ANDQ,ANDQ
2848
+ QMNV,QMNV
2849
+ PWMR,PWMR
2850
+ VMHY,VMHY
2851
+ PRRP,PRRP
2852
+ DTTC,DTTC
2853
+ KGMG,KGMG
2854
+ NIFD,NIFD
2855
+ CDFS,CDFS
2856
+ CCKT,CCKT
2857
+ QLDC,QLDC
2858
+ HQKW,HQKW
2859
+ NTWH,NTWH
2860
+ PRFI,PRFI
2861
+ YARD,YARD
2862
+ PSED,PSED
2863
+ HGFP,HGFP
2864
+ RTAE,RTAE
2865
+ HGQD,HGQD
2866
+ ILLY,ILLY
2867
+ QQDC,QQDC
2868
+ IVGF,IVGF
2869
+ VTFE,VTFE
2870
+ RCTH,RCTH
2871
+ VAQC,VAQC
2872
+ WHFK,WHFK
2873
+ YWGT,YWGT
2874
+ PIPL,PIPL
2875
+ HDFY,HDFY
2876
+ HHML,HHML
2877
+ GWSG,GWSG
2878
+ SRSV,SRSV
2879
+ IWDV,IWDV
2880
+ HEIK,HEIK
2881
+ YMYA,YMYA
2882
+ LVEW,LVEW
2883
+ VERV,VERV
2884
+ KATH,KATH
2885
+ TVIN,TVIN
2886
+ EWNL,EWNL
2887
+ HNYH,HNYH
2888
+ ILRQ,ILRQ
2889
+ ATHA,ATHA
2890
+ SEVP,SEVP
2891
+ VWNV,VWNV
2892
+ TKCT,TKCT
2893
+ RYYV,RYYV
2894
+ KAKV,KAKV
2895
+ SSAY,SSAY
2896
+ LMCM,LMCM
2897
+ VVHK,VVHK
2898
+ QLVQ,QLVQ
2899
+ VNGR,VNGR
2900
+ SCVI,SCVI
2901
+ VTVG,VTVG
2902
+ HGRK,HGRK
2903
+ MEDV,MEDV
2904
+ ALRI,ALRI
2905
+ PKHD,PKHD
2906
+ QLRC,QLRC
2907
+ DHIG,DHIG
2908
+ CYWH,CYWH
2909
+ WMAL,WMAL
2910
+ YGAM,YGAM
2911
+ VGSD,VGSD
2912
+ EFYI,EFYI
2913
+ CSNS,CSNS
2914
+ EYTI,EYTI
2915
+ NDYM,NDYM
2916
+ PWST,PWST
2917
+ TCPY,TCPY
2918
+ HCDA,HCDA
2919
+ SLDY,SLDY
2920
+ VWLV,VWLV
2921
+ FEKA,FEKA
2922
+ IWPI,IWPI
2923
+ QKRM,QKRM
2924
+ FCDH,FCDH
2925
+ YYHN,YYHN
2926
+ NSMQ,NSMQ
2927
+ LVME,LVME
2928
+ HCDD,HCDD
2929
+ NAGS,NAGS
2930
+ MSVP,MSVP
2931
+ DNQC,DNQC
2932
+ LFCG,LFCG
2933
+ VTTD,VTTD
2934
+ SCKL,SCKL
2935
+ RSSI,RSSI
2936
+ RMCP,RMCP
2937
+ MTQM,MTQM
2938
+ LGRY,LGRY
2939
+ KPWM,KPWM
2940
+ RGPN,RGPN
2941
+ ERWD,ERWD
2942
+ IDRY,IDRY
2943
+ LPML,LPML
2944
+ PDAR,PDAR
2945
+ HISK,HISK
2946
+ TPCR,TPCR
2947
+ MDDL,MDDL
2948
+ GWMD,GWMD
2949
+ CVTS,CVTS
2950
+ LAID,LAID
2951
+ TVKF,TVKF
2952
+ MNVC,MNVC
2953
+ IAQE,IAQE
2954
+ IMHA,IMHA
2955
+ WHSL,WHSL
2956
+ KKGF,KKGF
2957
+ KCPR,KCPR
2958
+ THWR,THWR
2959
+ FQYV,FQYV
2960
+ IRTS,IRTS
2961
+ KVRG,KVRG
2962
+ ESEE,ESEE
2963
+ TWLW,TWLW
2964
+ DYNK,DYNK
2965
+ SKTQ,SKTQ
2966
+ IGAA,IGAA
2967
+ EQMR,EQMR
2968
+ GYVS,GYVS
2969
+ IMCH,IMCH
2970
+ GFNF,GFNF
2971
+ EIVW,EIVW
2972
+ PKFS,PKFS
2973
+ IQIT,IQIT
2974
+ VGYP,VGYP
2975
+ KVPL,KVPL
2976
+ EKTV,EKTV
2977
+ DSLP,DSLP
2978
+ SIHA,SIHA
2979
+ EPPK,EPPK
2980
+ MFHP,MFHP
2981
+ LCKA,LCKA
2982
+ VRAN,VRAN
2983
+ WDPV,WDPV
2984
+ GMAM,GMAM
2985
+ NPGM,NPGM
2986
+ VHNH,VHNH
2987
+ WCNN,WCNN
2988
+ ADNF,ADNF
2989
+ AGSC,AGSC
2990
+ VALE,VALE
2991
+ AIGG,AIGG
2992
+ PWVL,PWVL
2993
+ GGMN,GGMN
2994
+ VDWA,VDWA
2995
+ WYGE,WYGE
2996
+ PRMT,PRMT
2997
+ APAM,APAM
2998
+ DYDA,DYDA
2999
+ GHSG,GHSG
3000
+ YIPM,YIPM
3001
+ ITEM,ITEM
3002
+ YTAK,YTAK
3003
+ RVWA,RVWA
3004
+ AQPS,AQPS
3005
+ YAYN,YAYN
3006
+ WGWC,WGWC
3007
+ MTGS,MTGS
3008
+ ELVR,ELVR
3009
+ QGED,QGED
3010
+ LFPD,LFPD
3011
+ YHGA,YHGA
3012
+ PKIN,PKIN
3013
+ QKRC,QKRC
3014
+ KTRW,KTRW
3015
+ PVQD,PVQD
3016
+ NYSP,NYSP
3017
+ DDWI,DDWI
3018
+ GKAA,GKAA
3019
+ FHAQ,FHAQ
3020
+ HFVT,HFVT
3021
+ KWLY,KWLY
3022
+ VRQC,VRQC
3023
+ TWER,TWER
3024
+ AMEF,AMEF
3025
+ HIWR,HIWR
3026
+ VDEC,VDEC
3027
+ FLDR,FLDR
3028
+ MNTM,MNTM
3029
+ SKNF,SKNF
3030
+ IKER,IKER
3031
+ NLMT,NLMT
3032
+ QFMC,QFMC
3033
+ KGSE,KGSE
3034
+ HPNT,HPNT
3035
+ EYYH,EYYH
3036
+ FAPR,FAPR
3037
+ VSDL,VSDL
3038
+ DYME,DYME
3039
+ EGKV,EGKV
3040
+ SNGF,SNGF
3041
+ QKPM,QKPM
3042
+ FKST,FKST
3043
+ EMPY,EMPY
3044
+ TMCF,TMCF
3045
+ FMDK,FMDK
3046
+ VPYD,VPYD
3047
+ PRRN,PRRN
3048
+ WGPV,WGPV
3049
+ MGPI,MGPI
3050
+ KHDQ,KHDQ
3051
+ YCSM,YCSM
3052
+ MLAE,MLAE
3053
+ CVEK,CVEK
3054
+ MQAV,MQAV
3055
+ WGCR,WGCR
3056
+ RRVY,RRVY
3057
+ GINC,GINC
3058
+ QTSD,QTSD
3059
+ RDLK,RDLK
3060
+ IQTT,IQTT
3061
+ KQDD,KQDD
3062
+ FNYW,FNYW
3063
+ CHHY,CHHY
3064
+ YVCR,YVCR
3065
+ RTKF,RTKF
3066
+ MGEQ,MGEQ
3067
+ CCRV,CCRV
3068
+ SMQE,SMQE
3069
+ DCNY,DCNY
3070
+ DWKQ,DWKQ
3071
+ IVFT,IVFT
3072
+ PCHQ,PCHQ
3073
+ SPSR,SPSR
3074
+ FFTN,FFTN
3075
+ WVDV,WVDV
3076
+ MWRQ,MWRQ
3077
+ CEDC,CEDC
3078
+ NWGD,NWGD
3079
+ GGHG,GGHG
3080
+ SMRW,SMRW
3081
+ WNEF,WNEF
3082
+ GMRD,GMRD
3083
+ WCTM,WCTM
3084
+ VLGF,VLGF
3085
+ VYHK,VYHK
3086
+ LKSQ,LKSQ
3087
+ MEDW,MEDW
3088
+ DNGG,DNGG
3089
+ DGIK,DGIK
3090
+ VKEP,VKEP
3091
+ DPWG,DPWG
3092
+ RRET,RRET
3093
+ LDMR,LDMR
3094
+ WFND,WFND
3095
+ SKDS,SKDS
3096
+ ERIL,ERIL
3097
+ IKIM,IKIM
3098
+ ITRE,ITRE
3099
+ DMNE,DMNE
3100
+ FACL,FACL
3101
+ ACDG,ACDG
3102
+ SNPI,SNPI
3103
+ QHSM,QHSM
3104
+ PSGD,PSGD
3105
+ IPLY,IPLY
3106
+ TSVR,TSVR
3107
+ FSAF,FSAF
3108
+ IQKY,IQKY
3109
+ NFST,NFST
3110
+ WVGW,WVGW
data/splits/4AA_val.csv ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres
2
+ EHFR,EHFR
3
+ IMCP,IMCP
4
+ YVTL,YVTL
5
+ GQGV,GQGV
6
+ PCFK,PCFK
7
+ VVIV,VVIV
8
+ QYTC,QYTC
9
+ WGDY,WGDY
10
+ CWLF,CWLF
11
+ QHIN,QHIN
12
+ ASKI,ASKI
13
+ KITV,KITV
14
+ SLYW,SLYW
15
+ REVV,REVV
16
+ NYLE,NYLE
17
+ QGRE,QGRE
18
+ AEHY,AEHY
19
+ SRPT,SRPT
20
+ NGDS,NGDS
21
+ ATVV,ATVV
22
+ DKFA,DKFA
23
+ MSTP,MSTP
24
+ VNWW,VNWW
25
+ LDNH,LDNH
26
+ CHSI,CHSI
27
+ YFCS,YFCS
28
+ WLSC,WLSC
29
+ YYTK,YYTK
30
+ GPNT,GPNT
31
+ LTQE,LTQE
32
+ VMHV,VMHV
33
+ GTLM,GTLM
34
+ QRRW,QRRW
35
+ EGDW,EGDW
36
+ IFRG,IFRG
37
+ LRYM,LRYM
38
+ ITGR,ITGR
39
+ PTNI,PTNI
40
+ KQSH,KQSH
41
+ WTWS,WTWS
42
+ CASF,CASF
43
+ VKFG,VKFG
44
+ GQYP,GQYP
45
+ CMYH,CMYH
46
+ VFGD,VFGD
47
+ KWIC,KWIC
48
+ PQHG,PQHG
49
+ FLIS,FLIS
50
+ QEGR,QEGR
51
+ WQTE,WQTE
52
+ MHTN,MHTN
53
+ ISML,ISML
54
+ HRCS,HRCS
55
+ EQVR,EQVR
56
+ NNKC,NNKC
57
+ MVQY,MVQY
58
+ FART,FART
59
+ ENKE,ENKE
60
+ TIDH,TIDH
61
+ RDTI,RDTI
62
+ KAPF,KAPF
63
+ ICLP,ICLP
64
+ HDQW,HDQW
65
+ HAPV,HAPV
66
+ AMEN,AMEN
67
+ GPHG,GPHG
68
+ QHFV,QHFV
69
+ GHSS,GHSS
70
+ NEEE,NEEE
71
+ PRGM,PRGM
72
+ YNML,YNML
73
+ RHDG,RHDG
74
+ VMVL,VMVL
75
+ ETYF,ETYF
76
+ EGVT,EGVT
77
+ CWFC,CWFC
78
+ DNYP,DNYP
79
+ RREW,RREW
80
+ QDCN,QDCN
81
+ QNCG,QNCG
82
+ VVVR,VVVR
83
+ FRPQ,FRPQ
84
+ FVFN,FVFN
85
+ PEPR,PEPR
86
+ TQVK,TQVK
87
+ FCND,FCND
88
+ FVLS,FVLS
89
+ NMSG,NMSG
90
+ KCWL,KCWL
91
+ LMWL,LMWL
92
+ WDHC,WDHC
93
+ VGWM,VGWM
94
+ NLQW,NLQW
95
+ CETY,CETY
96
+ EFMM,EFMM
97
+ EDTV,EDTV
98
+ PIRK,PIRK
99
+ YTPA,YTPA
100
+ HEKI,HEKI
101
+ GYQH,GYQH
data/splits/atlas.csv ADDED
The diff for this file is too large to render. See raw diff
 
data/splits/atlas_test.csv ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres,release_date,msa_id,seqlen
2
+ 6o2v_A,GWNDPDRMLLRDVKALTLHYDRYTTSRRLDPIPQLKCVGGTAGCDSYTPKVIQCQNKGWDGYDVQWECCTDLDIAYKFGKTVVSCEGYESSEDQYVLRGSCGLEYNLDYTELGLQKLKESGKQHGFCSFSDYYYK,2019-05-29,6o2v_A,135
3
+ 7ead_A,GLPYPEGYRFWTHVKSMELKPGHPLYESFGGLHHIYVNPTGLRTYLEGKKAPFPKGTVIVFDLLEAKVEGNALLEGPRKLIGVMAKDPGRYPDTGGWGYYAFGPDKKPLAIDPKACHACHQGAANTDYVFSAFRP,2022-03-09,7ead_A,135
4
+ 6uof_A,DILTVEKLYRPSHEYGFLRETDTVKDYLDLVRKNRSSRFPVINQHQVVVGVVTMRDAGDKSPSTTIDKVMSRSLFLVGLSTNIANVSQRMIAEDFEMVPVVRSNQTLLGVVTRRDVMEK,2019-11-06,6uof_A,119
5
+ 6lus_A,DILTLENLGDILKYLNSADLTTLDEVSMRAALSLTCAGIRKTSRSMINTLTEQHVSAENLSPDQTQIIKQTYTGIHLDKGGNFEAALWKNWDRRSISLFLQAAISVLNTTPCESSKSVISAYNHFLQKPGDIK,2020-12-02,6lus_A,133
6
+ 6qj0_A,MRVTELIIDGFKSYAVRTVITGWDESFNAVTGLNGSGKSNILDAICFVLGITNMSTVRAQNLQDLIYKRGQAGVTKASVTIVFDNRDKKRSPIGFEEYATISVTRQIVLGGTTKYLINGHRAQQQTVQNLFQSVQLNINNPNFLIMQGRITKVLNMKPAEILAMIEEAAGTRMFEDRKEKALKTMAKKDLKLQEITELLRDEIEPKLEKLRQEKRSGGSGGSMNMIDSVEKKEMSLKHMMKTVLKDKHKIEETIATLDEYKRKALQETWEKVNGDFGQIFNELLPGSFAKLEPPEGKDLTDGLEVKVRLGKVWKQSLTELSGGQRSLIALSLIMALLQFKPAPMYILDEVDAALDLSHTQNIGRLIKTRFKGSQFIVVSLKDGMFQNANRIFRVRFSEGTSVVQALTPADLK,2019-07-03,6qj0_A,412
7
+ 6j56_A,ARQREIEMNRQQRFFRIPFIRPADQYKDPQSKKKGWWYAHFDGPWIARQMELHPDKPPILLVAGKDDMEMCELNLEETGLTRKRGAEILPRQFEEIWERCGGIQYLQNAIESRQARPTYATAMLQSLLK,2019-08-21,6j56_A,129
8
+ 7ec1_A,SMNYFVGNSLGVNLTGIEKAIINRLNLFKEMGRPAQCVFLSWNRYLYRNAQNYITSSDYINMYDFFQEATYLERNEPFDWLSYWTDECHYTLKHVENSHDFRIYDQERFLMYAHFQDPKYRILDYVNHFDSQRRKVKRDFYDVRGFLSCSRILVDKQQTLCEFFYNPEGDTKLEKYFSYKDGKPEVQKIIVYYANKQYFFNNETELGAFFIKQLYQHGDLFFSDRNVYTAPIFNLTPESIPVVAVLHSTHIKNIDALDSSPFKNVYKAMFENLSRYRAIIVSTEQQKLDVEKRINHTIPVVNIPVGYSETIDTPVQTLDQRSVKLISVARYSPEKQLHQQIELIKRLVSYVPKIELHMYGFGSESKKLNELIQKYGLENHVYLRGFLSNLDQEYSDAYLSLITSNMEGFSLALLESLAHGVPVISYDIKYGPNELITSDFNGYLITKNDEDALFDKVKYVIDHPEVQQRLSKGSLAKAQQYSKASLIKQWDQFVRLILEHHHHHH,2021-03-24,7ec1_A,505
9
+ 6xds_A,KTEEGKLVIWINGDKGYNGLAEVGKKFEKDTGIKVTVEHPDKLEEKFPQVAATGDGPDIIFWAHDRFGGYAQSGLLAEITPAAAFQDKLYPFTWDAVRYNGKLIAYPIAVEALSLIYNKDLLPNPPKTWEEIPALDKELKAKGKSALMFNLQEPYFTWPLIAADGGYAFKYAAGKYDIKDVGVDNAGAKAGLTFLVDLIKNKHMNADTDYSIAEHAFNHGETAMTINGPWAWSNIDTSAVNYGVTVLPTFKGQPSKPFVGVLSAGINAASPNKELAKEFLENYLLTDEGLEAVNKDKPLGAVALKSYEEELVKDPRVAATMENAQKGEIMPNIPQMSAFWYAVRTAVINAASGRQTVDAALAAAQTNAHDTTVFQGVAGQSLQVSCPYDSMKHWGRRKAWCRQLGEKGPCQRVVSTHNLWLLSFLRRWNGSTAITDDTLGGTLTITLRNLQPHDAGLYQCQSLHGSEADTLRKVLVEVLADPLDHHHHHHHH,2021-02-17,6xds_A,492
10
+ 6xrx_A,KIEEGKLVIWINGDKGYNGLAEVGKKFEKDTGIKVTVEHPDKLEEKFPQVAATGDGPDIIFWAHDRFGGYAQSGLLAEITPAAAFQDKLYPFTWDAVRYNGKLIAYPIAVEALSLIYNKDLLPNPPKTWEEIPALDKELKAKGKSALMFNLQEPYFTWPLIAADGGYAFKYAAGKYDIKDVGVDNAGAKAGLTFLVDLIKNKHMNADTDYSIAEAAFNKGETAMTINGPWAWSNIDTSAVNYGVTVLPTFKGQPSKPFVGVLSAGINAASPNKELAKEFLENYLLTDEGLEAVNKDKPLGAVALKSYEEELAKDPRIAATMENAQKGEIMPNIPQMSAFWYAVRTAVINAASGRQTVDAALAAAQTNAAAALKDDFQEFVDLVPVDKLVNVALQYLVSDKEFKEFFGYLQGEEFSAVWDQFFALNEVKDVLNYLEAADLAVYDALNTVADFLGLHHVKPTVHTLRTGGLTGFFDETVALLPLDKFEALFEEKLKTSPEFKAFFEKLRNLDYQKFVDFHNNSKEVQGFLQKLRSYGLDVDGFFNLVAGFFGWGKF,2021-03-24,6xrx_A,554
11
+ 6q9c_B,RSYPAIPRIYAETTLNMLLKRAKKPRVHSIDEYLKDGGYQALEKALNMSPEEIIDWVDKSTLRGRGGAGFPTGKKWKFAVQNPGPRYFICNADESEPGTFKDRIIIERDPHLLIEGIIISSYAIGANEAYIYIRGEYPAGYYILRDAIEEAKKKGFLGKNILGSGFDLEIYVARGAGAYICGEETALIESLEGKRGHPRLKPPYPVQKGLWGKPTVVNNVETIANVPFIISMGWEEYRYIGPSDYAGPKLFPVSGKVKKPGVYELPMNTTLREVIFKYAGGTLGNKKVKAVFSGALDCFSSEELDIPMDYSPLGFGGTGTVIVLTEEDDIVEAALKIAEFYEHETCGQCTPCRVGCYEQANLLEKIYKGEATEQDWEGFDFVNRNIQPTSICGLGAVAGRLIRQTLEKFPEEWEKYRK,2019-06-26,6q9c_B,418
12
+ 6rrv_A,GPKPKNTKENLSKSSWRQEWLANLKLISVSLVDEFPSELSDSDRQIINEKMQLLKDIFANNLKSAISNNFRESDIIILKGEIEDYPMSSEIKIYYNELQNKPDAKKARFWSFMKTQRFVSNMGFDIQ,2019-09-18,6rrv_A,127
13
+ 7lao_A,QGMNTIESITADLHGLGVRPGDLIMVHASLKAVGPVEGGAASVVSALRAAVGSAGTLMGYASWDRSPYEETLNGARMDEELRRRWPPFDLATSGTYPGFGLLNRFLLEAPDARRSAHPDASMVAVGPLAATLTEPHRLGQALGEGSPLERFVGHGGKVLLLGAPLDSVTVLHYAEAIAPIPNKRRVTYEMPMLGPDGRVRWELAEDFDSNGILDCFAVDGKPDAVETIAKAYVELGRHREGIVGRAPSYLFEAQDIVSFGVTYLEQHFGAP,2021-01-20,7lao_A,271
14
+ 6l4l_A,MNRQVIEFSKYNPSGNMTILVHSKHDASEYASIANQLMAATHVCCEQVGFIESTQNDDGNDFHLVMSGNEFCGNATMSYIHHLQESHLLKDQQFKVKVSGCSDLVQCAIHDCQYYEVQMPQAHRVVPTTINMGNHSWKALEIIYETYVHYVIPVKQVTTEIQHLVEAFVREQQWSHKYKTVGMMLFDEQRQFLQPLIYIPEIQSLIWENSCGSGTASIGVFNNYQRNDACKDFTVHQPGGSILVTSKRCHQLGYQTSIKGQVTTVATGKAYIELEHHHHHH,2020-10-21,6l4l_A,281
15
+ 7asg_A,GPNVCAVQKVIGTNRKYFTNCKQWYQRKICGKSTVISYECCPGYEKVPGEKGCPAALPLSNLYETLGVVGSTTTQLYTDRTEKLRPEMEGPGSFTIFAPSNEAWASLPAEVLDSLVSNVNIELLNALRYHMVGRRVLTDELKHGMTLTSMYQNSNIQIHHYPNGIVTVNCARLLKADHHATNGVVHLIDKVISTITNNIQQIIEIEDTFETLRAAVAASGLNTMLEGNGQYTLLAPTNEAFEKIPSETLNRILGDPEALRDLLNNHILKSAMCAEAIVAGLSVETLEGTTLEVGCSGDMLTINGKAIISNKDILATNGVIHYIDELLIPDSAKTLFELAAESDVSTAIDLFRQAGLGNHLSGSERLTLLAPLNSVFKDGTPPIDAHTRNLLRNHIIKDQLASKYLYHGQTLETLGGKKLRVFVYRNSLCIENSCIAAHDKRGRYGTLFTMDRVLTPPMGTVMDVLKGDNRFSMLVAAIQSAGLTETLNREGVYTVFAPTNEAFRALPPREWSRLLGDAKELANILKYHIGDEILVSGGIGALVRLKSLQGDKLEVSLKNNVVSVNKEPVAEPDIMATNGVVHVITNVLHHHHHH,2020-12-23,7asg_A,594
16
+ 6kty_A,GSHMASVDKKFLSGDPNIVDGQVDPGSAIPGDYAIEVVQLAQKPAAMSNGFPDKDQTQIGVGYIKFETPEGTKEVYINGSNSTLDGVMKQINAANVGLKAQVVEDRKDQENPFKLLVSGLSTGNDSQVTFPKIYLLDGDQDMYFEESRKAQNAKVKVDGFEIELPDNKSTDLVPGVTLDFKSAAPGREIRLSVKEN,2019-10-09,6kty_A,196
17
+ 6vjg_A,HHHHHHMKFAVIDRKNFTLIHFEIEKPIKPEILKEIEIPSVDTRKGVVISGRGPIWLHCFLAHKYHHTPFVAVYDPRLGAVVVQSHSELREGDVIDVVVEEILKGGVRHV,2020-09-02,6vjg_A,110
18
+ 6sms_A,QKEMDRKGLLGYYFKDKDFSNLTMFSPTRYNTLIYDQQTANKLLDKKQQEYQSIRWIGLIQSNKTGDFTFELSDDECAIIEMDGKVISNKGKEKQVVHLEKGKLVPIKIEYQLDEPLNIDDEKFKGFKLLKVDNQKQLHQVQQDELRNPEFNKKESQEFLAKASKINLFTKKIKRDIDEGTDTDGDSIPDMWEENGYTIQNRIAVKWNDSLASKGYTKFVSNPLDSHTVGDPYTDYEKASRDLDLSNAKETFNPLVAAFPSVNVSMEKVILSPNKNLSNSVESHSSTNWSYTNTEGASVEAGIGPKGFSFGVSANYQHSETVAQEWGASIGDTTQLNTASAGYLNANVRYNNVGTGAIYDVKPTTSFVLEKNTIATITAKSNSTALSISPGESYPKKGQNGIAITSMDDFNSHPITLNKKQLDQVLTNNPIMLETDQTDGIYKIKDTHGNIVTGGTWNGVTQQIKAKTASIIVDDGKQVAEKRVAAKDYAYPEDKTPSLTLKDALKLSFPEEIKETDGLLYYNNKPIYESSVMTYLDGNTAKEVKKQINDKTGEFKDVQHLYAVKLTPKMNFTIKVPVAYDTAKQAVNLGGDNPWGAKGLLGTWVNAMVVDNSGDKAYKRVEPGYLLSPTLEFSEGSLDNLKKNYSFYVSMYVKSDKPFTLRINAGPYSTKRTIEASNDFKRVDIPAFYIEGFPIDTIRLEGSDYPSAIWWKDVSITEVSAVKK,2021-02-10,6sms_A,724
19
+ 6l3r_E,GAILAARIAVSNLHKETKKVFSDVMEDLYNYINPHNGKHSPMVAKSTLDIVLANKDRLNSAIIYDRDFSYNYFGFKTLERSYLLKINGKVAERPQHMLMRVSVGIHKEDIDAAIETYNLLSERWFTHASPTLFNAGTNRPQLSSCFLLSMKDDSIEGIYDTLKQCALISKSAGGIGVAVSCIRATGSYIAGTNGNSNGLVPMLRVYNNTARYVDQGGNKRPGAFAIYLEPWHLDIFEFLDLKKNTGKEEQRARDLFFALWIPDLFMKRVETNQDWSLMCPNECPGLDEVWGEEFEKLYASYEKQGRVRKVVKAQQLWYAIIESQTETGTPYMLYKDSCNRKSNQQNLGTIKCSNLCTEIVEYTSKDEVAVCNLASLALNMYVTSEHTYDFKKLAEVTKVVVRNLNKIIDINYYPVPEACLSNKRHRPIGIGVQGLADAFILMRYPFESAEAQLLNKQIFETIYYGALEASCDLAKEQGPYETYEGSPVSKGILQYDMWNVTPTDLWDWKVLKEKIAKYGIRNSLLIAPMPTASTAQILGNNESIEPYTSNIYTRRVLSGEFQIVNPHLLKDLTERGLWHEEMKNQIIACNGSIQSIPEIPDDLKQLYKTVWEISQKTVLKMAAERGAFIDQSQSLNIHIAEPNYGKLTSMHFYGWKQGLKTGMYYLRTR,2020-10-28,6l3r_E,669
20
+ 7qsu_A,SGKAVDGNTLVLTEEFGLVKIKELYEKLDGKGRKTVEGNEEWTELETPVTVYGYRNGRIVGIKATHIYKGISSGMIEIRTRTGRKIKVTPIHKLFTGRVTKDGLALEEVMAMHIKPGDRIAVVKKIDGGEYVKLTTSPDFRKSRKIKVPEVLDEDLAEFLGYLIADGTLKPRTVAIYNNDESLLKRANFLSTKLFGINGKIVQERTVKALLIHSKPLVDFFRKLGIPESKKARNWKVPRELLLSPPSVVKAFINAYIVCDGYYHERKGEIEITTASEEGAYGLSYLLAKLGIYATFRKKQIKGKEYYRIAISGKTNLEKLGIKRETRGYTNIDIVILFDEVVEVKYIPEPQEVYDITTETHNFVGGNMPTLLHN,2022-03-30,7qsu_A,374
21
+ 7p46_A,KNLRDLEPGIHTDLEGRLTYGGYLRLDQLLSAQQPLSEPAHHDEMLFIIQSQTSELWLKLLAHELRAAIVHLQRDEVWQCRKVLARSKQVLRQLTEQWSVLETLTPSEYMGFRDVLGPSSGFQSLQYRYIEFLLGNKNPQMLQVFAYDPAGQARLREVLEAPSLYEEFLRYLARFGHAIPQQYQARDWTAAHVADDTLRPVFERIYENTDRYWREYSLCEDLVDVETQFQLWRFRHMRTVMRVIGFKRGTGGSSGVGFLQQALALTFFPELFDVRTSVGVDN,2021-10-06,7p46_A,282
22
+ 7e2s_A,MSDNLTELSQQLHDASEKKQLTAIAALAEMGEGGQGILLDYLAKNVPLEKPVLAVGNVYQTLRNLEQETITTQLQRNYPTGIFPLQSAQGIDYLPLQEALGSQDFETADEITRDKLCELAGPGASQRQWLYFTEVEKFPALDLHTINALWWLHSNGNFGFSVQRRLWLASGKEFTKLWPKIGWKSGNVWTRWPKGFTWDLSAPQGHLPLLNQLRGVRVAESLYRHPVWSQYGW,2021-08-25,7e2s_A,233
23
+ 6pxz_B,QRWVQFMKEAGQGSRDMWRAYSDMKKANWKNSDKYFHARGNYDAARRGPGGAWAAKVISDAREAVQKFTGHGAEDSRADQFANEWGRSGKDPNHFRPAGLPKRY,2019-08-28,6pxz_B,104
24
+ 6ovk_R,GAMGQDWRADYHSRIGEQRRLTLADGTQVQLNTDSALNVAFDQQARRLRLVRGEMLITRPALADSRPLWVDTEHGRLESTLAQFNVRLHGQHTQATVYQGSVALQPALHAYPPILLGAGEQASFNQQGLLARQAVAAVAPAWSQGMLVAQGQPLAAFIEDLARYRRGHLACDPALAGLRVSGTFPLENTDKIIAAVAETLQLEVQHFTRYWVTLKPRMA,2020-03-04,6ovk_R,219
25
+ 6ndw_B,MAKATTSVNYACNLDKRLPELPEGANRAQILESTWSTEFKVYDSFGEAHELQIDFARVPGEVNAWRATVNVDPTNADATATRVGIGTTDGVQNSFIVRFDNNGHLASVTDTAGNVTSPAGQVLVQISYNVVGANPDEAGAPTRHTFDVNLGEIGTSKNTITQFSDKSTTKAYEQDGYT,2019-08-14,6ndw_B,178
26
+ 6pce_B,PSMKERQVCWGARDEYWKCLDENLEDASQCKKLRSSFESSCPQQWIKYFDKRRDYLKFKEKFEAGQFEPS,2019-10-02,6pce_B,70
27
+ 7p41_D,MRGSMQQVGTVAQLWIYPVKSCKGVPVSEAECTAMGLRSGNLRDRFWLVINQEGNMVTARQEPRLVLISLTCDGDTLTLSAMNIFEMLRIDEGLRLKIYKDTEGYYTIGIGHLLTKSPSLNAAKSELDKAIGRNCNGVITKDEAEKLFNQDVDAAVRGILRNAKLKPVYDSLDAVRRCALINMVFQMGETGVAGFTNSLRMLQQKRWDEAAVNLAKSRWYNQTPNRAKRVITTFRTGTWDAYTKDLLLPIKTPTTNAVHKCRVHGLEIEGRDCGEATAQWITSFLKSQPYRLVHFEPHMRPRRPHQIADLFRPKDQIAYSDTSPFLILSEASLADLNSRLEKKVKATNFRPNIVISGCDVYAEDSWDELLIGDVELKRVMACSRCILTTVDPDTGVMSRKEPLETLKSYRQCDPSERKLYGKSPLFGQYFVLENPGTIKVGDPVYLLG,2021-08-18,7p41_D,448
28
+ 6h86_A,MADSDPGERSYDNMLKMLSDLNKDLEKLLEEMEKISVQATWMAYDMVVMRTNPTLAESMRRLEDAFLNCKEEMEKNWQELLTETKRKQ,2019-05-08,6h86_A,88
29
+ 7jfl_C,SALQDLLRTLKSPSSPQQQQQVLNILKSNPQLMAAFIKQRTAKYVAN,2020-09-09,7jfl_C,47
30
+ 6iah_A,MWIVFDVDGVLIDVRESYDEATKLTAEYFLGLFGVEREIKPEWVRELRRKGSFGDDFKVSEALILFALSGRAEELVEEFPEGGTIEWVREKFGFQVFGGSIERVFNTFYLGREYPERLFDFPGLWKKERPIVRRGLLERASKHFKLGVVTGRSALEMELAERIIGFKFENAVTREAYLKPDPRALWELVRGEPGVYIGDTINDELFVENYRGKYGDFDFVMVGRDVKDVNEFLENALEGG,2019-08-14,6iah_A,240
31
+ 6y2x_A,GSEPEPEQVIKNYTEELKVPPDEDCIICMEKLSTASGYSDVTDSKAIGSLAVGHLTKCSHAFHLLCLLAMYCNGNKDGSLQCPSCKTIYGEKTGTQPQGKMEVLRFQMSLPGHEDCGTILIVYSIPHGIQGPEHPNPGKPFTARGFPRQCYLPDNAQGRKVLELLKVAWKRRLIFTVGTSSTTGETDTVVWNEIHHKTEMDRNITGHGYPDPNYLQNVLAELAAQGVTEDCLEQQ,2020-09-02,6y2x_A,235
32
+ 7nmq_A,GNYALGPEGLKKALAETGSHILVMDLYAKTMIKQPNVNLSNIDLGSEGGELLKNIHLNQELSRINANYWLDTAKPQIQKTARNIVNYDEQFQNYYDTLVETVQKKDKAGLKEGINDLITTINTNSKEVTDVIKMLQDFKGKLYQNSTDFKNNVGGPDGKGGLTAILAGQQATIPQLQAEIEQLRSTQKKHFDDVLAWSIGGGLGAAILVIAAIGGAVVIVVTGGTATPAVVGGLSALGAAGIGLGTAAGVTASKHMDSYNEISNKIGELSMKADRANQAVLSLTNAKETLAYLYQTVDQAILSLTNIQKQWNTMGANYTDLLDNIDSMQDHKFSLIPDDLKAAKESWNDIHKDAEFISKDIAFKQ,2021-04-07,7nmq_A,365
33
+ 6xb3_H,SMELYNIKYAIDPTNKIVIEQVDNVDAFVHILEPGQEVFDETLSQYHQFPGVVSSIIFPQLVLNTIISVLSEDGSLLTLKLENTCFNFHVCNKRFVFGNLPAAVVNNETKQKLRIGAPIFAGKKLVSVVTAFHRVGENEWLLPVTGIREASQLSGHMKVLNGVRVEKWRPNMSVYGTVQLPYDKIKQHALEQENKTPNALESCVLFYKDSEIRITYNKGDYEIMHLRMPGPLIQPNTIYYS,2020-11-25,6xb3_H,241
34
+ 6jwh_A,GPLGSIKELAVDEELAAADGLIPRQKSKLCKHGDRGMCEYCSPLPPWDKEYHEKNKIKHISFHSYLKKLNENANKKENGSSYISPLSEPDFRINKRCHNGHEPWPRGICSKCQPSAITLQQQEFRMVDHVEFQKSEIINEFIQAWRYTGMQRFGYMYGSYSKYDNTPLGIKAVVEAIYEPPQHDEQDGLTMDVEQVKNEMLQIDRQAQEMGLSRIGLIFTDLSDAGAGDGSVFCKRHKDSFFLSSLEVIMAARHQTRHPNVSKYSEQGFFSSKFVTCVISGNLEGEIDISSYQVSTEAEALVTADMISGSTFPSMAYINDTTDERYVPEIFYMKSNEYGITVKENAKPAFPVDYLLVTLTHGFPNTDTETNSKFVSSTGFPWSNRQAMGQSQDYQELKKYLFNVASSGDFNLLHEKISNFHLLLYINSLQILSPDEWKLLIESAVKNEWEESLLKLVSSAGWQTLVMILQESG,2019-12-25,6jwh_A,473
35
+ 6l4p_B,GDLAAAKPALDAALEALNSIKDGDIKNLKALKKPPQIITRIFDCVLVLRMLPVTKAEYTDEKGRMVQVGNYPEAQKMMNQMSFLQDLKDFAKEQINDETVELLEPYFMSEDFTFENAQKASGNVAGLCNWAESMAKYHNVAKVVE,2020-02-19,6l4p_B,145
36
+ 6jpt_A,MEDTPLVISKQKTEVVCGVPTQVVCTAFSSHILVVVTQFGKMGTLVSLEPSSVASDVSKPVLTTKVLLGQDEPLIHVFAKNLVAFVSQEAGNRAVLLAVAVKDKSMEGLKALREVIRVCQVW,2019-05-29,6jpt_A,122
37
+ 7a66_B,MKIRAKVELTWEYEDEETAKAIANAVNVDNISIPEKLKKSLNLITFPDGARVVTKVKYEGEIESLVVALDDLIFAIKVAEEVLWSH,2022-03-23,7a66_B,86
38
+ 6okd_C,GSREGCASRCMKYNDELEKCEARMMSMSNTEEDCEQELEDLLYCLDHCHSQ,2020-04-15,6okd_C,51
39
+ 6in7_A,MMSREALQETLSAVMDNEADELELRRVLAACGEDAELRSTWSRYQLARSVMHREPTLPKLDIAAAVSAALADEAAPPKAEK,2019-07-24,6in7_A,81
40
+ 7onn_A,STSRGMITDRSGRPLAVSVPVGGGESRRYYPSGEVTAHLIGFTNVDSQGIEGVEKSFDKWLTGQGGGAAHNLALSIDERLQALVYRELNNAVAFNKAESGSAVLVDVNTGEVLAMANSPSYNPNNLSGTPKEAMRNRTITDVFEPGSTVKPMVVMTALQRGVVRENSVLNTIPYRINGHEIKDVARYSELTLTGVLQKSSNVGVSKLALAMPSSALVDTYSRFGLGKATNLGLVGERSGLYPQKQRWSDIERATFSFGYGLMVTPLQLARVYATIGSYGIYRPLSITKVDPPVPGERVFPESIVRTVVHMMESVALPGGGGVKAAIKGYRIAIKTGTAKKVGPDGRYINKYIAYTAGVAPASQPRFALVVVINDPQAGKYYGGAVSAPVFGAIMGGVLRTMNIEPDALTT,2021-08-04,7onn_A,410
41
+ 6ono_C,MDWRHKAVCRDEDPELFFPVGNSGPALAQIADAKLVCNRCPVTTECLSWALNTGQDSGVWGGMSEDERRALKRRNA,2019-11-27,6ono_C,76
42
+ 6d7y_A,IKTVLDTAQAPYKGSTVIGHALSKHAGRHPEIWGKVKGSMSGWNEQAMKHFKEIVRAPGEFRPTMNEKGITFLEKRLIDGRGVRLNLDGTFKGFID,2019-05-01,6d7y_A,96
43
+ 6odd_B,SRAQVLSLYRAMLRESKRFSAYNYRTYAVRRIRDAFRENKNVKDPVEIQTLVNKAKRDLGVIRRQVHIGQLYST,2019-11-27,6odd_B,74
44
+ 6p5x_B,MDHLPIFCQLRDRDCLIVGGGDVAERKARLLLEAGARLTVNALTFIPQFTVWANEGMLTLVEGPFDETLLDSCWLAIAATDDDTVNQRVSDAAESRRIFCNVVDAPKAASFIMPSIIDRSPLMVAVSAGGTSPVLARLLREKLESLLPQHLGQVARYAGQLRARVKKQFATMGERRRFWEKFFVNDRLAQSLANADEKAVNATTERLFSEPLDHRGEVVLVGAGPGDAGLLTLKGLQQIQQADIVVYDRLVSDDIMNLVRRDADRVFVGKRAGYHCVPQEEINQILLREAQKGKRVVRLKGGDPFIFGRGGEELETLCHAGIPFSVVPGITAASGCSAYSGIPLTHRDYAQSVRLVTGHLKTGGELDWENLAAEKQTLVFYMGLNQAATIQEKLIAFGMQADMPVALVENGTSVKQRVVHGVLTQLGELAQQVESPALIIVGRVVALRDKLNWFSNH,2020-02-26,6p5x_B,457
45
+ 6tgk_C,LDFQALEETTEYDGGYTRDSVLIREFWEIVHSFTDEQKRLFLQFTTGTDRAPVGGLGKLKMIIAKNGPDTERLPTSHTCFNVLLLPEYSSKEKLKERLLKAITYA,2020-02-26,6tgk_C,105
46
+ 7dmn_A,GSHMSNVTVSAFTVDKSISEEHVLPSSFIPGSGNIFPKFTSAIPKTAWELWYFDGISKDDKSSIVIGVTRNAEGLKHGGFKVQVFVIWADERTWHRDLFFPESVVSINESGVTDGIWKDATSNSSISFSCAGDLSKASLVFDVPGVVQGDMHLEALPGDTGLDTDARLGPSVYYVRPIGRASVKAQLSLYSSDATAAEQFSLGTSANGGMDRVWSPLSWPQVMTESYYLRTQVGPYAMQIMRIFPPAGSEDQPSTMARLYREGQLVCVAQHVVTREDALMTHDSLILSKQDNSDSEDVVTGGYRDKNTGYTVEFVEKGNEGQRWKFQVRHERIIWNTPTSRPGPDATGNTGFVEVLCGGTIGESYEGVGTGGQCELS,2021-10-06,7dmn_A,377
47
+ 7lp1_A,VTQSFLPPGWEMRIAPNGRPFFIDHNTKTTTWEDPRLKF,2021-07-28,7lp1_A,39
48
+ 6l34_A,KRPMSAYMLWLNASREKIKSDHPGISITDLSKKAGEIWKGMSKEKKEEWDRKAEDARRDYEKAMKEYE,2020-10-14,6l34_A,68
49
+ 7ned_A,MERITAARGLELRCKGWRQEALLRMLENVLENGENQKELIVYAALAKAARNWPSYHAIVRTLKELEEDETLVIQSGKPIGIFKTHRFAPLIVMANCNLVGRWATSENFYRLQEKGLLIWGGLTAAAWQYIGSQGVIQGTYEIFQSIARLHFNGSLAGKFILTAGLGGMGGAQPLAGTLAGAAILCVEVSEDRVDRRLQTNYLQRKTRSLDEALLWIEEAVDNLHPVSVGLTGNASDIYPELVRRGITPDIVTDQTSAHDLVYGYVPSGYRVEELEEARANDPEQLQRDAGASIAVEVEAMLELKKRGAIVFDNGNNIRSQAKEYGVQNAFDIDIFTEAFLRPLFARAIGPFRWVALSGELSDIHAIDEFILEAFSDNEVIANWIRLAREHVPVEGLPARIGWFGHGDRTKLALAVNQMVREGKLQGPIAFSRDHLDAASMTHPNIMTERMKDGSDAIADWPLLNAMLNCSSMADLVTIHSGGGGYAGYMTSAGVTLVADGSTESDIRLETTLNNDTGLGVLRYADAGYEESADEVRLKDIRWIKTN,2021-02-24,7ned_A,546
50
+ 7s86_A,TAVPRDVNGGTPPKSCSSGPVYCCNKTEDSKHLDKGTTALLGLLNIKIGDLKDLVGLNCSPLSVIGVGGNSCSAQTVCCTNTYQHGLVNVGCTPINIGL,2021-12-29,7s86_A,99
51
+ 6l8s_A,SSTAHKQQNINHLLDKIYEDTKYPDLQEIAKNFNPLGDTSMYNDQGAAAEVLMKELNDHRLLEQHHWYSLFNARQREEALMLFAVLNQCKVWHCFRNNAAYFREQMNEGEFVYALYVGVIHSKLGDGIVLPPLYEITPHMFTNSEVIDKAYSAKMTQKAGTFNVSFTGTKKNKEQRVAYFGEDIGMNIHHVTWHMDFPFWWQDSYGNHLDRKGELFFWVHHQLTARFDFERLSNWLDPVDELHWDRIIREGFAPLTSYKYGGEFPVRPDNIHFEDVDGVAHVHDMEITENRIYEAIDHGYITATDGHTIDIRQPKGIELLGDIIESSMYSPNAQYYGSLHNTAHVMLGRQGDPHGKFNLPPGVMEHFETATRDPSFFRLHKYMDNIFKKHTDSFPPYTHDDLEFAGMVVDGVAIDGELTTFFDEFQYSLINAVDSGESIEDVEINARVHRLNHKEFTYKITVSNSIGSDHLATFRIFLCPIEDNNGITLTLDKERWLCIELDKFFQKVPSGTHTIHRSSKDSSVTVPDMPSFHSLKEQADNAVNGGSDLDLSAYERSCGIPERMLLPKSKPEGMEFNLFVAVTDGVKDTEGHNGDHDHGGTHAQCGVHGEAYPDNRPLGYPLERRIPDERVFDGVPNIKHVVVKIVHHPE,2020-05-27,6l8s_A,650
52
+ 7bwf_B,SGLVPRGSHMIIKNYSYARQNLKALMTKVNDDSDMVTVTSTDDKNVVIMSESDYNSMMETLYLQQNPNNAEHLAQSIADLERGKTITKDIDV,2020-05-20,7bwf_B,92
53
+ 7aex_A,MHHHHHHSEINPAEFEQVNMVLQGFVETSVLPVLELSADESHIEFREHSRNAHTVVWKIISTSYQDELTVSLHITTGKLQIQGRPLSCYRVFTFNLAALLDLQGLEKVLIRQEDGKANIVQQEVARTYLQTVMADAYPHLHVTAEKLLVSGLCVKLAAPDLPDYCMLLYPELRTIEGVLKSKMSGLGMPVQQPAGFGTYFDKPAAHYILKPQFAATLRPEQINIISTAYTFFNVERHSLFHMETVVDASRMISDMARLMGKATRAWGIIKDLYIV,2021-06-09,7aex_A,275
54
+ 6d7y_B,MKELFEVIFEGVNTSRLFFLLKEIESKSDRIFDFNFSEDFFSSNVNVFSELLIDSFLGFNGDLYFGVSMEGFSVKDGLKLPVVLLRVLKYEGGVDVGLCFYMNDFNSAGKVMLEFQKYMNGISADFGFENFYGGLEPASDQETRFFTNNRLGPLL,2019-05-01,6d7y_B,155
55
+ 6e7e_A,ATANLHLYQDLQREVGSLKEINFMLSVLQKEFLHLSKEFATTSKDLSAVSQDFYSCLQGFRDNYKGFESLLDEYKNSTEEMRKLFSQEIIADLKGSVASLREEIRFLTPLAEEVRRLAHNQQSLTAAIEELKTIRDSLRDEIGQLSQLSKTLTSQIALQRKLEHHHHHH,2019-07-10,6e7e_A,169
56
+ 7k7p_B,EKTHVQLSLPVLQVRDVLVRGFGDSVEEVLSEARQHLKDGTCGLVEVEKGVLPQLEQPYVFIKRSDARTAPHGHVMVELVAELEGIQYGRSGETLGVLVPHVGEIPVAYRKVLLRKNG,2020-09-30,7k7p_B,118
57
+ 7buy_A,SGFRKMAFPSGKVEGCMVQVTCGTTTLNGLWLDDVVYCPRHVICTSEDMLNPNYEDLLIRKSNHNFLVQAGNVQLRVIGHSMQNCVLKLKVDTANPKTPKYKFVRIQPGQTFSVLACYNGSPSGVYQCAMRPNFTIKGSFLNGSCGSVGFNIDYDCVSFCYMHHMELPTGVHAGTDLEGNFYGPFVDRQTAQAAGTDTTITVNVLAWLYAAVINGDRWFLNRFTTTLNDFNLVAMKYNYEPLTQDHVDILGPLSAQTGIAVLDMCASLKELLQNGMNGRTILGSALLEDEFTPFDVVRQCSGVTFQ,2020-04-29,7buy_A,306
58
+ 6yhu_B,SEDKRAKVTSAMQTMLFTMLRKLDNDALNNIINNARDGCVPLNIIPLTTAAKLMVVIPDYNTYKNTCDGTTFTYASALWEIQQVVDADSKIVQLSEISMDNSPNLAWPLIVTALRAN,2020-04-29,6yhu_B,117
59
+ 6h49_A,GMEGAGQMAELPTHYGTIIKTLRKYMKLTQSKLSERTGFSQNTISNHENGNRNIGVNEIEIYGKGLGIPSYILHRISDEFKEKGYSPTLNDFGKFDKMYSYVNKAYYNDGDIYYSSYDLYDETIKLLELLKESKINVNDIDYDYVLKLYKQILSTDT,2019-08-28,6h49_A,157
60
+ 7aqx_A,AAEKGFKQAFWQPLCQVSEELDDQPKGALFTLQAAASKIQKMRDAALRASIYAEINHGTNRAKAAVIVANHYAMKADSGLEALKQTLSSQEVTATATASYLKGRIDEYLNLLLQTKESGTSGCMMDTSGTNTVTKAGGTIGGVPCKLQLSPIQPKRPAATYLGKAGYVGLTRQADAANNFHDNDAECRLASGHNTNGLGKSGQLSAAVTMAAGYVTVANSQTAVTVQALDALQEASGAAHQPWIDAWKAKKALTGAETAEFRNETAGIAGKTGVTKLVEEALLKKKDSEASEIQTELKKYFSGHENEQWTAIEKLISEQPVAQNLVGDNQPTKLGELEGNAKLTTILAYYRMETAGKFEVLTQK,2021-11-03,7aqx_A,364
61
+ 7c45_A,TSSATSSSSMILKYPYRVVDTHEKLKEAVTSLQGARSIALDIEAFCTTDQAKQLGRISLVQACSDAKPVVFLFDVLTLTPDVFVKDMQSLLSDREIRKLFFDCRRDVEALSCQLGVKPEGVLDLQVFFTAIQWKLRSVNRRSGMGYVLKSVAGLTRQEGDSAVQTAMTLGNRPVWDIRPLPDHFLEYAAGDVRHILLLSNYLVGNKDVPVDVVAVERLTAQYVEHYAVGKPVITEADATPAEVNRAWLERYIGPGGGCHFCGAKGHTEAECFKKQNGKAKCSFCGEVGHTARNCFKKHPQLL,2021-04-07,7c45_A,302
62
+ 6gus_A,QIQKAEQNDVKLAPPTDVRSGYIRLVKNVNYYIDSESIWVDNQEPQIVHFDAVVNLDKGLYVYPEPKRYARSVRQYKILNCANYHLTQVRTDFYDEFWGQGLRAAPKKQKKHTLSLTPDTTLYNAAQIICANYGEAFSVDKKKLAAALEHHHHHH,2019-05-29,6gus_A,155
63
+ 6q9c_A,FEFPEELKTKLQEHINYFPKKRQAILLCLHEIQNYYGYIPPESLKPLADMLELPLNHVEGVVAFYDMFDREDKAKYRIRVCVSIVCHLMGTNKLLKALENILGIKPGEVTPDGKFKIVPVQCLGACSEAPVFMVNDDEYKFESEVQLNEILSRYT,2019-06-26,6q9c_A,155
64
+ 7n0j_E,APQVITVSRFEVGKDKWAFNREEVMLTCRPGNALYVINPSTLVQYPLNDIAQKEVASGKTNAQPISVIQIDDPNNPGEKMSLAPFIERAEKLCVDHHHHHH,2022-03-09,7n0j_E,101
65
+ 6o6y_A,MGMKLLVVSWGDFERWKETKYRFGGETSVGPSTLPILQKVIKPDWTVIVLSDTIGKDFSSVETLREDVRNRVMDFLDRIGAGREVDVIIAPGIGEFTHGSFRGSAMDAYYYVLHALSEIIPTKGDLEVHFDSTHGLNYVTLLTYRALKDLLGIAAVMNTVTFYAYNSDPFVPKITKELNINTIETTMVKPTPLSEPLPGFDEYLCPYSMERAEFVRLKGSLNTLKNLRKEKKKLEAWIGSLLFGLPLLFLEEFPDIGRLESYIEELAETWGGAIAVNAEEKAVTRRLAFGSGFGTLVKLLFQARITRGLLVEEPYSIEKLYSVSDRLFRGSTLQRVRVELGKIEDKAIKYARKGAFPRDIPLRDFLGFDAANREVSPRNVLAHAGLEANVVEVSMEAWEPKRPEEEAGRHTHLKYTPVGLKKVEDIVSRALKESHHHHHH,2019-07-31,6o6y_A,440
66
+ 6zsl_B,SMAVGACVLCNSQTSLRCGACIRRPFLCCKCCYDHVISTSHKLVLSVNPYVCNAPGCDVTDVTQLYLGGMSYYCKSHKPPISFPLCANGQVFGLYKNTCVGSDNVTDFNAIATCDWTNAGDYILANTCTERLKLFAAETLKATEETFKLSYGIATVREVLSDRELHLSWEVGKPRPPLNRNYVFTGYRVTKNSKVQIGEYTFEKGDYGDAVVYRGTTTYKLNVGDYFVLTSHTVMPLSAPTLVPQEHYVRITGLYPTLNISDEFSSNVANYQKVGMQKYSTLQGPPGTGKSHFAIGLALYYPSARIVYTACSHAAVDALCEKALKYLPIDKCSRIIPARARVECFDKFKVNSTLEQYVFCTVNALPETTADIVVFDEISMATNYDLSVVNARLRAKHYVYIGDPAQLPAPRTLLTKGTLEPEYFNSVCRLMKTIGPDMFLGTCRRCPAEIVDTVSALVYDNKLKAHKDKSAQCFKMFYKGVITHDVSSAINRPQIGVVREFLTRNPAWRKAVFISPYNSQNAVASKILGLPTQTVDSSQGSEYDYVIFTQTTETAHSCNVNRFNVAITRAKVGILCIMSDRDLYDKLQFTSLEIPRRNVATLQ,2020-07-29,6zsl_B,603
67
+ 7rm7_A,MGSSHHHHHHSSGLVPRGSHMISKINGKLFADMIIQGAQNLSNNADLVDSLNVYPVPDGDTGTNMNLTMTSGREEVENNLSKNIGELGKTFSKGLLMGARGNSGVILSQLFRGFCKNIESESEINSKLLAESFQAGVETAYKAVMKPVEGTILTVAKDAAQAAIEKANNTEDCIELMEYIIVKANESLENTPNLLAVLKEVGVVDSGGKGLLCVYEGFLKALKGEKVE,2022-03-02,7rm7_A,228
68
+ 6ypi_A,GSGSPLAQQIKNILSLIGQADAAGRMDEVRTLQLNLCQLMVEYFQQSDGDGGSPLAQQIQNIHSFGHQAWAAGRLDEVRRIQENLYQLMKEYFQQSD,2021-04-28,6ypi_A,97
69
+ 6ro6_A,MEGALPKGLSDLIADPTLGPQITPDWVRTLSRIELRGKRPRDKQDWYEIYLHLKRILS,2019-10-09,6ro6_A,58
70
+ 7mf4_A,QGRAILTDRYINRGTAFTMEERQKLHILGRLPPVVETLEEQVARVYGQVKKYEKPINRYQHLVSVHSTNTTLYYATILAHLEEMLPIIYTPTVGEACMEYSHLFFRERGVYFNRLYKGQFRNIMRDAGYQKVEVVVITDGSRILGLGDLGSNGIGISIGKCSLYVAGAGIDPRLIVPVILDVGTNNERYLQDKDYLGMREKRLGDEEFYELLDEFMEAASAEWPNAVIQFEDFSNNHCFDIMERYQKKYRCFNDDIQGTGAVIAAGFLNAIKLSGVSPLQQRIVVFGAGSAAVGVANNIAALAARMYKFPVQDLVKTFYLVDTKGLVTTTRGDQLAAHKKLLARTDVSAEDSAKLRTLEEIVRFVKPTTLLGLGGVGPAFTEEIVKMVMQNTERPIIFPLSNPTSKAEVTPENAYKWTNGAAIVASGSPFPPTTIGGKTFKPSQGNNLYVFPGVGLGCALAQPTHIPEELLLTASESLNLLTTEGDLREGRLYPPLEDIHNISANVATDVILEAQRMKIDNNKKLPRTRDELLAFVKKAMWKPVYSGEVGEQVL,2021-07-28,7mf4_A,554
71
+ 7jrq_A,HHHHHHENLYFQSSEKEELFEKLKQTADEAVQLFQRLREIFDKGDDDSFEQVLEELEEALQKHRQLADQGRKKGLLTSEAAKQGDQFVQLFQRFREAWDKGDKDSLEQILEELEQVAQKAVELGLKILKTQ,2021-01-13,7jrq_A,131
72
+ 7wab_A,TTGEAYFEQLLDHHNPEKGTFSQRYWWSTEYWGGPGSPVVLFNPGEVSADGYEGYLTNDTLTGVYAQEIQGAVILIEHRYWGDSSPYEVLNAETLQYLTLDQSILDMTYFAETVKLQFDNSSRSNAQNAPWVMVGGSYSGALTAWTESIAPGTFWAYHATSAPVEAIYDFWQYFYPIQQGMAQNCSKDVSLVAEYVDKIGKNGTAKEQQELKELFGLGAVEHYDDFAAVLPNGPYLWQDNDFVTGYSSFFQFCDAVEGVEAGAAVTPGPEGVGLEKALANYANWFNSTILPNYCASYGYWTDEWSVACFDSYNASSPIFTDTSVGNPVDRQWEWFLCNEPFFWWQDGAPEGTSTIVPRLVSASYWQRQCPLYFPEVNGYTYGSAKGKNSATVNSWTGGWDMTRNTTRLIWTNGQYDPWRDSGVSSTFRPGGPLVSTANEPVQIIPGGFHCSDLYMEDYYANEGVRKVVDNEVKQIKEWVEEYYA,2022-01-12,7wab_A,484
73
+ 5znj_A,MKQSKVFIPTMRDVPSEAEAQSHRLLLKSGLIKQSTSGIYSYLPLATRVLNNITAIVRQEMERIDSVEILMPALQQAELWEESGRWGAYGPELMRLQDRHGRQFALGPTHEELVTSIVRNELKSYKQLPMTLFQIQSKFRDEKRPRFGLLRGREFIMKDAYSFHADEASLDQTYQDMYQAYSRIFERVGINARPVVADSGAIGGSHTHEFMALSAIGEDTIVYSKESDYTANIEKAEVVYEPNHKHSTVQPLEKIETPNVKTAQELADFLGRPVDEIAKTMIFKVDGEYIMVLVRGHHEINDIKLKSYFGTDNIELATQDEIVNLVGANPGSLGPVIDKEIKIYADNFVQDLNNLVVGANEDGYHLINVNVGRDFNVDEYGDFRFILEGEKLSDGSGVAHFAEGIEVGQVFKLGTKYSESMNATFLDNQGKAQPLIMGCYGIGISRTLSAIVEQNHDDNGIVWPKSVTPFDLHLISINPKKDDQRELADALYAEFNTKFDVLYDDRQERAGVKFNDADLIGLPLRIVVGKRASEGIVEVKERLTGDSEEVHIDDLMTVITNKYDNLK,2019-05-29,5znj_A,567
74
+ 6pnv_A,SNAVRIEITQGVDSARPIGVVPFKWAGPGAAPEDIGGIVAADLRNSGKFNPLDRSRLPQQPATAQEVQPTAWSALGIDAVVVGQVTPNPDGSYNVAYQLVDTGGAPGTVLAQNSYKVNKQWLRYAGHTASDEVFEKLTGIKGAFRTRIAYVVQTNGGQFPYELRVSDYDGYNQFVVHRSPQPLMSPAWSPDGSKLAYVTFESGRSALVIQTLANGAVRQVASFPRHNGAPAFSPDGTKLAFALSKTGSLNLYVMDLASGQIRQITDGRSNNTEPTWFPDSQTLAFTSDQAGRPQVYKMNINGGAAQRITWEGSQNQDADVSSDGKFMVMVSSNNGQQHIAKQDLVTGGVQVLSSTFLDETPSLAPNGTMVIYSSSQGMGSVLNLVSTDGRFKARLPATDGQVKSPAWSPYL,2019-07-17,6pnv_A,411
75
+ 6rwt_A,MILQLFRRKSKANEAIVLRVYEVIVAAARQKRFYAQFQVPDTPLGRYEMLSLHIFLALHRMKGENPALNALAQEIADEFFKDVDHSLRELGIGDQGVPKRMKKLARMFYGRVGAYGAALDANDAQALAAALTRNIRPDLEFWPHACYLGAYVLQCRDCLREISDEALAAGDISYMDVDQVDLAP,2019-09-18,6rwt_A,184
76
+ 6oz1_A,QGMTETISTPAATTTPDLADQQELERRVAQVVSNDPQLQALLPDDAVSGAVNEPGLTLIELIRRLLEGYGDRPALGQRAVELVTDEHGATTVALKTEFVTTSYRELWNRAEAIAAAWYAQGIRDGDFVAQLGFTSTDFASLDVAGLRLGTVSVPLQTGASVQQRNAILEETQPTVFAASVEYLEAAVDSVLATPSVQLLSVFDYHPEADAHRAALSAVRDRLETAGRTITIDSLGDAIARGRELPAAPQPSEDPDALRLLIYTSGSTGTPKGAMYPQWLVANLWQKKWLTTTVIPSVGVNFMPMSHLAGRLTLMGTLSGGGTAYYIASSDLSTFFEDIALIRPTEVLFVPRVVEMVFQRYQAELDRSLAPGESNAEIAEKIKVRIREEDFGGRVLNAGSGSAPLSPEMNDFMESLLQVAMLDGYGSTEAGAVWRDGVLQRPPVTEYKLVDVPELGYFTTDSPHPRGELRIKSETMFPGYYKRPETTADVFDEDGFYMTGDVVAELGPDHLKYLDRVKNVLKLAQGEFVAVSKLEAAYTGSPLVRQIFVYGNSERSYLLAVVVPTPEALERYADSPDALKPLIQDSLQQVAKGAELQSYEIPRDFIVETVPFTVESGLLSDARKLLRPKLKEHYGERLEALYAD,2020-04-22,6oz1_A,643
77
+ 6nl2_A,LADAVAHLTPERWEEANRLLVRKALAEFTHERLLTPEREPDDGGGQTYVVRSDDGQTAYRFTATVRALDHWQVDAASVTRHRDGAELPLAALDFFIELKQTLGLSDEILPVYLEEISSTLSGTCYKLTKPQLSSAELARSGDFQAVETGMTEGHPCFVANNGRLGFGIHEYLSYAPETASPVRLVWLAAHRSRAAFTAGVGIEYESFVRDELGAATVDRFHGVLRGRGLDPADYLLIPVHPWQWWNKLTVTFAAEVARGHLVCLGEGDDEYLAQQSIRTFFNASHPGKHYVKTALSVLNMGFMQGLSAAYMEATPAINDWLARLIEGDPVLKETGLSIIRERAAVGYRHLEYEQATDRYSPYRKMLAALWRESPVPSIREGETLATMASLVHQDHEGASFAGALIERSGLTPTEWLRHYLRAYYVPLLHSFYAYDLVYMPHGENVILVLADGVVRRAVYKDIAEEIAVMDPDAVLPPEVSRIAVDVPDDKKLLSIFTDVFDCFFRFLAANLAEEGIVTEDAFWRTVAEVTREYQESVPELADKFERYDMFAPEFALSCLNRLQLRDNRQMVDLADPSGALQLVGTLKNPLAGRG,2020-01-15,6nl2_A,594
78
+ 6p5h_A,GPLGSRFNAPQKYQKIKREEFNPETAEKNKIYLLEDQLVYLDIFGKVIDLGQTSDTCHRLFNAITTPFYQNYILYDEYIDPEESAEEAAMFEMGEIVKAKMKNID,2020-05-27,6p5h_A,105
79
+ 6q10_A,MAHHHHHHKLGERVSRTEMTDVTPAQLGETKVRVLNASGRGGQAADVAGALKDLGFTQPTAANDPVYADTRLDCQGQIRFGTAGQATAAAVWLVAPCTELFNDGRADDSVDLVLGTDFTTLAHNDDIDAVLSSLRPGATQPPDPTLIAKIHASSC,2019-08-14,6q10_A,155
80
+ 6jv8_A,LQLLHQKVEEQAAKYKHRVPKKCCYDGARENKYETCEQRVARVTIGPHCIRAFNECCTIADKIRKESHHKGMLLGR,2020-04-22,6jv8_A,76
81
+ 6lrd_A,MSRPAHWLLAPPASRDALLATMREWQVSPPVAQVLCGRDLRTELLALPLELTPNPALREAARHIVAAVREGKRIRIHGDYDADGVSATATLVLGLRAIGANVHGFIPHRLNEGYGIHPDRVPEHAAAADLVVTVDCGVSNLDEVKSLLATGTEVVVTDHHAPGENFPECLVVHPHLTPDYDPDRHNLTGAGVAYHLLWAVYEELGRPEPRALLPLATLGTVADVAPLLGENRALVRAGLAEMARTELPGLRALMNEKRVRQPTARDVAFILAPRINAAGRMGEADRALELLTTPSDHEAKSLAAYLEIRNQERRKIQDDMFAQALQLADPNDPALVLTHDDWHAGVMGIVASKLVETFNRPVYIVAQGKGSVRSTPGISAVQGLRESRDLLGRFGGHPGAAGFSLDPQNFGALRERIHGYVRQFPTPVPAVRLDAPLPVAALTPELLSELSILEPFGEGNPRPLWHLRGPLTDTRLVGKQGDVLQFRFGGVKGMKYSERDDAAGERDVAAELALNEWKGRTSLELHAAALRPLAPLALAGTEEGLPTLPRLNPREAMTFLKTGAAAYAEQGVATYLRDNVPGLTLLDTNAPHPGGDLILYGLPPESALRRWLHEAQEQGGRVAFALGPKTLAELDAALTLAKLLPDSHTEAAQEAAADAYRSWQWAHHYRVLNDAGWSASVYAMLGLPVPAALPKAAEALALAAG,2020-08-26,6lrd_A,705
82
+ 6tly_A,GSSAAPGAIQCMNRHKMERHGKMPAGYKGFDCNVCDQPMLKITEKAYMYRCEKCDYDVCNQCAESRKFKEVHFLCAKCGKKFPSQTKLQYHSRGCRGPS,2019-12-25,6tly_A,99
83
+ 7la6_A,SNADSLPERIDLFVSLFDYNSATTSYDIRSIQTDFPTRLLTPDSMLPQTSEYPLKDIQLLYKLAQSCTGKLPLSPLITEPLVFTRSLCKGSSLSPRWFARSGLIHPGGGTYAFRYAEKYPAQFANLLPYMHIQERPNAAEGTLLYHLQNMGEDAINALVSGASMFGSGSDLWLRKGDIYYLFNEETWLTNANKAGLSYSLLSADTCFIQRGNICWDVEDHS,2021-01-27,7la6_A,221
data/splits/atlas_train.csv ADDED
The diff for this file is too large to render. See raw diff
 
data/splits/atlas_val.csv ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name,seqres,release_date,msa_id
2
+ 6irx_A,VYWDLDIQTNAVIRERAPADHLPPHPEIELQRAQLTTKLRQHYHELCSQREGIEPPRESFNRWLLERKVVDKGLDPLLPSECDPVISPSMFREIMNDIPIRLSRIKYKEEARKLLFKYAEAAKKMIDSRNVTPESRKVVKWNVEDTMNWLRRDHSASKEDYMDRLENLRKQCGPHVASVAKDSVEGICSKIYHISAEYVRRIRQAHLTLLKECNISVDGTESAEVQDRLVYCYPVRLSIPAPPQTRVELHFENDIACLRFKGEMVKVSRGHFNKLELLYRYSCIDDPRFEKFLSRVWCLIKRYQVMFGSGVNEGSGLQGSLPVPVFEALNKQFGVTFECFASPLNCYFKQFCSAFPDIDGFFGSRGPFLSFSPASGSFEANPPFCEELMDAMVTHFEDLLGRSSEPLSFIIFVPEWRDPPTPALTRMEASRFRRHQMTVPAFEHEYRSGSQHICKREEIYYKAIHGTAVIFLQNNAGFAKWEPTTERIQELLAAYK,2018-12-05,6irx_A
3
+ 6cka_B,MLYIDEFKEAIDKGYILGDTVAIVRKNGKIFDYVLPHEKVRDDEVVTVERVEEVMVELDKLEHHHHHH,2018-11-14,6cka_B
4
+ 6hj6_A,ETGMPLSCSKNNSHHYIFVGNNSGLELTLTNTSLLNHKFCNLSDAHKRAQYDMALMSIVSTFHLSIPNFNQYEAMSCDFNGDNITVQYNLSHASAVDAANHCGTVANGILETFHKFFWSNNIKDAYQLPNQGKLAHCYSTSYQFLIIQNTTWEDHCTFSRPTGTKHHHHHH,2018-10-10,6hj6_A
5
+ 6dgk_B,ASRYLTDMTLEEMSRDWFMLMPKQKVAGSLCIRMDQAIMDKNIILKANFSVIFDRLETLILLRAFTEEGAIVGEISPLPSLPGHTDEDVKNAVGVLIGGLEANDNTVRVSETLQRFAWRS,2018-08-15,6dgk_B
6
+ 6fc0_B,GPHMQPSAALQSLRSARFLPGIVQDIYPPGIKSPNPALNEAVQKKGRIFKYDVQFLLQFQNVFTEKPSPDFDQQVKALIGD,2018-06-27,6fc0_B
7
+ 6dlm_A,GSPRSYLLKELADLSQHLVRLLERLVRESERVVEVLERGEVDEEELKRLEDLHRELEKAVREVRETHREIRERSR,2018-12-19,6dlm_A
8
+ 6mdw_A,GPGRPQESLSLVDASWELVDPTPDLQALFVQFNDQFFWGQLEAVEVKWSVRMTLCAGICSYEGKGGMCSIRLSEPLLKLRPRKDLVETLLHEMIHAYLFVTNNDKDREGHGPEFCKHMHRINSLTGANITVYHTFHDEVDEYRRHWWRCNGPCQHRPPYYGYVKRATNREPSAHDYWWAEHQKTCGGTYIKIKE,2019-04-10,6mdw_A
9
+ 6bwq_A,TADAVLMIEANLDDQTGEGLGYVMNQLLTAGAYDVFFTPIQMKKDRPATKLTVLGNVNDKDLLTKLILQETTTIGVRYQTWQRTIMQRHFLTVATPYGDVQVKVATYQDIEKKMPEYADCAQLAQQFHIPFRTVYQAALVAVDQLDEEA,2018-06-20,6bwq_A
10
+ 5ydn_A,MNYATVNDLCARYTRTRLDILTRPKTADGQPDDAVAEQALADASAFIDGYLAARFVLPLTVVPSLLKRQCCVVAWFYLNESQPTEQITATYRDTVRWLEQVRDGKTDPG,2018-07-11,5ydn_A
11
+ 5w82_E,HMMPSEDYAIWYARATIAALQAAEYRLAMPSASYTAWFTDAVSDKLDKISESLNTLVECVIDKRLAVSVPEPLPVRVENKVQVEVEDEVRVRVENKVDVEVKN,2018-12-26,5w82_E
12
+ 6cb7_A,HMDKLRVLYDEFVTISKDNLERETGLSASDVDMDFDLNIFMTLVPVLAAAVCAITPTIEDDKIVTMMKYCSYQSFSFWFLKSGAVVKSVYNKLDYVKKEKFVATFRDMLLNVQTLISLNSMY,2018-12-12,6cb7_A
13
+ 5yrv_I,MARVSDYPLANKHPEWVKTATNKTLDDFTLENVLSNKVTAQDMRITPETLRLQASIAKDAGRDRLAMNFERAAELTAVPDDRILEIYNALRPYRSTKEELLAIADDLESRYQAKICAAFVREAATLYVERKKLKGDD,2018-09-19,5yrv_I
14
+ 5z51_A,PTLWPQREALKSALQYPALAGPVFDALTVEGFTHPEYAAVRAAIDTAGGTSAGLSGAQWLDMVRQQTTSTVTSALISELGVEAIQVDDDKLPRYIAGVLARLQEVWLGRQIAEVKSKLQRMSPIEQGDEYHALFGDLVAMEAYRRSLLEQASGDDLHHHHHH,2018-11-28,5z51_A
15
+ 6a9a_A,MISDSMTVEEIRLHLGLALKEKDFVVDKTGVKTIEIIGASFVADEPFIFGALNDEYIQRELEWYKSKSLFVKDIPGETPKIWQQVASSKGEINSNYGWAIWSEDNYAQYDMCLAELGQNPDSRRGIMIYTRPSMQFDYNKDGMSDFMSTNTVQYLIRDKKINAVVNMRSNDVVFGFRNNYAWQKYVLDKLVSDLNAGDSTRQYKAGSIIWNVGSLHVYSRHFYLVDHWWKTGETHISKKDYVGKYA,2019-01-02,6a9a_A
16
+ 6atg_C,GHMNQRNINELKIFVEKAKYYSIKLDAIYNECTGAYNDIMTYSEGTFSDQSKVNQAISIFKKDNKIVNKFKELEKIIEEYKPMFLSKLIDDFAIELDQAVDNDVSNARHVADSYKKLRKSVVLAYIESFDVISSKFVDSKFVEASKKFVNKAKEFVEENDLIALECIVKTIGDMVNDREINSRSRYNNFYKKEADFLGAAVELEGAYKAIKQTL,2018-09-12,6atg_C
17
+ 5w4a_B,MDTNKREIVEFLGIRTYFFPNLALYAVNNDELLVSDPNKANSFAAYVFGASDKKPSVDDIVQILFPSGSDSGTILTSMDTLLALGPDFLTEFKKRNQDLARFNLTHDLSILAQGDEDAAKKKLNLMGRKAKLQKTEAAKILAILIKTINSEENYEKFTELSELCGLDLDFDAYVFTKILGLEDEDTADEVEVIRDNFLNRLDQTKPKLADIIRNGP,2018-06-13,5w4a_B
18
+ 5zmo_A,GSREAPKTFHRRVGDVRPARRAMGPALHRPVLLLWAIGQAVARAPRLQPWSTTRDAVAPLMEKYGQVEDGVDGVRYPFWALVRDDLWCVEQAEELTLTSRGRRPTLESLNAVDPSAGLREDDYNLLRSQPEAAASAAAGLIARYFHLLPAGLLEDFGLHELLAGRWPDALRP,2018-09-26,5zmo_A
19
+ 6bk4_A,ARKDNLAKTIAETAKIREVLIIQNVLNCFNDDQVRSDFLNGENGAKKLENTELELLEKFFIETQTRRPETADDVSFIATAQKSAELFYSTINARPKSFGEVSFEKLRSLFQQIQDSGYLDKYYL,2018-11-07,6bk4_A
20
+ 6eu8_A,MDKKDDVKETIKKSKTIDATKYEMWTYVNLETGQTETHRDFSEWHVMKNGKLLETIPAKGSEADIKIKWHIAIHRFDIRTNEGEAIATKETEFSKVTGLPAGDYKKDVEIKDKMLVGFNMADMMKSKFTVAGMAKVNPVLKTWIVENPMGKAPVLSKSVFVVKFKDGSYAKIKFTDATNDKQEKGHVSFNYEFQPK,2018-10-24,6eu8_A
21
+ 6mbg_A,SNATGERFHPLLGRRVLDPAPGARCFTGTVSSDRPAYLGEHWVYDAIVVLGVTYLEMALAAASRLLPGNAADTLIVEDVTVWSPLVLRAGAPARLRLRSEDERFEIHSAEPERSDDESAWTRHATGRIARRQLSPDATGQLPRLDGEAVELDAYYERMRIYYGPRLRNIRHLERRGREAIGHVCLQGEEAQESASYELHPALLDACFQCVFALIYAHESHREPFVPLGCARIELRARGVREVRVHLRLHPPRSTDHNQTHTADLRLFDMEGRLVASVDALQLKRA,2018-09-19,6mbg_A
22
+ 6bm5_A,GSKKPRTPPVTLEAARDNDFAFDWQAYTPPVAHRLGVQEVEASIETLRNYIDWTPFFMTWSLAGKYPRILEDEVVGVEAQRLFKDANDMLDKLSAEKTLNPRGVVGLFPANRVGDDIEIYRDETRTHVINVSHHLRQQTEKTGFANYCLADFVAPKLSGKADYIGAFAVTGGLEEDALADAFEAQHDDYNKIMVKALADRLAEAFAEYLHERVRKVYWGYAPNENLSNEELIRENYQGIRPAPGYPACPEHTEKATIWELLEVEKHTGMKLTESFAMWPGASVSGWYFSHPDSKYYAVAQIQRDQVEDYARRKGMSVTEVERWLAPNLGYDAD,2018-05-23,6bm5_A
23
+ 6f45_D,MAVQGPWVGSSYVAETGQNWASLAANELRVTERPFWISSFIGRSKEEIWEWTGENHSFNKDWLIGELRNRGGTPVVINIRAHQVSYTPGAPLFEFPGDLPNAYITLNIYADIYGRGGTGGVAYLGGNPGGDCIHNWIGNRLRINNQGWICGGGGGGGGFRVGHTEAGGGGGRPLGAGGVSSLNLNGDNATLGAPGRGYQLGNDYAGNGGDVGNPGSASSAEMGGGAAGRAVVGTSPQWINVGNIAGSWL,2018-08-01,6f45_D
24
+ 6fub_B,METGNKYIEKRAIDLSRERDPNFFDNPGIPVPECFWFMFKNNVRQDAGTCYSSWKMDMKVGPNWVHIKSDDNCNLSGDFPPGWIVLGKKRPGF,2018-06-13,6fub_B
25
+ 6gfx_C,MEAGRPRPVLRSVNSREPSQVIFCNRSPRVVLPVWLNFDGEPQPYPTLPPGTGRRIHSYRGHLWLFRDAGTHDGLLVNQTELFVPSLNVDGQPIFANITLPVYTLKERCLQVVRSLVKPENYRRLDIVRSLYEDLEDHPNVQKDLERLTQERIAHQRMGD,2018-07-11,6gfx_C
26
+ 6a02_A,MAATNMTDNVTLNNDKISGQAWQAMRDIGMSRFELFNGRTQKAEQLAAQAEKLLNDDSTDWNLYVKSDKKAPVEGDHYIRINSSITVAEDYLPAGQKNDAINKANQKMKEGDKKGTIEALKLAGVSVIENQELIPLQQTRKDVTTALSLMNEGKYYQAGLLLKSAQDGIVVDSQSVQLEHHHHHH,2019-02-06,6a02_A
27
+ 6c0h_A,SGMKSAKEPTIYQDVDIIRRIQELMVLCSLLPPDGKLREALELALALHEEPALARITPLTNLHPFATKAWLETLWLGEGVSSEEKELVAWQNKSENMGPAIRELKNAEQQSGITLVARLTS,2018-09-05,6c0h_A
28
+ 6dnm_A,SHMVVLGGDRDFWLQVGIDPIQIMTGTATFYTLRCYLDDRPIFLGRNGRISVFGSERALARYLADEHDHDLSDLSTYDDIRTAATDGSLAVAVTDDNVYVLSGLVDDFADGPDAVDREQLDLAVELLRDIGDYSEDSAVDKALETTRPLGQLVAYVLDPHSVGKPTAPYAAAVREWEKLERFVESRLRRE,2019-01-23,6dnm_A
29
+ 6as3_A,HHHHHHMEKKLSDAQVALVAAWRKYPDLRESLEEAASILSLIVFQAETLSDQANELANYIRRQGLEEAEGACRNIDIMRAKWVEVCGEVNQYGIRVYGDAIDRDVD,2018-08-29,6as3_A
30
+ 6bn0_A,QPYNPCKPQEVIDTKCMGPKDCLYPNPDSCTTYIQCVPLDEVGNAKPVVKPCPKGLQWNDNVGKKWCDYPNLSTCPVKT,2018-08-22,6bn0_A
31
+ 5wfy_A,MILKILNEIASIGSTKQKQAILEKNKDNELLKRVYRLTYSRGLQYYIKKWPKPGIATQSFGMLTLTDMLDFIEFTLATRKLTGNAAIEELTGYITDGKKDDVEVLRRVMMRDLECGASVSIANKVWPGLEHHHHHH,2018-09-26,5wfy_A
32
+ 6hem_A,GPDFSKHLKEETIQIITKASHEHEDKSPETVLQSAIKLEYARLVKLAQEDTPPETDYRLHHVVVYFIQNQAPKKIIEKTLLEQFGDRNLSFDERCHNIMKVAQAKLEMIKPEEVNLEEYEEWHQDYRKFRETTMYLIIGLENFQRESYIDSLLFLICAYQNNKELLSKGLYRGHDEELISHYRRECLLKLNEQAAELFESGEDREVNNGLIIMNEFIVPFLPLLLVDEMEEKDILAVEDMRNRWCSYLGQEMEPHLQEKLTDFLPKLLDCSMEIKSFHEPPKLPSYSTHELCERFARIMLSLS,2019-03-27,6hem_A
33
+ 5z1n_A,GPLSGLKKLIPEEGRELIGSVKKIIKRVSNEEKANEMEKNILKILIKVFFYIDSKAIQIGDLAKVDRALRDGFNHLDRAFRYYGVKKAADLVVILEKASTALKEAEQETVTLLTPFFRPHNIQLIRNTFAFLGSLDFFTKVWDDLEIEDDLFLLISALNKYTQIELIY,2018-10-17,5z1n_A
34
+ 5zlq_A,GPMGKIALQLKATLENITNLRPVGEDFRWYLKMKCGNCGEISDKWQYIRLMDSVALKGGRGSASMVQKCKLCARENSIEILSSTIKPYNAEDNENFKTIVEFECRGLEPVDFQPQAGFAAEGVESGTAFSDINLQEKDWTDYDEKAQESVGIYEVTHQFVKC,2018-10-10,5zlq_A
35
+ 5naz_A,SVAHGFLITRHSQTTDAPQCPQGTLQVYEGFSLLYVQGNKRAHGQDLGTAGSCLRRFSTMPFMFCNINNVCNFASRNDYSYWLSTPEPMPMSMQPLKGQSIQPFISRCAVCEAPAVVIAVHSQTIQIPHCPQGWDSLWIGYSFMMHTSAGAEGSGQALASPGSCLEEFRSAPFIECHGRGTCNYYANSYSFWLATVDVSDMFSKPQSETLKAGDLRTRISRCQVCMKRT,2018-09-12,5naz_A
36
+ 6e33_A,GKVKKRLPQAKRACAKCQKDNKKCDDARPCQRCIKAKTDCIDLPRKKRPTGVRRGPYKKLS,2018-10-24,6e33_A
37
+ 5ok6_A,MDREPQHEELPGLDSQWRQIENGESGRERPLRAGESWFLVEKHWYKQWEAYVQGGDQDSSTFPGCINNATLFQDEINWRLKEGLVEGEDYVLLPAAAWHYLVSWYGLEHGQPPIERKVIELPNIQKVEVYPVELLLVRHNDLGKSHTVQFSHTDSIGLVLRTARERFLVEPQEDTRLWAKNSEGSLDRLYDTHITVLDAALETGQLIIMETRKKDGTWPSAQLEHHHHHH,2018-08-08,5ok6_A
38
+ 6idx_A,GPGSMPPPSDIVKVAIEWPGANAQLLEIDQKRPLASIIKEVCDGWSLPNPEYYTLRYADGPQLYITEQTRSDIKNGTILQLAISPSRAARQLMERTQSSNMETRLDAMKELAKLSADVTFATEFINMDGIIVLTRLVESGTKLLSHYSEMLAFTLTAFLELMDHGIVSWDMVSITFIKQIAGYVSQPMVDVSILQRSLAILESMVLNSQSLYQKIAEEITVGQLISHLQVSNQEIQTYAIALINALFLKAPEDKRQDMANAFAQKHLRSIILNHVIRGNRPIKTEMAHQLYVLQVLTFNLLEERMMTKMDPNDQAQRDIIFELRRIAFDAESDPSNAPGSGTEKRKAMYTKDYKMLGFTNHINPAMDFTQTPPGMLALDNMLYLAKVHQDTYIRIVLENSSREDKHECPFGRSAIELTKMLCEILQVGELPNEGRNDYHPMFFTHDRAFEELFGICIQLLNKTWKEMRATAEDFNKVMQVVREQITRALPSKPNSLDQFKSKLRSLSYSEILRLRQSERMSQDD,2019-01-23,6idx_A
39
+ 6e5y_A,SNAMTTILPNLPTGQKVGIAFSGGLDTSAALLWMRQKGAVPYAYTANLGQPDEPDYDEIPRRAMQYGAEAARLVDCRAQLVAEGIAALQAGAFHISTAGLTYFNTTPIGRAVTGTMLVAAMKEDGVNIWGDGSTFKGNDIERFYRYGLLTNPDLKIYKPWLDQTFIDELGGRAEMSEYMRQAGFDYKMSAEKAYSTDSNMLGATHEAKDLELLSAGIRIVQPIMGVAFWQDSVQIKAEEVTVRFEEGQPVALNGVEYADPVELLLEANRIGGRHGLGMSDQIENRIIEAKSRGIYEAPGLALLFIAYERLVTGIHNEDTIEQYRENGRKLGRLLYQGRWFDPQAIMLRETAQRWVARAITGEVTLELRRGNDYSLLNTESANLTYAPERLSMEKVENAPFTPADRIGQLTMRNLDIVDTREKLFTYVKTGLLAPSAGSALPQIKDGKK,2018-08-01,6e5y_A
40
+ 6crk_G,MASNNTASIAQARKLVEQLKMEANIDRIKVSKAAADLMAYCEAHAKEDPLLTPVPASENPFREKKFFSAIL,2018-10-24,6crk_G
data/tps_inference.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import copy
3
+ import json
4
+ import pickle
5
+
6
+ parser = argparse.ArgumentParser()
7
+ parser.add_argument('--sim_ckpt', type=str, default=None, required=True)
8
+ parser.add_argument('--data_dir', type=str, default='share/4AA_data')
9
+ parser.add_argument('--mddir', type=str, default='/data/cb/scratch/share/mdgen/4AA_sims')
10
+ parser.add_argument('--suffix', type=str, default='')
11
+ parser.add_argument('--pdb_id', nargs='*', default=[])
12
+ parser.add_argument('--num_frames', type=int, default=1000)
13
+ parser.add_argument('--num_batches', type=int, default=100)
14
+ parser.add_argument('--batch_size', type=int, default=10)
15
+ parser.add_argument('--out_dir', type=str, default=".")
16
+ parser.add_argument('--split', type=str, default='splits/4AA_test.csv')
17
+ parser.add_argument('--chunk_idx', type=int, default=0)
18
+ parser.add_argument('--n_chunks', type=int, default=1)
19
+ args = parser.parse_args()
20
+ import mdgen.analysis
21
+ import os, torch, mdtraj, tqdm
22
+ from mdgen.geometry import atom14_to_atom37, atom37_to_torsions
23
+ from mdgen.tensor_utils import tensor_tree_map
24
+
25
+ from mdgen.residue_constants import restype_order
26
+ from mdgen.wrapper import NewMDGenWrapper
27
+ from mdgen.dataset import atom14_to_frames
28
+ import pandas as pd
29
+ import contextlib
30
+ import numpy as np
31
+
32
+ @contextlib.contextmanager
33
+ def temp_seed(seed):
34
+ state = np.random.get_state()
35
+ np.random.seed(seed)
36
+ try:
37
+ yield
38
+ finally:
39
+ np.random.set_state(state)
40
+
41
+ os.makedirs(args.out_dir, exist_ok=True)
42
+
43
+ def get_sample(arr, seqres, start_idxs, end_idxs, start_state, end_state, num_frames=1000):
44
+ start_idx = np.random.choice(start_idxs, 1).item()
45
+ end_idx = np.random.choice(end_idxs, 1).item()
46
+
47
+ start_arr = np.copy(arr[start_idx:start_idx + 1]).astype(np.float32)
48
+ end_arr = np.copy(arr[end_idx:end_idx + 1]).astype(np.float32)
49
+ seqres = torch.tensor([restype_order[c] for c in seqres])
50
+
51
+ start_frames = atom14_to_frames(torch.from_numpy(start_arr))
52
+ start_atom37 = torch.from_numpy(atom14_to_atom37(start_arr, seqres)).float()
53
+ start_torsions, start_torsion_mask = atom37_to_torsions(start_atom37, seqres[None])
54
+
55
+ end_frames = atom14_to_frames(torch.from_numpy(end_arr))
56
+ end_atom37 = torch.from_numpy(atom14_to_atom37(end_arr, seqres)).float()
57
+ end_torsions, end_torsion_mask = atom37_to_torsions(end_atom37, seqres[None])
58
+ L = start_frames.shape[1]
59
+ traj_torsions = start_torsions.expand(num_frames, -1, -1, -1).clone()
60
+ traj_torsions[-1] = end_torsions
61
+
62
+ traj_trans = start_frames._trans.expand(num_frames, -1, -1).clone()
63
+ traj_trans[-1] = end_frames._trans
64
+
65
+ traj_rots = start_frames._rots._rot_mats.expand(num_frames, -1, -1, -1).clone()
66
+ traj_rots[-1] = end_frames._rots._rot_mats
67
+
68
+ mask = torch.ones(L)
69
+ return {
70
+ 'torsions': traj_torsions,
71
+ 'torsion_mask': start_torsion_mask[0],
72
+ 'trans': traj_trans,
73
+ 'rots': traj_rots,
74
+ 'seqres': seqres,
75
+ 'start_idx': start_idx,
76
+ 'end_idx': end_idx,
77
+ 'start_state': start_state,
78
+ 'end_state': end_state,
79
+ 'mask': mask, # (L,)
80
+ }
81
+
82
+ def do(model, name, seqres):
83
+ print('doing', name)
84
+ if os.path.exists(f'{args.out_dir}/{name}_metadata.json'):
85
+ return
86
+ if os.path.exists(f'{args.out_dir}/{name}_metadata.pkl'):
87
+ pkl_metadata = pickle.load(open(f'{args.out_dir}/{name}_metadata.pkl', 'rb'))
88
+ msm = pkl_metadata['msm']
89
+ cmsm = pkl_metadata['cmsm']
90
+ ref_kmeans = pkl_metadata['ref_kmeans']
91
+ else:
92
+ with temp_seed(137):
93
+ feats, ref = mdgen.analysis.get_featurized_traj(f'{args.mddir}/{name}/{name}', sidechains=True)
94
+ tica, _ = mdgen.analysis.get_tica(ref)
95
+ kmeans, ref_kmeans = mdgen.analysis.get_kmeans(tica.transform(ref))
96
+ try:
97
+ msm, pcca, cmsm = mdgen.analysis.get_msm(ref_kmeans, nstates=10)
98
+ except Exception as e:
99
+ print('ERROR', e, name, flush=True)
100
+ return
101
+ pickle.dump({
102
+ 'msm': msm,
103
+ 'cmsm': cmsm,
104
+ 'tica': tica,
105
+ 'pcca': pcca,
106
+ 'kmeans': kmeans,
107
+ 'ref_kmeans': ref_kmeans,
108
+ }, open(f'{args.out_dir}/{name}_metadata.pkl', 'wb'))
109
+
110
+ flux_mat = cmsm.transition_matrix * cmsm.pi[None, :]
111
+ flux_mat[flux_mat < 0.0000001] = np.inf # set 0 flux to inf so we do not choose that as the argmin
112
+ start_state, end_state = np.unravel_index(np.argmin(flux_mat, axis=None), flux_mat.shape)
113
+ ref_discrete = msm.metastable_assignments[ref_kmeans]
114
+ start_idxs = np.where(ref_discrete == start_state)[0]
115
+ end_idxs = np.where(ref_discrete == end_state)[0]
116
+ if (ref_discrete == start_state).sum() == 0 or (ref_discrete == end_state).sum() == 0:
117
+ print('No start or end state found for ', name, 'skipping...')
118
+ return
119
+
120
+ arr = np.lib.format.open_memmap(f'{args.data_dir}/{name}.npy', 'r')
121
+
122
+ metadata = []
123
+ for i in tqdm.tqdm(range(args.num_batches), desc='num batch'):
124
+ batch_list = []
125
+ for _ in range(args.batch_size):
126
+ batch_list.append(
127
+ get_sample(arr, seqres, copy.deepcopy(start_idxs), end_idxs, start_state, end_state, num_frames=args.num_frames))
128
+ batch = next(iter(torch.utils.data.DataLoader(batch_list, batch_size=args.batch_size)))
129
+ batch = tensor_tree_map(lambda x: x.cuda(), batch)
130
+ print('Start tps for ', name, 'with start coords', batch['trans'][0, 0, 0])
131
+ print('Start tps for ', name, 'with end coords', batch['trans'][0, -1, 0])
132
+ atom14s, _ = model.inference(batch)
133
+ for j in range(args.batch_size):
134
+ idx = i * args.batch_size + j
135
+ path = os.path.join(args.out_dir, f'{name}_{idx}.pdb')
136
+ atom14_to_pdb(atom14s[j].cpu().numpy(), batch['seqres'][0].cpu().numpy(), path)
137
+
138
+ traj = mdtraj.load(path)
139
+ traj.superpose(traj)
140
+ traj.save(os.path.join(args.out_dir, f'{name}_{idx}.xtc'))
141
+ traj[0].save(os.path.join(args.out_dir, f'{name}_{idx}.pdb'))
142
+ metadata.append({
143
+ 'name': name,
144
+ 'start_idx': batch['start_idx'][j].cpu().item(),
145
+ 'end_idx': batch['end_idx'][j].cpu().item(),
146
+ 'start_state': batch['start_state'][j].cpu().item(),
147
+ 'end_state': batch['end_state'][j].cpu().item(),
148
+ 'path': path,
149
+ })
150
+ json.dump(metadata, open(f'{args.out_dir}/{name}_metadata.json', 'w'))
151
+
152
+
153
+ @torch.no_grad()
154
+ def main():
155
+ model = NewMDGenWrapper.load_from_checkpoint(args.sim_ckpt)
156
+ model.eval().to('cuda')
157
+ df = pd.read_csv(args.split, index_col='name')
158
+ names = np.array(df.index)
159
+
160
+ chunks = np.array_split(names, args.n_chunks)
161
+ chunk = chunks[args.chunk_idx]
162
+ print('#' * 20)
163
+ print(f'RUN NUMBER: {args.chunk_idx}, PROCESSING IDXS {args.chunk_idx * len(chunk)}-{(args.chunk_idx + 1) * len(chunk)}')
164
+ print('#' * 20)
165
+ for name in tqdm.tqdm(chunk, desc='num peptides'):
166
+ if args.pdb_id and name not in args.pdb_id:
167
+ continue
168
+ do(model, name, df.seqres[name])
169
+
170
+
171
+ main()
data/train.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mdgen.parsing import parse_train_args
2
+ args = parse_train_args()
3
+ from mdgen.logger import get_logger
4
+ logger = get_logger(__name__)
5
+
6
+ import torch, os, wandb
7
+ from mdgen.dataset import MDGenDataset
8
+ from mdgen.wrapper import NewMDGenWrapper
9
+ from pytorch_lightning.callbacks import ModelCheckpoint, ModelSummary
10
+ import pytorch_lightning as pl
11
+
12
+
13
+ torch.set_float32_matmul_precision('medium')
14
+
15
+ if args.wandb:
16
+ wandb.init(
17
+ entity=os.environ["WANDB_ENTITY"],
18
+ settings=wandb.Settings(start_method="fork"),
19
+ project="mdgen",
20
+ name=args.run_name,
21
+ config=args,
22
+ )
23
+
24
+
25
+ trainset = MDGenDataset(args, split=args.train_split)
26
+
27
+ if args.overfit:
28
+ valset = trainset
29
+ else:
30
+ valset = MDGenDataset(args, split=args.val_split, repeat=args.val_repeat)
31
+
32
+ train_loader = torch.utils.data.DataLoader(
33
+ trainset,
34
+ batch_size=args.batch_size,
35
+ num_workers=args.num_workers,
36
+ shuffle=True,
37
+ )
38
+
39
+ val_loader = torch.utils.data.DataLoader(
40
+ valset,
41
+ batch_size=args.batch_size,
42
+ num_workers=args.num_workers,
43
+ )
44
+ model = NewMDGenWrapper(args)
45
+
46
+ trainer = pl.Trainer(
47
+ accelerator="gpu" if torch.cuda.is_available() else 'auto',
48
+ max_epochs=args.epochs,
49
+ limit_train_batches=args.train_batches or 1.0,
50
+ limit_val_batches=0.0 if args.no_validate else (args.val_batches or 1.0),
51
+ num_sanity_val_steps=0,
52
+ precision=args.precision,
53
+ enable_progress_bar=not args.wandb or os.getlogin() == 'hstark',
54
+ gradient_clip_val=args.grad_clip,
55
+ default_root_dir=os.environ["MODEL_DIR"],
56
+ callbacks=[
57
+ ModelCheckpoint(
58
+ dirpath=os.environ["MODEL_DIR"],
59
+ save_top_k=-1,
60
+ every_n_epochs=args.ckpt_freq,
61
+ ),
62
+ ModelSummary(max_depth=2),
63
+ ],
64
+ accumulate_grad_batches=args.accumulate_grad,
65
+ val_check_interval=args.val_freq,
66
+ check_val_every_n_epoch=args.val_epoch_freq,
67
+ logger=False
68
+ )
69
+
70
+ # torch.manual_seed(137)
71
+ # np.random.seed(137)
72
+
73
+
74
+ if args.validate:
75
+ trainer.validate(model, val_loader, ckpt_path=args.ckpt)
76
+ else:
77
+ trainer.fit(model, train_loader, val_loader, ckpt_path=args.ckpt)
data/upsampling_inference.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ parser = argparse.ArgumentParser()
3
+ parser.add_argument('--ckpt', type=str, default=None, required=True)
4
+ parser.add_argument('--data_dir', type=str, default=None, required=True)
5
+ parser.add_argument('--suffix', type=str, default='_i100')
6
+ parser.add_argument('--pdb_id', nargs='*', default=[])
7
+ parser.add_argument('--batch_size', type=int, default=1)
8
+ parser.add_argument('--out_dir', type=str, default=".")
9
+ parser.add_argument('--split', type=str, default='splits/4AA_implicit_test.csv')
10
+ args = parser.parse_args()
11
+
12
+ import os, torch, mdtraj, tqdm
13
+ import numpy as np
14
+ from mdgen.geometry import atom14_to_frames, atom14_to_atom37, atom37_to_torsions
15
+ from mdgen.residue_constants import restype_order
16
+ from mdgen.tensor_utils import tensor_tree_map
17
+ from mdgen.wrapper import NewMDGenWrapper
18
+ from mdgen.utils import atom14_to_pdb
19
+ import pandas as pd
20
+
21
+
22
+
23
+
24
+ os.makedirs(args.out_dir, exist_ok=True)
25
+
26
+
27
+
28
+ def get_batch(name, seqres, num_frames):
29
+ arr = np.lib.format.open_memmap(f'{args.data_dir}/{name}{args.suffix}.npy', 'r')
30
+ arr = np.copy(arr).astype(np.float32)
31
+
32
+ frames = atom14_to_frames(torch.from_numpy(arr))
33
+ seqres = torch.tensor([restype_order[c] for c in seqres])
34
+ atom37 = torch.from_numpy(atom14_to_atom37(arr, seqres[None])).float()
35
+ torsions, torsion_mask = atom37_to_torsions(atom37, seqres[None])
36
+ L = frames.shape[1]
37
+ mask = torch.ones(L)
38
+ return {
39
+ 'torsions': torsions,
40
+ 'torsion_mask': torsion_mask[0],
41
+ 'trans': frames._trans,
42
+ 'rots': frames._rots._rot_mats,
43
+ 'seqres': seqres,
44
+ 'mask': mask, # (L,)
45
+ }
46
+
47
+ def split_batch(item, num_frames=1000, cond_interval=100):
48
+ total_frames = item['torsions'].shape[0] * cond_interval
49
+ batches = []
50
+ total_items = int(total_frames / num_frames)
51
+ cond_frames = int(num_frames / cond_interval)
52
+ for i in tqdm.trange(total_items):
53
+ new_batch = {
54
+ 'torsions': torch.zeros(num_frames, 4, 7, 2),
55
+ 'torsion_mask': item['torsion_mask'],
56
+ 'trans': torch.zeros(num_frames, 4, 3),
57
+ 'rots': torch.zeros(num_frames, 4, 3, 3),
58
+ 'seqres': item['seqres'],
59
+ 'mask': item['mask'],
60
+ }
61
+ new_batch['rots'][:] = torch.eye(3)
62
+ new_batch['torsions'][::cond_interval] = item['torsions'][i*cond_frames:(i+1)*cond_frames]
63
+ new_batch['trans'][::cond_interval] = item['trans'][i*cond_frames:(i+1)*cond_frames]
64
+ new_batch['rots'][::cond_interval] = item['rots'][i*cond_frames:(i+1)*cond_frames]
65
+ batches.append(new_batch)
66
+ return batches
67
+
68
+ def do(model, name, seqres):
69
+
70
+ item = get_batch(name, seqres, num_frames = model.args.num_frames)
71
+
72
+ items = split_batch(item, num_frames=model.args.num_frames, cond_interval=model.args.cond_interval)
73
+
74
+ loader = torch.utils.data.DataLoader(items, shuffle=False, batch_size=args.batch_size)
75
+
76
+ all_atom14 = []
77
+ for batch in tqdm.tqdm(loader):
78
+ batch = tensor_tree_map(lambda x: x.cuda(), batch)
79
+ atom14, _ = model.inference(batch)
80
+ all_atom14.extend(atom14)
81
+
82
+ all_atom14 = torch.cat(all_atom14)
83
+
84
+ path = os.path.join(args.out_dir, f'{name}.pdb')
85
+ atom14_to_pdb(all_atom14.cpu().numpy(), batch['seqres'][0].cpu().numpy(), path)
86
+
87
+ traj = mdtraj.load(path)
88
+ traj.superpose(traj)
89
+ traj.save(os.path.join(args.out_dir, f'{name}.xtc'))
90
+ traj[0].save(os.path.join(args.out_dir, f'{name}.pdb'))
91
+
92
+ @torch.no_grad()
93
+ def main():
94
+ model = NewMDGenWrapper.load_from_checkpoint(args.ckpt)
95
+ model.eval().to('cuda')
96
+
97
+
98
+ df = pd.read_csv(args.split, index_col='name')
99
+ for name in df.index:
100
+ if args.pdb_id and name not in args.pdb_id:
101
+ continue
102
+ do(model, name, df.seqres[name])
103
+
104
+
105
+ main()