-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathbenchmark_pytorch_multimer.py
59 lines (51 loc) · 2.18 KB
/
benchmark_pytorch_multimer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from alphafold_pytorch_jit.basics import GatingAttention
from tpp_pytorch_extension.alphafold.Alpha_Attention import GatingAttentionOpti_forward
GatingAttention.forward = GatingAttentionOpti_forward
from alphafold_pytorch_jit.subnets_multimer import AlphaFold
import torch
from alphafold_pytorch_jit.folding_multimer import StructureModule
from alphafold.model.config import model_config
import pdb
from time import time
import os
from jax.random import PRNGKey
from alphafold_pytorch_jit.weight_io import load_npy2hk_params
from alphafold_pytorch_jit.hk_io import get_pure_fn
mc = model_config('model_1_multimer_v3')['model']
gc = mc['global_config']
n_msa = 14987
n_seq = 86 # 1828
random_seed = 123
root_weights = '/mnt/remote6/yangw/af2home/weights/extracted/model_1_multimer_v3'
batch = dict(
msa = torch.ones((n_msa, n_seq), dtype=torch.int64),
msa_mask = torch.ones((n_msa, n_seq), dtype=torch.float32),
seq_mask = torch.ones((n_seq), dtype=torch.float32),
aatype = torch.ones((n_seq), dtype=torch.int64),
cluster_bias_mask = torch.ones((n_msa), dtype=torch.float32),
deletion_matrix = torch.ones((n_msa, n_seq), dtype=torch.float32),
bert_mask = torch.ones((n_msa, n_seq), dtype=torch.float32),
template_aatype = torch.ones((4, n_seq), dtype=torch.int64),
template_all_atom_mask = torch.ones((4, n_seq, 37), dtype=torch.float32),
template_all_atom_positions = torch.ones((4, n_seq, 37, 3), dtype=torch.float32),
residue_index = torch.ones((n_seq), dtype=torch.int64),
asym_id = torch.ones((n_seq), dtype=torch.int64),
entity_id = torch.ones((n_seq), dtype=torch.int64),
sym_id = torch.ones((n_seq), dtype=torch.int64),
prev_pos = torch.ones((n_seq, 37, 3), dtype=torch.float32),
prev_msa_first_row = torch.ones((n_seq, 256), dtype=torch.float32),
prev_pair = torch.ones((n_seq, n_seq, 128), dtype=torch.float32)
)
model = AlphaFold(mc, root_weights)
t0 = time()
res = model(batch)
print(time() - t0)
for k, v in res.items():
if isinstance(v, dict):
for k2, v2 in v.items():
print('{}.{} = {}'.format(k, k2, v2.shape))
elif isinstance(v, int) or isinstance(v, float):
print('{} = {}'.format(k, v))
else:
print('{} = {}'.format(k, v.shape))
pdb.set_trace()