-
Notifications
You must be signed in to change notification settings - Fork 1
/
convert_opt66_checkpoint.py
65 lines (49 loc) · 1.96 KB
/
convert_opt66_checkpoint.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import os
import argparse
import torch
if __name__ == '__main__':
try:
os.mkdir('opt-66b-new')
except:
pass
with open('opt-66b/pytorch_model.bin.index.json') as f:
index = json.load(f)
## emb
item = {}
item['embed_tokens.weight'] = torch.load(
'opt-66b/' + index['weight_map']['model.decoder.embed_tokens.weight'],
map_location=torch.device('cpu'),
)['model.decoder.embed_tokens.weight']
item['embed_positions.weight'] = torch.load(
'opt-66b/' + index['weight_map']['model.decoder.embed_positions.weight'],
map_location=torch.device('cpu'),
)['model.decoder.embed_positions.weight']
torch.save(item, 'opt-66b-new/pytorch_embs.pt')
## out
item = {}
item['lm_head.weight'] = torch.load(
'opt-66b/' + index['weight_map']['model.decoder.embed_tokens.weight'],
map_location=torch.device('cpu'),
)['model.decoder.embed_tokens.weight']
item['final_layer_norm.weight'] = torch.load(
'opt-66b/' + index['weight_map']['model.decoder.final_layer_norm.weight'],
map_location=torch.device('cpu'),
)['model.decoder.final_layer_norm.weight']
item['final_layer_norm.bias'] = torch.load(
'opt-66b/' + index['weight_map']['model.decoder.final_layer_norm.bias'],
map_location=torch.device('cpu'),
)['model.decoder.final_layer_norm.bias']
torch.save(item, 'opt-66b-new/pytorch_lm_head.pt')
## layers
for i in range(0, 64):
layer_prefix = f'model.decoder.layers.{i}.'
item = {}
layer_maps = {k:v for k,v in index['weight_map'].items() if k.startswith(layer_prefix)}
for k, v in layer_maps.items():
new_k = k.replace(layer_prefix, '')
item[new_k] = torch.load(
'opt-66b/' + index['weight_map'][k],
map_location=torch.device('cpu'),
)[k]
torch.save(item, f'opt-66b-new/pytorch_{i}.pt')
del item