-
Notifications
You must be signed in to change notification settings - Fork 1
/
convert_bloom_checkpoint.py
69 lines (53 loc) · 1.95 KB
/
convert_bloom_checkpoint.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import os
import argparse
import torch
if __name__ == '__main__':
try:
os.mkdir('bloom-new')
except:
pass
with open('bloom/pytorch_model.bin.index.json') as f:
index = json.load(f)
## emb
item = {}
item['word_embeddings.weight'] = torch.load(
'bloom/' + index['weight_map']['word_embeddings.weight'],
map_location=torch.device('cpu'),
)['word_embeddings.weight']
item['word_embeddings_layernorm.bias'] = torch.load(
'bloom/' + index['weight_map']['word_embeddings_layernorm.bias'],
map_location=torch.device('cpu'),
)['word_embeddings_layernorm.bias']
item['word_embeddings_layernorm.weight'] = torch.load(
'bloom/' + index['weight_map']['word_embeddings_layernorm.weight'],
map_location=torch.device('cpu'),
)['word_embeddings_layernorm.weight']
torch.save(item, 'bloom-new/pytorch_embs.pt')
## out
item = {}
item['lm_head.weight'] = torch.load(
'bloom/' + index['weight_map']['word_embeddings.weight'],
map_location=torch.device('cpu'),
)['word_embeddings.weight']
item['ln_f.weight'] = torch.load(
'bloom/' + index['weight_map']['ln_f.weight'],
map_location=torch.device('cpu'),
)['ln_f.weight']
item['ln_f.bias'] = torch.load(
'bloom/' + index['weight_map']['ln_f.bias'],
map_location=torch.device('cpu'),
)['ln_f.bias']
torch.save(item, 'bloom-new/pytorch_lm_head.pt')
## layers
for i in range(0, 70):
layer_prefix = f'h.{i}.'
item = {}
layer_maps = {k:v for k,v in index['weight_map'].items() if k.startswith(layer_prefix)}
for k, v in layer_maps.items():
new_k = k.replace(layer_prefix, '')
item[new_k] = torch.load(
'bloom/' + index['weight_map'][k],
map_location=torch.device('cpu'),
)[k]
torch.save(item, f'bloom-new/pytorch_{i}.pt')
del item