Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
nikolaydubina committed Jul 31, 2023
1 parent 30f15b6 commit 732012c
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion llama2/transformer.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ func Transformer(token int, pos int, config Config, s RunState, w TransformerWei
// iterate over all timesteps, including the current one
for t := 0; t <= pos; t++ {
// get the key vector for this head and at this timestamp
k := s.KeyCache[(loff + t*dim + h*headSize):(loff + (t+1)*dim + h*headSize)]
k := s.KeyCache[(loff + t*dim + h*headSize):(loff + t*dim + (h+1)*headSize)]
// calculate the attention score as the dot product of q and k
var score float32
for i := 0; i < headSize; i++ {
Expand Down
4 changes: 2 additions & 2 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@ func main() {
w := llama2.NewTransformerWeightsFromCheckpoint(config, checkpointFile, isSharedWeights)

// right now we cannot run for more than config.SeqLen steps
if steps <= 0 || steps >= config.SeqLen {
steps = config.SeqLen - 1
if steps <= 0 || steps > config.SeqLen {
steps = config.SeqLen
}

runState := llama2.NewRunState(config)
Expand Down

0 comments on commit 732012c

Please sign in to comment.