Skip to content

Commit

Permalink
Fix attention UT issue.
Browse files Browse the repository at this point in the history
  • Loading branch information
changqi1 committed Jun 4, 2024
1 parent 0faa9de commit 3cfa650
Showing 1 changed file with 6 additions and 3 deletions.
9 changes: 6 additions & 3 deletions tests/ut/layers_attention_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,14 +87,17 @@ void test_AttentionLLaMA(void) {
int nextTokenNum = 1;

compareAttentionLLaMA<T>(step++, batchSize, inputSeqLen, pastSeqLen, currentSeqLen, attHeadDim, attHeadNum,
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + kvSize, oProj);
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + qSize + kvSize,
oProj);
pastSeqLen += inputSeqLen;
currentSeqLen = nextTokenNum;
compareAttentionLLaMA<T>(step++, batchSize, inputSeqLen, pastSeqLen, currentSeqLen, attHeadDim, attHeadNum,
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + kvSize, oProj);
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + qSize + kvSize,
oProj);
pastSeqLen += nextTokenNum;
compareAttentionLLaMA<T>(step++, batchSize, inputSeqLen, pastSeqLen, currentSeqLen, attHeadDim, attHeadNum,
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + kvSize, oProj);
kvHeadNum, maxPositions, maxPosEmbed, hiddenSize, qkvProj, qkvProj + qSize, qkvProj + qSize + kvSize,
oProj);

free(qkvProj);
free(oProj);
Expand Down

0 comments on commit 3cfa650

Please sign in to comment.