Skip to content

Commit 792d1a1

Browse files
committed
llama : minor
1 parent f39e607 commit 792d1a1

File tree

1 file changed

+9
-18
lines changed

1 file changed

+9
-18
lines changed

Diff for: llama.cpp

+9-18
Original file line numberDiff line numberDiff line change
@@ -3503,8 +3503,7 @@ static struct ggml_cgraph * llm_build_llama(
35033503

35043504
// norm
35053505
cur = llm_build_norm(ctx0, inpL,
3506-
model.layers[il].attn_norm,
3507-
NULL,
3506+
model.layers[il].attn_norm, NULL,
35083507
LLM_NORM_RMS, norm_rms_eps, cb, il);
35093508
cb(cur, "attn_norm", il);
35103509

@@ -3540,8 +3539,7 @@ static struct ggml_cgraph * llm_build_llama(
35403539
// feed-forward network
35413540
{
35423541
cur = llm_build_norm(ctx0, inpFF,
3543-
model.layers[il].ffn_norm,
3544-
NULL,
3542+
model.layers[il].ffn_norm, NULL,
35453543
LLM_NORM_RMS, norm_rms_eps, cb, il);
35463544
cb(cur, "ffn_norm", il);
35473545

@@ -3563,8 +3561,7 @@ static struct ggml_cgraph * llm_build_llama(
35633561
cur = inpL;
35643562

35653563
cur = llm_build_norm(ctx0, cur,
3566-
model.output_norm,
3567-
NULL,
3564+
model.output_norm, NULL,
35683565
LLM_NORM_RMS, norm_rms_eps, cb, -1);
35693566
cb(cur, "result_norm", -1);
35703567

@@ -3661,8 +3658,7 @@ static struct ggml_cgraph * llm_build_baichaun(
36613658
struct ggml_tensor * inpSA = inpL;
36623659

36633660
cur = llm_build_norm(ctx0, inpL,
3664-
model.layers[il].attn_norm,
3665-
NULL,
3661+
model.layers[il].attn_norm, NULL,
36663662
LLM_NORM_RMS, norm_rms_eps, cb, il);
36673663
cb(cur, "attn_norm", il);
36683664

@@ -3709,8 +3705,7 @@ static struct ggml_cgraph * llm_build_baichaun(
37093705
// feed-forward network
37103706
{
37113707
cur = llm_build_norm(ctx0, inpFF,
3712-
model.layers[il].ffn_norm,
3713-
NULL,
3708+
model.layers[il].ffn_norm, NULL,
37143709
LLM_NORM_RMS, norm_rms_eps, cb, il);
37153710
cb(cur, "ffn_norm", il);
37163711

@@ -3732,8 +3727,7 @@ static struct ggml_cgraph * llm_build_baichaun(
37323727
cur = inpL;
37333728

37343729
cur = llm_build_norm(ctx0, cur,
3735-
model.output_norm,
3736-
NULL,
3730+
model.output_norm, NULL,
37373731
LLM_NORM_RMS, norm_rms_eps, cb, -1);
37383732
cb(cur, "result_norm", -1);
37393733

@@ -4394,8 +4388,7 @@ static struct ggml_cgraph * llm_build_refact(
43944388
struct ggml_tensor * inpSA = inpL;
43954389

43964390
cur = llm_build_norm(ctx0, inpL,
4397-
model.layers[il].attn_norm,
4398-
NULL,
4391+
model.layers[il].attn_norm, NULL,
43994392
LLM_NORM_RMS, norm_rms_eps, cb, il);
44004393
cb(cur, "attn_norm", il);
44014394

@@ -4430,8 +4423,7 @@ static struct ggml_cgraph * llm_build_refact(
44304423
// feed-forward network
44314424
{
44324425
cur = llm_build_norm(ctx0, inpFF,
4433-
model.layers[il].ffn_norm,
4434-
NULL,
4426+
model.layers[il].ffn_norm, NULL,
44354427
LLM_NORM_RMS, norm_rms_eps, cb, il);
44364428
cb(cur, "ffn_norm", il);
44374429

@@ -4453,8 +4445,7 @@ static struct ggml_cgraph * llm_build_refact(
44534445
cur = inpL;
44544446

44554447
cur = llm_build_norm(ctx0, cur,
4456-
model.output_norm,
4457-
NULL,
4448+
model.output_norm, NULL,
44584449
LLM_NORM_RMS, norm_rms_eps, cb, -1);
44594450
cb(cur, "result_norm", -1);
44604451

0 commit comments

Comments
 (0)