From e68c8bc1e3e46b4dd0b5d22ff35e1277cc9cdc89 Mon Sep 17 00:00:00 2001 From: caitianchi Date: Tue, 25 Jun 2024 20:05:52 +0800 Subject: [PATCH] change n_layer --- examples/llava/clip.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 5b6f7aef31917..cb6da1f4a003d 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -576,7 +576,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 const int hidden_size = hparams.hidden_size; const int n_head = hparams.n_head; const int d_head = hidden_size / n_head; - const int n_layer = hparams.n_layer; + int n_layer = hparams.n_layer; const float eps = hparams.eps; const int batch_size = imgs->size; @@ -647,7 +647,10 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 } // loop over layers - for (int il = 0; il < n_layer; il++) { + if (ctx->has_minicpmv_projector){ + n_layer += 1; + } + for (int il = 0; il < n_layer-1; il++) { struct ggml_tensor * cur = embeddings; // embeddings = residual, cur = hidden_states //const size_t nb_q_w = model.layers[il].q_w->nb[0];