Skip to content

Commit

Permalink
modify codes according to review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
zhou.weiguo committed Apr 26, 2024
1 parent 67beeb6 commit f20e281
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 6 deletions.
6 changes: 6 additions & 0 deletions ggml-qnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4666,6 +4666,12 @@ static ggml_guid_t ggml_backend_qnn_guid() {


static ggml_backend_t ggml_backend_qnn_reg_init(const char * params, void * user_data) {
if (nullptr == params) {
//this is data path of prebuit QNN libs provided by Qualcomm
//can be obtained through JNI from Java layer such as "/data/data/com.ggml.llamacpp/"
//or hardcoded to "/data/local/tmp/" which is an Android OS defined path
params = "/data/local/tmp/";
}
ggml_backend_t qnn_backend = ggml_backend_qnn_init((int) (intptr_t) user_data, params);

return qnn_backend;
Expand Down
4 changes: 2 additions & 2 deletions llama.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15403,8 +15403,8 @@ struct llama_context * llama_new_context_with_model(
#elif defined(GGML_USE_QNN)
if (model->n_gpu_layers > 0) {
//the second param is data path of prebuit QNN libs provided by Qualcomm
//in Andorid APP mode, can be obtained through JNI from Java layer
//in Andorid terminal mode, can be hardcoded to "/data/local/tmp"
//can be obtained through JNI from Java layer such as "/data/data/com.ggml.llamacpp/"
//or hardcoded to "/data/local/tmp/"
ggml_backend_t backend = ggml_backend_qnn_init(model->main_gpu, "/data/local/tmp/");
if (nullptr == backend) {
LLAMA_LOG_ERROR("%s: failed to initialize QNN backend\n", __func__);
Expand Down
4 changes: 0 additions & 4 deletions tests/test-backend-ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2225,11 +2225,7 @@ int main(int argc, char ** argv) {
continue;
}

#ifdef GGML_USE_QNN
ggml_backend_t backend = ggml_backend_reg_init_backend(i, "/data/local/tmp/");
#else
ggml_backend_t backend = ggml_backend_reg_init_backend(i, NULL);
#endif
GGML_ASSERT(backend != NULL);

if (backend_filter == NULL && ggml_backend_is_cpu(backend)) {
Expand Down

0 comments on commit f20e281

Please sign in to comment.