From bb10875487fe346092d274b914f2cac9ba6c9b26 Mon Sep 17 00:00:00 2001 From: AsbjornOlling Date: Tue, 25 Nov 2025 10:50:20 +0100 Subject: [PATCH] use std::ffi::c_char in cstr_to_string --- llama-cpp-2/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama-cpp-2/src/lib.rs b/llama-cpp-2/src/lib.rs index 16f05e31..8139725f 100644 --- a/llama-cpp-2/src/lib.rs +++ b/llama-cpp-2/src/lib.rs @@ -13,7 +13,7 @@ //! //! - `cuda` enables CUDA gpu support. //! - `sampler` adds the [`context::sample::sampler`] struct for a more rusty way of sampling. -use std::ffi::NulError; +use std::ffi::{c_char, NulError}; use std::fmt::Debug; use std::num::NonZeroI32; @@ -398,7 +398,7 @@ pub struct LlamaBackendDevice { pub fn list_llama_ggml_backend_devices() -> Vec { let mut devices = Vec::new(); for i in 0..unsafe { llama_cpp_sys_2::ggml_backend_dev_count() } { - fn cstr_to_string(ptr: *const i8) -> String { + fn cstr_to_string(ptr: *const c_char) -> String { if ptr.is_null() { String::new() } else {