{"@context":"https://w3id.org/codemeta/3.0","@type":"SoftwareSourceCode","identifier":"pkg:npm/%40realtimex/node-llama-cpp","name":"@realtimex/node-llama-cpp","description":"Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level","version":"0.3.1","softwareVersion":"0.3.1","license":"https://spdx.org/licenses/MIT","codeRepository":"https://github.com/therealtimex/node-llama-cpp","issueTracker":"https://github.com/therealtimex/node-llama-cpp/issues","url":"https://node-llama-cpp.withcat.ai","keywords":["llama","llama-cpp","llama.cpp","bindings","ai","cmake","cmake-js","prebuilt-binaries","llm","gguf","metal","cuda","vulkan","grammar","embedding","rerank","reranking","json-grammar","json-schema-grammar","functions","function-calling","token-prediction","speculative-decoding","temperature","minP","topK","topP","seed","xtc","json-schema","raspberry-pi","self-hosted","local","catai","mistral","deepseek","qwen","qwq","gpt","gpt-oss","typescript","lora","batching","gpu"],"programmingLanguage":{"@type":"ComputerLanguage","name":"TypeScript"},"maintainer":[{"@type":"Person","name":"realtimex"}],"author":[{"@type":"Person","name":"realtimex"}],"copyrightHolder":[{"@type":"Person","name":"realtimex"}],"dateCreated":"2026-04-03","dateModified":"2026-04-06","datePublished":"2026-04-06","copyrightYear":2026,"downloadUrl":"https://registry.npmjs.org/@realtimex/node-llama-cpp/-/node-llama-cpp-0.3.1.tgz","applicationCategory":"npm","runtimePlatform":"npm","developmentStatus":"active","sameAs":["https://www.npmjs.com/package/@realtimex/node-llama-cpp"],"funder":[{"@type":"Organization","url":"https://github.com/sponsors/giladgd"}]}