diff --git a/gallery/index.yaml b/gallery/index.yaml index 5c0d67017..c7299230f 100644 --- a/gallery/index.yaml +++ b/gallery/index.yaml @@ -1,4 +1,32 @@ --- +- name: "qwen3.5-27b-claude-4.6-opus-reasoning-distilled-heretic-i1" + url: "github:mudler/LocalAI/gallery/virtual.yaml@master" + urls: + - https://huggingface.co/mradermacher/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic-i1-GGUF + description: | + Describe the model in a clear and concise way that can be shared in a model gallery. + tags: + - default + - default + overrides: + parameters: + model: llama-cpp/models/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic.i1-Q4_K_M.gguf + name: Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic-i1-GGUF + backend: llama-cpp + template: + use_tokenizer_template: true + known_usecases: + - chat + function: + grammar: + disable: true + description: Imported from https://huggingface.co/mradermacher/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic-i1-GGUF + options: + - use_jinja:true + files: + - filename: llama-cpp/models/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic.i1-Q4_K_M.gguf + sha256: af6c2ceae20d019624cd6ec48cfffb646b0309b0a7a82d9719754297394168e1 + uri: https://huggingface.co/mradermacher/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic-i1-GGUF/resolve/main/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled-heretic.i1-Q4_K_M.gguf - name: "qwen_qwen3.5-0.8b" url: "github:mudler/LocalAI/gallery/virtual.yaml@master" urls: