Files
ollama-utils/modelfile-repo/ministral-3-8b-instruct-2512-q5_k_m.Modelfile
2026-01-21 12:13:39 +01:00

25 lines
808 B
Plaintext

# ollama-utils-metadata
# hf_upstream: https://huggingface.co/mistralai/Ministral-3-8B-Instruct-2512-GGUF
# quantization: Q5_K_M
# capabilities: vision, tools
#
# mmproj_url: https://huggingface.co/unsloth/Ministral-3-8B-Instruct-2512-GGUF
# mmproj_quant: BF16
FROM ./Ministral-3-8B-Instruct-2512-Q5_K_M.gguf
# Specialized parser for Mistral 3 logic
PARSER ministral
# Full context and recommended stability parameters
PARAMETER num_ctx 75776
PARAMETER temperature 0.15
PARAMETER top_p 1.0
PARAMETER stop "</s>"
PARAMETER stop "[INST]"
PARAMETER stop "[/INST]"
# Template: Basic structure that lets the PARSER handle the heavy lifting
TEMPLATE """{{ if .System }}[SYSTEM_PROMPT]{{ .System }}[/SYSTEM_PROMPT]{{ end }}[INST]{{ .Prompt }}[/INST]"""
SYSTEM """You are a helpful and concise AI assistant."""