fix qwen3 prompt templates

This commit is contained in:
2026-01-19 14:12:20 +01:00
parent 70d2ac8d36
commit ee8ce9e831
7 changed files with 387 additions and 45 deletions

View File

@@ -355,9 +355,9 @@ def create_ollama_model(modelfile_path, gguf_path, model_name, capabilities=None
)
if result.returncode == 0:
print(f"✓ Model '{model_name}' created successfully")
# Success - output will be shown by the caller
if result.stdout:
print(f" {result.stdout.strip()}")
print(result.stdout.strip())
else:
print(f"✗ Failed to create model")
if result.stderr: