adjust staus panel
This commit is contained in:
@@ -181,15 +181,24 @@ function updateRunningModels(models) {
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = models.map(model => `
|
||||
container.innerHTML = models.map(model => {
|
||||
const gpuLayers = model.gpu_layers || 0;
|
||||
const totalLayers = model.total_layers || 0;
|
||||
const cpuLayers = totalLayers > 0 ? totalLayers - gpuLayers : 0;
|
||||
|
||||
return `
|
||||
<div class="running-model">
|
||||
<div class="running-model-name">${escapeHtml(model.name)}</div>
|
||||
<div class="running-model-stats">
|
||||
VRAM: ${model.vram_gb.toFixed(2)} GB
|
||||
${model.offload_pct > 0 ? ` | CPU: ${model.offload_pct.toFixed(1)}%` : ''}
|
||||
</div>
|
||||
<div class="running-model-layers">
|
||||
GPU: ${gpuLayers} layers | CPU: ${cpuLayers} layers
|
||||
</div>
|
||||
</div>
|
||||
`).join('');
|
||||
`;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
// ===== MODEL MANAGEMENT =====
|
||||
|
||||
@@ -110,8 +110,16 @@ body {
|
||||
}
|
||||
|
||||
.running-model-stats {
|
||||
font-size: 11px;
|
||||
font-size: 0.85rem;
|
||||
color: var(--text-secondary);
|
||||
margin-top: 0.25rem;
|
||||
}
|
||||
|
||||
.running-model-layers {
|
||||
font-size: 0.85rem;
|
||||
color: var(--accent);
|
||||
margin-top: 0.25rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.no-models {
|
||||
|
||||
@@ -398,12 +398,19 @@ def api_status():
|
||||
size_total = model.get('size', 0) / (1024**3) # GB
|
||||
offload_pct = ((size_total - size_vram) / size_total * 100) if size_total > 0 else 0
|
||||
|
||||
# Extract layer information from model details
|
||||
details = model.get('details', {})
|
||||
total_layers = details.get('parameter_size', 0)
|
||||
gpu_layers = details.get('quantization_level', 0)
|
||||
|
||||
running_models.append({
|
||||
'name': model.get('name', 'Unknown'),
|
||||
'size_gb': size_total,
|
||||
'vram_gb': size_vram,
|
||||
'offload_pct': offload_pct,
|
||||
'expires_at': model.get('expires_at', '')
|
||||
'expires_at': model.get('expires_at', ''),
|
||||
'gpu_layers': gpu_layers,
|
||||
'total_layers': total_layers
|
||||
})
|
||||
except Exception as e:
|
||||
print(f"Error getting running models: {e}")
|
||||
|
||||
Reference in New Issue
Block a user