mirror of
https://github.com/mudler/LocalAI
synced 2026-04-21 13:27:21 +00:00
feat: improve CLI error messages with actionable guidance (#8880)
- transcript.go: Model not found error now suggests available models commands - util.go: GGUF error explains format and how to get models - worker_p2p.go: Token error explains purpose and how to obtain one - run.go: Startup failure includes troubleshooting steps and docs link - model_config_loader.go: Config validation errors include file path and guidance Refs: H2 - UX Review Issue Signed-off-by: localai-bot <localai-bot@noreply.github.com> Co-authored-by: localai-bot <localai-bot@noreply.github.com> Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
6b245299d7
commit
315b634a91
5 changed files with 6 additions and 7 deletions
|
|
@ -507,7 +507,7 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
|||
|
||||
app, err := application.New(opts...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed basic startup tasks with error %s", err.Error())
|
||||
return fmt.Errorf("LocalAI failed to start: %w.\nTroubleshooting steps:\n 1. Check that your models directory exists and is accessible: %s\n 2. Verify model config files are valid YAML: 'local-ai util usecase-heuristic <config>'\n 3. Check available disk space and file permissions\n 4. Run with --log-level=debug for more details\nSee https://localai.io/basics/troubleshooting/ for more help", err, r.ModelsPath)
|
||||
}
|
||||
|
||||
appHTTP, err := http.API(app)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ package cli
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
|
|
@ -60,7 +59,7 @@ func (t *TranscriptCMD) Run(ctx *cliContext.Context) error {
|
|||
|
||||
c, exists := cl.GetModelConfig(t.Model)
|
||||
if !exists {
|
||||
return errors.New("model not found")
|
||||
return fmt.Errorf("model %q not found. Run 'local-ai models list' to see available models, or install one with 'local-ai models install <model>'. See https://localai.io/models/ for more information", t.Model)
|
||||
}
|
||||
|
||||
c.Threads = &t.Threads
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ func (u *CreateOCIImageCMD) Run(ctx *cliContext.Context) error {
|
|||
|
||||
func (u *GGUFInfoCMD) Run(ctx *cliContext.Context) error {
|
||||
if len(u.Args) == 0 {
|
||||
return fmt.Errorf("no GGUF file provided")
|
||||
return fmt.Errorf("no GGUF file provided. Usage: local-ai util gguf-info <path-to-file.gguf>\nGGUF is a binary format for storing quantized language models. You can download GGUF models from https://huggingface.co or install one with 'local-ai models install <model>'")
|
||||
}
|
||||
// We try to guess only if we don't have a template defined already
|
||||
f, err := gguf.ParseGGUFFile(u.Args[0])
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
|||
// Check if the token is set
|
||||
// as we always need it.
|
||||
if r.Token == "" {
|
||||
return fmt.Errorf("Token is required")
|
||||
return fmt.Errorf("a P2P token is required to join the network. Set it via the LOCALAI_TOKEN environment variable or the --token flag. You can generate a token by running 'local-ai run --p2p' on the main node. See https://localai.io/features/distribute/ for more information")
|
||||
}
|
||||
|
||||
port, err := freeport.GetFreePort()
|
||||
|
|
|
|||
|
|
@ -193,9 +193,9 @@ func (bcl *ModelConfigLoader) ReadModelConfig(file string, opts ...ConfigLoaderO
|
|||
bcl.configs[c.Name] = *c
|
||||
} else {
|
||||
if err != nil {
|
||||
return fmt.Errorf("config is not valid: %w", err)
|
||||
return fmt.Errorf("model config %q is not valid: %w. Ensure the YAML file has a valid 'name' field and correct syntax. See https://localai.io/docs/getting-started/customize-model/ for config reference", file, err)
|
||||
}
|
||||
return fmt.Errorf("config is not valid")
|
||||
return fmt.Errorf("model config %q is not valid. Ensure the YAML file has a valid 'name' field and correct syntax. See https://localai.io/docs/getting-started/customize-model/ for config reference", file)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
|||
Loading…
Reference in a new issue