feat: Add shell completion support for bash, zsh, and fish (#8851)

feat: add shell completion support for bash, zsh, and fish

- Add core/cli/completion.go with dynamic completion script generation
- Add core/cli/completion_test.go with unit tests
- Modify cmd/local-ai/main.go to support completion command
- Modify core/cli/cli.go to add Completion subcommand
- Add docs/content/features/shell-completion.md with installation instructions

The completion scripts are generated dynamically from the Kong CLI model,
so they automatically include all commands, subcommands, and flags.

Co-authored-by: localai-bot <localai-bot@noreply.github.com>
This commit is contained in:
LocalAI [bot] 2026-03-08 09:32:39 +01:00 committed by GitHub
parent 6a928e70bc
commit efd402207c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 588 additions and 1 deletions

View file

@ -39,7 +39,7 @@ func main() {
}
// Actually parse the CLI options
ctx := kong.Parse(&cli.CLI,
k := kong.Must(&cli.CLI,
kong.Description(
` LocalAI is a drop-in replacement OpenAI API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware.
@ -58,6 +58,13 @@ Version: ${version}
"version": internal.PrintableVersion(),
},
)
ctx, err := k.Parse(os.Args[1:])
if err != nil {
k.FatalIfErrorf(err)
}
// Pass Kong model to the completion command for dynamic script generation
cli.CLI.Completion.SetApplication(k.Model)
// Configure the logging level before we run the application
// This is here to preserve the existing --debug flag functionality

View file

@ -18,4 +18,5 @@ var CLI struct {
Worker worker.Worker `cmd:"" help:"Run workers to distribute workload (llama.cpp-only)"`
Util UtilCMD `cmd:"" help:"Utility commands"`
Explorer ExplorerCMD `cmd:"" help:"Run p2p explorer"`
Completion CompletionCMD `cmd:"" help:"Generate shell completion scripts for bash, zsh, or fish"`
}

396
core/cli/completion.go Normal file
View file

@ -0,0 +1,396 @@
package cli
import (
"fmt"
"strings"
"github.com/alecthomas/kong"
cliContext "github.com/mudler/LocalAI/core/cli/context"
)
type CompletionCMD struct {
Shell string `arg:"" enum:"bash,zsh,fish" help:"Shell to generate completions for (bash, zsh, fish)"`
app *kong.Application `kong:"-"`
}
func (c *CompletionCMD) SetApplication(app *kong.Application) {
c.app = app
}
func (c *CompletionCMD) Run(_ *cliContext.Context) error {
if c.app == nil {
return fmt.Errorf("application model not available")
}
var script string
switch c.Shell {
case "bash":
script = generateBashCompletion(c.app)
case "zsh":
script = generateZshCompletion(c.app)
case "fish":
script = generateFishCompletion(c.app)
default:
return fmt.Errorf("unsupported shell: %s", c.Shell)
}
fmt.Print(script)
return nil
}
func collectCommands(node *kong.Node, prefix string) []commandInfo {
var cmds []commandInfo
for _, child := range node.Children {
if child.Hidden {
continue
}
name := child.Name
fullName := name
if prefix != "" {
fullName = prefix + " " + name
}
help := child.Help
cmds = append(cmds, commandInfo{
name: name,
fullName: fullName,
help: help,
node: child,
})
cmds = append(cmds, collectCommands(child, fullName)...)
}
return cmds
}
type commandInfo struct {
name string
fullName string
help string
node *kong.Node
}
func collectFlags(node *kong.Node) []flagInfo {
var flags []flagInfo
seen := make(map[string]bool)
// Collect flags from this node and its ancestors
for n := node; n != nil; n = n.Parent {
for _, flag := range n.Flags {
if flag.Hidden || seen[flag.Name] {
continue
}
seen[flag.Name] = true
flags = append(flags, flagInfo{
name: flag.Name,
short: flag.Short,
help: flag.Help,
})
}
}
return flags
}
type flagInfo struct {
name string
short rune
help string
}
func generateBashCompletion(app *kong.Application) string {
var sb strings.Builder
cmds := collectCommands(app.Node, "")
topLevelCmds := []string{}
for _, cmd := range cmds {
if !strings.Contains(cmd.fullName, " ") {
topLevelCmds = append(topLevelCmds, cmd.name)
}
}
globalFlags := collectFlags(app.Node)
globalFlagNames := []string{}
for _, f := range globalFlags {
globalFlagNames = append(globalFlagNames, "--"+f.name)
if f.short != 0 {
globalFlagNames = append(globalFlagNames, "-"+string(f.short))
}
}
sb.WriteString(`# bash completion for local-ai
# Generated by local-ai completion bash
_local_ai_completions()
{
local cur prev words cword
_init_completion || return
local commands="` + strings.Join(topLevelCmds, " ") + `"
local global_flags="` + strings.Join(globalFlagNames, " ") + `"
# Find the subcommand
local subcmd=""
local subcmd_idx=0
for ((i=1; i < cword; i++)); do
case "${words[i]}" in
-*)
# Skip flags and their values
;;
*)
if [[ -z "$subcmd" ]]; then
subcmd="${words[i]}"
subcmd_idx=$i
fi
;;
esac
done
# If completing a flag value, don't suggest anything special
if [[ "$cur" == -* ]]; then
case "$subcmd" in
`)
// Generate flag completions per top-level command
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
flags := collectFlags(cmd.node)
flagNames := []string{}
for _, f := range flags {
flagNames = append(flagNames, "--"+f.name)
if f.short != 0 {
flagNames = append(flagNames, "-"+string(f.short))
}
}
sb.WriteString(fmt.Sprintf(" %s)\n", cmd.name))
sb.WriteString(fmt.Sprintf(" COMPREPLY=($(compgen -W \"%s\" -- \"$cur\"))\n", strings.Join(flagNames, " ")))
sb.WriteString(" return\n")
sb.WriteString(" ;;\n")
}
sb.WriteString(` *)
COMPREPLY=($(compgen -W "$global_flags" -- "$cur"))
return
;;
esac
fi
# Complete subcommands for top-level commands
case "$subcmd" in
`)
// Generate subcommand completions
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
subcmds := []string{}
for _, sub := range cmds {
parts := strings.SplitN(sub.fullName, " ", 2)
if len(parts) == 2 && parts[0] == cmd.name && !strings.Contains(parts[1], " ") {
subcmds = append(subcmds, parts[1])
}
}
if len(subcmds) > 0 {
sb.WriteString(fmt.Sprintf(" %s)\n", cmd.name))
sb.WriteString(fmt.Sprintf(" COMPREPLY=($(compgen -W \"%s\" -- \"$cur\"))\n", strings.Join(subcmds, " ")))
sb.WriteString(" return\n")
sb.WriteString(" ;;\n")
}
}
sb.WriteString(` "")
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
return
;;
esac
}
complete -F _local_ai_completions local-ai
`)
return sb.String()
}
func generateZshCompletion(app *kong.Application) string {
var sb strings.Builder
cmds := collectCommands(app.Node, "")
globalFlags := collectFlags(app.Node)
sb.WriteString(`#compdef local-ai
# Generated by local-ai completion zsh
_local_ai() {
local -a commands
local -a global_flags
global_flags=(
`)
for _, f := range globalFlags {
help := strings.ReplaceAll(f.help, "'", "'\\''")
help = strings.ReplaceAll(help, "[", "\\[")
help = strings.ReplaceAll(help, "]", "\\]")
sb.WriteString(fmt.Sprintf(" '--%s[%s]'\n", f.name, help))
if f.short != 0 {
sb.WriteString(fmt.Sprintf(" '-%s[%s]'\n", string(f.short), help))
}
}
sb.WriteString(` )
commands=(
`)
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
help := strings.ReplaceAll(cmd.help, "'", "'\\''")
help = strings.ReplaceAll(help, "[", "\\[")
help = strings.ReplaceAll(help, "]", "\\]")
sb.WriteString(fmt.Sprintf(" '%s:%s'\n", cmd.name, help))
}
sb.WriteString(` )
_arguments -C \
$global_flags \
'1:command:->command' \
'*::arg:->args'
case $state in
command)
_describe -t commands 'local-ai commands' commands
;;
args)
case $words[1] in
`)
// Per-command completions
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
sb.WriteString(fmt.Sprintf(" %s)\n", cmd.name))
// Check for subcommands
subcmds := []commandInfo{}
for _, sub := range cmds {
parts := strings.SplitN(sub.fullName, " ", 2)
if len(parts) == 2 && parts[0] == cmd.name && !strings.Contains(parts[1], " ") {
subcmds = append(subcmds, sub)
}
}
if len(subcmds) > 0 {
sb.WriteString(" local -a subcmds\n")
sb.WriteString(" subcmds=(\n")
for _, sub := range subcmds {
parts := strings.SplitN(sub.fullName, " ", 2)
help := strings.ReplaceAll(sub.help, "'", "'\\''")
help = strings.ReplaceAll(help, "[", "\\[")
help = strings.ReplaceAll(help, "]", "\\]")
sb.WriteString(fmt.Sprintf(" '%s:%s'\n", parts[1], help))
}
sb.WriteString(" )\n")
sb.WriteString(" _describe -t commands 'subcommands' subcmds\n")
}
flags := collectFlags(cmd.node)
if len(flags) > 0 {
sb.WriteString(" _arguments \\\n")
for i, f := range flags {
help := strings.ReplaceAll(f.help, "'", "'\\''")
help = strings.ReplaceAll(help, "[", "\\[")
help = strings.ReplaceAll(help, "]", "\\]")
suffix := " \\"
if i == len(flags)-1 {
suffix = ""
}
sb.WriteString(fmt.Sprintf(" '--%s[%s]'%s\n", f.name, help, suffix))
}
}
sb.WriteString(" ;;\n")
}
sb.WriteString(` esac
;;
esac
}
_local_ai "$@"
`)
return sb.String()
}
func generateFishCompletion(app *kong.Application) string {
var sb strings.Builder
cmds := collectCommands(app.Node, "")
globalFlags := collectFlags(app.Node)
sb.WriteString("# fish completion for local-ai\n")
sb.WriteString("# Generated by local-ai completion fish\n\n")
// Disable file completions by default
sb.WriteString("complete -c local-ai -f\n\n")
// Global flags
for _, f := range globalFlags {
help := strings.ReplaceAll(f.help, "'", "\\'")
args := fmt.Sprintf("complete -c local-ai -l %s", f.name)
if f.short != 0 {
args += fmt.Sprintf(" -s %s", string(f.short))
}
args += fmt.Sprintf(" -d '%s'", help)
sb.WriteString(args + "\n")
}
sb.WriteString("\n")
// Top-level commands (no condition means they show when no subcommand is given)
topLevelCmds := []string{}
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
topLevelCmds = append(topLevelCmds, cmd.name)
help := strings.ReplaceAll(cmd.help, "'", "\\'")
sb.WriteString(fmt.Sprintf("complete -c local-ai -n '__fish_use_subcommand' -a %s -d '%s'\n", cmd.name, help))
}
sb.WriteString("\n")
// Subcommands and per-command flags
for _, cmd := range cmds {
if strings.Contains(cmd.fullName, " ") {
continue
}
// Subcommands
for _, sub := range cmds {
parts := strings.SplitN(sub.fullName, " ", 2)
if len(parts) == 2 && parts[0] == cmd.name && !strings.Contains(parts[1], " ") {
help := strings.ReplaceAll(sub.help, "'", "\\'")
sb.WriteString(fmt.Sprintf("complete -c local-ai -n '__fish_seen_subcommand_from %s' -a %s -d '%s'\n", cmd.name, parts[1], help))
}
}
// Per-command flags
flags := collectFlags(cmd.node)
for _, f := range flags {
help := strings.ReplaceAll(f.help, "'", "\\'")
args := fmt.Sprintf("complete -c local-ai -n '__fish_seen_subcommand_from %s' -l %s", cmd.name, f.name)
if f.short != 0 {
args += fmt.Sprintf(" -s %s", string(f.short))
}
args += fmt.Sprintf(" -d '%s'", help)
sb.WriteString(args + "\n")
}
}
return sb.String()
}

View file

@ -0,0 +1,96 @@
package cli
import (
"strings"
"testing"
"github.com/alecthomas/kong"
)
func getTestApp() *kong.Application {
var testCLI struct {
Run struct{} `cmd:"" help:"Run the server"`
Models struct {
List struct{} `cmd:"" help:"List models"`
Install struct{} `cmd:"" help:"Install a model"`
} `cmd:"" help:"Manage models"`
Completion CompletionCMD `cmd:"" help:"Generate shell completions"`
}
k := kong.Must(&testCLI)
return k.Model
}
func TestGenerateBashCompletion(t *testing.T) {
app := getTestApp()
script := generateBashCompletion(app)
if !strings.Contains(script, "complete -F _local_ai_completions local-ai") {
t.Error("bash completion missing complete command registration")
}
if !strings.Contains(script, "run") {
t.Error("bash completion missing 'run' command")
}
if !strings.Contains(script, "models") {
t.Error("bash completion missing 'models' command")
}
if !strings.Contains(script, "completion") {
t.Error("bash completion missing 'completion' command")
}
}
func TestGenerateZshCompletion(t *testing.T) {
app := getTestApp()
script := generateZshCompletion(app)
if !strings.Contains(script, "#compdef local-ai") {
t.Error("zsh completion missing compdef header")
}
if !strings.Contains(script, "run") {
t.Error("zsh completion missing 'run' command")
}
if !strings.Contains(script, "models") {
t.Error("zsh completion missing 'models' command")
}
}
func TestGenerateFishCompletion(t *testing.T) {
app := getTestApp()
script := generateFishCompletion(app)
if !strings.Contains(script, "complete -c local-ai") {
t.Error("fish completion missing complete command")
}
if !strings.Contains(script, "__fish_use_subcommand") {
t.Error("fish completion missing subcommand detection")
}
if !strings.Contains(script, "run") {
t.Error("fish completion missing 'run' command")
}
if !strings.Contains(script, "models") {
t.Error("fish completion missing 'models' command")
}
}
func TestCollectCommands(t *testing.T) {
app := getTestApp()
cmds := collectCommands(app.Node, "")
names := make(map[string]bool)
for _, cmd := range cmds {
names[cmd.fullName] = true
}
if !names["run"] {
t.Error("missing 'run' command")
}
if !names["models"] {
t.Error("missing 'models' command")
}
if !names["models list"] {
t.Error("missing 'models list' subcommand")
}
if !names["models install"] {
t.Error("missing 'models install' subcommand")
}
}

View file

@ -0,0 +1,87 @@
+++
disableToc = false
title = "Shell Completion"
weight = 18
url = "/features/shell-completion/"
+++
LocalAI provides shell completion support for **bash**, **zsh**, and **fish** shells. Once installed, tab completion works for all CLI commands, subcommands, and flags.
## Generating Completion Scripts
Use the `completion` subcommand to generate a completion script for your shell:
```bash
local-ai completion bash
local-ai completion zsh
local-ai completion fish
```
## Installation
### Bash
Add the following to your `~/.bashrc`:
```bash
source <(local-ai completion bash)
```
Or install it system-wide:
```bash
local-ai completion bash > /etc/bash_completion.d/local-ai
```
### Zsh
Add the following to your `~/.zshrc`:
```zsh
source <(local-ai completion zsh)
```
Or install it to a completions directory:
```zsh
local-ai completion zsh > "${fpath[1]}/_local-ai"
```
If shell completions are not already enabled in your zsh environment, add the following to the beginning of your `~/.zshrc`:
```zsh
autoload -Uz compinit
compinit
```
### Fish
```fish
local-ai completion fish | source
```
Or install it permanently:
```fish
local-ai completion fish > ~/.config/fish/completions/local-ai.fish
```
## Usage
After installation, restart your shell or source your shell configuration file. Then type `local-ai` followed by a tab to see available commands:
```
$ local-ai <TAB>
run backends completion explorer models
federated sound-generation transcript tts util
```
Tab completion also works for subcommands and flags:
```
$ local-ai models <TAB>
install list
$ local-ai run --<TAB>
--address --backends-path --context-size --debug ...
```