trying to get flags to work. some movement, but not quite there yet.

This commit is contained in:
sealad886 2025-01-27 15:25:01 +00:00
parent d94d2bf8d1
commit 1b1e6c9546
2 changed files with 137 additions and 64 deletions

View File

@ -31,6 +31,13 @@ This plugin enhances your Zsh shell environment by integrating powerful features
omz plugin enable ollama omz plugin enable ollama
``` ```
In order to get the most benefit from completions, with helpful usage hints, etc:
```sh
# ~/.zshrc
# add the following zstyle entry wherever you want
zstyle ':completion:*:*:*:*:descriptions' format '%F{green}%d%f'
```
2. **Restart Your Shell** 2. **Restart Your Shell**
Apply the changes by reloading Oh-My-Zsh: Apply the changes by reloading Oh-My-Zsh:

View File

@ -1,38 +1,50 @@
# Function to retrieve available models for completion # ------------------------------------------------------------------------------
_ollama_get_models() { # ollama.plugin.zsh
# Execute 'ollama list' and capture its output, suppressing any error messages #
local models_output # Plugin providing Zsh completions for the `ollama` command.
models_output="$(ollama list 2>/dev/null)" # ------------------------------------------------------------------------------
# Initialize an array to hold the model suggestions # ------------------------------------------------------------------------------
# Function: _ollama_get_models
# Purpose: Retrieves the list of available models for completion.
# Uses `ollama list` with a short timeout and provides candidates.
# ------------------------------------------------------------------------------
_ollama_get_models() {
local models_output
local timeout=5 # Timeout duration in seconds
# Attempt to fetch models via `ollama list`; if it fails, show a short message.
models_output="$(timeout $timeout ollama list 2>/dev/null)" || {
_message "Failed to fetch models"
return 1
}
# Accumulate parsed model names here
local -a models local -a models
local line local line
while IFS= read -r line; do
# Skip blank lines and header lines (starting with NAME)
[[ -z "$line" || "$line" =~ ^NAME ]] && continue
# Read the output line by line # Extract the first column and escape any colons for safety
while IFS=" " read -r line; do local suggestion="${line%% *}"
# Skip blank lines suggestion="${suggestion/:/\\:}"
[[ -z "$line" ]] && continue models+=("$suggestion")
# Skip the header line that starts with 'NAME'
if [[ "$line" =~ ^NAME ]]; then
continue
fi
# Split the line into words and extract the first word (model name:tag)
set -- $line
local suggestion="${$(echo $1 | cut -d ' ' -f 1)/:/\\:}" # Escape ':' by replacing it with '\:'
models+=( "$suggestion" ) # Add the escaped model name to the array
done <<< "$models_output" done <<< "$models_output"
# Use the '_describe' function to provide the model suggestions for completion # Provide model suggestions using `_describe`
_describe -t models 'models' models _describe -t models 'models' models
} }
# Main completion function for the 'ollama' command # ------------------------------------------------------------------------------
# Function: _ollama
# Purpose: The main completion function for the `ollama` CLI. Determines which
# subcommand is being completed, then sets up the corresponding flags
# and suggestions.
# ------------------------------------------------------------------------------
_ollama() { _ollama() {
# Define an array of available commands with their descriptions # List of top-level commands and their descriptions
local -a commands local -a commands=(
commands=(
'serve:Start the Ollama server' 'serve:Start the Ollama server'
'create:Create a model from a Modelfile' 'create:Create a model from a Modelfile'
'show:Display information about a specific model' 'show:Display information about a specific model'
@ -47,59 +59,113 @@ _ollama() {
'help:Provide help information for a command' 'help:Provide help information for a command'
) )
# Initialize context variables for the completion # Standard local variables used by _arguments
local context curcontext="$curcontext" state line local curcontext="$curcontext" state line
local -A opt_args local -A opt_args
# Define the arguments and options for the 'ollama' command # The main `_arguments` call for handling top-level options (e.g. -h, -v)
# and capturing the first positional argument -> subcommand, then the rest.
_arguments -C \ _arguments -C \
'(-h --help)'{-h,--help}'[Display help information]' \ '(-h --help)'{-h,--help}'[Display help information]' \
'(-v --version)'{-v,--version}'[Show version information]' \ '(-v --version)'{-v,--version}'[Show version information]' \
'1: :->command' \ '1: :->command' \
'*:: :->args' '*:: :->args'
# Determine the state of the completion (command or arguments) # If the user is trying to complete the first argument (the subcommand),
# then we present them the `commands` array above.
case $state in case $state in
command) command)
# Provide command suggestions
_describe -t commands 'ollama commands' commands _describe -t commands 'ollama commands' commands
return
;; ;;
args) esac
# Handle argument completion based on the specified command
case $words[1] in # If the first argument is known, proceed with subcommand-specific completions
run|rm|stop|show|pull|push) case $words[1] in
# For these commands, provide model name suggestions exactly once serve)
if [[ $CURRENT -eq 2 ]]; then _arguments \
_ollama_get_models '(-p --port)'{-p,--port}'[Specify the port number]:port number:'
fi ;;
;;
cp) create)
if [[ $CURRENT -eq 2 ]]; then # If user typed only `ollama create ` (with no second arg),
# For the 'cp' command, suggest source model names # display a short message to remind them to name the new model
_ollama_get_models if [[ $CURRENT -eq 2 ]]; then
elif [[ $CURRENT -eq 3 ]]; then _message 'Specify the new model name'
# For the 'cp' command, prompt for the destination model name else
_message 'destination model name' # Otherwise, offer flags for `create`
fi _arguments \
;; '(-f --filename)'{-f,--filename}'[Path to the Modelfile]:Modelfile:_files' \
create) '(-q --quantize)'{-q,--quantize}'[Quantization method (e.g. q4_0)]' \
# For the 'create' command, suggest Modelfile paths '--prefix[Set a prefix for the created model]' \
_arguments \ '(-h --help)--help[Show help for create]'
'(-f --filename)'{-f,--filename}'[Specify the path to the Modelfile]:Modelfile:_files' fi
;; ;;
serve)
# For the 'serve' command, suggest specifying the port number show)
_arguments \ _message 'Usage: ollama show MODEL [flags]'
'(-p --port)'{-p,--port}'[Specify the port number]:port number:' if [[ $CURRENT -eq 2 ]]; then
;; _ollama_get_models
*) else
# For any other commands, use the default completion _arguments \
_default '--license[Show the models license]' \
;; '--modelfile[Show the models Modelfile]' \
esac '--parameters[Show model parameters]' \
'--system[Show the system message of the model]' \
'--template[Show the models template]' \
'(-h --help)--help[Show help for show]'
fi
;;
run)
# Display usage message only if there's no argument yet
if [[ $CURRENT -eq 2 ]]; then
_message "Usage: ollama run MODEL [PROMPT] [flags]"
_ollama_get_models
else
# Define flags for the `run` command
local -a _run_flags=(
'--format-string=[Format string for the output (e.g. json)]'
'--insecure[Use an insecure registry]'
'--keepalive=[Time to keep the model loaded (e.g. 5m)]'
'--nowordwrap[Disable word wrapping]'
'--verbose[Show response timings]'
'(-h --help)--help[Show help for run]'
)
# Use a mix of `_arguments` and manual handling for freeform input
if [[ $CURRENT -eq 3 ]]; then
# Suggest a freeform prompt (arbitrary input)
_message "Enter a prompt as a string"
else
# Provide flag completions
_arguments -S "${_run_flags[@]}"
fi
fi
;;
cp)
# The `cp` command expects `ollama cp SOURCE DEST`
if [[ $CURRENT -eq 2 ]]; then
_ollama_get_models
elif [[ $CURRENT -eq 3 ]]; then
_message 'Specify the destination model name'
fi
;;
rm|stop|pull|push)
# All of these commands accept one or more model names
if [[ $CURRENT -eq 2 ]]; then
_ollama_get_models
fi
;;
# If the subcommand doesnt match anything above, fall back to default
*)
_default
;; ;;
esac esac
} }
# Register the '_ollama' function as the completion handler for the 'ollama' command # Finally, register the completion function for the `ollama` command
compdef _ollama ollama compdef _ollama ollama