feat(ollama): add plugin for managing and running language models with autocompletion and man page support

This commit is contained in:
sealad886 2025-01-27 09:42:07 +00:00
parent ce9a4a0196
commit 78515593d5
2 changed files with 240 additions and 0 deletions

94
plugins/ollama/README.md Normal file
View File

@ -0,0 +1,94 @@
# Ollama Plugin for Oh-My-Zsh
This plugin enhances your Zsh shell environment by integrating powerful features for managing, running, and creating large language models locally using the [Ollama CLI](https://ollama.ai/). The plugin provides streamlined workflows, autocompletion, and man page support, making it easier than ever to interact with your local AI models.
## Features
- **Command Autocompletion**: Full support for Ollama CLI commands, options, and arguments.
- **Dynamic Model Suggestions**: Automatically suggests available models based on the output of `ollama list`.
## Installation
### Prerequisites
- A working installation of [Oh-My-Zsh](https://ohmyz.sh/).
- The Ollama CLI installed on your system. Refer to the [official Ollama documentation](https://github.com/ollama/ollama) for setup instructions.
### Steps
1. **Enable the Plugin**
Add `ollama` to the `plugins` array in your `.zshrc` file:
```sh
# in your ~/.zshrc file
plugins=(... ollama)
```
or
```sh
# from shell
omz plugin enable ollama
```
2. **Restart Your Shell**
Apply the changes by reloading Oh-My-Zsh:
```sh
omz reload
```
## Usage
### Commands
The plugin provides autocompletion and enhanced functionality for the following Ollama commands:
| Command | Description |
|-------------|------------------------------------------|
| `serve`, `start`| Start the Ollama server locally. |
| `create` | Create a model from a Modelfile. |
| `show` | Display information about a specific model. |
| `run` | Execute a model with a given prompt. |
| `stop` | Terminate a running model. |
| `pull` | Download a model from a registry. |
| `push` | Upload a model to a registry. |
| `list`, `ls` | List all available models. |
| `ps` | Show currently running models. |
| `cp` | Duplicate an existing model locally. |
| `rm` | Remove a model from the local system. |
| `help [command]` | Provide help information for a command. |
### Alias
The plugin includes an alias for convenience:
- `o`: This is an alias for the `ollama` command, allowing you to use `o` as a shorthand for executing Ollama commands. Useful when jumping around the command line frequently.
```sh
>>> o ls
NAME ID SIZE MODIFIED
deepseek-r1:14b-qwen-distill-q8_0 022efe288297 15 GB 3 hours ago
deepseek-r1:32b 38056bbcbb2d 19 GB 3 days ago
deepseek-r1:8b 28f8fd6cdc67 4.9 GB 3 days ago
deepseek-r1:70b 0c1615a8ca32 42 GB 3 days ago
```
## Notes
- **Model Naming**: Models follow a `model:tag` format. If no tag is provided, Ollama defaults to `latest`. The model can be invoked with or without `latest` (e.g. `ollama run llama3.2` is equivalent to `ollama run llama3.2:latest`)
- **Multiline Input**: Use triple quotes (`"""`) for multiline prompts:
```zsh
> """What is the impact of AI on society?
... Include specific examples."""
```
## License
This project is licensed under the MIT License.
For more details, visit the [Ollama CLI GitHub repository](https://github.com/ollama/ollama).
Currently maintained by [sealad886](https://github.com/sealad886)

View File

@ -0,0 +1,146 @@
install_ollama_manpage() {
local manpage_source="${0:A:h}/ollama.1" # Path to the manpage in the plugin directory
local manpage_target_dir
local manpage_target
# Determine the appropriate man directory based on the operating system
case "$(uname)" in
Linux|Darwin)
manpage_target_dir="/usr/local/share/man/man1"
;;
*)
echo "Unsupported OS: $(uname). Manpage installation skipped."
return
;;
esac
manpage_target="${manpage_target_dir}/ollama.1"
# Check if the manpage already exists
if [[ -f "$manpage_target" ]]; then
# silently stop if the manpage entry already exists
return
fi
# Manpage does not exist; proceed with installation
if [[ -f "$manpage_source" ]]; then
# Ensure the target directory exists
if [[ ! -d "$manpage_target_dir" ]]; then
sudo mkdir -p "$manpage_target_dir"
fi
# Copy the manpage to the target directory
sudo cp "$manpage_source" "$manpage_target"
else
echo "Manpage source file not found: $manpage_source"
fi
}
# Call the function to install the manpage
install_ollama_manpage
# Function to retrieve available models for completion
_ollama_get_models() {
# Execute 'ollama list' and capture its output, suppressing any error messages
local models_output
models_output="$(ollama list 2>/dev/null)"
# Initialize an array to hold the model suggestions
local -a models
local line
# Read the output line by line
while IFS=" " read -r line; do
# Skip blank lines
[[ -z "$line" ]] && continue
# Skip the header line that starts with 'NAME'
if [[ "$line" =~ ^NAME ]]; then
continue
fi
# Split the line into words and extract the first word (model name:tag)
set -- $line
local suggestion="${$(echo $1 | cut -d ' ' -f 1)/:/\\:}" # Escape ':' by replacing it with '\:'
models+=( "$suggestion" ) # Add the escaped model name to the array
done <<< "$models_output"
# Use the '_describe' function to provide the model suggestions for completion
_describe -t models 'models' models
}
# Main completion function for the 'ollama' command
_ollama() {
# Define an array of available commands with their descriptions
local -a commands
commands=(
'serve:Start the Ollama server'
'create:Create a model from a Modelfile'
'show:Display information about a specific model'
'run:Execute a model with a given prompt'
'stop:Terminate a running model'
'pull:Download a model from the registry'
'push:Upload a model to the registry'
'list:Display all available models'
'ps:Show currently running models'
'cp:Duplicate an existing model'
'rm:Delete a model from the local system'
'help:Provide help information for a command'
)
# Initialize context variables for the completion
local context curcontext="$curcontext" state line
local -A opt_args
# Define the arguments and options for the 'ollama' command
_arguments -C \
'(-h --help)'{-h,--help}'[Display help information]' \
'(-v --version)'{-v,--version}'[Show version information]' \
'1: :->command' \
'*:: :->args'
# Determine the state of the completion (command or arguments)
case $state in
command)
# Provide command suggestions
_describe -t commands 'ollama commands' commands
;;
args)
# Handle argument completion based on the specified command
case $words[1] in
run|rm|stop|show|pull|push)
# For these commands, provide model name suggestions
_ollama_get_models
;;
cp)
if [[ $CURRENT -eq 2 ]]; then
# For the 'cp' command, suggest source model names
_ollama_get_models
elif [[ $CURRENT -eq 3 ]]; then
# For the 'cp' command, prompt for the destination model name
_message 'destination model name'
fi
;;
create)
# For the 'create' command, suggest Modelfile paths
_arguments \
'(-f --filename)'{-f,--filename}'[Specify the path to the Modelfile]:Modelfile:_files'
;;
serve)
# For the 'serve' command, suggest specifying the port number
_arguments \
'(-p --port)'{-p,--port}'[Specify the port number]:port number:'
;;
*)
# For any other commands, use the default completion
_default
;;
esac
;;
esac
}
# Register the '_ollama' function as the completion handler for the 'ollama' command
compdef _ollama ollama
# Register aliases
alias o=ollama