mirror of https://github.com/ohmyzsh/ohmyzsh.git
Compare commits
6 Commits
126de8c0e9
...
24c3056b3c
Author | SHA1 | Date |
---|---|---|
|
24c3056b3c | |
|
30e516a3aa | |
|
1b1e6c9546 | |
|
d94d2bf8d1 | |
|
7e4b330e98 | |
|
78515593d5 |
|
@ -0,0 +1,89 @@
|
||||||
|
# jj - Jujutsu CLI
|
||||||
|
|
||||||
|
This plugin provides autocompletion for [jj](https://martinvonz.github.io/jj).
|
||||||
|
|
||||||
|
To use it, add `jj` to the plugins array of your zshrc file:
|
||||||
|
|
||||||
|
```zsh
|
||||||
|
plugins=(... jj)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Aliases
|
||||||
|
|
||||||
|
| Alias | Command |
|
||||||
|
| ------ | ----------------------------- |
|
||||||
|
| jjc | `jj commit` |
|
||||||
|
| jjcmsg | `jj commit --message` |
|
||||||
|
| jjd | `jj diff` |
|
||||||
|
| jjdmsg | `jj desc --message` |
|
||||||
|
| jjds | `jj desc` |
|
||||||
|
| jje | `jj edit` |
|
||||||
|
| jjgcl | `jj git clone` |
|
||||||
|
| jjgf | `jj git fetch` |
|
||||||
|
| jjgp | `jj git push` |
|
||||||
|
| jjl | `jj log` |
|
||||||
|
| jjla | `jj log -r "all()"` |
|
||||||
|
| jjn | `jj new` |
|
||||||
|
| jjrb | `jj rebase` |
|
||||||
|
| jjrs | `jj restore` |
|
||||||
|
| jjrt | `cd "$(jj root \|\| echo .)"` |
|
||||||
|
| jjsp | `jj split` |
|
||||||
|
| jjsq | `jj squash` |
|
||||||
|
|
||||||
|
## Prompt usage
|
||||||
|
|
||||||
|
Because `jj` has a very powerful [template syntax](https://martinvonz.github.io/jj/latest/templates/), this
|
||||||
|
plugin only exposes a convenience function `jj_prompt_template` to read information from the current change.
|
||||||
|
It is basically the same as `jj log --no-graph -r @ -T $1`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
_my_theme_jj_info() {
|
||||||
|
jj_prompt_template 'self.change_id().shortest(3)'
|
||||||
|
}
|
||||||
|
|
||||||
|
PROMPT='$(_my_theme_jj_info) $'
|
||||||
|
```
|
||||||
|
|
||||||
|
`jj_prompt_template` escapes `%` signs in the output. Use `jj_prompt_template_raw` if you don't want that
|
||||||
|
(e.g. to colorize the output).
|
||||||
|
|
||||||
|
However, because `jj` can be used inside a Git repository, some themes might clash with it. Generally, you can
|
||||||
|
fix it with a wrapper function that tries `jj` first and then falls back to `git` if it didn't work:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
_my_theme_vcs_info() {
|
||||||
|
jj_prompt_template 'self.change_id().shortest(3)' \
|
||||||
|
|| git_prompt_info
|
||||||
|
}
|
||||||
|
|
||||||
|
PROMPT='$(_my_theme_vcs_info) $'
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find an example
|
||||||
|
[here](https://github.com/nasso/omzsh/blob/e439e494f22f4fd4ef1b6cb64626255f4b341c1b/themes/sunakayu.zsh-theme).
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
Sometimes `jj` can be slower than `git`.
|
||||||
|
|
||||||
|
If you feel slowdowns, consider using the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
zstyle :omz:plugins:jj ignore-working-copy yes
|
||||||
|
```
|
||||||
|
|
||||||
|
This will add `--ignore-working-copy` to all `jj` commands executed by your prompt. The downside here is that
|
||||||
|
your prompt might be out-of-sync until the next time `jj` gets a chance to _not_ ignore the working copy (i.e.
|
||||||
|
you manually run a `jj` command).
|
||||||
|
|
||||||
|
If you prefer to keep your prompt always up-to-date but still don't want to _feel_ the slowdown, you can make
|
||||||
|
your prompt asynchronous. This plugin doesn't do this automatically so you'd have to hack your theme a bit for
|
||||||
|
that.
|
||||||
|
|
||||||
|
## See Also
|
||||||
|
|
||||||
|
- [martinvonz/jj](https://github.com/martinvonz/jj)
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
|
- [nasso](https://github.com/nasso) - Plugin Author
|
|
@ -0,0 +1,53 @@
|
||||||
|
# if jj is not found, don't do the rest of the script
|
||||||
|
if (( ! $+commands[jj] )); then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If the completion file doesn't exist yet, we need to autoload it and
|
||||||
|
# bind it to `jj`. Otherwise, compinit will have already done that.
|
||||||
|
if [[ ! -f "$ZSH_CACHE_DIR/completions/_jj" ]]; then
|
||||||
|
typeset -g -A _comps
|
||||||
|
autoload -Uz _jj
|
||||||
|
_comps[jj]=_jj
|
||||||
|
fi
|
||||||
|
|
||||||
|
jj util completion zsh >| "$ZSH_CACHE_DIR/completions/_jj" &|
|
||||||
|
|
||||||
|
function __jj_prompt_jj() {
|
||||||
|
local -a flags
|
||||||
|
flags=("--no-pager")
|
||||||
|
if zstyle -t ':omz:plugins:jj' ignore-working-copy; then
|
||||||
|
flags+=("--ignore-working-copy")
|
||||||
|
fi
|
||||||
|
command jj $flags "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
# convenience functions for themes
|
||||||
|
function jj_prompt_template_raw() {
|
||||||
|
__jj_prompt_jj log --no-graph -r @ -T "$@" 2> /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
function jj_prompt_template() {
|
||||||
|
local out
|
||||||
|
out=$(jj_prompt_template_raw "$@") || return 1
|
||||||
|
echo "${out:gs/%/%%}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Aliases (sorted alphabetically)
|
||||||
|
alias jjc='jj commit'
|
||||||
|
alias jjcmsg='jj commit --message'
|
||||||
|
alias jjd='jj diff'
|
||||||
|
alias jjdmsg='jj desc --message'
|
||||||
|
alias jjds='jj desc'
|
||||||
|
alias jje='jj edit'
|
||||||
|
alias jjgcl='jj git clone'
|
||||||
|
alias jjgf='jj git fetch'
|
||||||
|
alias jjgp='jj git push'
|
||||||
|
alias jjl='jj log'
|
||||||
|
alias jjla='jj log -r "all()"'
|
||||||
|
alias jjn='jj new'
|
||||||
|
alias jjrb='jj rebase'
|
||||||
|
alias jjrs='jj restore'
|
||||||
|
alias jjrt='cd "$(jj root || echo .)"'
|
||||||
|
alias jjsp='jj split'
|
||||||
|
alias jjsq='jj squash'
|
|
@ -0,0 +1,95 @@
|
||||||
|
# Ollama Plugin for Oh-My-Zsh
|
||||||
|
|
||||||
|
This plugin enhances your Zsh shell environment by integrating powerful features for managing, running, and creating large language models locally using the [Ollama CLI](https://ollama.ai/). The plugin provides streamlined workflows, autocompletion, and man page support, making it easier than ever to interact with your local AI models.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Command Autocompletion**: Full support for Ollama CLI commands, options, and arguments.
|
||||||
|
- **Dynamic Model Suggestions**: Automatically suggests available models based on the output of `ollama list`.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- A working installation of [Oh-My-Zsh](https://ohmyz.sh/).
|
||||||
|
- The Ollama CLI installed on your system. Refer to the [official Ollama documentation](https://github.com/ollama/ollama) for setup instructions.
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
1. **Enable the Plugin**
|
||||||
|
Add `ollama` to the `plugins` array in your `.zshrc` file:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# in your ~/.zshrc file
|
||||||
|
plugins=(... ollama)
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# from shell
|
||||||
|
omz plugin enable ollama
|
||||||
|
```
|
||||||
|
|
||||||
|
In order to get the most benefit from completions, with helpful usage hints, etc:
|
||||||
|
```sh
|
||||||
|
# ~/.zshrc
|
||||||
|
# add the following zstyle entry wherever you want
|
||||||
|
zstyle ':completion:*:*:*:*:descriptions' format '%F{green}%d%f'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
2. **Restart Your Shell**
|
||||||
|
Apply the changes by reloading Oh-My-Zsh:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
omz reload
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
The plugin provides autocompletion and enhanced functionality for the following Ollama commands:
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|-------------|------------------------------------------|
|
||||||
|
| `serve`, `start`| Start the Ollama server locally. |
|
||||||
|
| `create` | Create a model from a Modelfile. |
|
||||||
|
| `show` | Display information about a specific model. |
|
||||||
|
| `run` | Execute a model with a given prompt. |
|
||||||
|
| `stop` | Terminate a running model. |
|
||||||
|
| `pull` | Download a model from a registry. |
|
||||||
|
| `push` | Upload a model to a registry. |
|
||||||
|
| `list`, `ls` | List all available models. |
|
||||||
|
| `ps` | Show currently running models. |
|
||||||
|
| `cp` | Duplicate an existing model locally. |
|
||||||
|
| `rm` | Remove a model from the local system. |
|
||||||
|
| `help [command]` | Provide help information for a command. |
|
||||||
|
|
||||||
|
```sh
|
||||||
|
>>> o ls
|
||||||
|
NAME ID SIZE MODIFIED
|
||||||
|
deepseek-r1:14b-qwen-distill-q8_0 022efe288297 15 GB 3 hours ago
|
||||||
|
deepseek-r1:32b 38056bbcbb2d 19 GB 3 days ago
|
||||||
|
deepseek-r1:8b 28f8fd6cdc67 4.9 GB 3 days ago
|
||||||
|
deepseek-r1:70b 0c1615a8ca32 42 GB 3 days ago
|
||||||
|
```
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- **Model Naming**: Models follow a `model:tag` format. If no tag is provided, Ollama defaults to `latest`. The model can be invoked with or without `latest` (e.g. `ollama run llama3.2` is equivalent to `ollama run llama3.2:latest`)
|
||||||
|
- **Multiline Input**: Use triple quotes (`"""`) for multiline prompts:
|
||||||
|
|
||||||
|
```zsh
|
||||||
|
> """What is the impact of AI on society?
|
||||||
|
... Include specific examples."""
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the MIT License.
|
||||||
|
|
||||||
|
For more details, visit the [Ollama CLI GitHub repository](https://github.com/ollama/ollama).
|
||||||
|
|
||||||
|
Currently maintained by [sealad886](https://github.com/sealad886)
|
|
@ -0,0 +1,171 @@
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# ollama.plugin.zsh
|
||||||
|
#
|
||||||
|
# Plugin providing Zsh completions for the `ollama` command.
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Function: _ollama_get_models
|
||||||
|
# Purpose: Retrieves the list of available models for completion.
|
||||||
|
# Uses `ollama list` with a short timeout and provides candidates.
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
_ollama_get_models() {
|
||||||
|
local models_output
|
||||||
|
local timeout=5 # Timeout duration in seconds
|
||||||
|
|
||||||
|
# Attempt to fetch models via `ollama list`; if it fails, show a short message.
|
||||||
|
models_output="$(timeout $timeout ollama list 2>/dev/null)" || {
|
||||||
|
_message "Failed to fetch models"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Accumulate parsed model names here
|
||||||
|
local -a models
|
||||||
|
local line
|
||||||
|
while IFS= read -r line; do
|
||||||
|
# Skip blank lines and header lines (starting with NAME)
|
||||||
|
[[ -z "$line" || "$line" =~ ^NAME ]] && continue
|
||||||
|
|
||||||
|
# Extract the first column and escape any colons for safety
|
||||||
|
local suggestion="${line%% *}"
|
||||||
|
suggestion="${suggestion/:/\\:}"
|
||||||
|
models+=("$suggestion")
|
||||||
|
done <<< "$models_output"
|
||||||
|
|
||||||
|
# Provide model suggestions using `_describe`
|
||||||
|
_describe -t models 'models' models
|
||||||
|
}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Function: _ollama
|
||||||
|
# Purpose: The main completion function for the `ollama` CLI. Determines which
|
||||||
|
# subcommand is being completed, then sets up the corresponding flags
|
||||||
|
# and suggestions.
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
_ollama() {
|
||||||
|
# List of top-level commands and their descriptions
|
||||||
|
local -a commands=(
|
||||||
|
'serve:Start the Ollama server'
|
||||||
|
'create:Create a model from a Modelfile'
|
||||||
|
'show:Display information about a specific model'
|
||||||
|
'run:Execute a model with a given prompt'
|
||||||
|
'stop:Terminate a running model'
|
||||||
|
'pull:Download a model from the registry'
|
||||||
|
'push:Upload a model to the registry'
|
||||||
|
'list:Display all available models'
|
||||||
|
'ps:Show currently running models'
|
||||||
|
'cp:Duplicate an existing model'
|
||||||
|
'rm:Delete a model from the local system'
|
||||||
|
'help:Provide help information for a command'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Standard local variables used by _arguments
|
||||||
|
local curcontext="$curcontext" state line
|
||||||
|
local -A opt_args
|
||||||
|
|
||||||
|
# The main `_arguments` call for handling top-level options (e.g. -h, -v)
|
||||||
|
# and capturing the first positional argument -> subcommand, then the rest.
|
||||||
|
_arguments -C \
|
||||||
|
'(-h --help)'{-h,--help}'[Display help information]' \
|
||||||
|
'(-v --version)'{-v,--version}'[Show version information]' \
|
||||||
|
'1: :->command' \
|
||||||
|
'*:: :->args'
|
||||||
|
|
||||||
|
# If the user is trying to complete the first argument (the subcommand),
|
||||||
|
# then we present them the `commands` array above.
|
||||||
|
case $state in
|
||||||
|
command)
|
||||||
|
_describe -t commands 'ollama commands' commands
|
||||||
|
return
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# If the first argument is known, proceed with subcommand-specific completions
|
||||||
|
case $words[1] in
|
||||||
|
serve)
|
||||||
|
_arguments \
|
||||||
|
'(-p --port)'{-p,--port}'[Specify the port number]:port number:'
|
||||||
|
;;
|
||||||
|
|
||||||
|
create)
|
||||||
|
# If user typed only `ollama create ` (with no second arg),
|
||||||
|
# display a short message to remind them to name the new model
|
||||||
|
if [[ $CURRENT -eq 2 ]]; then
|
||||||
|
_message 'Specify the new model name'
|
||||||
|
else
|
||||||
|
# Otherwise, offer flags for `create`
|
||||||
|
_arguments \
|
||||||
|
'(-f --filename)'{-f,--filename}'[Path to the Modelfile]:Modelfile:_files' \
|
||||||
|
'(-q --quantize)'{-q,--quantize}'[Quantization method (e.g. q4_0)]' \
|
||||||
|
'--prefix[Set a prefix for the created model]' \
|
||||||
|
'(-h --help)--help[Show help for create]'
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
show)
|
||||||
|
_message 'Usage: ollama show MODEL [flags]'
|
||||||
|
if [[ $CURRENT -eq 2 ]]; then
|
||||||
|
_ollama_get_models
|
||||||
|
else
|
||||||
|
_arguments \
|
||||||
|
'--license[Show the model’s license]' \
|
||||||
|
'--modelfile[Show the model’s Modelfile]' \
|
||||||
|
'--parameters[Show model parameters]' \
|
||||||
|
'--system[Show the system message of the model]' \
|
||||||
|
'--template[Show the model’s template]' \
|
||||||
|
'(-h --help)--help[Show help for show]'
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
run)
|
||||||
|
# Display usage message only if there's no argument yet
|
||||||
|
if [[ $CURRENT -eq 2 ]]; then
|
||||||
|
_message "Usage: ollama run MODEL [PROMPT] [flags]"
|
||||||
|
_ollama_get_models
|
||||||
|
else
|
||||||
|
# Define flags for the `run` command
|
||||||
|
local -a _run_flags=(
|
||||||
|
'--format-string=[Format string for the output (e.g. json)]'
|
||||||
|
'--insecure[Use an insecure registry]'
|
||||||
|
'--keepalive=[Time to keep the model loaded (e.g. 5m)]'
|
||||||
|
'--nowordwrap[Disable word wrapping]'
|
||||||
|
'--verbose[Show response timings]'
|
||||||
|
'(-h --help)--help[Show help for run]'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use a mix of `_arguments` and manual handling for freeform input
|
||||||
|
if [[ $CURRENT -eq 3 ]]; then
|
||||||
|
# Suggest a freeform prompt (arbitrary input)
|
||||||
|
_message "Enter a prompt as a string"
|
||||||
|
else
|
||||||
|
# Provide flag completions
|
||||||
|
_arguments -S "${_run_flags[@]}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
cp)
|
||||||
|
# The `cp` command expects `ollama cp SOURCE DEST`
|
||||||
|
if [[ $CURRENT -eq 2 ]]; then
|
||||||
|
_ollama_get_models
|
||||||
|
elif [[ $CURRENT -eq 3 ]]; then
|
||||||
|
_message 'Specify the destination model name'
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
rm|stop|pull|push)
|
||||||
|
# All of these commands accept one or more model names
|
||||||
|
if [[ $CURRENT -eq 2 ]]; then
|
||||||
|
_ollama_get_models
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
|
||||||
|
# If the subcommand doesn’t match anything above, fall back to default
|
||||||
|
*)
|
||||||
|
_default
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Finally, register the completion function for the `ollama` command
|
||||||
|
compdef _ollama ollama
|
Loading…
Reference in New Issue