'(-T --types)'{-T,--types}'=[configure the types of lint rmlint will look for]: :_rmlint_types' \
'*'{-o,--output}'=[configure the way rmlint outputs its results]:spec:_rmlint_output' \
'*'{-O,--add-output}'=[configure the way rmlint outputs its results (preserve defaults)]:spec:_rmlint_output' \
'*'{-c,--config}'=[configure a format]:spec:_rmlint_config' \
'(-z --perms)'{-z,--perms}'=[only look into file if it is readable, writable or executable by the current user]: :_values -s "" perms r w x' \
'(-a --algorithm)'{-a,--algorithm}'=[choose the algorithm to use for finding duplicate files]:algo:_rmlint_algorithm' \
'*'{-p,--paranoid}'[increase the paranoia of rmlint'\''s duplicate algorithm]' \
'*'{-P,--less-paranoid}'[decrease the paranoia of rmlint'\''s duplicate algorithm]' \
'*'{-v,--loud}'[increase the verbosity]' \
'*'{-V,--quiet}'[decrease the verbosity]' \
'(-g --progress)'{-g,--progress}'[show a progressbar with sane defaults]' \
'(-G --no-progress)'{-G,--no-progress}'[do not show a progressbar with sane defaults (default)]' \
'(-D --merge-directories)'{-D,--merge-directories}'[makes rmlint use a special mode where all found duplicates are collected and checked if whole directory trees are duplicates]' \
'(-j --honour-dir-layout)'{-j,--honour-dir-layout}'[only recognize directories as duplicates that have the same path layout]' \
'(-y --sort-by)'{-y,--sort-by}'=[during output, sort the found duplicate groups by criteria described by order]:order:_rmlint_sort' \
'(-w --with-color)'{-w,--with-color}'[use color escapes for pretty output (default)]' \
'(-W --no-with-color)'{-W,--no-with-color}'[disable color escapes for pretty output]' \
'(- *)'{-h,--help}'[show a shorter reference help text]' \
'(- *)--help-all[show all help options]' \
'(- *)'{-H,--show-man}'[show the full man page]' \
'(- *)--version[print the version of rmlint]' \
'(-s --size)'{-s,--size}'=[only consider files as duplicates in a certain size range]:range:_rmlint_size_range' \
'(-d --max-depth)'{-d,--max-depth}'=[only recurse up to this depth]: :_guard "[0-9]#" "depth"' \
'(-l --hardlinked)'{-l,--hardlinked}'[hardlinked files are treated as duplicates (default)]' \
'--keep-hardlinked[rmlint will not delete any files that are hardlinked to an original in their respective group]' \
'(-L --no-hardlinked)'{-L,--no-hardlinked}'[only one file (of a set of hardlinked files) is considered, all the others are ignored]' \
'(-b --match-basename)'{-b,--match-basename}'[only consider those files as dupes that have the same basename]' \
'(-B --unmatched-basename)'{-B,--unmatched-basename}'[only consider those files as dupes that do not share the same basename]' \
'(-e --match-with-extension)'{-e,--match-with-extension}'[only consider those files as dupes that have the same file extension]' \
'(-E --no-match-with-extension)'{-E,--no-match-with-extension}'[don'\'t' consider those files as dupes that have the same file extension (default)]' \
'(-i --match-without-extension)'{-i,--match-without-extension}'[only consider those files as dupes that have the same basename minus the file extension]' \
'(-I --no-match-without-extension)'{-I,--no-match-without-extension}'[don'\'t' consider those files as dupes that have the same basename minus the file extension (default)]' \
'(-n --newer-than-stamp)'{-n,--newer-than-stamp}'=[only consider files (and their size siblings for duplicates) newer than a certain modification time (mtime)]:timestamp_filename:_files' \
'(-N --newer-than)'{-N,--newer-than}'=[don'\'t' consider files (and their size siblings for duplicates) newer than a certain modification time (mtime)]: :_rmlint_iso8601_or_unix_timestamp' \
'(-k --keep-all-tagged)'{-k,--keep-all-tagged}'[don'\''t delete any duplicates that are in tagged paths]' \
'(-K --keep-all-untagged)'{-K,--keep-all-untagged}'[don'\''t delete any duplicates that are in non-tagged paths]' \
'(-m --must-match-tagged)'{-m,--must-match-tagged}'[only look for duplicates of which at least one is in one of the tagged paths]' \
'(-M --must-match-untagged)'{-M,--must-match-untagged}'[only look for duplicates of which at least one is in one of the non-tagged paths]' \
'(-S --rank-by)'{-S,--rank-by}'=[sort the files in a group of duplicates into originals and duplicates by one or more criteria]: :_rmlint_rank' \
'--replay[read an existing json file and re-output it]' \
'(-C --xattr)'{-C,--xattr}'[shortcut for --xattr-read, --xattr-write, --write-unfinished]' \
'--xattr-read[read cached checksums from the extended file attributes]' \
'--xattr-write[write cached checksums from the extended file attributes]' \
'--xattr-clear[clear cached checksums from the extended file attributes]' \
'(-U --write-unfinished)'{-U,--write-unfinished}'[include files in output that have not been hashed fully, i.e. files that do not appear to have a duplicate]' \
'(-t --threads)'{-t,--threads}'=[the number of threads to use during file tree traversal and hashing (default: 16)]: :_guard "[0-9]#" "threads [16]"' \
'(-u --limit-mem)'{-u,--limit-mem}'=[apply a maximum number of memory to use for hashing and --paranoid]:size: _rmlint_size' \
'(-q --clamp-low)'{-q,--clamp-low}'=[only look at the content of files in the range of from low to (including) high (default: 0)]: : _rmlint_clamp 0' \
'(-Q --clamp-top)'{-Q,--clamp-top}'=[only look at the content of files in the range of from low to (including) high (default: 1.0)]: : _rmlint_clamp 1.0' \
'(-Z --mtime-window)'{-Z,--mtime-window}'=[only consider those files as duplicates that have the same content and the same modification time (mtime) within a certain window of T seconds (default: -1)]: :_guard "[0-9]#" "mtime window (seconds) [-1]"' \
'--with-fiemap[enable reading the file extents on rotational disk in order to optimize disk access patterns (default)]' \
'--without-fiemap[disable reading the file extents on rotational disk in order to optimize disk access patterns]' \
'--gui[start the optional graphical frontend to rmlint called Shredder]:*: :->gui' \
'--hash[make rmlint work as a multi-threaded file hash utility]:*: :->hash' \
'--equal[check if the paths given on the commandline all have equal content]: :_rmlint_files_or_separator' \
'(-0 --stdin0)'{-0,--stdin0}'[read null-separated file list from stdin]' \
'--backup[do create backups of previous result files]' \
'--no-backup[do not create backups of previous result files]' \
'--dedupe[dedupe matching extents from source to dest (if filesystem supports)]:*:: := ->dedupe' \
'--dedupe-xattr[check extended attributes to see if the file is already deduplicated]' \
'--dedupe-readonly[(--dedupe option) even dedupe read-only snapshots (needs root)]' \
'--is-reflink[test if two files are reflinks (share same data extents)]:*:: := ->reflink' \
'*: :_rmlint_files_or_separator' && return
case $state in
(gui)
_arguments -s -w : \
'(- *)'{-h,--help}'[show help options]' \
{-a,--add-location}'[add locations to locations view]' \
{-s,--scan}'[add location to scan (as untagged path)]' \
{-S,--scan-tagged}'[add location to scan (as tagged path)]' \
{-l,--load-script}'[show `script` in editor view]' \
'*'{-v,--verbose}'[be more verbose]' \
'*'{-V,--less-verbose}'[be less verbose]' \
{-c,--show-settings}'[show the settings view]' \
'(- *)--version[show the version of Shredder]' && ret=0
;;
(hash)
_arguments -s -w : \
'(- *)'{-h,--help}'[show help options]' \
{-a,--algorithm}'[digest type \[bLAKE2B\]]:type:_rmlint_algorithm' \
{-t,--num-threads}'[number of hashing threads \[8\]]: :_guard "[0-9]#" "threads [8]"' \