1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
|
#compdef rmlint rmlint.sh -P rmlint.*.sh
# Copyright (c) 2021 Github zsh-users - https://github.com/zsh-users
#
# Permission is hereby granted, without written agreement and without
# licence or royalty fees, to use, copy, modify, and distribute this
# software and to distribute modified versions of this software for any
# purpose, provided that the above copyright notice and the following
# two paragraphs appear in all copies of this software.
#
# In no event shall the Zsh Development Group be liable to any party for
# direct, indirect, special, incidental, or consequential damages arising out
# of the use of this software and its documentation, even if the Zsh
# Development Group have been advised of the possibility of such damage.
#
# The Zsh Development Group specifically disclaim any warranties, including,
# but not limited to, the implied warranties of merchantability and fitness
# for a particular purpose. The software provided hereunder is on an "as is"
# basis, and the Zsh Development Group have no obligation to provide
# maintenance, support, updates, enhancements, or modifications.
#
# Description
# -----------
#
# Zsh completion for rmlint 2.10.1 (https://github.com/sahib/rmlint)
#
# Authors
# -------
#
# * oxiedi (https://github.com/oxiedi)
(( $+functions[_rmlint_types] )) ||
_rmlint_types() {
compset -P '*[+-]'
# FIXME: all values before `-` are swallowed by `*`, which breaks deduplication of the swallowed values
# TODO: respect `prefix-needed`
_values -s ',' 'list [defaults]' \
'all[enables all lint types]' \
'defaults[enables all lint types, but nonstripped]' \
'minimal[defaults minus emptyfiles and emptydirs]' \
'minimaldirs[defaults minus emptyfiles, emptydirs and duplicates, but with duplicatedirs]' \
'none[disable all lint types]' \
'(badids bi)'{badids,bi}'[find files with bad UID, GID or both]' \
'(badlinks bl)'{badlinks,bl}'[find bad symlinks pointing nowhere valid]' \
'(emptydirs ed)'{emptydirs,ed}'[find empty directories]' \
'(emptyfiles ef)'{emptyfiles,ef}'[find empty files]' \
'(nonstripped ns)'{nonstripped,ns}'[find nonstripped binaries]' \
'(duplicates df)'{duplicates,df}'[find duplicate files]' \
'(duplicatedirs dd)'{duplicatedirs,dd}'[find duplicate directories (This is the same -D!)]'
}
(( $+functions[__rmlint_setup_formatter_descriptions] )) ||
__rmlint_setup_formatter_descriptions() {
typeset -gA formatter_descriptions=(
csv 'output all found lint as comma-separated-value list'
sh 'output all found lint as shell script'
json 'print a JSON-formatted dump of all found reports'
py 'outputs a python script and a JSON document, just like the json formatter'
uniques 'outputs all unique paths found during the run, one path per line'
stamp 'outputs a timestamp of the time rmlint was run'
progressbar 'shows a progressbar'
pretty 'shows all found items in realtime nicely colored'
summary 'shows counts of files and their respective size after the run. Also list all written output files'
fdupes 'prints an output similar to the popular duplicate finder fdupes(1)'
)
}
(( $+functions[_rmlint_output] )) ||
_rmlint_output() {
local -A formatter_descriptions
__rmlint_setup_formatter_descriptions
if compset -P "(${(kj:|:)formatter_descriptions}):"; then
_alternative \
'files:file:_files' \
'outputs:output:(stdout stderr)'
else
local -a outputs
local f d
for f d in ${(kv)formatter_descriptions}; do
outputs+=( "$f:$d" )
done
_describe -t outputs 'output' outputs -S ':' -q
fi
}
(( $+functions[_rmlint_config] )) ||
_rmlint_config() {
local -A formatter_descriptions
__rmlint_setup_formatter_descriptions
unset 'formatter_descriptions['{py,pretty,summary}']'
local -a match mbegin mend
if compset -P "(#b)(${(kj:|:)formatter_descriptions}):"; then
case $match[1] in
(csv)
_values 'option' \
'no_header[do not write a first line describing the column headers]' \
'unique[include unique files in the output]'
;;
(sh)
local context state state_descr line
_values 'option' \
'cmd[specify a user defined command to run on duplicates]: :_cmdstring' \
'handler[define a comma separated list of handlers to try on duplicate files in that given order until one handler succeeds]:handler:->handler' \
'link[shortcut for -c sh:handler=clone,reflink,hardlink,symlink]' \
'hardlink[shortcut for -c sh:handler=hardlink,symlink]' \
'symlink[shortcut for -c sh:handler=symlink]'
case $state in
(handler)
_values -s ',' $state_descr \
'clone[try to clone both files with the FIDEDUPERANGE ioctl(3p) (or BTRFS_IOC_FILE_EXTENT_SAME on older kernels)]' \
'reflink[try to reflink the duplicate file to the original]' \
'hardlink[replace the duplicate file with a hardlink to the original file]' \
'symlink[tries to replace the duplicate file with a symbolic link to the original]' \
'remove[remove the file using rm -rf]' \
'usercmd[use the provided user defined command (-c sh:cmd=something)]'
;;
esac
;;
(json)
_values 'option' \
'unique[include unique files in the output]' \
'no_header[print the header with metadata]:boolean [true]:(false true)' \
'no_footer[print the footer with statistics]:boolean [true]:(false true)' \
'oneline[print one json document per line]:boolean [false]:(true false)'
;;
(uniques)
_values 'option' \
'print0[do not put newlines between paths but zero bytes]'
;;
(stamp)
_values 'option' \
'iso8601[write an ISO8601 formatted timestamps or seconds since epoch]:boolean:(true false)'
;;
(progressbar)
_values 'option' \
'update_interval[number of milliseconds to wait between updates (default: 50)]: :_guard "[0-9]#" "update interval (milliseconds) [50]"' \
'ascii[do not attempt to use unicode characters, which might not be supported by some terminals]' \
'fancy[use a more fancy style for the progressbar]'
;;
(fdupes)
_values 'option' \
'omitfirst[omits the first line of each set of duplicates]' \
'sameline[does not print newlines between files, only a space]'
;;
esac
else
local -a formatters
local f d
for f d in ${(kv)formatter_descriptions}; do
formatters+=( "$f:$d" )
done
_describe -t formatters 'formatter' formatters -S ':'
fi
}
(( $+functions[_rmlint_algorithm] )) ||
_rmlint_algorithm() {
local -a tmp=( sha{1,256,512} sha3-{256,384,512} blake{2s,2b,2sp,2bp} highway{64,128,256} )
_alternative \
'512bit-algorithms:512-bit:(blake2b blake2bp sha3-512 sha512)' \
'384bit-algorithms:384-bit:(sha3-384)' \
'256bit-algorithms:256-bit:(blake2s blake2sp sha3-256 sha256 highway256 metro256 metrocrc256)' \
'160bit-algorithms:160-bit:(sha1)' \
'128bit-algorithms:128-bit:(md5 murmur metro metrocrc)' \
'64bit-algorithms:64-bit:(highway64 xxhash)' \
"cryptographic-algorithms:cryptographic:($tmp)" \
'non-cryptographic-algorithms:non-cryptographic:(metro metro256 xxhash murmur)' \
'not-useful-algorithms:not-useful:(cumulative paranoid ext)'
}
(( $+functions[_rmlint_sort] )) ||
_rmlint_sort() {
local -A letter_descriptions=(
s 'sort by size of group'
a 'sort alphabetically by the basename of the original'
m 'sort by mtime of the original'
p 'sort by path-index of the original'
o 'sort by natural found order (might be different on each run)'
n 'sort by number of files in the group'
)
local -a letters
local l d
for l d in ${(kv)letter_descriptions}; do
letters+=( "${l}[$d]" "${(U)l}[$d (in reverse order)]" )
done
_values -s '' 'order' $letters
}
(( $+functions[__rmlint_describe_multipliers] )) ||
__rmlint_describe_multipliers() {
local -a multipliers=(
'C:1^1'
'W:2^1'
'B:512^1'
'K:1000^1'
'KB:1024^1'
'M:1000^2'
'MB:1024^2'
'G:1000^3'
'GB:1024^3'
'T:1000^4'
'TB:1024^4'
'P:1000^5'
'PB:1024^5'
'E:1000^6'
'EB:1024^6'
)
_describe -t multiplier 'multiplier' multipliers "$@"
}
(( $+functions[__rmlint_multipliers] )) ||
__rmlint_multipliers() {
compset -P '[0-9]##' || return
__rmlint_describe_multipliers "$@"
}
(( $+functions[_rmlint_size] )) ||
_rmlint_size() {
! __rmlint_multipliers && [[ -z $PREFIX ]] && _message -e "${1:-size}"
}
(( $+functions[_rmlint_size_range] )) ||
_rmlint_size_range() {
if compset -P '[0-9]##'; then
if compset -P '(C|W|B|K|KB|M|MB|G|GB|T|TB|P|PB|E|EB)-'; then
_rmlint_size 'max'
else
__rmlint_describe_multipliers -S '-' -q
fi
elif [[ -z $PREFIX ]]; then
_message -e 'min'
fi
}
(( $+functions[_rmlint_iso8601_or_unix_timestamp] )) ||
_rmlint_iso8601_or_unix_timestamp() {
_alternative \
'dates:iso8601_timestamp: _dates -f "%FT%T"' \
'seconds:unix_timestamp:_guard "[0-9]#" "seconds since epoch"'
}
(( $+functions[_rmlint_rank] )) ||
_rmlint_rank() {
# TODO: {r,R,x,X}<regex>
_values -s '' 'criteria [pOma]' \
'm[keep lowest mtime (oldest)]' 'M[keep highest mtime (newest)]' \
'a[keep first alphabetically]' 'A[keep last alphabetically]' \
'p[keep first named path]' 'P[keep last named path]' \
'd[keep path with lowest depth]' 'D[keep path with highest depth]' \
'l[keep path with shortest basename]' 'L[keep path with longest basename]' \
'r[keep paths matching regex]' 'R[keep path not matching regex]' \
'x[keep basenames matching regex]' 'X[keep basenames not matching regex]' \
'h[keep file with lowest hardlink count]' 'H[keep file with highest hardlink count]' \
'o[keep file with lowest number of hardlinks outside of the paths traversed by rmlint]' \
'O[keep file with highest number of hardlinks outside of the paths traversed by rmlint]'
}
(( $+functions[_rmlint_percent] )) ||
_rmlint_percent() {
if compset -P '(100|[1-9][0-9]|[1-9])'; then
compadd "$@" -- '%'
elif [[ -z $PREFIX ]]; then
_message -e 'percent%%'
fi
}
(( $+functions[_rmlint_clamp] )) ||
_rmlint_clamp() {
_alternative \
"factor: :_guard '((|0)(|.[0-9]#)|1(|.0#))' 'fac.tor [$1]'" \
'percent:percent%%:_rmlint_percent' \
'multiplier:offset: _rmlint_size "offset"'
}
(( $+functions[_rmlint_files_or_separator] )) ||
_rmlint_files_or_separator() {
if (( $words[(i)-] < CURRENT )); then
[[ -z $words[CURRENT] ]] && compadd "$@" -S '' -- -
return
fi
local -a alts=( 'files:file:_files' )
(( $line[(I)//] || $+opt_args[--equal] )) || alts+=( 'separator:separator:(//)' )
_alternative $alts
}
_rmlint() {
if [[ $service = *.sh ]]; then
_arguments -s : \
'(-)-h[show help message]' \
'-d[do not ask before running]' \
'-x[keep rmlint.sh; do not autodelete it]' \
'-p[recheck that files are still identical before removing duplicates]' \
'-r[allow deduplication of files on read-only btrfs snapshots (requires sudo)]' \
'(-d -x)-n[do not perform any modifications, just print what would be done (implies -d and -x)]' \
'-c[clean up empty directories while deleting duplicates]' \
'-q[do not show progress]' \
'-k[keep the timestamp of directories when removing duplicates]' \
'-i[ask before deleting each file]'
return
fi
local curcontext="$curcontext" state state_descr
local -a line
local -i ret=1
typeset -A opt_args
_arguments -s -w -C : \
'(-T --types)'{-T,--types}'=[configure the types of lint rmlint will look for]: :_rmlint_types' \
'*'{-o,--output}'=[configure the way rmlint outputs its results]:spec:_rmlint_output' \
'*'{-O,--add-output}'=[configure the way rmlint outputs its results (preserve defaults)]:spec:_rmlint_output' \
'*'{-c,--config}'=[configure a format]:spec:_rmlint_config' \
'(-z --perms)'{-z,--perms}'=[only look into file if it is readable, writable or executable by the current user]: :_values -s "" perms r w x' \
'(-a --algorithm)'{-a,--algorithm}'=[choose the algorithm to use for finding duplicate files]:algo:_rmlint_algorithm' \
'*'{-p,--paranoid}'[increase the paranoia of rmlint'\''s duplicate algorithm]' \
'*'{-P,--less-paranoid}'[decrease the paranoia of rmlint'\''s duplicate algorithm]' \
'*'{-v,--loud}'[increase the verbosity]' \
'*'{-V,--quiet}'[decrease the verbosity]' \
'(-g --progress)'{-g,--progress}'[show a progressbar with sane defaults]' \
'(-G --no-progress)'{-G,--no-progress}'[do not show a progressbar with sane defaults (default)]' \
'(-D --merge-directories)'{-D,--merge-directories}'[makes rmlint use a special mode where all found duplicates are collected and checked if whole directory trees are duplicates]' \
'(-j --honour-dir-layout)'{-j,--honour-dir-layout}'[only recognize directories as duplicates that have the same path layout]' \
'(-y --sort-by)'{-y,--sort-by}'=[during output, sort the found duplicate groups by criteria described by order]:order:_rmlint_sort' \
'(-w --with-color)'{-w,--with-color}'[use color escapes for pretty output (default)]' \
'(-W --no-with-color)'{-W,--no-with-color}'[disable color escapes for pretty output]' \
'(- *)'{-h,--help}'[show a shorter reference help text]' \
'(- *)--help-all[show all help options]' \
'(- *)'{-H,--show-man}'[show the full man page]' \
'(- *)--version[print the version of rmlint]' \
'(-s --size)'{-s,--size}'=[only consider files as duplicates in a certain size range]:range:_rmlint_size_range' \
'(-d --max-depth)'{-d,--max-depth}'=[only recurse up to this depth]: :_guard "[0-9]#" "depth"' \
'(-l --hardlinked)'{-l,--hardlinked}'[hardlinked files are treated as duplicates (default)]' \
'--keep-hardlinked[rmlint will not delete any files that are hardlinked to an original in their respective group]' \
'(-L --no-hardlinked)'{-L,--no-hardlinked}'[only one file (of a set of hardlinked files) is considered, all the others are ignored]' \
'(-f --followlinks)'{-f,--followlinks}'[follow symbolic links]' \
'(-F --no-followlinks)'{-F,--no-followlinks}'[ignore symbolic links completely]' \
'(-@ --see-symlinks)'{-@,--see-symlinks}'[see symlinks and treats them like small files with the path to their target in them (default)]' \
'(-x --no-crossdev)'{-x,--no-crossdev}'[stay always on the same device]' \
'(-X --crossdev)'{-X,--crossdev}'[allow crossing mountpoints (default)]' \
'(-r --hidden)'{-r,--hidden}'[traverse hidden directories]' \
'(-R --no-hidden)'{-R,--no-hidden}'[don'\''t traverse hidden directories (default)]' \
'--partial-hidden[traverse duplicate hidden directories]' \
'(-b --match-basename)'{-b,--match-basename}'[only consider those files as dupes that have the same basename]' \
'(-B --unmatched-basename)'{-B,--unmatched-basename}'[only consider those files as dupes that do not share the same basename]' \
'(-e --match-with-extension)'{-e,--match-with-extension}'[only consider those files as dupes that have the same file extension]' \
'(-E --no-match-with-extension)'{-E,--no-match-with-extension}'[don'\'t' consider those files as dupes that have the same file extension (default)]' \
'(-i --match-without-extension)'{-i,--match-without-extension}'[only consider those files as dupes that have the same basename minus the file extension]' \
'(-I --no-match-without-extension)'{-I,--no-match-without-extension}'[don'\'t' consider those files as dupes that have the same basename minus the file extension (default)]' \
'(-n --newer-than-stamp)'{-n,--newer-than-stamp}'=[only consider files (and their size siblings for duplicates) newer than a certain modification time (mtime)]:timestamp_filename:_files' \
'(-N --newer-than)'{-N,--newer-than}'=[don'\'t' consider files (and their size siblings for duplicates) newer than a certain modification time (mtime)]: :_rmlint_iso8601_or_unix_timestamp' \
'(-k --keep-all-tagged)'{-k,--keep-all-tagged}'[don'\''t delete any duplicates that are in tagged paths]' \
'(-K --keep-all-untagged)'{-K,--keep-all-untagged}'[don'\''t delete any duplicates that are in non-tagged paths]' \
'(-m --must-match-tagged)'{-m,--must-match-tagged}'[only look for duplicates of which at least one is in one of the tagged paths]' \
'(-M --must-match-untagged)'{-M,--must-match-untagged}'[only look for duplicates of which at least one is in one of the non-tagged paths]' \
'(-S --rank-by)'{-S,--rank-by}'=[sort the files in a group of duplicates into originals and duplicates by one or more criteria]: :_rmlint_rank' \
'--replay[read an existing json file and re-output it]' \
'(-C --xattr)'{-C,--xattr}'[shortcut for --xattr-read, --xattr-write, --write-unfinished]' \
'--xattr-read[read cached checksums from the extended file attributes]' \
'--xattr-write[write cached checksums from the extended file attributes]' \
'--xattr-clear[clear cached checksums from the extended file attributes]' \
'(-U --write-unfinished)'{-U,--write-unfinished}'[include files in output that have not been hashed fully, i.e. files that do not appear to have a duplicate]' \
'(-t --threads)'{-t,--threads}'=[the number of threads to use during file tree traversal and hashing (default: 16)]: :_guard "[0-9]#" "threads [16]"' \
'(-u --limit-mem)'{-u,--limit-mem}'=[apply a maximum number of memory to use for hashing and --paranoid]:size: _rmlint_size' \
'(-q --clamp-low)'{-q,--clamp-low}'=[only look at the content of files in the range of from low to (including) high (default: 0)]: : _rmlint_clamp 0' \
'(-Q --clamp-top)'{-Q,--clamp-top}'=[only look at the content of files in the range of from low to (including) high (default: 1.0)]: : _rmlint_clamp 1.0' \
'(-Z --mtime-window)'{-Z,--mtime-window}'=[only consider those files as duplicates that have the same content and the same modification time (mtime) within a certain window of T seconds (default: -1)]: :_guard "[0-9]#" "mtime window (seconds) [-1]"' \
'--with-fiemap[enable reading the file extents on rotational disk in order to optimize disk access patterns (default)]' \
'--without-fiemap[disable reading the file extents on rotational disk in order to optimize disk access patterns]' \
'--gui[start the optional graphical frontend to rmlint called Shredder]:*: :->gui' \
'--hash[make rmlint work as a multi-threaded file hash utility]:*: :->hash' \
'--equal[check if the paths given on the commandline all have equal content]: :_rmlint_files_or_separator' \
'(-0 --stdin0)'{-0,--stdin0}'[read null-separated file list from stdin]' \
'--backup[do create backups of previous result files]' \
'--no-backup[do not create backups of previous result files]' \
'--dedupe[dedupe matching extents from source to dest (if filesystem supports)]:*:: := ->dedupe' \
'--dedupe-xattr[check extended attributes to see if the file is already deduplicated]' \
'--dedupe-readonly[(--dedupe option) even dedupe read-only snapshots (needs root)]' \
'--is-reflink[test if two files are reflinks (share same data extents)]:*:: := ->reflink' \
'*: :_rmlint_files_or_separator' && return
case $state in
(gui)
_arguments -s -w : \
'(- *)'{-h,--help}'[show help options]' \
{-a,--add-location}'[add locations to locations view]' \
{-s,--scan}'[add location to scan (as untagged path)]' \
{-S,--scan-tagged}'[add location to scan (as tagged path)]' \
{-l,--load-script}'[show `script` in editor view]' \
'*'{-v,--verbose}'[be more verbose]' \
'*'{-V,--less-verbose}'[be less verbose]' \
{-c,--show-settings}'[show the settings view]' \
'(- *)--version[show the version of Shredder]' && ret=0
;;
(hash)
_arguments -s -w : \
'(- *)'{-h,--help}'[show help options]' \
{-a,--algorithm}'[digest type \[bLAKE2B\]]:type:_rmlint_algorithm' \
{-t,--num-threads}'[number of hashing threads \[8\]]: :_guard "[0-9]#" "threads [8]"' \
{-b,--buffer-mbytes}'[megabytes read buffer \[256 MB\]]: :_guard "[0-9]#" "buffer (MB) [256]"' \
{-i,--ignore-order}'[print hashes in order completed, not in order entered (reduces memory usage)]' \
'*:file:_files' && ret=0
;;
(dedupe)
_arguments -s -w : \
'-r[enable deduplication of read-only \[btrfs\] snapshots (requires root)]' \
'(-V)*-v' \
'(-v)*-V' \
':src:_files' \
':dst:_files' && ret=0
;;
(reflink)
_arguments -s -w : \
'(-V)*-v' \
'(-v)*-V' \
':file1:_files' \
':file2:_files' && ret=0
;;
esac
return ret
}
_rmlint "$@"
# Local Variables:
# mode: Shell-Script
# sh-indentation: 2
# indent-tabs-mode: nil
# sh-basic-offset: 2
# End:
# vim: ft=zsh sw=2 ts=2 et
|