aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/_age39
-rw-r--r--src/_bundle637
-rw-r--r--src/_concourse1931
-rw-r--r--src/_console72
-rw-r--r--src/_emacs149
-rw-r--r--src/_emacsclient66
-rw-r--r--src/_fleetctl123
-rw-r--r--src/_flutter1
-rw-r--r--src/_git-pulls2
-rw-r--r--src/_google94
-rw-r--r--src/_jmeter6
-rw-r--r--src/_knife324
-rw-r--r--src/_l3build91
-rw-r--r--src/_mussh42
-rw-r--r--src/_networkQuality4
-rw-r--r--src/_node360
-rw-r--r--src/_pgsql_utils590
-rw-r--r--src/_phing45
-rw-r--r--src/_play190
-rw-r--r--src/_rkt369
-rw-r--r--src/_rslsync41
-rw-r--r--src/_rsvm88
-rw-r--r--src/_screencapture2
-rw-r--r--src/_sfdx935
-rw-r--r--src/_sslscan98
-rw-r--r--src/_stack134
-rw-r--r--src/_supervisorctl237
-rw-r--r--src/_supervisord245
-rw-r--r--src/_svm204
-rw-r--r--src/_textutil8
-rw-r--r--src/_tsx58
-rw-r--r--src/_zcash-cli273
32 files changed, 2060 insertions, 5398 deletions
diff --git a/src/_age b/src/_age
index c30291f..c30377d 100644
--- a/src/_age
+++ b/src/_age
@@ -1,9 +1,9 @@
-#compdef age
+#compdef age age-keygen
# ------------------------------------------------------------------------------
# Description
# -----------
#
-# Completion script for age 1.1.1 (https://github.com/FiloSottile/age).
+# Completion script for age 1.2.1 (https://github.com/FiloSottile/age).
#
# ------------------------------------------------------------------------------
# Authors
@@ -13,19 +13,28 @@
#
# ------------------------------------------------------------------------------
-_arguments \
- -A '-*' \
- '(- *)'{-h,--help}'[show help message and exit]' \
- '(-e --encrypt -d --decrypt)'{-e,--encrypt}'[Encrypt INPUT to OUTPUT]' \
- '(-e --encrypt -d --decrypt -a --armor -p --passphrase -r --recipient -R --recipients-file)'{-d,--decrypt}'[Decrypt INPUT to OUTPUT]' \
- \*{-i,--identity=}'[Encrypt/Decrypt using the identities at PATH]:IDENTITY:_files' \
- '(-o --output)'{-o,--output=}'[Write encrypted/decrypted file to OUTPUT]:OUTPUT:_files' \
- '(-j --plugin)'{-j,--plugin=}'[Encrypt/Decrypt using the data-less PLUGIN]:PLUGIN:' \
- '(-d --decrypt)'\*{-r,--recipient=}'[Encrypt to the explicitly specified RECIPIENT]:RECIPIENT:' \
- '(-d --decrypt)'\*{-R,--recipients-file=}'[Encrypt to the RECIPIENTS listed in the file at PATH]:RECIPIENTS_FILE:_files' \
- '(-a --armor -d --decrypt)'{-a,--armor}'[Encrypt to an ASCII-only "armored" encoding]' \
- '(-p --passphrase -d --decrypt)'{-p,--passphrase}'[Encrypt with a passphrase]' \
- :INPUT:_files
+case $service in
+ (age)
+ _arguments \
+ -A '-*' \
+ '(- *)'{-h,--help}'[show help message and exit]' \
+ '(-e --encrypt -d --decrypt)'{-e,--encrypt}'[Encrypt the input to the output. Default if omitted]' \
+ '(-e --encrypt -d --decrypt -a --armor -p --passphrase -r --recipient -R --recipients-file)'{-d,--decrypt}'[Decrypt the input to the output]' \
+ '(-o --output)'{-o,--output=}'[Write the result to the given file]:OUTPUT:_files' \
+ '(-a --armor -d --decrypt)'{-a,--armor}'[Encrypt to a PEM encoded format]' \
+ '(-p --passphrase -d --decrypt)'{-p,--passphrase}'[Encrypt with a passphrase]' \
+ '(-d --decrypt)'\*{-r,--recipient=}'[Encrypt to the explicitly specified RECIPIENT]:RECIPIENT:' \
+ '(-d --decrypt)'\*{-R,--recipients-file=}'[Encrypt to the RECIPIENTS listed in the file at PATH]:RECIPIENTS_FILE:_files' \
+ \*{-i,--identity=}'[Use the given identify file]:IDENTITY:_files' \
+ :INPUT:_files
+ ;;
+ (age-keygen)
+ _arguments \
+ '(-o --output)'{-o,--output}'[write the result to the given file]:file:_files' \
+ '-y[convert an identity file to a recipients file]' \
+ '*:input'
+ ;;
+esac
# Local Variables:
# mode: Shell-Script
diff --git a/src/_bundle b/src/_bundle
index 0122fd2..da25c8e 100644
--- a/src/_bundle
+++ b/src/_bundle
@@ -28,7 +28,7 @@
# Description
# -----------
#
-# Completion script for Bundler 2.7.0 (https://bundler.io/).
+# Completion script for Bundler 4.0.0 (https://bundler.io/).
#
# ------------------------------------------------------------------------------
# Authors
@@ -39,7 +39,273 @@
#
# ------------------------------------------------------------------------------
-local curcontext="$curcontext" state line ret=1
+_bundle() {
+ typeset -A opt_args
+ local context state line
+ local curcontext="$curcontext"
+
+ local ret=1
+
+ _arguments -C -A "-v" -A "--version" \
+ '(- 1 *)'{-v,--version}'[display version information]' \
+ '(-r --retry)'{-r,--retry}'[specify the number of times you with to attempt network commands]:number:' \
+ '(-v --verbose)'{-V,--verbose}'[print out additional logging information]' \
+ '--no-color[print all output without color]' \
+ '1: :_bundle_commands' \
+ '*:: :->args' && ret=0
+
+ case $state in
+ (args)
+ case $words[1] in
+ (help)
+ _arguments \
+ '1: :_bundle_commands' \
+ && ret=0
+ ;;
+ (install)
+ local -a policies=('HighSecurity' 'MediumSecurity' 'LowSecurity' 'AlmostNoSecurity' 'NoSecurity')
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '(--force --redownload)'{--force,--redownload}'[force reinstalling every gem]' \
+ '--full-index[download and cache the index file of all gems]' \
+ '--gemfile=-[use the specified gemfile instead of Gemfile]:gemfile:_files' \
+ '(-j --jobs)'{-j,--jobs}'[the maximum number of parallel download and install jobs]:number' \
+ '--local[do not attempt to connect to rubygems.org]' \
+ '--lockfile=[location of the lockfile which Bundler should use]:path:_files' \
+ '--prefer-local[force using locally installed gems]' \
+ '--no-cache[do not update the cache in vendor/cache with newly installed gems]' \
+ '--no-lock[do not create a lockfile]' \
+ '--quiet[only output warnings and errors]' \
+ '--retry=[retry number when network or git requests failed]:number' \
+ '--standalone=-[create standalone bundles]:groups:_bundle_groups' \
+ "--trust-policy=-[apply the Rubygems security policy]:arg:($policies)" \
+ && ret=0
+ ;;
+ (update)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--all[update all gems specified in Gemfile]' \
+ \*{--group,-g}=-'[only update the gems in the specified group]' \
+ '--source=-[the name of a source used in the Gemfile]:url' \
+ '--local[do not attempt to fetch gems remotely and use the gem cached instead]' \
+ '--ruby[update the locked version of Ruby to the current version of Ruby]' \
+ '--bundler[update the locked version of bundler to invoked bundler version]' \
+ '(--force --redownload)'{--force,--redownload}'[force reinstalling every gem]' \
+ '--full-index[fall back to using the single-file index of all gems]' \
+ '(-j --jobs)'{-j,--jobs}'[specify the number of jobs to run in parallel]:number' \
+ '--retry=-[retry failed network or git requests for number times]:number' \
+ '--quiet[only output warnings and errors]' \
+ '--patch[prefer updating only to next patch version]' \
+ '--minor[prefer updating only to next minor version]' \
+ '--major[prefer updating only to next major version (default)]' \
+ '--pre[always choose the highest allowed version]' \
+ '--strict[do not allow any gem to be updated past latest --patch | --minor | --major]' \
+ '--conservative[use bundle install conservative update behavior]' \
+ '*:: :_bundle_gems' \
+ && ret=0
+ ;;
+ (cache)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--all-platforms[include gems for all platforms present in the lockfile, not only the current one]' \
+ '--cache-path=-[specify a different cache path than the default(vendor/cache)]: :_files -/' \
+ '--gemfile=-[use the specified gemfile instead of Gemfile]:gemfile:_files' \
+ "--no-install[don't install the gems, only update the cache]" \
+ '--quite[only output warnings and errors]' \
+ && ret=0
+ ;;
+ (exec)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--gemfile=[use the specified gemfile instead of Gemfile]' \
+ '*:: :_normal' \
+ && ret=0
+ ;;
+ (config)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '1: :_bundle_config_subcommands' \
+ '--local[use local configuration]' \
+ '--global[use global configuration]' \
+ && ret=0
+ ;;
+ (add)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '(-v --version)'{-v,--version}=-'[specify version requirements for the added gem]:version' \
+ '(-g --group)'{-g,--group}=-'[specify the group for the added gem]:group:_bundle_groups' \
+ '(-s --source)'{-s,--source}=-'[specify the source for the added gem]: :_files' \
+ '(-r --require)'{-r,--require}=-'[adds require path to gem]: :_files' \
+ '--path=[specify the file path for the added gem]: :_files -/' \
+ '--git=[specify the git source for the added gem]:git' \
+ '--github=[specify the github source for the added gem]:github' \
+ '--branch=[specify the git branch for the added gem]:branch' \
+ '--ref=[specify the git ref for the added gem]' \
+ "--glob=[specify the location of a dependency's .gemspec, expanded within Ruby]:glob" \
+ '--quiet[do not print progress information to the standard output]' \
+ '--skip-install[adds the gem to the Gemfile but does not install it]' \
+ '--optimistic[adds optimistic declaration of version]' \
+ '--strict[adds strict declaration of version]' \
+ '1::gem' \
+ && ret=0
+ ;;
+ (binstubs)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--force[overwrite existing binstubs if they exist]' \
+ '--standalone[makes binstubs that can work without depending on Rubygems or Bundler at runtime]' \
+ '--shebang=-[specify a different shebang executable name than the default(default: ruby)]: :_files' \
+ '--all[create binstubs for all gems in the bundle]' \
+ '--all-platforms[install binstubs for all platforms]' \
+ '1::gem:' \
+ && ret=0
+ ;;
+ (check)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--dry-run[locks the Gemfile before running the command]' \
+ '--gemfile=-[use the specified gemfile instead of the Gemfile]: :_files' \
+ && ret=0
+ ;;
+ (show)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--paths[list the paths of all gems that are required by your Gemfile]' \
+ '1:: :_bundle_gems' \
+ && ret=0
+ ;;
+ (outdated)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--local[do not attempt to fetch gems remotely and use the gem cache instead]' \
+ '--pre[check for newer pre-release gems]' \
+ '--source=[check against a specific source]:source:_files' \
+ '(--filter-strict --strict)'{--filter-strict,--strict}'[only list newer versions allowed by your Gemfile requirements]' \
+ '(--parseable --porcelain)'{--parseable,--porcelain}'[use minimal formatting for more parsable output]' \
+ '--group=[list gems from a specific group]:group:_bundle_groups' \
+ '--groups[list gems organized by groups]' \
+ '--major[prefer updating to next major version(default)]' \
+ '--minor[prefer updating only to next minor version]' \
+ '--patch[prefer updating only to next patch version]' \
+ '--filter-major[only list major new versions]' \
+ '--filter-minor[only list minor new versions]' \
+ '--filter-patch[only list patch new versions]' \
+ '--only-explicit[only list gems specified in your Gemfile, not their dependencies]' \
+ '*:: :_bundle_gems' \
+ && ret=0
+ ;;
+ (console)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--no-color[disable colorization in output]' \
+ '(-r --retry)'{-r,--retry}='[specify the number of times you with to attempt network commands]:num' \
+ '1:: :_bundle_groups' \
+ && ret=0
+ ;;
+ (open)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--path=[specify GEM source relative path to open]:path:_files' \
+ '1:: :_bundle_gems' \
+ && ret=0
+ ;;
+ (list)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--name-only[print only the name of each gem]' \
+ '--paths[print the path to each gem in the bundle]' \
+ '--without-group=-[a space-separated list of groups of gems to skip during printing]: :_bundle_groups' \
+ '--only-group=-[a space-separated list of groups of gems to print]: :_bundle_groups' \
+ '--format[output format]:format:(json)' \
+ && ret=0
+ ;;
+ (lock)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--update=-[ignores the existing lockfile]' \
+ '--bundler=-[update the locked version of bundler to the given version or the latest version]:version' \
+ '--local[do not attempt to connect to rubygems.org]' \
+ '--print[prints the lockfile to STDOUT instead of writing to the file system]' \
+ '--lockfile=[the path where the lick file should be written to]: :_files' \
+ '--full-index[fall back to using the single file index of all gems]' \
+ '--gemfile=[use the specified gemfile instead of Gemfile]:file:_files' \
+ '--add-checksums[add checksums to the lockfile]' \
+ '--add-platform=[add a new platform to the lockfile]:platforms' \
+ '--remove-platform=[remove a platform from the lockfile]:platforms' \
+ '--normalize-platforms[normalize lockfile platforms]' \
+ '--patch[if updating, prefer updating only to next patch version]' \
+ '--minor[if updating, prefer updating only to next minor version]' \
+ '--major[if updating, prefer updating to next major version(default)]' \
+ '--pre[if updating, always choose the highest allowed version]' \
+ '--strict[if updating, do not allow any gem to be updated past latest --patch | --minor | --major]' \
+ '--conservative[if updating, use bundle install conservative update behavior]' \
+ && ret=0
+ ;;
+ (init)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--gemspec=-[use the specified .gemspec to create the Gemfile]: :_files' \
+ '--gemfile=[use the specified name for the gemfile instead of Gemfile]:name' \
+ && ret=0
+ ;;
+ (gem)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '(--exe -b --bin --no-exe)'{--exe,-b,--bin}'[specify that bundler should create a binary executable in the generated rubygem project]' \
+ '(--exe -b --bin --no-exe)--no-exe[do not create a binary]' \
+ '(--no-coc)--coc[add a CODE_OF_CONDUCT.md to the root of the generated project]' \
+ '(--coc)--no-coc[do not create a CODE_OF_CONDUCT.md]' \
+ '(--changelog --no-changelog)--changelog[add a CHANGELOG.md file to the root of the project]' \
+ '(--changelog --no-changelog)--no-changelog[do not add a CHANGELOG.md file to the root of the project]' \
+ '(--no-ext --ext)--ext=[add boilerplate for C, GO or Rust extension code to the generated project]: :(c go rust )' \
+ '(--ext --no-ext)--no-ext[do not add extension code]' \
+ '--git[initialize a git repo inside your library]' \
+ '--github-username=[Github username on README]:username' \
+ '(--no-mit --mit)--mit[add an MIT license to a LICENSE.txt file in the root of the generated project]' \
+ '(--mit --no-mit)--no-mit[do not create a LICENSE.txt]' \
+ '(-t --test --no-test)'{-t,--test}='[specify the test framework]: :(minitest rspec test-unit)' \
+ '(-t --test --no-test)--no-test[do not use a test framework]' \
+ '(--ci --no-ci)--ci=-[specify the continuous integration service]: :(circle github gitlab)' \
+ '(--ci --no-ci)--no-ci[do not use a continuous integration service]' \
+ '(--linter --no-linter)--linter=-[specify the linter and code formatter]: :(rubocop standard)' \
+ '(--linter --no-linter)--no-linter[do not add a linter]' \
+ '(-e --edit)'{-e,--edit}='[open the resulting GEM_NAME.gemspec in EDITOR]:editor' \
+ '(--bundle --no-bundle)--bundle[run bundle install after creating the gem]' \
+ '(--bundle --no-bundle)--no-bundle[do not run bundle install after creating the gem]' \
+ '1::gem_name:' \
+ && ret=0
+ ;;
+ (platform)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--ruby[it will display the ruby directive information]' \
+ && ret=0
+ ;;
+ (clean)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--dry-run[print the changes, but do not clean the unused gems]' \
+ '--force[forces cleaning up unused gems even if Bundler is configured to use globally installed gems]' \
+ && ret=0
+ ;;
+ (doctor)
+ _bundle_doctor && ret=0
+ ;;
+ (remove)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '*:: :_bundle_gems' \
+ && ret=0
+ ;;
+ (plugin)
+ _bundle_plugin && ret=0
+ ;;
+ esac
+ ;;
+ esac
+
+ return ret
+}
_bundle_commands() {
local -a commands=(
@@ -58,13 +324,14 @@ _bundle_commands() {
"open:Open an installed gem in the editor"
"list:Show all of the gems in the current bundle"
"lock:Generate a lockfile for your dependencies"
- "viz:Generate a visual representation of your dependencies"
"init:Generate a simple Gemfile, placed in the current directory"
"gem:Create a simple gem, suitable for development with bundler"
"platform:Displays platform compatibility information"
"clean:Clean up unused gems in your Bundler directory"
"doctor:Display warnings about common problems"
"remove:Removes gems from the Gemfile"
+ "plugin:Manage Bundler plugins"
+ "version:Prints Bundler version information"
)
_describe -t commands 'command' commands "$@"
@@ -85,8 +352,7 @@ _bundle_groups() {
}
_bundle_config_subcommands() {
- local subcommands;
- subcommands=(
+ local -a subcommands=(
"list:print a list of all bundler configuration"
"get:print the value of that configuration setting"
"set:set <name> <value> defaults to setting configuration"
@@ -95,261 +361,118 @@ _bundle_config_subcommands() {
_describe -t subcommands 'subcommand' subcommands "$@"
}
-_arguments -C -A "-v" -A "--version" \
- '(- 1 *)'{-v,--version}'[display version information]' \
- '(-r --retry)'{-r,--retry}'[specify the number of times you with to attempt network commands]:number:' \
- '(-v --verbose)'{-V,--verbose}'[print out additional logging information]' \
- '--no-color[print all output without color]' \
- '1: :_bundle_commands' \
- '*:: :->args' && ret=0
+_bundle_plugin() {
+ local ret=1
+
+ _arguments -C \
+ '(- *)'{-h,--help}'[show help message]' \
+ '1:subcommand:_bundle_plugin_subcommands' \
+ '*:: :->arg' \
+ && ret=0
+
+ case $state in
+ (arg)
+ case $words[1] in
+ (install)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--source=[install the plugin gem from a specific source]:url:_urls' \
+ '--version=[specify a version of the plugin gem]:version' \
+ '--git=[install the plugin gem from a Git repository]:url:_urls' \
+ '--branch=[branch name when using --git]:branch' \
+ '--ref=[tag or commit hash when using --git]:ref' \
+ '--path=[local file path to install the plugin gem]:path:_files -/' \
+ '*::plugins' \
+ && ret=0
+ ;;
+ (uninstall)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--all[uninstall all the installed plugins]' \
+ '*::plugins' \
+ && ret=0
+ ;;
+ (list)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ && ret=0
+ ;;
+ (help)
+ _arguments \
+ '(- *)'{-h,--help}'[show help message]' \
+ '1:subcommand:(install uninstall list help)' \
+ && ret=0
+ ;;
+ esac
+ ;;
+ esac
+
+ return ret
+}
+
+_bundle_plugin_subcommands() {
+ local -a subcommands=(
+ 'install:install the given plugins'
+ 'uninstall:uninstall the plugins'
+ 'list:list the installed plugins and available commands'
+ 'help:describe subcommands or one specific subcommand'
+ )
+
+ _describe -t subcommands 'subcommand' subcommands "$@"
+}
+
+_bundle_doctor() {
+ local ret=1
+
+ _arguments -C \
+ '(- *)'{-h,--help}'[show help message]' \
+ '--quiet[only output warnings and errors]' \
+ '--gemfile=[the location of the Gemfile which Bundler should use]: :_files' \
+ '--ssl[diagnose common SSL problems when connecting to https://rubygems.org]' \
+ '1:subcommand:_bundle_doctor_subcommands' \
+ '*:: :->arg' \
+ && ret=0
-case $state in
- args)
- case $words[1] in
- help)
- local -a commands=('install' 'update' 'cache' 'exec' 'config' 'help' 'add' 'binstubs'
- 'check' 'show' 'outdated' 'console' 'open' 'list' 'lock' 'lock' 'viz' 'init'
- 'gem' 'platform' 'clean' 'doctor' 'remove')
- _arguments -C \
- '1: :($commands)' \
- && ret=0
- ;;
- install)
- local -a policies=('HighSecurity' 'MediumSecurity' 'LowSecurity' 'AlmostNoSecurity' 'NoSecurity')
- _arguments \
- '--binstubs=-[generate bin stubs for bundled gems to ./bin]:directory:_files -/' \
- '--clean[remove any gems not present in the current Gemfile]' \
- '--deployment[install using defaults tuned for deployment environments]' \
- '--redownload[force download every gem, even if the required versions are already available locally]' \
- '--frozen[do not allow the Gemfile.lock to be updated after this install]' \
- '--full-index[download and cache the index file of all gems]' \
- '--gemfile=-[use the specified gemfile instead of Gemfile]:gemfile:_files' \
- '(-j --jobs)'{-j,--jobs}'[the maximum number of parallel download and install jobs]:number' \
- '--local[do not attempt to connect to rubygems.org]' \
- '--no-cache[do not update the cache in vendor/cache with newly installed gems]' \
- '--no-prune[do not remove stale gem from cache after installation]' \
- '--path=-[specify a different path than the system default]:path:_files' \
- '--quiet[only output warnings and errors]' \
- '--shebang=-[specify ruby executable to execute scripts]:ruby:_files' \
- '--standalone=-[create standalone bundles]:groups:_bundle_groups' \
- '--system[install to the system location]' \
- "--trust-policy=-[apply the Rubygems security policy]:arg:($policies)" \
- '--with=-[include gems that are part of the specified named group]:groups:_bundle_groups' \
- '--without=-[exclude gems that are part of the specified named group]:groups:_bundle_groups' \
- && ret=0
- ;;
- update)
- _arguments \
- '--all[update all gems specified in Gemfile]' \
- \*{--group,-g}=-'[only update the gems in the specified group]' \
- '--source=-[the name of a source used in the Gemfile]:url' \
- '--local[do not attempt to fetch gems remotely and use the gem cached instead]' \
- '--ruby[update the locked version of Ruby to the current version of Ruby]' \
- '--bundler[update the locked version of bundler to invoked bundler version]' \
- '--full-index[fall back to using the single-file index of all gems]' \
- '(-j --jobs)'{-j,--jobs}'[specify the number of jobs to run in parallel]:number' \
- '--retry=-[retry failed network or git requests for number times]:number' \
- '--quiet[only output warnings and errors]' \
- '--redownload[force download every gem, even if the required versions are already available locally]' \
- '--patch[prefer updating only to next patch version]' \
- '--minor[prefer updating only to next minor version]' \
- '--major[prefer updating only to next major version (default)]' \
- '--strict[do not allow any gem to be updated past latest --patch | --minor | --major]' \
- '--conservative[use bundle install conservative update behavior]' \
- '*:: :_bundle_gems' \
- && ret=0
- ;;
- cache)
- _arguments \
- '--all[include all sources]' \
- '--all-platforms[include gems for all platforms present in the lockfile, not only the current one]' \
- '--cache-path=-[specify a different cache path than the default(vendor/cache)]: :_files -/' \
- '--gemfile=-[use the specified gemfile instead of Gemfile]:gemfile:_files' \
- "--no-install[don't install the gems, only update the cache]" \
- "--no-prune[don't remove stale gems from the cache]" \
- '--path=-[specify a different path than the system default($BUNDLE_PATH or $GEM_HOME)]: :_files' \
- '--quite[only output warnings and errors]' \
- '--frozen[do not allow the Gemfile.lock to be updated after this bundle cache operation]' \
- '--no-color[disable colorization in output]' \
- '(-r --retry)'{-r,--retry}=-'[specify the number of times you with to attempt network commands]:nums' \
- '(-V --verbose)'{-v,--verbose}'[enable verbose output mode]' \
- && ret=0
- ;;
- exec)
- _arguments \
- '--keep-file-descriptors[exec will revert to the 1.9 behavior of passing all file descriptors to the new process]' \
- '*:: :_normal' \
- && ret=0
- ;;
- config)
- _arguments -C \
- '1: :_bundle_config_subcommands' \
- '--local[use local configuration]' \
- '--global[use global configuration]' \
- && ret=0
- ;;
- add)
- _arguments \
- '(-v --version)'{-v,--version}=-'[specify version requirements for the added gem]:version' \
- '(-g --group)'{-g,--group}=-'[specify the group for the added gem]:group:_bundle_groups' \
- '(-s --source)'{-s,--source}=-'[specify the source for the added gem]: :_files' \
- '(-r --require)'{-r,--require}=-'[adds require path to gem]: :_files' \
- '--path=-[specify the file path for the added gem]: :_files -/' \
- '--git=-[specify the git source for the added gem]:git' \
- '--github=-[specify the github source for the added gem]:github' \
- '--branch=-[specify the git branch for the added gem]:branch' \
- '--ref=-[specify the git ref for the added gem]' \
- "--glob=[specify the location of a dependency's .gemspec, expanded within Ruby]:glob" \
- '--quiet[do not print progress information to the starndard output]' \
- '--skip-install[adds the gem to the Gemfile but does not install it]' \
- '--optimistic[adds optimistic declaration of version]' \
- '--strict[adds strict declaration of version]' \
- '1::gem:' \
- && ret=0
- ;;
- binstubs)
- _arguments \
- '--force[overwrite existing binstubs if they exist]' \
- '--path=-[the location to install the specified binstubs to]: :_files -/' \
- '--standalone[makes binstubs that can work without depending on Rubygems or Bundler at runtime]' \
- '--shebang=-[specify a different shebang executable name than the default(default: ruby)]: :_files' \
- '--all[create binstubs for all gems in the bundle]' \
- '1::gem:' \
- && ret=0
- ;;
- check)
- _arguments \
- '--dry-run[locks the Gemfile before running the command]' \
- '--gemfile=-[use the specified gemfile instead of the Gemfile]: :_files' \
- '--path=-[specify a different path than the system default($BUNDLE_PATH or $GEM_HOME)]: :_files -/' \
- && ret=0
- ;;
- show)
- _arguments \
- '--paths[list the paths of all gems that are required by your Gemfile]' \
- '1:: :_bundle_gems' \
- && ret=0
- ;;
- outdated)
- _arguments \
- '--local[do not attempt to fetch gems remotely and use the gem cache instead]' \
- '--pre[check for newer pre-release gems]' \
- '--source[check against a specific source]' \
- '--strict[only list newer versions allowed by your Gemfile requirements]' \
- {--parseable,--porcelain}'[use minimal formatting for more parsable output]' \
- '--group=-[list gems from a specific group]:group:_bundle_groups' \
- '--groups[list gems organized by groups]' \
- '--major[prefer updating to next major version(default)]' \
- '--minor[prefer updating only to next minor version]' \
- '--patch[prefer updating only to next patch version]' \
- '--filter-major[only list major new versions]' \
- '--filter-minor[only list minor new versions]' \
- '--filter-patch[only list patch new versions]' \
- '--only-explicit[only list gems specified in your Gemfile, not their dependencies]' \
- '*:: :_bundle_gems' \
- && ret=0
- ;;
- console)
- _arguments \
- '--no-color[disable colorization in output]' \
- '(-r --retry)'{-r,--retry}=-'[specify the number of times you with to attempt network commands]:num' \
- '(-v --verbose)'{-v,--verbose}=-'[enable verbose output mode]' \
- '1:: :_bundle_groups' \
- && ret=0
- ;;
- open)
- _arguments \
- '1:: :_bundle_gems' \
- && ret=0
- ;;
- list)
- _arguments \
- '--name-only[print only the name of each gem]' \
- '--paths[print the path to each gem in the bundle]' \
- '--without-group=-[a space-separated list of groups of gems to skip during printing]: :_bundle_groups' \
- '--only-group=-[a space-separated list of groups of gems to print]: :_bundle_groups' \
- && ret=0
- ;;
- lock)
- _arguments \
- '--update=-[ignores the existing lockfile]' \
- '--local[do not attempt to connect to rubygems.org]' \
- '--print[prints the lockfile to STDOUT instead of writing to the file system]' \
- '--lockfile=-[the path where the lick file should be written to]: :_files' \
- '--full-index[fall back to using the single file index of all gems]' \
- '--add-platform=-[add a new platform to the lockfile, re-resolving for the addition of that platform]' \
- '--remove-platform=-[remove a platform from the lockfile]' \
- '--normalize-platforms[normalize lockfile platforms]' \
- '--patch[if updating, prefer updating only to next patch version]' \
- '--minor[if updating, prefer updating only to next minor version]' \
- '--major[if updating, prefer updating to next major version(default)]' \
- '--strict[if updating, do not allow any gem to be updated past latest --patch | --minor | --major]' \
- '--conservative[if updating, use bundle install conservative update behavior]' \
- && ret=0
- ;;
- viz)
- _arguments \
- '(-f --file)'{-f,--file}=-'[the name to use for the generated file]: :_files' \
- '(-F --format)'{-F,--format}=-'[output format option]: :(png jpg svg dot)' \
- '(-R --requirements)'{-r,--requirements}'[set to show the version of each required dependency]' \
- '(-v --version)'{-v,--version}'[set to show each version]' \
- '(-W --without)'{-W,--without}'[exclude gems that are part of the specified named group]' \
- && ret=0
- ;;
- init)
- _arguments \
- '--gemspec=-[use the specified .gemspec to create the Gemfile]: :_files' \
- '--gemfile=[use the specified name for the gamfile instead of Gemfile]:name' \
- && ret=0
- ;;
- gem)
- _arguments \
- '(--exe -b --bin --no-exe)'{--exe,-b,--bin}'[specify that bundler should create a binary executable in the generated rubygem project]' \
- '(--exe -b --bin --no-exe)--no-exe[do not create a binary]' \
- '(--no-coc)--coc[add a CODE_OF_CONDUCT.md to the root of the generated project]' \
- '(--coc)--no-coc[do not create a CODE_OF_CONDUCT.md]' \
- '(--no-ext --ext)--ext=[add boilerplate for C or Rust extension code to the generated project]: :(c rust)' \
- '(--ext --no-ext)--no-ext[do not add extension code]' \
- '(--no-mit --mit)--mit[add an MIT license to a LICENSE.txt file in the root of the generated project]' \
- '(--mit --no-mit)--no-mit[do not create a LICENSE.txt]' \
- '(-t --test --no-test)'{-t,--test}=-'[specify the test framework]: :(minitest rspec test-unit)' \
- '(-t --test --no-test)--no-test[do not use a test framework]' \
- '(--ci --no-ci)--ci=-[specify the continuous integration service]: :(github travis gitlab circle)' \
- '(--ci --no-ci)--no-ci[do not use a continuous integration service]' \
- '(--linter --no-linter)--linter=-[specify the linter and code formatter]: :(rubocop standard)' \
- '(--linter --no-linter)--no-linter[do not add a linter]' \
- '(-e --edit)'{-e,--edit}=-'[open the resulting GEM_NAME.gemspec in EDITOR]' \
- '1::gem_name:' \
- && ret=0
- ;;
- platform)
- _arguments \
- '--ruby[it will display the ruby directive information]' \
- && ret=0
- ;;
- clean)
- _arguments \
- '--dry-run[print the changes, but do not clean the unused gems]' \
- '--force[forces cleaning up unused gems even if Bundler is configured to use globally installed gems]' \
- && ret=0
- ;;
- doctor)
- _arguments \
- '--quiet[only output warnings and errors]' \
- '--gemfile=-[the location of the Gemfile which Bundler should use]: :_files' \
- && ret=0
- ;;
- remove)
- _arguments \
- '--install[runs bundle install after the given gem have been removed from the Gemfile]' \
- '*:: :_bundle_gems' \
- && ret=0
- ;;
- esac
+ case $state in
+ (arg)
+ case $words[1] in
+ (diagnose)
+ _arguments \
+ '--quiet[only output warnings and errors]' \
+ '--gemfile=[the location of the Gemfile which Bundler should use]: :_files' \
+ '--ssl[diagnose common SSL problems when connecting to https://rubygems.org]' \
+ && ret=0
+ ;;
+ (ssl)
+ _arguments \
+ '--host=[perform the diagnostic on HOST(default: rubygems.org)]:host' \
+ '--tls-version=[TLS version to connect to HOST]:version:(1.1 1.2)' \
+ '--verify-mode=[specify the TLS verify mode]:mode:(CLIENT_ONCE FAIL_IF_NO_PEER_CERT NONE PEER)' \
+ && ret=0
+ ;;
+ (help)
+ _arguments \
+ '1:subcommand:_bundle_doctor_subcommands' \
+ && ret=0
+ ;;
+ esac
;;
-esac
+ esac
+
+ return ret
+}
+
+_bundle_doctor_subcommands() {
+ local -a subcommands=(
+ 'diagnose:check your Gemfile and gem environment for common problems'
+ 'ssl:check issues related to SSL certificates or/and TLS versions'
+ 'help:describe subcommands or one specific subcommand'
+ )
+
+ _describe -t subcommands 'subcommand' subcommands "$@"
+}
-return ret
+_bundle "$@"
# Local Variables:
# mode: Shell-Script
diff --git a/src/_concourse b/src/_concourse
index 4d0cbc6..31affcd 100644
--- a/src/_concourse
+++ b/src/_concourse
@@ -1,4 +1,4 @@
-#compdef concourse fly
+#compdef concourse
# ------------------------------------------------------------------------------
# Description
# -----------
@@ -13,1361 +13,699 @@
#
# ------------------------------------------------------------------------------
-local _concourse_fly_target \
- _concourse_fly_pipeline \
- _concourse_fly_pipeline_config \
- _concourse_fly_job \
- _concourse_fly_resource \
- _concourse_fly_resource_type
-
-(( $+functions[_concourse_fly] )) ||
-_concourse_fly() {
-
+(( $+functions[_concourse_server] )) ||
+_concourse_server() {
local context state state_descr line ret=1
typeset -A opt_args
+ local -a concourse_postgres_configurations=(
+ '--postgres-host=[the host to connect to]: :_hosts'
+ '--postgres-port=[the port to connect to]: :_concourse_ports'
+ '--postgres-socket=[path to a UNIX domain socket to connect to]: :_files'
+ '--postgres-user=[the user to sign in as]: :_users'
+ "--postgres-password=[the user's password]:password"
+ '--postgres-sslmode=[whether or not to use SSL(default: disable)]:SSL mode:(disable require verify-ca verify-full)'
+ '--postgres-sslnegotiation=[how SSL encryption is negotiated with the server(default: postgres)]: :(postgres direct)'
+ '--postgres-ca-cert=[CA cert file location, to verify when connecting with SSL]: :_files'
+ '--postgres-client-cert=[client cert file location]: :_files'
+ '--postgres-client-key=[client key file location]: :_files'
+ '--postgres-connect-timeout=[dialing timeout]: :_concourse_durations'
+ '--postgres-database=[the name of the database to use(default: atc)]:database name'
+ )
+
_arguments -C \
'(- : *)'{-h,--help}'[display help information]' \
- '(- : *)'{-v,--version}'[print the version of Fly and exit]' \
- {-t,--target=}'[concourse target name]: :_concourse_fly_targets' \
- --verbose'[print API requests and responses]' \
- --print-table-headers'[print table headers even for redirected output]' \
+ '(- : *)'{-v,--version}'[print the version of Concourse and exit]' \
'(-): :->command' \
'(-)*:: :->arguments' \
&& ret=0
case $state in
(command)
- _concourse_fly_commands
- ;;
+ _concourse_commands && ret=0
+ ;;
(arguments)
- curcontext=${curcontext%:*:*}:concourse-fly-$words[1]:
- if (( $+functions[_concourse_fly_${words[1]}_args] )); then
- _concourse_fly_target=${(v)opt_args[(i)-t|--target]}
- _concourse_fly_${words[1]}_args && ret=0
+ curcontext=${curcontext%:*:*}:concourse-$words[1]:
+
+ if [[ $words[1] == "quickstart" || $words[1] == "web" ]]; then
+ _concourse_quickstart_or_web "${words[1]}" && ret=0
else
- _message "unknown command ${words[1]}" && ret=1
+ if (( $+functions[_concourse_${words[1]}] )); then
+ _concourse_${words[1]} && ret=0
+ else
+ _message "unknown command ${words[1]}" && ret=1
+ fi
fi
- ;;
+ ;;
esac
return ret
}
-(( $+functions[_concourse_fly_commands] )) ||
-_concourse_fly_commands() {
+(( $+functions[_concourse_commands] )) ||
+_concourse_commands() {
local commands=(
- {ab,abort-build}":abort a build"
- {bs,builds}":list builds data"
- {cr,check-resource}":check a resource"
- {crt,check-resource-type}":check a resource-type"
- {cl,checklist}":print a Checkfile of the given pipeline"
- {ctc,clear-task-cache}":clears cache from a task container"
- {cs,containers}":print the active containers"
- {c,curl}":curl the api"
- {dtg,delete-target}":delete target"
- {dp,destroy-pipeline}":destroy a pipeline"
- {dt,destroy-team}":destroy a team and delete all of its data"
- {etg,edit-target}":edit a target"
- {e,execute}":execute a one-off build using local bits"
- {ep,expose-pipeline}":make a pipeline publicly viewable"
- {fp,format-pipeline}":format a pipeline config"
- {gp,get-pipeline}":get a pipeline's current configuration"
- {gt,get-team}":show team configuration"
- "help:print help message"
- {hp,hide-pipeline}":hide a pipeline from the public"
- {i,intercept,hijack}":execute a command in a container"
- {js,jobs}":list the jobs in the pipelines"
- {lw,land-worker}":land a worker"
- {l,login}":authenticate with the target"
- {o,logout}":release authentication with the target"
- {op,order-pipelines}":orders pipelines"
- {pj,pause-job}":pause a job"
- {pp,pause-pipeline}":pause a pipeline"
- {ps,pipelines}":list the configured pipelines"
- {pw,prune-worker}":prune a stalled, landing, landed, or retiring worker"
- {rp,rename-pipeline}":rename a pipeline"
- {rt,rename-team}":rename a team"
- {rvs,resource-versions}":list the versions of a resource"
- {rs,resources}":list the resources in the pipeline"
- {sp,set-pipeline}":create or update a pipeline's configuration"
- {st,set-team}":create or modify a team to have the given credentials"
- "status:login status"
- {s,sync}":download and replace the current fly from the target"
- {ts,targets}":list saved targets"
- {t,teams}":list the configured teams"
- {tj,trigger-job}":start a job in a pipeline"
- {uj,unpause-job}":unpause a job"
- {up,unpause-pipeline}":un-pause a pipeline"
- "userinfo:user information"
- {vp,validate-pipeline}":validate a pipeline config"
- {vs,volumes}":list the active volumes"
- {w,watch}":stream a build's output"
- {ws,workers}":list the registered workers"
+ "generate-key:generate RSA key for use with Concourse components"
+ "land-worker:safely drain a worker's assignments for temporary downtime"
+ "migrate:run database migrations"
+ "quickstart:run both 'web' and 'worker' together, auto-wired"
+ "retire-worker:safely remove a worker from the cluster permanently"
+ "web:run the web UI and build scheduler"
+ "worker:run and register a worker"
)
_describe -t commands commands commands
}
-(( $+functions[_concourse_fly_ab_args] )) ||
-_concourse_fly_ab_args() {
- _concourse_fly_abort-build_args
-}
-
-(( $+functions[_concourse_fly_bs_args] )) ||
-_concourse_fly_bs_args() {
- _concourse_fly_builds_args
-}
-
-(( $+functions[_concourse_fly_cl_args] )) ||
-_concourse_fly_cl_args() {
- _concourse_fly_checklist_args
-}
-
-(( $+functions[_concourse_fly_cr_args] )) ||
-_concourse_fly_cr_args() {
- _concourse_fly_check-resource_args
-}
-
-(( $+functions[_concourse_fly_crt_args] )) ||
-_concourse_fly_crt_args() {
- _concourse_fly_check-resource-type_args
-}
-
-(( $+functions[_concourse_fly_ctc_args] )) ||
-_concourse_fly_ctc_args() {
- _concourse_fly_clear-task-cache_args
-}
-
-(( $+functions[_concourse_fly_cs_args] )) ||
-_concourse_fly_cs_args() {
- _concourse_fly_containers_args
-}
-
-(( $+functions[_concourse_fly_c_args] )) ||
-_concourse_fly_c_args() {
- _concourse_fly_curl_args
-}
-
-(( $+functions[_concourse_fly_dtg_args] )) ||
-_concourse_fly_dtg_args() {
- _concourse_fly_delete-target_args
-}
-
-(( $+functions[_concourse_fly_dp_args] )) ||
-_concourse_fly_dp_args() {
- _concourse_fly_destroy-pipeline_args
-}
-
-(( $+functions[_concourse_fly_dt_args] )) ||
-_concourse_fly_dt_args() {
- _concourse_fly_destroy-team_args
-}
-
-(( $+functions[_concourse_fly_etg_args] )) ||
-_concourse_fly_etg_args() {
- _concourse_fly_edit-target_args
-}
-
-(( $+functions[_concourse_fly_e_args] )) ||
-_concourse_fly_e_args() {
- _concourse_fly_execute_args
-}
-
-(( $+functions[_concourse_fly_ep_args] )) ||
-_concourse_fly_ep_args() {
- _concourse_fly_expose-pipeline_args
-}
-
-(( $+functions[_concourse_fly_fp_args] )) ||
-_concourse_fly_fp_args() {
- _concourse_fly_format-pipeline_args
-}
-
-(( $+functions[_concourse_fly_gp_args] )) ||
-_concourse_fly_gp_args() {
- _concourse_fly_get-pipeline_args
-}
-
-(( $+functions[_concourse_fly_gt_args] )) ||
-_concourse_fly_gt_args() {
- _concourse_fly_get-team_args
-}
-
-(( $+functions[_concourse_fly_hp_args] )) ||
-_concourse_fly_hp_args() {
- _concourse_fly_hide-pipeline_args
-}
-
-(( $+functions[_concourse_fly_hijack_args] )) ||
-_concourse_fly_hijack_args() {
- _concourse_fly_intercept_args
-}
-
-(( $+functions[_concourse_fly_i_args] )) ||
-_concourse_fly_i_args() {
- _concourse_fly_intercept_args
-}
-
-(( $+functions[_concourse_fly_js_args] )) ||
-_concourse_fly_js_args() {
- _concourse_fly_jobs_args
-}
-
-(( $+functions[_concourse_fly_lw_args] )) ||
-_concourse_fly_lw_args() {
- _concourse_fly_land-worker_args
-}
-
-(( $+functions[_concourse_fly_l_args] )) ||
-_concourse_fly_l_args() {
- _concourse_fly_login_args
-}
-
-(( $+functions[_concourse_fly_o_args] )) ||
-_concourse_fly_o_args() {
- _concourse_fly_logout_args
-}
-
-(( $+functions[_concourse_fly_op_args] )) ||
-_concourse_fly_op_args() {
- _concourse_fly_order-pipelines_args
-}
-
-(( $+functions[_concourse_fly_pj_args] )) ||
-_concourse_fly_pj_args() {
- _concourse_fly_pause-job_args
-}
-
-(( $+functions[_concourse_fly_pp_args] )) ||
-_concourse_fly_pp_args() {
- _concourse_fly_pause-pipeline_args
-}
-
-(( $+functions[_concourse_fly_ps_args] )) ||
-_concourse_fly_ps_args() {
- _concourse_fly_pipelines_args
-}
-
-(( $+functions[_concourse_fly_pw_args] )) ||
-_concourse_fly_pw_args() {
- _concourse_fly_prune-worker_args
-}
-
-(( $+functions[_concourse_fly_rp_args] )) ||
-_concourse_fly_rp_args() {
- _concourse_fly_rename-pipeline_args
-}
-
-(( $+functions[_concourse_fly_rt_args] )) ||
-_concourse_fly_rt_args() {
- _concourse_fly_rename-team_args
-}
-
-(( $+functions[_concourse_fly_rs_args] )) ||
-_concourse_fly_rs_args() {
- _concourse_fly_resources_args
-}
-
-(( $+functions[_concourse_fly_rvs_args] )) ||
-_concourse_fly_rvs_args() {
- _concourse_fly_resource-versions_args
-}
-
-(( $+functions[_concourse_fly_sp_args] )) ||
-_concourse_fly_sp_args() {
- _concourse_fly_set-pipeline_args
-}
-
-(( $+functions[_concourse_fly_st_args] )) ||
-_concourse_fly_st_args() {
- _concourse_fly_set-team_args
-}
-
-(( $+functions[_concourse_fly_s_args] )) ||
-_concourse_fly_s_args() {
- _concourse_fly_sync_args
-}
-
-(( $+functions[_concourse_fly_ts_args] )) ||
-_concourse_fly_ts_args() {
- _concourse_fly_targets_args
-}
-
-(( $+functions[_concourse_fly_t_args] )) ||
-_concourse_fly_t_args() {
- _concourse_fly_teams_args
-}
-
-(( $+functions[_concourse_fly_tj_args] )) ||
-_concourse_fly_tj_args() {
- _concourse_fly_trigger-job_args
-}
-
-(( $+functions[_concourse_fly_uj_args] )) ||
-_concourse_fly_uj_args() {
- _concourse_fly_unpause-job_args
-}
-
-(( $+functions[_concourse_fly_up_args] )) ||
-_concourse_fly_up_args() {
- _concourse_fly_unpause-pipeline_args
-}
-
-(( $+functions[_concourse_fly_vp_args] )) ||
-_concourse_fly_vp_args() {
- _concourse_fly_validate-pipeline_args
-}
-
-(( $+functions[_concourse_fly_vs_args] )) ||
-_concourse_fly_vs_args() {
- _concourse_fly_volumes_args
-}
-
-(( $+functions[_concourse_fly_w_args] )) ||
-_concourse_fly_w_args() {
- _concourse_fly_watch_args
-}
-
-(( $+functions[_concourse_fly_ws_args] )) ||
-_concourse_fly_ws_args() {
- _concourse_fly_workers_args
-}
-
-(( $+functions[_concourse_fly_help_args] )) ||
-_concourse_fly_help_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]'
-}
-
-(( $+functions[_concourse_fly_status_args] )) ||
-_concourse_fly_status_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]'
-}
-
-(( $+functions[_concourse_fly_userinfo_args] )) ||
-_concourse_fly_userinfo_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_abort-build_args] )) ||
-_concourse_fly_abort-build_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \
- '(-b --build)'{-b,--build=}'[job build number to cancel, or build id]: :_concourse_fly_builds'
-}
-
-(( $+functions[_concourse_fly_builds_args] )) ||
-_concourse_fly_builds_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-a --all-teams)'{-a,--all-teams}'[show builds for the all teams that user has access to]' \
- '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]: :number' \
- '--current-team[show builds for the currently targeted team]' \
- '(-j --job -p --pipeline)'{-j,--job=}'[name of a job to get builds for]: :_concourse_fly_pipeline_slash_jobs' \
- '--json[print command result as JSON]' \
- '(-j --job -p --pipeline)'{-p,--pipeline=}'[name of a pipeline to get builds for]: :_concourse_fly_pipelines' \
- '--since=[start of the range to filter builds]: :_concourse_fly_dates' \
- '--until=[end of the range to filter builds]: :_concourse_fly_dates'
-}
-
-(( $+functions[_concourse_fly_checklist_args] )) ||
-_concourse_fly_checklist_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[the pipeline from which to generate the Checkfile]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_check-resource_args] )) ||
-_concourse_fly_check-resource_args() {
-
- local context state state_descr line ret=1
- typeset -A opt_args
-
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-r --resource)'{-r,--resource=}'[name of a resource to check]: :_concourse_fly_pipeline_slash_resources' \
- '(-f --from)'{-f,--from=}'[version of the resource to check from]: :->version' \
- && ret=0
-
- case $state in
- (version)
- _concourse_fly_resource=${(v)opt_args[(i)-r|--resource]}
- _concourse_fly_pipeline_resource_versions && ret=0
- ;;
- esac
-
- return ret
-}
-
-(( $+functions[_concourse_fly_check-resource-type_args] )) ||
-_concourse_fly_check-resource-type_args() {
-
- local context state state_descr line ret=1
- typeset -A opt_args
-
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-r --resource-type)'{-r,--resource-type=}'[name of a resource type to check]: :_concourse_fly_pipeline_slash_resource_types' \
- '(-f --from)'{-f,--from=}'[version of the resource type to check from]: :->version' \
- && ret=0
-
- case $state in
- (version)
- _concourse_fly_resource_type=${(v)opt_args[(i)-r|--resource-type]}
- _concourse_fly_pipeline_resource_type_versions && ret=0
- ;;
- esac
-
- return ret
-}
-
-(( $+functions[_concourse_fly_clear-task-cache_args] )) ||
-_concourse_fly_clear-task-cache_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \
- '(-s --step)'{-s,--step=}'[step name to clear cache from]:task step' \
- '(-c --cache-path)'{-c,--cache-path=}'[cache directory to clear out]: :_files -/' \
- '(-n --non-interactive)'{-n,--non-interactive=}'[destroy the task cache(s) without confirmation]'
-}
-
-(( $+functions[_concourse_fly_containers_args] )) ||
-_concourse_fly_containers_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_curl_args] )) ||
-_concourse_fly_curl_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '--print-and-exit[print curl command and exit]'
-}
-
-(( $+functions[_concourse_fly_delete-target_args] )) ||
-_concourse_fly_delete-target_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-a --all)'{-a,--all}'[delete all targets]'
-}
-
-(( $+functions[_concourse_fly_destroy-pipeline_args] )) ||
-_concourse_fly_destroy-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[the pipeline to destroy]: :_concourse_fly_pipelines' \
- '(-n --non-interactive)'{-n,--non-interactive}'[destroy the pipeline without confirmation]'
-}
-
-(( $+functions[_concourse_fly_destroy-team_args] )) ||
-_concourse_fly_destroy-team_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-n --team-name)'{-n,--team-name=}'[the team to delete]: :_concourse_fly_teams' \
- '(-n --non-interactive)'{-n,--non-interactive}'[force apply configuration]'
-}
-
-(( $+functions[_concourse_fly_edit-target_args] )) ||
-_concourse_fly_edit-target_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '--target-name=[update target name]: :_concourse_fly_targets' \
- '(-u --concourse-url)'{-u,--concourse-url=}'[update concourse URL]: :_urls' \
- '(-n --team-name)'{-n,--team-name=}'[update concourse URL]: :_concourse_fly_teams'
-}
-
-(( $+functions[_concourse_fly_execute_args] )) ||
-_concourse_fly_execute_args() {
-
- local context state state_descr line ret=1
- typeset -A opt_args
-
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-c --config)'{-c,--config=}'[the task config to execute]: :_concourse_config_files' \
- '(-p --privileged)'{-p,--privileged}'[run the task with full privileges]' \
- '--include-ignored[disregard .gitignore entries and uploads everything]' \
- '*'{-i,--input=}'[an input to provide to the task]: :->input' \
- '*'{-m,--input-mapping=}'[map a resource to a different name as task input]: :->input-mapping' \
- '(-j --inputs-from)'{-j,--inputs-from=}'[a job to base the inputs on]: :_concourse_fly_pipeline_slash_jobs' \
- '*'{-o,--output=}'[an output to fetch from the task]: :->output' \
- '--image=[image resource for the one-off build]: :_concourse_fly_images' \
- '*--tag=[a tag for a specific environment]: :_concourse_fly_tags' \
- '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
- '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
- '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
- && ret=0
-
- _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
-
- case $state in
- (input-mapping)
- # TODO complete --input-mapping
- _message 'input mapping'
- ;;
- (input)
- _concourse_fly_input_equal_paths && ret=0
- ;;
- (output)
- _concourse_fly_output_equal_paths && ret=0
- ;;
- (var)
- _concourse_fly_var_equal_values && ret=0
- ;;
- esac
-
- return ret
-}
-
-(( $+functions[_concourse_fly_expose-pipeline_args] )) ||
-_concourse_fly_expose-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[pipeline to expose]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_format-pipeline_args] )) ||
-_concourse_fly_format-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
- '(-w --write)'{-w,--write}'[do not print to stdout, overwrite the file in place]'
-}
-
-(( $+functions[_concourse_fly_get-pipeline_args] )) ||
-_concourse_fly_get-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[get configuration of this pipeline]: :_concourse_fly_pipelines' \
- '(-j --json)'{-j,--json}'[print config as json instead of yaml]'
-}
-
-(( $+functions[_concourse_fly_get-team_args] )) ||
-_concourse_fly_get-team_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-n --team)'{-n,--team=}'[get configuration of this team]: :_concourse_fly_teams' \
- '(-j --json)'{-j,--json}'[print config as json instead of yaml]'
-}
-
-(( $+functions[_concourse_fly_hide-pipeline_args] )) ||
-_concourse_fly_hide-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[pipeline to hide]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_intercept_args] )) ||
-_concourse_fly_intercept_args() {
- # TODO complete --handle
- # TODO complete --check
- # TODO complete --step
- # TODO complete --step-type
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job --handle -c --check -u --url)'{-j,--job=}'[name of a job to hijack]: :_concourse_fly_pipeline_slash_jobs' \
- '(-j --job --handle -c --check -u --url)--handle=[handle id of a job to hijack]:job handle' \
- '(-j --job --handle -c --check -u --url)'{-c,--check=}'[name of a resource'\''s checking container to hijack]:name' \
- '(-j --job --handle -c --check -u --url)'{-u,--url=}'[URL for the build, job, or check container to hijack]: :_urls' \
- '(-b --build)'{-b,--build=}'[build number within the job, or global build ID]: :_concourse_fly_builds' \
- '(-s --step)'{-s,--step=}'[name of step to hijack]:step' \
- '--step-type=[type of step to hijack]:step type' \
- '(-a --attempt)'{-a,--attempt=}'[attempt number of step to hijack]: :_values -s, "number" 1 2 3 4 5 6 7 8 9' \
- '(-):command name: _command_names -e' \
- '*::arguments:_normal'
-}
-
-(( $+functions[_concourse_fly_jobs_args] )) ||
-_concourse_fly_jobs_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[get jobs in this pipeline]: :_concourse_fly_pipelines' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_land-worker_args] )) ||
-_concourse_fly_land-worker_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-w --worker)'{-w,--worker=}'[worker to land]: :_concourse_fly_workers'
-}
-
-(( $+functions[_concourse_fly_login_args] )) ||
-_concourse_fly_login_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-c --concourse-url)'{-c,--concourse-url=}'[concourse URL to authenticate with]: :_urls' \
- '(-k --insecure)'{-k,--insecure}'[skip verification of the endpoint'\''s SSL certificate]' \
- '(-u --username)'{-u,--username=}'[username for basic auth]: :_users' \
- '(-p --password)'{-p,--password=}'[password for basic auth]:password' \
- '(-n --team-name)'{-n,--team-name=}'[team to authenticate with]: :_concourse_fly_teams' \
- '--ca-cert=[path to Concourse PEM-encoded CA certificate file]: :_files -g "*.pem"' \
- '(-b --open-browser)'{-b,--open-browser}'[open browser to the auth endpoint]'
-}
-
-(( $+functions[_concourse_fly_logout_args] )) ||
-_concourse_fly_logout_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-a --all)'{-a,--all}'[logout of all targets]'
-}
-
-(( $+functions[_concourse_fly_order-pipelines_args] )) ||
-_concourse_fly_order-pipelines_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[name of pipeline to order]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_pause-job_args] )) ||
-_concourse_fly_pause-job_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[name of a job to pause]: :_concourse_fly_pipeline_slash_jobs'
-}
-
-(( $+functions[_concourse_fly_pause-pipeline_args] )) ||
-_concourse_fly_pause-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[pipeline to pause]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_pipelines_args] )) ||
-_concourse_fly_pipelines_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-a --all)'{-a,--all}'[show all pipelines]' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_prune-worker_args] )) ||
-_concourse_fly_prune-worker_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-w --worker)'{-w,--worker=}'[worker to prune]: :_concourse_fly_workers' \
- '(-a --all-stalled)'{-a,--all-stalled}'[prune all stalled workers]'
-}
-
-(( $+functions[_concourse_fly_rename-pipeline_args] )) ||
-_concourse_fly_rename-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-o --old-name)'{-o,--old-name=}'[pipeline to rename]: :_concourse_fly_pipelines' \
- '(-n --new-name)'{-n,--new-name=}'[name to set as pipeline name]: :_concourse_fly_pipelines'
-}
-
-(( $+functions[_concourse_fly_rename-team_args] )) ||
-_concourse_fly_rename-team_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-o --old-name)'{-o,--old-name=}'[current team name]: :_concourse_fly_teams' \
- '(-n --new-name)'{-n,--new-name=}'[new team name]: :_concourse_fly_teams'
-}
-
-(( $+functions[_concourse_fly_resources_args] )) ||
-_concourse_fly_resources_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[get resources in this pipeline]: :_concourse_fly_pipelines' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_resource-versions_args] )) ||
-_concourse_fly_resource-versions_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]:number' \
- '(-r --resource)'{-r,--resource=}'[name of a resource to get versions for]: :_concourse_fly_pipeline_slash_resources' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_set-pipeline_args] )) ||
-_concourse_fly_set-pipeline_args() {
-
- local context state state_descr line ret=1
- typeset -A opt_args
-
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-n --non-interactive)'{-n,--non-interactive}'[skips interactions, uses default values]' \
- '--no-color[disable color output]' \
- '--check-creds[validate credential variables against credential manager]' \
- '(-p --pipeline)'{-p,--pipeline=}'[pipeline to configure]: :_concourse_fly_pipelines' \
- '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
- '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
- '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
- '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
- && ret=0
+(( $+functions[_concourse_quickstart_or_web] )) ||
+_concourse_quickstart_or_web() {
+ local command="$1"
+ local ret=1
- _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
+ # define common variables
- case $state in
- (var)
- _concourse_fly_var_equal_values && ret=0
- ;;
- esac
+ local -a concourse_web_configurations=(
+ '--peer-address=[network address of this web node, reachable by other web nodes]: :_concourse_host_colon_ports'
+ '--log-level=[minimum level of logs to see]:level:_concourse_log_levels'
+ '--bind-ip=[IP address on which to listen for web traffic]: :_concourse_ip_addresses'
+ '--bind-port=[port on which to listen for HTTP traffic(default: 8000)]: :_concourse_ports'
+ '--tls-bind-port=[port on which to listen for HTTPS traffic]: :_concourse_ports'
+ '--tls-cert=[file containing an SSL certificate]: :_files'
+ '--tls-key=[file containing an RSA private key, used to encrypt HTTPS traffic]: :_files'
+ '--tls-ca-cert=[file containing the client CA certificate, enables mTLS]: :_files'
+ '--external-url=[URL used to reach any ATC from the outside world]: :_urls'
+ '--concurrent-request-limit=[limit the number of concurrent requests to an API endpoint]:limit'
+ '--api-max-conns=[maximum number of open connections for the api connection pool(default: 10)]:limit'
+ '--backend-max-conns=[maximum number of open connections for the backend connection pool(default: 50)]:limit'
+ '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key'
+ '--old-encryption-key=[encryption key previously used for encrypting sensitive information]:encryption key'
+ '--debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses'
+ '--debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports'
+ '--intercept-idle-timeout=[length of time for a intercepted session to be idle before terminating]: :_concourse_durations'
+ '--component-runner-interval=[interval on which runners are kicked off for builds, locks, scans and checks]:interval:_concourse_durations'
+ '--lidar-scanner-interval[interval on which the resource scanner will run to see if new checkes need to be scheduled]:interval:_concourse_durations'
+ '--global-resource-check-timeout=[time limit on checking for new versions of resources]: :_concourse_durations'
+ '--resource-checking-interval=[interval on which to check for new versions of resources]: :_concourse_durations'
+ '--resource-type-checking-interval=[interval on which to check for new versions of resource types]: :_concourse_durations'
+ '--resource-with-webhook-checking-interval=[interval on which to check for new versions of resources that has webhook defined]:interval:_concourse_durations'
+ '--max-checks-per-second=[maximum number of checks that can be started per second]:number'
+ '--pause-pipelines-after=[number of days after which a pipeline will be automatically paused when there are no jobs]:days'
+ '--baggageclaim-response-header-timeout=[how long to wait for Baggageclaim to send the response header]: :_concourse_durations'
+ '--streaming-artifacts-compression=[compression algorithm for internal streaming(default: gzip)]:alg:(gzip zstd raw)'
+ '--streaming-size-limitation=[internal volume streaming size limitation in MB]:size'
+ '--garden-request-timeout=[how long to wait for requests to Garden to complete(default: 5m)]: :_concourse_durations'
+ '--cli-artifacts-dir=[directory containing downloadable CLI binaries]: :_files -/'
+ '--web-public-dir=[Web public/ directory to server live for local development]:dir:_files -/'
+ '--log-db-queries[log database queries]'
+ '--log-cluster-name[log cluster name]'
+ '--build-tracker-interval=[interval on which to run build tracking]: :_concourse_durations'
+ '--default-build-logs-to-retain=[default build logs to retain, 0 means all]:number'
+ '--max-build-logs-to-retain=[maximum build logs to retain, 0 means not specified]:number'
+ '--default-days-to-retain-build-logs=[default days to retain build logs. 0 means unlimited]:number'
+ '--max-days-to-retain-build-logs=[maximum days to retain build logs, 0 means not specified]:number'
+ '--job-scheduling-max-in-flight=[maximum number of jobs to be scheduling at the same time(default: 32)]:number'
+ '--default-task-cpu-limit=[default max number of cpu shares per task, 0 means unlimited]:number'
+ '--default-task-memory-limit=[default maximum memory per task, 0 means unlimited]:number'
+ '--enable-build-auditing[enable auditing for all api requests connected to builds]'
+ '--enable-container-auditing[enable auditing for all api requests connected to containers]'
+ '--enable-job-auditing[enable auditing for all api requests connected to jobs]'
+ '--enable-pipeline-auditing[enable auditing for all api requests connected to pipelines]'
+ '--enable-resource-auditing[enable auditing for all api requests connected to resources]'
+ '--enable-system-auditing[enable auditing for all api requests connected to system transactions]'
+ '--enable-team-auditing[enable auditing for all api requests connected to teams]'
+ '--enable-worker-auditing[enable auditing for all api requests connected to workers]'
+ '--enable-volume-auditing[enable auditing for all api requests connected to volumes]'
+ '--config-rbac=[customize RBAC role-action mapping]:mapping'
+ '--system-claim-key=[token claim key to use when matching system-claim-values(default: aud)]:key'
+ '--system-claim-value=[configure which token requests should be considered "system" requests(default: concourse-worker)]:value'
+ '--base-resource-type-defaults=[base resource type defaults]:type'
+ '--p2p-volume-streaming-timeout=[timeout of p2p volume streaming(default: 15m)]: :_concourse_durations'
+ '--display-user-id-per-connector=[define how to display user ID for each authentication connector]:how'
+ '--default-get-timeout=[default timeout of get steps]: :_concourse_durations'
+ '--default-put-timeout=[default timeout of put steps]: :_concourse_durations'
+ '--default-task-timeout=[default timeout of task steps]: :_concourse_durations'
+ '--num-goroutine-threshold=[when number of goroutines reaches to this threshold, then slow down current ATC]:num'
+ '--db-notification-bus-queue-size=[DB notification bus queue size(default: 10000)]:size' \
+ )
- return ret
-}
+ local -a concourse_credential_manager_configurations=(
+ # Credential Management
+ '--secret-retry-attempts=[the number of attempts secret will be retried to be fetched, in case a retriable error happens]:number'
+ '--secret-retry-interval=[the interval between secret retry retrieval attempts]: :_concourse_durations'
+ '--secret-cache-enabled[enable in-memory cache for secrets]'
+ '--secret-cache-duration=[secret values will be cached for not longer than this duration]: :_concourse_durations'
+ '--secret-cache-duration-notfound=[secret not found responses will be cached for this duration]: :_concourse_durations'
+ '--secret-cache-purge-interval=[if the cache is enabled, expired items will be removed on this internal]: :_concourse_durations'
+ # Vault Credential Management
+ '--vault-url=[vault server address used to access secrets]: :_urls'
+ '--vault-path-prefix=[path under which to namespace credential lookup]:prefix'
+ '--vault-lookup-templates=[path templates for credential lookup]: :_files'
+ '--vault-shared-path=[path under which to lookup shared credentials]:path'
+ '--vault-namespace=[Vault namespace to use for authentication and secret lookup]:namespace'
+ '--vault-login-timeout=[timeout value for Vault login(default: 60s)]: :_concourse_durations'
+ '--vault-query-timeout=[timeout value for Vault queue(default: 60s)]: :_concourse_durations'
+ '--vault-disable-srv-lookup[disable that client will lookup the host through DNS SRV lookup]'
+ '--vault-ca-cert=[path to a PEM-encoded CA cert file to use to verify the vault server SSL cert]: :_files'
+ '--vault-ca-path=[path to a directory of PEM-encoded CA cert files to verify the vault server SSL cert]: :_files -/'
+ '--vault-client-cert=[path to the client certificate for Vault authorization]: :_files'
+ '--vault-client-key=[path to the client private key for Vault authorization]: :_files'
+ '--vault-server-name=[if set, is used to set the SNI host when connecting via TLS]:server name'
+ '--vault-insecure-skip-verify[enable insecure SSL verification]'
+ '--vault-client-token=[client token for accessing secrets within the Vault server]:client token'
+ '--vault-client-token-path=[absolute path to a file containing the Vault client token]: :_files'
+ '--vault-auth-backend=[auth backend to use for logging in to Vault]:auth backend'
+ '--vault-auth-backend-max-ttl=[time after which to force a re-login]: :_concourse_durations'
+ '--vault-retry-max=[the maximum time between retries when logging in or re-authing a secret]: :_concourse_durations'
+ '--vault-retry-initial=[the initial time between retries when logging in or re-authing a secret]: :_concourse_durations'
+ '*--vault-auth-param=[parameter to pass when logging in via the backend]: :_concourse_name_colon_values'
+ # Conjur Credential Management
+ '--conjur-appliance-url=[URL of the conjur instance]: :_urls'
+ '--conjur-account=[Conjur Account]:account'
+ '--conjur-cert-file=[cert file used if conjur instance is using a self signed cert]: :_files'
+ '--conjur-authn-login=[host username for conjur authn login]:host'
+ '--conjur-authn-api-key=[Api key related to the host for conjur authn]:api_key'
+ '--conjur-authn-token-file=[token file used if conjur instance is running in k8s or iam]: :_files'
+ '--conjur-pipeline-secret-template=[Conjur secret identifier template used for pipeline specific parameter]:template'
+ '--conjur-team-secret-template=[Conjur secret identifier template used for team specific parameter]:template'
+ '--conjur-secret-template=[Conjur secret identifier template used for full path conjur secrets]:template'
+ # CredHub Credential Management
+ '--credhub-url=[CredHub server address used to access secrets]: :_urls'
+ '--credhub-path-prefix=[path under which to namespace credential lookup]:path'
+ '--credhub-ca-cert=[path to PEM-encoded CA cert files to use to verify the CredHub server SSL cert]: :_files'
+ '--credhub-client-cert=[path to the client certificate for mutual TLS authorization]: :_files'
+ '--credhub-client-key=[path to the client private key for mutual TLS authorization]: :_files'
+ '--credhub-insecure-skip-verify[enable insecure SSL verification]'
+ '--credhub-client-id=[client ID for CredHub authorization]:client ID'
+ '--credhub-client-secret=[client secret for CredHub authorization]:client secret'
+ # Dummy Credential Management
+ '--dummy-creds-var=[a YAML value to expose via credential management]:key_val'
+ # Kubernetes Credential Management
+ '--kubernetes-in-cluster[enable the Kubernetes in-cluster client]'
+ '--kubernetes-config-path=[path to Kubernetes config]: :_files'
+ '--kubernetes-namespace-prefix=[prefix to use for Kubernetes namespace]:prefix'
+ # AWS SecretsManager Credential Management
+ '--aws-secretsmanager-access-key=[AWS Access key ID]:access key'
+ '--aws-secretsmanager-secret-key=[AWS Secret Access Key]:secret key'
+ '--aws-secretsmanager-session-token=[AWS Session Token]:session token'
+ '--aws-secretsmanager-region=[AWS region to send requests to]:region'
+ '--aws-secretsmanager-pipeline-secret-template=[AWS Secrets Manager secret identifier template used for pipeline specific parameter]:template'
+ '--aws-secretsmanager-team-secret-template=[AWS Secrets Manager secret identifier template used for team specific parameter]:template'
+ '--aws-secretsmanager-shared-secret-template=[AWS Secrets Manager secret identifier templated used for shared parameter]:template'
+ # AWS SSM Credential Management
+ '--aws-ssm-access-key=[AWS Access key ID]:access key'
+ '--aws-ssm-secret-key=[AWS Secret Access Key]:secret key'
+ '--aws-ssm-session-token=[AWS Session Token]:session token'
+ '--aws-ssm-region=[AWS region to send requests to]:region'
+ '--aws-ssm-pipeline-secret-template=[AWS SSM parameter name template used for pipeline specific parameter]:template'
+ '--aws-ssm-team-secret-template=[AWS SSM parameter name template used for team specific parameter]:template'
+ '--aws-ssm-shared-path=[AWS SSM parameter path used for shared parameters]: :_files'
+ )
-(( $+functions[_concourse_fly_set-team_args] )) ||
-_concourse_fly_set-team_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-n --team-name)'{-n,--team-name=}'[the team to create or modify]: :_concourse_fly_teams' \
- '--non-interactive[force apply configuration]' \
- '*--local-user=[list of whitelisted local concourse users]: :_users' \
- '(-c --config)'{-c,--config=}'[configuration file for specifying team params]: :_concourse_config_files' \
- '*--bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]:user name' \
- '*--bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team name' \
- '*--cf-user=[list of whitelisted CloudFoundry users]:user name' \
- '*--cf-org=[list of whitelisted CloudFoundry orgs]:org name' \
- '*--cf-space=[list of whitelisted CloudFoundry spaces]:space name' \
- '*--github-user=[list of whitelisted GitHub users]:user name' \
- '*--github-org=[list of whitelisted GitHub orgs]:org name' \
- '*--github-team=[list of whitelisted GitHub teams]:team name' \
- '*--gitlab-user=[list of whitelisted GitLab users]:user name' \
- '*--gitlab-group=[list of whitelisted GitLab groups]:group name' \
- '*--ldap-user=[list of whitelisted LDAP users]:user name' \
- '*--ldap-group=[list of whitelisted LDAP groups]:group name' \
- '*--oauth-user=[list of whitelisted OAuth2 users]:user name' \
- '*--oauth-group=[list of whitelisted OAuth2 groups]:group name' \
- '*--oidc-user=[list of whitelisted OIDC users]:user name' \
- '*--oidc-group=[list of whitelisted OIDC groups]:group name'
-}
+ local -a concourse_placement_strategies=(
+ volume-locality random fewest-build-containers limit-active-tasks limit-active-containers
+ limit-active-volumes
+ )
+ local -a concourse_second_placement_strategies=(
+ random fewest-build-containers limit-active-tasks limit-active-containers limit-active-volumes
+ )
-(( $+functions[_concourse_fly_sync_args] )) ||
-_concourse_fly_sync_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-f --force)'{-f,--force}'[sync even if versions already match]'
-}
+ local -a concourse_container_placement_strategy_configurations=(
+ '--container-placement-strategy=[method by which a worker is selected during container placement]: :(($concourse_placement_strategies))'
+ '--no-input-container-placement-strategy=[a second container placement strategy]: :(($concourse_second_placement_strategies))'
+ '--check-container-placement-strategy=[a third container placement strategy]: :(($concourse_second_placement_strategies))'
+ '--max-active-tasks-per-worker=[maximum allowed number of active build tasks per worker]:tasks'
+ '--max-active-containers-per-worker=[maximum allowed number of active containers per worker]:containers'
+ '--max-active-volumes-per-worker=[maximum allowed number of active volumes per worker]:volumes'
+ )
-(( $+functions[_concourse_fly_targets_args] )) ||
-_concourse_fly_targets_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]'
-}
+ local -a concourse_metric_configurations=(
+ # Metrics & Diagnostics
+ '--metrics-host-name=[host string to attach to emitted metrics]: :_hosts'
+ '*--metrics-attribute=[a key-value attribute to attach to emitted metrics]: :_concourse_name_colon_values'
+ '--metrics-buffer-size=[size of the buffer used in emitting event metrics(default: 1000)]:size'
+ '--capture-error-metrics[enable capturing of error log metrics]'
+ # Metric Emitter (Datadog)
+ '--datadog-agent-host=[datadog agent host to expose dogstatsd metrics]: :_hosts'
+ '--datadog-agent-port=[datadog agent port to expose dogstatsd metrics]: :_concourse_ports'
+ '--datadog-agent-uds-filepath=[Datadog agent unix domain socket filepath]: :_files'
+ '--datadog-prefix=[prefix for all metrics to easily find them in Datadog]:prefix'
+ # Metric Emitter (InfluxDB)
+ '--influxdb-url=[influxDB server address to emit points to]: :_urls'
+ '--influxdb-database=[influxDB database to write points to]:database name'
+ '--influxdb-username=[influxDB server username]: :_users'
+ '--influxdb-password=[influxDB server password]:password'
+ '--influxdb-insecure-skip-verify[skip SSL verification when emitting to InfluxDB]'
+ '--influxdb-batch-size=[number of points to batch together when emitting to InfluxDB(default: 5000)]:size'
+ '--influxdb-batch-duration=[duration to wait before emitting a batch of points to InfluxDB(default: 300s)]: :_concourse_durations'
+ # Metric Emitter (Lager)
+ '--emit-to-logs[emit metrics to logs]'
+ # Metric Emitter (NewRelic)
+ '--newrelic-account-id=[new Relic Account ID]:account ID'
+ '--newrelic-api-key=[new Relic Insights API Key]:API key'
+ '--newrelic-insights-api-url=[Base Url for insights Insert API]: :_urls'
+ '--newrelic-service-prefix=[an optional prefix for emitted New Relic events]:prefix'
+ '--newrelic-batch-size=[number of events to batch together before emitting(default: 2000)]:size'
+ '--newrelic-batch-duration=[length of time to wait between emitting(default: 60s)]: :_concourse_durations'
+ '--newrelic-batch-disable-compression=[disable compression of the batch before sending it]'
+ # Metric Emitter (Prometheus)
+ '--prometheus-bind-ip=[IP to listen on to expose Prometheus metrics]: :_concourse_ip_addresses'
+ '--prometheus-bind-port=[port to listen on to expose Prometheus metrics]: :_concourse_ports'
+ )
-(( $+functions[_concourse_fly_teams_args] )) ||
-_concourse_fly_teams_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-d --details)'{-d,--details}'[print authentication configuration]' \
- '--json[print command result as JSON]'
-}
+ local -a concourse_tracing_configurations=(
+ '--tracing-service-name=[service name to attach to traces as metadata(default: concourse-web)]:name'
+ '--tracing-attribute=[attributes to attach to traces as metadata]:attributes'
+ '--tracing-honeycomb-api-key=[honeycomb.io api key]:key'
+ '--tracing-honeycomb-dataset=[honeycomb.io dataset name]:name'
+ '--tracing-jaeger-endpoint=[jaeger http-based thrift collected]:endpoint'
+ '--tracing-jaeger-tags=[tags to add to the components]:tags'
+ '--tracing-jaeger-service=[jaeger process service name(default; web)]:name'
+ "--tracing-stackdriver-projectid=[GCP's project ID]:project_id"
+ '--tracing-otlp-address=[odlp address to send traces to]:address'
+ '--tracing-otlp-header=[headers to attach to each tracing messages]:header'
+ '--tracing-otlp-use-tls[whether to use TLS or not]'
+ )
-(( $+functions[_concourse_fly_trigger-job_args] )) ||
-_concourse_fly_trigger-job_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[name of a job to trigger]: :_concourse_fly_pipeline_slash_jobs' \
- '(-w --watch)'{-w,--watch}'[start watching the build output]'
-}
+ local -a concourse_policy_check_agent_configurations=(
+ # Policy Checking
+ '--policy-check-filter-http-method=[API http method to go through policy check]:method'
+ '--policy-check-filter-action=[actions in the list will go through policy check]:actions'
+ '--policy-check-filter-action-skip=[actions the list will not go through policy check]:actions'
+ # Policy Check Agent (Open Policy Agent):
+ '--opa-url=[OPA policy check endpoint]:url:_urls'
+ '--opa-timeout=[OPA request timeout(default: 5s)]: :_concourse_durations'
+ '--opa-result-allowed-key=[key name of if pass policy check in OPA returned result]:key'
+ '--opa-result-should-block-key=[key name of if should block current action in OPA returned result]:key'
+ '--opa-result-messages-key=[key name of messages in OPA returned result]:key'
+ )
-(( $+functions[_concourse_fly_unpause-job_args] )) ||
-_concourse_fly_unpause-job_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[name of a job to unpause]: :_concourse_fly_pipeline_slash_jobs'
-}
+ local -a concourse_web_server_configurations=(
+ '--x-frame-options=[the value to set for X-Frame-Options]:options'
+ '--content-security-policy=[value to set for Content-Security-Policy header]:value'
+ '--strict-transport-security=[value to set for the Strict-Transport-Security header]:value'
+ '--cluster-name=[a name for this Concourse cluster, to be displayed on the dashboard page]:name'
+ '--client-id=[client ID to use for login flow(default: concourse-web)]:id'
+ '--client-secret=[client secret to use for login flow]:secret'
+ )
-(( $+functions[_concourse_fly_unpause-pipeline_args] )) ||
-_concourse_fly_unpause-pipeline_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-p --pipeline)'{-p,--pipeline=}'[pipeline to unpause]: :_concourse_fly_pipelines'
-}
+ local -a concourse_gc_configurations=(
+ '--gc-interval=[interval on which to perform garbage collection]: :_concourse_durations'
+ '--gc-one-off-grace-period=[period after which one-off build containers will be garbage-collected]: :_concourse_durations'
+ '--gc-missing-grace-period=[period after which to reap containers and volumes that were created but went missing from the worker]: :_concourse_durations'
+ '--gc-hijack-grace-period=[period after which hijacked containers will be garbage collected]: :_concourse_durations'
+ '--gc-failed-grace-period=[period after which failed containers will be garbage collected]: :_concourse_durations'
+ '--gc-check-recycle-period=[period after which to reap checks that are completed]: :_concourse_durations'
+ '--gc-var-source-recycle-period=[period after which to reap var_sources that are not used]: :_concourse_durations'
+ )
-(( $+functions[_concourse_fly_validate-pipeline_args] )) ||
-_concourse_fly_validate-pipeline_args() {
+ local -a concourse_syslog_configurations=(
+ '--syslog-hostname=[client hostname with which the build logs will be sent to the syslog server]: :_hosts'
+ '--syslog-address=[remote syslog server address with port]: :_concourse_host_colon_ports'
+ '--syslog-transport=[transport protocol for syslog messages]:protocol:(tcp udp tls)'
+ '--syslog-drain-interval=[interval over which checking is done for new build logs to send to syslog server]: :_concourse_durations'
+ '--syslog-ca-cert=[paths to PEM-encoded CA cert files to use to verify the Syslog server SSL cert]: :_files'
+ )
- local context state state_descr line ret=1
- typeset -A opt_args
+ local -a concourse_authentication_configurations=(
+ '--cookie-secure[force sending secure flag on http cookies]'
+ '--auth-duration=[length of time for which tokens are valid]: :_concourse_durations'
+ '--session-signing-key=[file containing an RSA private key, used to sign auth tokens]: :_files'
+ '--password-connector=[connector to use when authenticating via "fly login -u .. -p.."]: :(local ldap)'
+ '*--add-local-user=[list of username:password combinations for all your local users]: :_concourse_username_colon_passwords'
+ '*--add-client=[list of client_id:client_secret combinations]:pair'
+ # Authentication Main Team
+ '*--main-team-local-user=[list of whitelisted local concourse users]: :_users'
+ {-c,--main-team-config=}'[configuration file for specifying team params]: :_concourse_config_files'
+ # Authentication Main Team Bitbucket Cloud
+ '*--main-team-bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]: :_users'
+ '*--main-team-bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team'
+ # Authentication Main Team CloudFoundry
+ '*--main-team-cf-user=[list of whitelisted CloudFoundry users]: :_users'
+ '*--main-team-cf-org=[list of whitelisted CloudFoundry orgs]:org name'
+ '*--main-team-cf-space=[list of whitelisted CloudFoundry spaces]:space name'
+ '*--main-team-cf-space-with-any-role=[a whitelisted CloudFoundry space for users with any role]:space'
+ '*--main-team-cf-space-with-developer-role=[a whitelisted CloudFoundry space for users with the developer role]:space'
+ '*--main-team-cf-space-with-auditor-role=[a whitelisted CloudFoundry space for users with the auditor role]:space'
+ '*--main-team-cf-space-with-manager-role=[a whitelisted CloudFoundry space for users with the manager role]:space'
+ '*--main-team-cf-space-guid=[a whitelisted CloudFoundry space guid]:space_guid'
+ # Authentication Main Team GitHub
+ '*--main-team-github-user=[list of whitelisted GitHub users]: :_users'
+ '*--main-team-github-org=[list of whitelisted GitHub orgs]:org name'
+ '*--main-team-github-team=[list of whitelisted GitHub teams]:team name'
+ # Authentication Main Team GitLab
+ '*--main-team-gitlab-user=[list of whitelisted GitLab users]: :_users'
+ '*--main-team-gitlab-group=[list of whitelisted GitLab groups]:group name'
+ # Authentication Main Team LDAP
+ '*--main-team-ldap-user=[list of whitelisted LDAP users]: :_users'
+ '*--main-team-ldap-group=[list of whitelisted LDAP groups]:group name'
+ # Authentication Main Team Microsoft
+ '*--main-team-microsoft-user=[a whitelisted Microsoft user]: :_users'
+ '*--main-team-microsoft-group=[a whitelisted Microsoft group]:group name'
+ # Authentication Main Team OAuth2
+ '*--main-team-oauth-user=[list of whitelisted OAuth2 users]: :_users'
+ '*--main-team-oauth-group=[list of whitelisted OAuth2 groups]:group name'
+ # Authentication Main Team OIDC
+ '*--main-team-oidc-user=[list of whitelisted OIDC users]: :_users'
+ '*--main-team-oidc-group=[list of whitelisted OIDC groups]:group name'
+ # Authentication Main Team SAML
+ '*--main-team-saml-user=[a whitelisted SAML user]: :_users'
+ '*--main-team-saml-group=[a whitelisted SAML group]:group name'
+ # Authentication Bitbucket Cloud
+ '--bitbucket-cloud-client-id=[client id]:client ID'
+ '--bitbucket-cloud-client-secret=[client secret]:client secret'
+ # Authentication CloudFoundry
+ '--cf-client-id=[client id]:client ID'
+ '--cf-client-secret=[client secret]:client secret'
+ '--cf-api-url=[the base API URL of your CF deployment]: :_urls'
+ '--cf-ca-cert=[CA Certificate]: :_files'
+ '--cf-skip-ssl-validation[skip SSL validation]'
+ # Authentication GitHub
+ '--github-client-id=[client id]:client ID'
+ '--github-client-secret=[client secret]:client secret'
+ '--github-host=[hostname of GitHub Enterprise deployment]: :_hosts'
+ '--github-ca-cert=[CA certificate of GitHub Enterprise deployment]: :_files'
+ # Authentication GitLab
+ '--gitlab-client-id=[client id]:client ID'
+ '--gitlab-client-secret=[client secret]:client secret'
+ '--gitlab-host=[hostname of Gitlab Enterprise deployment]: :_hosts'
+ # Authentication LDAP
+ '--ldap-display-name=[the auth provider name displayed to users on the login page]:display name'
+ '--ldap-host=[the host and optional port of the LDAP server]: :_hosts'
+ '--ldap-bind-dn=[bind DN for searching LDAP users and groups]:bind DN'
+ '--ldap-bind-pw=[bind Password for the user specified by bind-dn]:bind password'
+ '--ldap-insecure-no-ssl[required if LDAP host does not use TLS]'
+ '--ldap-insecure-skip-verify[skip certificate verification]'
+ '--ldap-start-tls[start on insecure port, then negotiate TLS]'
+ '--ldap-ca-cert=[CA certificate]: :_files'
+ '--ldap-username-prompt=[propmt when logging in through the UI]:prompt'
+ '--ldap-user-search-base-dn= [baseDN to start the search from]:baseDN'
+ '--ldap-user-search-filter=[optional filter to apply when searching the directory]:filter'
+ '--ldap-user-search-username=[attribute to match against the inputted username]:attribute'
+ "--ldap-user-search-scope=[can either be: 'sub' - search the whole sub tree or 'one' - only search one level]:scope:((sub one))"
+ '--ldap-user-search-id-attr=[a mapping of attributes on the user entry to claims]:attribute mapping'
+ '--ldap-user-search-email-attr=[a mapping of attributes on the user entry to claims]:attribute mapping'
+ '--ldap-user-search-name-attr=[a mapping of attributes on the user entry to claims]:attribute mapping'
+ '--ldap-group-search-base-dn=[baseDN to start the search from]:baseDN'
+ '--ldap-group-search-filter=[optional filter to apply when searching the directory]:filter'
+ "--ldap-group-search-scope=[can either be: 'sub' - search the whole sub tree or 'one' - only search one level]:scope:(sub one)"
+ "--ldap-group-search-user-attr=[adds an additional requirement to the filter that an attribute in the group match the user's attribute value]:attribute"
+ "--ldap-group-search-group-attr=[adds an additional requirement to the filter that an attribute in the group match the user's attribute value]:attribute"
+ '--ldap-group-search-name-attr=[the attribute of the group that represents its name]:attribute'
+ # Authentication Microsoft
+ '--microsoft-client-id=[Microsoft client ID]:id'
+ '--microsoft-client-secret=[Microsoft client secret]:secret'
+ '--microsoft-tenant=[Microsoft Tenant limitation]:tenant'
+ '--microsoft-groups=[allowed Active Directory Groups]:groups'
+ '--microsoft-only-security-groups[only fetch security groups]'
+ # Authentication OAuth2
+ '--oauth-display-name=[the auth provider name displayed to users on the login page]:display name'
+ '--oauth-client-id=[client id]:client ID'
+ '--oauth-client-secret=[client secret]:client secret'
+ '--oauth-auth-url=[Authorization URL]: :_urls'
+ '--oauth-token-url=[Token URL]: :_urls'
+ '--oauth-userinfo-url=[UserInfo URL]: :_urls'
+ '*--oauth-scope=[any additional scopes that need to be requested during authorization]:scope'
+ '--oauth-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key'
+ '--oauth-user-id-key=[the user id key indicates which claim to use to map an external user id to a Concourse user id]:id key'
+ '--oauth-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:name key'
+ '--oauth-ca-cert=[CA Certificate]: :_files'
+ '--oauth-skip-ssl-validation[skip SSL validation]'
+ # Authorization OIDC
+ '--oidc-display-name=[the auth provider name displayed to users on the login page]:display name'
+ '--oidc-issuer=[An OIDC issuer URL that will be used to discover provider configuration]: :_urls'
+ '--oidc-client-id=[client id]:client ID'
+ '--oidc-client-secret=[client secret]:client secret'
+ '*--oidc-scope=[any additional scopes that need to be requested during authorization]:scope'
+ '--oidc-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key'
+ '--oidc-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:user name key'
+ '--oidc-ca-cert=[CA Certificate]: :_files'
+ '--oidc-skip-ssl-validation[skip SSL validation]'
+ '--oidc-disable-groups[disable OIDC groups claims]'
+ '--oidc-skip-email-verified-validation[ignore the email_verified claim from the upstream provider]'
+ '--oidc-disable-get-user-info[disable not query the additional claims]'
+ # Authorization SAML
+ '--saml-display-name=[SAML auth provider name displayed to users on the login page]:name'
+ '--saml-sso-url=[SAML SSO URL used for POST value]:url:_urls'
+ '--saml-ca-cert=[SAML CA Certificate]: :_files'
+ "--saml-entity-issuer=[SAML manually specify dex's issuer value]"
+ '--saml-sso-issuer=[issuer value expected in the SAML response]:issuer'
+ '--saml-username-attr=[SAML concourse user name]:name'
+ '--saml-email-attr=[SAML concourse email]:name'
+ '--saml-groups-attr=[SAML concourse teams]:name'
+ '--saml-groups-delim=[groups are returned as string with this delimiter]:delimiter'
+ '--saml-name-id-policy-format=[requested format of the NameID]:format'
+ '--saml-skip-ssl-validation[SAML skip SSL validation]'
+ )
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
- '(-s --strict)'{-s,--strict}'[fail on warnings]' \
- '(-o --output)'{-o,--output}'[output templated pipeline to stdout]' \
- '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
- '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
- '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
- && ret=0
+ local -a concourse_feature_flags=(
+ '--enable-global-resources[enable equivalent resources across pipelines and teams to share a single version history]'
+ '--enable-redact-secrets[enable redacting secrets in build logs]'
+ '--enable-rerun-when-worker-disappears[enable automatically build rerun when worker disappears]'
+ '--enable-across-step[enable the experimental across step to be used in jobs]'
+ '--enable-pipeline-auditing[enable pipeline instances]'
+ '--enable-p2p-volume-streaming[enable P2P volume streaming]'
+ '--enable-cache-streamed-volumes[streamed resource volumes will be cached on the destination]'
+ '--enable-resource-causality[enable the resource causality page]'
+ )
- _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
+ local -a concourse_tsa_configurations=(
+ '--tsa-log-level=[minimum level of logs to see]: :_concourse_log_levels'
+ '--tsa-bind-ip=[IP address on which to listen for SSH]: :_concourse_ip_addresses'
+ '--tsa-peer-address=[network address of this web node, reachable by other web nodes]: :_urls'
+ '--tsa-bind-port=[port on which to listen for SSH]: :_concourse_ports'
+ '--tsa-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses'
+ '--tsa-debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports'
+ '--tsa-host-key=[path to private key to use for the SSH server]: :_files'
+ '--tsa-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_files'
+ '--tsa-team-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_concourse_name_colon_paths'
+ '--tsa-atc-url=[ATC API endpoints to which workers will be registered]: :_urls'
+ '--tsa-client-id=[client used to fetch a token from the auth server]:id'
+ '--tsa-client-secret=[client used to fetch a token from the auth server]:secret'
+ '--tsa-token-url=[token endpoint of the auth server]: :_urls'
+ '*--tsa-scope=[scopes to request from the auth server]'
+ '--tsa-heartbeat-interval=[interval on which to heartbeat workers to the ATC]: :_concourse_durations'
+ '--tsa-garden-request-timeout=[how long to wait for requests to Garden to complete]: :_concourse_durations'
+ '--tsa-cluster-name=[a name for this Concourse cluster]:name'
+ '--tsa-log-cluster-name=[log cluster name]:name'
+ )
- case $state in
- (var)
- _concourse_fly_var_equal_values && ret=0
- ;;
+ case $command in
+ (quickstart)
+ _concourse_quickstart && ret=0
+ ;;
+ (web)
+ _concourse_web && ret=0
+ ;;
esac
return ret
}
-(( $+functions[_concourse_fly_volumes_args] )) ||
-_concourse_fly_volumes_args() {
- _arguments -C \
+(( $+functions[_concourse_generate-key] )) ||
+_concourse_generate-key() {
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
- '(-d --details)'{-d,--details}'[print additional information for each volume]' \
- '--json[print command result as JSON]'
+ '(-t --type)'{-t,--type=}'[the type of key to generate]:key type:(rsa ssh)' \
+ '(-f --filename)'{-f,--filename=}'[file path where the key shall be created. When generating ssh keys, the public key will be stored in a file with the same name but with .pub appended]: :_files' \
+ '(-b --bits)'{-b,--bits=}'[the number of bits in the key to create]:integer'
}
-(( $+functions[_concourse_fly_watch_args] )) ||
-_concourse_fly_watch_args() {
- _arguments -C \
+(( $+functions[_concourse_land-worker] )) ||
+_concourse_land-worker() {
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
- '(-j --job)'{-j,--job=}'[watches builds of the given job]: :_concourse_fly_pipeline_slash_jobs' \
- '(-b --build)'{-b,--build=}'[watches a specific build]: :_concourse_fly_builds' \
- '(-t --timestamps)'{-t,--timestamps}'[print with local timestamp]'
+ '--name=[the name of the worker you wish to land]:worker name' \
+ '*--tsa-host=[TSA host to forward the worker through]: :_concourse_host_colon_ports' \
+ '--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
+ '--tsa-worker-private-key=[file containing a public key to expect from the TSA]: :_files'
}
-(( $+functions[_concourse_fly_workers_args] )) ||
-_concourse_fly_workers_args() {
- _arguments -C \
+(( $+functions[_concourse_migrate] )) ||
+_concourse_migrate() {
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
- '(-d --details)'{-d,--details}'[print additional information for each worker]' \
- '--json[print command result as JSON]'
-}
-
-(( $+functions[_concourse_fly_targets] )) ||
-_concourse_fly_targets() {
- local targets=($(_call_program targets $service targets | awk '{print $1}'))
- _describe -t targets 'target' targets $@ || _message 'no target found'
-}
-
-(( $+functions[_concourse_fly_teams] )) ||
-_concourse_fly_teams() {
- if [[ -n ${_concourse_fly_target} ]]; then
- local teams=($(_call_program teams $service -t ${_concourse_fly_target} teams | awk '{print $1}'))
- _describe -t teams 'team' teams $@ || _message 'no team found'
- else
- _message 'team'
- fi
-}
-
-(( $+functions[_concourse_fly_pipelines] )) ||
-_concourse_fly_pipelines() {
- if [[ -n ${_concourse_fly_target} ]]; then
- local pipelines=($(_call_program pipelines $service -t ${_concourse_fly_target} pipelines | awk '{print $1}'))
- _describe -t pipelines 'pipeline' pipelines $@ || _message 'no pipeline found'
- else
- _message 'pipeline'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_jobs] )) ||
-_concourse_fly_pipeline_jobs() {
- if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
- local jobs=($(_call_program jobs $service -t ${_concourse_fly_target} jobs -p ${_concourse_fly_pipeline} 2>&1 | awk '{print $1}'))
- _describe -t jobs "${_concourse_fly_pipeline} job" jobs $@ || _message 'no job found'
- else
- _message 'job'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_resources] )) ||
-_concourse_fly_pipeline_resources() {
- if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
- local resources=($(_call_program resources $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $1}'))
- _describe -t resources 'resource' resources $@ || _message 'no resource found'
- else
- _message 'resource'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_resource_types] )) ||
-_concourse_fly_pipeline_resource_types() {
- if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
- local resource_types=($(_call_program resource-types $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $2}'))
- _describe -t resource-types 'resource type' resource_types $@ || _message 'no resource type found'
- else
- _message 'resource type'
- fi
-}
-
-(( $+functions[_concourse_fly_workers] )) ||
-_concourse_fly_workers() {
- if [[ -n ${_concourse_fly_target} ]]; then
- local workers=($(_call_program workers $service -t ${_concourse_fly_target} workers | awk '{print $1}'))
- _describe -t workers 'worker' workers $@ || _message 'no worker found'
- else
- _message 'worker'
- fi
-}
-
-(( $+functions[_concourse_fly_builds] )) ||
-_concourse_fly_builds() {
- if [[ -n ${_concourse_fly_target} ]]; then
- local builds=($(_call_program builds $service -t ${_concourse_fly_target} builds | awk '{print $1}'))
- _describe -t builds 'build' builds $@ || _message 'no build found'
- else
- _message 'build'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_resource_versions] )) ||
-_concourse_fly_pipeline_resource_versions() {
- if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_resource} ]]; then
- local resource_versions=($(_call_program resource-versions $service -t ${_concourse_fly_target} resource-versions -r ${_concourse_fly_resource} | awk '{print $2}'))
- _describe -t resource-versions 'resource version' resource_versions $@ || _message 'no version found'
- else
- _message 'resource version'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_config_vars] )) ||
-_concourse_fly_pipeline_config_vars() {
- if [[ -n ${_concourse_fly_pipeline_config} ]]; then
- local variables=($(grep -Po '(?<=\(\()[^\)]+' ${_concourse_fly_pipeline_config}))
- _describe -t variables 'variables' variables $@ || _message 'no variable found'
- else
- _message 'variable'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_config_inputs] )) ||
-_concourse_fly_pipeline_config_inputs() {
- if [[ -n ${_concourse_fly_pipeline_config} ]]; then
- if (( $+commands[yq] )); then
- local inputs=($(yq -r '.. | .inputs? | arrays | .[].name' ${_concourse_fly_pipeline_config} 2>&1))
- _describe -t inputs 'input' inputs $@ || _message 'no input found'
- else
- _message 'install yq (https://github.com/kislyuk/yq) to get completion of inputs'
- fi
- else
- _message 'input'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_config_outputs] )) ||
-_concourse_fly_pipeline_config_outputs() {
- if [[ -n ${_concourse_fly_pipeline_config} ]]; then
- if (( $+commands[yq] )); then
- local outputs=($(yq -r '.. | .outputs? | arrays | .[].name' ${_concourse_fly_pipeline_config}))
- _describe -t outputs 'output' outputs $@ || _message 'no output found'
- else
- _message 'install yq (https://github.com/kislyuk/yq) to get completion of outputs'
- fi
- else
- _message 'output'
- fi
-}
-
-(( $+functions[_concourse_fly_pipeline_resource_type_versions] )) ||
-_concourse_fly_pipeline_resource_type_versions() {
- # seems like there is no command for listing resource type versions...
- _message 'resource type version'
-}
-
-(( $+functions[_concourse_fly_tags] )) ||
-_concourse_fly_tags() {
- # seems like there is no command for listing tags...
- _message 'tag'
+ '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key' \
+ '(- : *)--current-db-version[print the current database version and exit]' \
+ '(- : *)--supported-db-version[print the max supported database version and exit]' \
+ '--migrate-db-to-version=[migrate to the specified database version and exit]:database version' \
+ '--migrate-to-latest-version[migrate to the latest migration version and exit]' \
+ $concourse_postgres_configurations[@] \
}
-(( $+functions[_concourse_fly_dates] )) ||
-_concourse_fly_dates() {
- # _dates completer does not seem to work on zsh 5.7.1
- _dates -f '%Y-%m-%d %H:%M:%S'
-}
+(( $+functions[_concourse_quickstart] )) ||
+_concourse_quickstart() {
+ local -a concourse_worker_configurations=(
+ '--worker-name=[name to set for the worker during registration]:name'
+ '--worker-tag=[tag to set during registration]:tag'
+ '--worker-team=[name of the team that this worker will be assigned to]:team'
+ '--worker-http-proxy=[HTTP proxy endpoint to use for containers]:proxy:_urls'
+ '--worker-https-proxy=[HTTPS proxy endpoint to use for containers]:proxy:_urls'
+ '*--worker-no-proxy=[blacklist of addresses to skip the proxy when reaching]:url:_urls'
+ '--worker-ephemeral[worker will be immediately removed upon stalling]'
+ '--worker-certs-dir=[directory to use when creating the resource certificates volume]:dir:_files -/'
+ '--worker-work-dir=[directory in which to place container data]:dir:_files -/'
+ '--worker-bind-ip=[IP address on which to listen for the Garden server]:ip'
+ '--worker-bind-port=[port on which to listen for the Garden server]:port'
+ '--worker-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]:ip'
+ '--worker-debug-bind-port=[port on which to listen for the pprof debugger endpoints]:port'
+ '--worker-healthcheck-bind-ip=[IP address on which to listen for health checking requests]:ip'
+ '--worker-healthcheck-bind-port=[port on which to listen for health checking requests]:port'
+ '--worker-healthcheck-timeout=[HTTP timeout for the full duration of health checking]: :_concourse_durations'
+ '--worker-sweep-interval=[interval on which containers and volumes will be garbage collected from the worker]: :_concourse_durations'
+ '--worker-volume-sweeper-max-in-flight=[maximum number of volumes which can be swept in parallel]:number'
+ '--worker-container-sweeper-max-in-flight=[maximum number of containers which can be swept in parallel]:number'
+ '--worker-rebalance-interval=[duration after which the registration should be swapped to another random SSH gateway]: :_concourse_durations'
+ '--worker-connection-drain-timeout=[duration after which a worker should give up draining forwarded connections on shutdown]: :_concourse_durations'
+ '--worker-external-garden-url=[API endpoint of an externally managed Garden server to use instead of running the embedded Garden server]:url:_urls'
+ '--worker-resource-types=[path to directory containing resource types the worker should advertise]:dir:_files -/'
+ '--worker-log-level=[minimum level of logs to see]:level:_concourse_log_levels'
-(( $+functions[_concourse_fly_pipeline_slash_jobs] )) ||
-_concourse_fly_pipeline_slash_jobs() {
- local ret=1
- if compset -P '*/'; then
- _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
- _concourse_fly_pipeline_jobs && ret=0
- else
- _concourse_fly_pipelines -qS/ && ret=0
- fi
- return ret
-}
+ # TSA Configuration:
+ '--worker-tsa-host=[TSA host to forward the worker through]:host'
+ '--worker-tsa-public-key=[file containing a public key to expect from the TSA]:file:_files'
+ '--worker-tsa-worker-private-key=[file containing the private key to use when authenticating to the TSA]:file:_files'
-(( $+functions[_concourse_fly_pipeline_slash_resources] )) ||
-_concourse_fly_pipeline_slash_resources() {
- local ret=1
- if compset -P '*/'; then
- _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
- _concourse_fly_pipeline_resources && ret=0
- else
- _concourse_fly_pipelines -qS/ && ret=0
- fi
- return ret
-}
+ # Tracing:
+ '--worker-tracing-service-name=[service name to attach to traces as metadata]:name'
+ '--worker-tracing-attribute=[attributes to attach to traces as metadata]:attr'
+ '--worker-tracing-honeycomb-api-key=[honeycomb.io api key]:key'
+ '--worker-tracing-honeycomb-dataset=[honeycomb.io dataset name]:dataset'
+ '--worker-tracing-jaeger-endpoint=[jaeger http-based thrift collector]:endpoint'
+ '--worker-tracing-jaeger-tags=[tags to add to the components]:tags'
+ '--worker-tracing-jaeger-service=[jaeger process service name]:service'
+ "--worker-tracing-stackdriver-projectid=[GCP's Project ID]:id"
+ '--worker-tracing-otlp-address=[otlp address to send traces to]:address'
+ '--worker-tracing-otlp-header=[headers to attach to each tracing message]:header'
+ '--worker-tracing-otlp-use-tls[whether to use tls or not]'
-(( $+functions[_concourse_fly_pipeline_slash_resource_types] )) ||
-_concourse_fly_pipeline_slash_resource_types() {
- local ret=1
- if compset -P '*/'; then
- _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
- _concourse_fly_pipeline_resource_types && ret=0
- else
- _concourse_fly_pipelines -qS/ && ret=0
- fi
- return ret
-}
+ # Runtime Configuration
+ '--worker-runtime=[runtime to use with the worker]:runtime:(guardian containerd houdini)'
-(( $+functions[_concourse_fly_var_equal_values] )) ||
-_concourse_fly_var_equal_values() {
- local ret=1
- if compset -P '*='; then
- _message 'value' && ret=0
- else
- _concourse_fly_pipeline_config_vars -qS= && ret=0
- fi
- return ret
-}
+ # Guardian Configuration
+ '--worker-garden-bin=[path to a garden server executable]:bin'
+ '--worker-garden-request-timeout=[how long to wait for requests to the Garden server to complete]:time:_concourse_durations'
+ '--worker-garden-config=[path to a config file to use for the Garden backend]:path:_files'
-(( $+functions[_concourse_fly_input_equal_paths] )) ||
-_concourse_fly_input_equal_paths() {
- local ret=1
- if compset -P '*='; then
- _files && ret=0
- else
- _concourse_fly_pipeline_config_inputs -qS= && ret=0
- fi
- return ret
-}
+ # DNS Proxy Configuration
+ '--worker-garden-dns-proxy-enable[enable proxy DNS server]'
-(( $+functions[_concourse_fly_output_equal_paths] )) ||
-_concourse_fly_output_equal_paths() {
- local ret=1
- if compset -P '*='; then
- _files && ret=0
- else
- _concourse_fly_pipeline_config_outputs -qS= && ret=0
- fi
- return ret
-}
+ # Container Networking
+ '--worker-garden-network-pool=[network range to use for dynamically allocated container subnets]:range'
-(( $+functions[_concourse_server] )) ||
-_concourse_server() {
+ # Limits:
+ '--worker-garden-max-containers=[maximum container capacity]:capacity'
- local context state state_descr line ret=1
- typeset -A opt_args
+ # Containerd Configuration:
+ '--worker-containerd-config=[path to a config file to use for the Containerd daemon]:file:_files'
+ '--worker-containerd-bin=[path to a containerd executable]:file:_files'
+ '--worker-containerd-init-bin=[path to an init executable]:file:_files'
+ '--worker-containerd-seccomp-profile=[path to a seccomp filter override]:file:_files'
+ '--worker-containerd-oci-hooks-dir=[path to the oci hooks dir]:dir:_files -/'
+ '--worker-containerd-cni-plugins-dir=[path to CNI network plugins]:dir:_files -/'
+ '--worker-containerd-request-timeout=[how long to wait for requests to Containerd to complete]: :_concourse_durations'
+ '--worker-containerd-max-containers=[max container capacity]:capacity'
+ '--worker-containerd-privileged-mode=[how many privileges privileged containers get]:type:(full fuse-only ignore)'
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(- : *)'{-v,--version}'[print the version of Concourse and exit]' \
- '(-): :->command' \
- '(-)*:: :->arguments' \
- && ret=0
+ # Containerd Networking
+ "--worker-containerd-external-ip=[IP address to use to reach container's mapped ports]:ip"
+ '--worker-containerd-dns-server=[DNS server IP address to use instead of automatically determined servers]:ip'
+ '--worker-containerd-additional-hosts=[additional entries to add to /etc/hosts in containers]:hosts'
+ '--worker-containerd-restricted-network=[network ranges to which traffic from containers will be restricted]:range'
+ '--worker-containerd-network-pool=[network range to use for dynamically allocated container subnets]:range'
+ '--worker-containerd-mtu=[MTU size for container network interfaces]:size'
+ "--worker-containerd-allow-host-access[allow containers to reach the host's network]"
- case $state in
- (command)
- _concourse_commands && ret=0
- ;;
- (arguments)
- curcontext=${curcontext%:*:*}:concourse-$words[1]:
- if (( $+functions[_concourse_${words[1]}_args] )); then
- _concourse_${words[1]}_args && ret=0
- else
- _message "unknown command ${words[1]}" && ret=1
- fi
- ;;
- esac
+ # DNS Proxy Configuration:
+ '--worker-containerd-dns-proxy-enable[Enable proxy DNS server]'
- return ret
-}
+ # IPv6 Configuration:
+ '--worker-containerd-v6-enable[enable IPv6 networking]'
+ '--worker-containerd-v6-pool=[IPv6 network range to use for dynamically allocated container addresses]:range'
+ '--worker-containerd-v6-disable-masquerade [Masquerade container traffic with worker address]:address'
-(( $+functions[_concourse_commands] )) ||
-_concourse_commands() {
- local commands=(
- "generate-key:generate RSA key for use with Concourse components"
- "land-worker:safely drain a worker's assignments for temporary downtime"
- "migrate:run database migrations"
- "quickstart:run both 'web' and 'worker' together, auto-wired"
- "retire-worker:safely remove a worker from the cluster permanently"
- "web:run the web UI and build scheduler"
- "worker:run and register a worker"
+ # Baggageclaim Configuration:
+ '--worker-baggageclaim-log-level=[minimum level of logs to see]: :_concourse_log_levels'
+ '--worker-baggageclaim-bind-ip=[IP address on which to listen for API traffic]:ip'
+ '--worker-baggageclaim-bind-port=[port on which to listen for API traffic]:port'
+ '--worker-baggageclaim-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]:ip'
+ '--worker-baggageclaim-debug-bind-port=[port on which to listen for the pprof debugger endpoints]:port'
+ '--worker-baggageclaim-p2p-interface-name-pattern=[regular expression to match a network interface for p2p streaming]:pattern'
+ '--worker-baggageclaim-p2p-interface-family=[4 for IPv4 and 6 for IPv6]:type:(4 6)'
+ '--worker-baggageclaim-volumes=[directory in which to place volume data]:dir:_files -/'
+ '--worker-baggageclaim-driver=[driver to use for managing volumes]:type:(detect naive btrfs overlay)'
+ '--worker-baggageclaim-btrfs-bin=[path to btrfs binary]:path:_files'
+ '--worker-baggageclaim-mkfs-bin=[path to mkfs binary]:path:_files'
+ '--worker-baggageclaim-overlays-dir=[path to directory in which to store overlay data]:dir:_files -/'
+ '--worker-baggageclaim-disable-user-namespaces[disable remapping of user/group IDs in unprivileged volumes]:namespace'
)
- _describe -t commands commands commands
-}
-(( $+functions[_concourse_generate-key_args] )) ||
-_concourse_generate-key_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(-t --type)'{-t,--type=}'[the type of key to generate]:key type:(rsa ssh)' \
- '(-f --filename)'{-f,--filename=}'[file path where the key shall be created. When generating ssh keys, the public key will be stored in a file with the same name but with .pub appended]: :_files' \
- '(-b --bits)'{-b,--bits=}'[the number of bits in the key to create]:integer'
-}
-
-(( $+functions[_concourse_land-worker_args] )) ||
-_concourse_land-worker_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '--name=[the name of the worker you wish to land]:worker name' \
- '*--tsa-host=[TSA host to forward the worker through]: :_concourse_host_colon_ports' \
- '--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
- '--tsa-worker-private-key=[file containing a public key to expect from the TSA]: :_files'
-}
-
-(( $+functions[_concourse_migrate_args] )) ||
-_concourse_migrate_args() {
- _arguments -C \
- '(- : *)'{-h,--help}'[display help information]' \
- '(- : *)--current-db-version[print the current database version and exit]' \
- '(- : *)--supported-db-version[print the max supported database version and exit]' \
- '(- : *)--migrate-db-to-version=[migrate to the specified database version and exit]:database version' \
- '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key' \
- '--postgres-host=[the host to connect to]: :_hosts' \
- '--postgres-port=[the port to connect to]: :_concourse_ports' \
- '--postgres-socket=[path to a UNIX domain socket to connect to]: :_files' \
- '--postgres-user=[the user to sign in as]: :_users' \
- '--postgres-password=[the user'\''s password]:password' \
- '--postgres-sslmode=[whether or not to use SSL]:SSL mode:((disable require verify-ca verify-full))' \
- '--postgres-ca-cert=[CA cert file location, to verify when connecting with SSL]: :_files' \
- '--postgres-client-cert=[client cert file location]: :_files' \
- '--postgres-client-key=[client key file location]: :_files' \
- '--postgres-connect-timeout=[dialing timeout]:duration' \
- '--postgres-database=[the name of the database to use]:database name'
+ _arguments \
+ $concourse_web_configurations[@] \
+ "--enable-lets-encrypt[automatically configure TLS certificate via Let's Encrypt/ACME]" \
+ "--lets-encrypt-acme-url=[URL of ACME CA directory endpoint]:url:_urls" \
+ $concourse_postgres_configurations[@] \
+ $concourse_credential_manager_configurations[@] \
+ '--signing-key-check-interval=[how often to check for outdated or expired signing keys(default: 10m)]: :_concourse_durations' \
+ '--signing-key-rotation-period=[after which time a new signing key for the idtoken secrets provider should be generated]: :_concourse_durations' \
+ '--signing-key-rotation-period=[how long a key should still be published after a new key has been generated]: :_concourse_durations' \
+ $concourse_container_placement_strategy_configurations[@] \
+ $concourse_metric_configurations[@] \
+ $concourse_tracing_configurations[@] \
+ $concourse_policy_check_agent_configurations[@] \
+ $concourse_web_server_configurations[@] \
+ $concourse_gc_configurations[@] \
+ $concourse_syslog_configurations[@] \
+ $concourse_authentication_configurations[@] \
+ $concourse_feature_flags[@] \
+ $concourse_tsa_configurations[@] \
+ $concourse_worker_configurations[@]
}
-(( $+functions[_concourse_retire-worker_args] )) ||
-_concourse_retire-worker_args() {
- _arguments -C \
+(( $+functions[_concourse_retire-worker] )) ||
+_concourse_retire-worker() {
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
'--name=[the name of the worker you wish to retire]:worker name' \
+ '--team=[the team name of the worker you with to retire]:team name' \
'*--tsa-host=[TSA host to forward the worker through]: :_concourse_host_colon_ports' \
'--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
'--tsa-worker-private-key=[file containing a public key to expect from the TSA]: :_files'
}
-(( $+functions[_concourse_web_args] )) ||
-_concourse_web_args() {
- _arguments -C \
+(( $+functions[_concourse_web] )) ||
+_concourse_web() {
+ local -a placement_strategies=(
+ volume-locality random fewest-build-containers limit-active-tasks limit-active-containers
+ limit-active-volumes
+ )
+ local -a second_placement_strategies=(
+ random fewest-build-containers limit-active-tasks limit-active-containers limit-active-volumes
+ )
+
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
- '--peer-address=[network address of this web node, reachable by other web nodes]: :_concourse_host_colon_ports' \
- '--log-level=[minimum level of logs to see]: :_concourse_log_levels' \
- '--bind-ip=[IP address on which to listen for web traffic]: :_concourse_ip_addresses' \
- '--bind-port=[port on which to listen for HTTP traffic]: :_concourse_ports' \
- '--tls-bind-port=[port on which to listen for HTTPS traffic]: :_concourse_ports' \
- '--tls-cert=[file containing an SSL certificate]: :_files' \
- '--tls-key=[file containing an RSA private key, used to encrypt HTTPS traffic]: :_files' \
- '--external-url=[URL used to reach any ATC from the outside world]: :_urls' \
- '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key' \
- '--old-encryption-key=[encryption key previously used for encrypting sensitive information]:encryption key' \
- '--debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
- '--debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
- '--intercept-idle-timeout=[length of time for a intercepted session to be idle before terminating]: :_concourse_durations' \
- '--enable-global-resources[enable equivalent resources across pipelines and teams to share a single version history]' \
- '--global-resource-check-timeout=[time limit on checking for new versions of resources]: :_concourse_durations' \
- '--resource-checking-interval=[interval on which to check for new versions of resources]: :_concourse_durations' \
- '--resource-type-checking-interval=[interval on which to check for new versions of resource types]: :_concourse_durations' \
- '--container-placement-strategy=[method by which a worker is selected during container placement]:strategy:((volume-locality random fewest-build-containers))' \
- '--baggageclaim-response-header-timeout=[how long to wait for Baggageclaim to send the response header]: :_concourse_durations' \
- '--cli-artifacts-dir=[directory containing downloadable CLI binaries]: :_files -/' \
- '--log-db-queries[log database queries]' \
- '--build-tracker-interval=[interval on which to run build tracking]: :_concourse_durations' \
- '--default-build-logs-to-retain=[default build logs to retain, 0 means all]:number' \
- '--max-build-logs-to-retain=[maximum build logs to retain, 0 means not specified]:number' \
- '--default-days-to-retain-build-logs=[default days to retain build logs. 0 means unlimited]:number' \
- '--max-days-to-retain-build-logs=[maximum days to retain build logs, 0 means not specified]:number' \
- '--default-task-cpu-limit=[default max number of cpu shares per task, 0 means unlimited]:number' \
- '--default-task-memory-limit=[default maximum memory per task, 0 means unlimited]:number' \
- '--enable-build-auditing[enable auditing for all api requests connected to builds]' \
- '--enable-container-auditing[enable auditing for all api requests connected to containers]' \
- '--enable-job-auditing[enable auditing for all api requests connected to jobs]' \
- '--enable-pipeline-auditing[enable auditing for all api requests connected to pipelines]' \
- '--enable-resource-auditing[enable auditing for all api requests connected to resources]' \
- '--enable-system-auditing[enable auditing for all api requests connected to system transactions]' \
- '--enable-team-auditing[enable auditing for all api requests connected to teams]' \
- '--enable-worker-auditing[enable auditing for all api requests connected to workers]' \
- '--enable-volume-auditing[enable auditing for all api requests connected to volumes]' \
- '--postgres-host=[the host to connect to]: :_hosts' \
- '--postgres-port=[the port to connect to]: :_concourse_ports' \
- '--postgres-socket=[path to a UNIX domain socket to connect to]: :_files' \
- '--postgres-user=[the user to sign in as]: :_users' \
- '--postgres-password=[the user'\''s password]:password' \
- '--postgres-sslmode=[whether or not to use SSL]:SSL mode:((disable require verify-ca verify-full))' \
- '--postgres-ca-cert=[CA cert file location, to verify when connecting with SSL]: :_files' \
- '--postgres-client-cert=[client cert file location]: :_files' \
- '--postgres-client-key=[client key file location]: :_files' \
- '--postgres-connect-timeout=[dialing timeout]: :_concourse_durations' \
- '--postgres-database=[the name of the database to use]:database name' \
- '--secret-retry-attempts=[the number of attempts secret will be retried to be fetched, in case a retriable error happens]:number' \
- '--secret-retry-interval=[the interval between secret retry retrieval attempts]: :_concourse_durations' \
- '--secret-cache-enabled[enable in-memory cache for secrets]' \
- '--secret-cache-duration=[if the cache is enabled, secret values will be cached for not longer than this duration]: :_concourse_durations' \
- '--secret-cache-purge-interval=[if the cache is enabled, expired items will be removed on this internal]: :_concourse_durations' \
- '--credhub-url=[CredHub server address used to access secrets]: :_urls' \
- '--credhub-path-prefix=[path under which to namespace credential lookup]:path' \
- '--credhub-ca-cert=[path to PEM-encoded CA cert files to use to verify the CredHub server SSL cert]: :_files' \
- '--credhub-client-cert=[path to the client certificate for mutual TLS authorization]: :_files' \
- '--credhub-client-key=[path to the client private key for mutual TLS authorization]: :_files' \
- '--credhub-insecure-skip-verify[enable insecure SSL verification]' \
- '--credhub-client-id=[client ID for CredHub authorization]:client ID' \
- '--credhub-client-secret=[client secret for CredHub authorization]:client secret' \
- '--kubernetes-in-cluster[enables the in-cluster client]' \
- '--kubernetes-config-path=[path to Kubernetes config when running ATC outside Kubernetes]: :_files' \
- '--kubernetes-namespace-prefix=[prefix to use for Kubernetes namespaces under which secrets will be looked up]:prefix' \
- '--aws-secretsmanager-access-key=[AWS Access key ID]:access key' \
- '--aws-secretsmanager-secret-key=[AWS Secret Access Key]:secret key' \
- '--aws-secretsmanager-session-token=[AWS Session Token]:session token' \
- '--aws-secretsmanager-region=[AWS region to send requests to]:region' \
- '--aws-secretsmanager-pipeline-secret-template=[AWS Secrets Manager secret identifier template used for pipeline specific parameter]:template' \
- '--aws-secretsmanager-team-secret-template=[AWS Secrets Manager secret identifier template used for team specific parameter]:template' \
- '--aws-ssm-access-key=[AWS Access key ID]:access key' \
- '--aws-ssm-secret-key=[AWS Secret Access Key]:secret key' \
- '--aws-ssm-session-token=[AWS Session Token]:session token' \
- '--aws-ssm-region=[AWS region to send requests to]:region' \
- '--aws-ssm-pipeline-secret-template=[AWS SSM parameter name template used for pipeline specific parameter]:template' \
- '--aws-ssm-team-secret-template=[AWS SSM parameter name template used for team specific parameter]:template' \
- '--vault-url=[vault server address used to access secrets]: :_urls' \
- '--vault-path-prefix=[path under which to namespace credential lookup]:prefix' \
- '--vault-shared-path=[path under which to lookup shared credentials]:path' \
- '--vault-ca-cert=[path to a PEM-encoded CA cert file to use to verify the vault server SSL cert]: :_files' \
- '--vault-ca-path=[path to a directory of PEM-encoded CA cert files to verify the vault server SSL cert]: :_files -/' \
- '--vault-client-cert=[path to the client certificate for Vault authorization]: :_files' \
- '--vault-client-key=[path to the client private key for Vault authorization]: :_files' \
- '--vault-server-name=[if set, is used to set the SNI host when connecting via TLS]:server name' \
- '--vault-insecure-skip-verify[enable insecure SSL verification]' \
- '--vault-client-token=[client token for accessing secrets within the Vault server]:client token' \
- '--vault-auth-backend=[auth backend to use for logging in to Vault]:auth backend' \
- '--vault-auth-backend-max-ttl=[time after which to force a re-login]: :_concourse_durations' \
- '--vault-retry-max=[the maximum time between retries when logging in or re-authing a secret]: :_concourse_durations' \
- '--vault-retry-initial=[the initial time between retries when logging in or re-authing a secret]: :_concourse_durations' \
- '*--vault-auth-param=[parameter to pass when logging in via the backend]: :_concourse_name_colon_values' \
- {-n,--noop}'[don'\''t actually do any automatic scheduling or checking]' \
- '--worker-garden-url=[a Garden API endpoint to register as a worker]: :_urls' \
- '--worker-baggageclaim-url=[a Baggageclaim API endpoint to register with the worker]: :_urls' \
- '*--worker-resource=[a resource type to advertise for the worker]: :_concourse_type_colon_images' \
- '--metrics-host-name=[host string to attach to emitted metrics]: :_hosts' \
- '*--metrics-attribute=[a key-value attribute to attach to emitted metrics]: :_concourse_name_colon_values' \
- '--capture-error-metrics[enable capturing of error log metrics]' \
- '--datadog-agent-host=[datadog agent host to expose dogstatsd metrics]: :_hosts' \
- '--datadog-agent-port=[datadog agent port to expose dogstatsd metrics]: :_concourse_ports' \
- '--datadog-prefix=[prefix for all metrics to easily find them in Datadog]:prefix' \
- '--influxdb-url=[influxDB server address to emit points to]: :_urls' \
- '--influxdb-database=[influxDB database to write points to]:database name' \
- '--influxdb-username=[influxDB server username]: :_users' \
- '--influxdb-password=[influxDB server password]:password' \
- '--influxdb-insecure-skip-verify[skip SSL verification when emitting to InfluxDB]' \
- '--emit-to-logs[emit metrics to logs]' \
- '--newrelic-account-id=[new Relic Account ID]:account ID' \
- '--newrelic-api-key=[new Relic Insights API Key]:API key' \
- '--newrelic-service-prefix=[an optional prefix for emitted New Relic events]:prefix' \
- '--prometheus-bind-ip=[IP to listen on to expose Prometheus metrics]: :_concourse_ip_addresses' \
- '--prometheus-bind-port=[port to listen on to expose Prometheus metrics]: :_concourse_ports' \
- '--riemann-host=[riemann server address to emit metrics to]: :_hosts' \
- '--riemann-port=[port of the Riemann server to emit metrics to]: :_concourse_ports' \
- '--riemann-service-prefix=[an optional prefix for emitted Riemann services]:prefix' \
- '*--riemann-tag=[tag to attach to emitted metrics]:tag' \
- '--x-frame-options=[the value to set for X-Frame-Options]:options' \
- '--cluster-name=[a name for this Concourse cluster, to be displayed on the dashboard page]:name' \
- '--gc-interval=[interval on which to perform garbage collection]: :_concourse_durations' \
- '--gc-one-off-grace-period=[period after which one-off build containers will be garbage-collected]: :_concourse_durations' \
- '--gc-missing-grace-period=[period after which to reap containers and volumes that were created but went missing from the worker]: :_concourse_durations' \
- '--syslog-hostname=[client hostname with which the build logs will be sent to the syslog server]: :_hosts' \
- '--syslog-address=[remote syslog server address with port]: :_concourse_host_colon_ports' \
- '--syslog-transport=[transport protocol for syslog messages]:protocol:((tcp udp tls))' \
- '--syslog-drain-interval=[interval over which checking is done for new build logs to send to syslog server]: :_concourse_durations' \
- '--syslog-ca-cert=[paths to PEM-encoded CA cert files to use to verify the Syslog server SSL cert]: :_files' \
- '--cookie-secure[force sending secure flag on http cookies]' \
- '--auth-duration=[length of time for which tokens are valid]: :_concourse_durations' \
- '--session-signing-key=[file containing an RSA private key, used to sign auth tokens]: :_files' \
- '*--add-local-user=[list of username:password combinations for all your local users]: :_concourse_username_colon_passwords' \
- '*--main-team-local-user=[list of whitelisted local concourse users]: :_users' \
- {-c,--main-team-config=}'[configuration file for specifying team params]: :_concourse_config_files' \
- '*--main-team-bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]: :_users' \
- '*--main-team-bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team' \
- '*--main-team-cf-user=[list of whitelisted CloudFoundry users]: :_users' \
- '*--main-team-cf-org=[list of whitelisted CloudFoundry orgs]:org name' \
- '*--main-team-cf-space=[list of whitelisted CloudFoundry spaces]:space name' \
- '*--main-team-github-user=[list of whitelisted GitHub users]: :_users' \
- '*--main-team-github-org=[list of whitelisted GitHub orgs]:org name' \
- '*--main-team-github-team=[list of whitelisted GitHub teams]:team name' \
- '*--main-team-gitlab-user=[list of whitelisted GitLab users]: :_users' \
- '*--main-team-gitlab-group=[list of whitelisted GitLab groups]:group name' \
- '*--main-team-ldap-user=[list of whitelisted LDAP users]: :_users' \
- '*--main-team-ldap-group=[list of whitelisted LDAP groups]:group name' \
- '*--main-team-oauth-user=[list of whitelisted OAuth2 users]: :_users' \
- '*--main-team-oauth-group=[list of whitelisted OAuth2 groups]:group name' \
- '*--main-team-oidc-user=[list of whitelisted OIDC users]: :_users' \
- '*--main-team-oidc-group=[list of whitelisted OIDC groups]:group name' \
- '--bitbucket-cloud-client-id=[client id]:client ID' \
- '--bitbucket-cloud-client-secret=[client secret]:client secret' \
- '--cf-client-id=[client id]:client ID' \
- '--cf-client-secret=[client secret]:client secret' \
- '--cf-api-url=[the base API URL of your CF deployment]: :_urls' \
- '--cf-ca-cert=[CA Certificate]: :_files' \
- '--cf-skip-ssl-validation[skip SSL validation]' \
- '--github-client-id=[client id]:client ID' \
- '--github-client-secret=[client secret]:client secret' \
- '--github-host=[hostname of GitHub Enterprise deployment]: :_hosts' \
- '--github-ca-cert=[CA certificate of GitHub Enterprise deployment]: :_files' \
- '--gitlab-client-id=[client id]:client ID' \
- '--gitlab-client-secret=[client secret]:client secret' \
- '--gitlab-host=[hostname of Gitlab Enterprise deployment]: :_hosts' \
- '--ldap-display-name=[the auth provider name displayed to users on the login page]:display name' \
- '--ldap-host=[the host and optional port of the LDAP server]: :_hosts' \
- '--ldap-bind-dn=[bind DN for searching LDAP users and groups]:bind DN' \
- '--ldap-bind-pw=[bind Password for the user specified by bind-dn]:bind password' \
- '--ldap-insecure-no-ssl[required if LDAP host does not use TLS]' \
- '--ldap-insecure-skip-verify[skip certificate verification]' \
- '--ldap-start-tls[start on insecure port, then negotiate TLS]' \
- '--ldap-ca-cert=[CA certificate]: :_files' \
- '--ldap-user-search-base-dn= [baseDN to start the search from]:baseDN' \
- '--ldap-user-search-filter=[optional filter to apply when searching the directory]:filter' \
- '--ldap-user-search-username=[attribute to match against the inputted username]:attribute' \
- '--ldap-user-search-scope=[can either be: '\''sub'\'' - search the whole sub tree or '\''one'\'' - only search one level]:scope:((sub one))' \
- '--ldap-user-search-id-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
- '--ldap-user-search-email-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
- '--ldap-user-search-name-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
- '--ldap-group-search-base-dn=[baseDN to start the search from]:baseDN' \
- '--ldap-group-search-filter=[optional filter to apply when searching the directory]:filter' \
- '--ldap-group-search-scope=[can either be: '\''sub'\'' - search the whole sub tree or '\''one'\'' - only search one level]:scope:((sub one))' \
- '--ldap-group-search-user-attr=[adds an additional requirement to the filter that an attribute in the group match the user'\''s attribute value]:attribute' \
- '--ldap-group-search-group-attr=[adds an additional requirement to the filter that an attribute in the group match the user'\''s attribute value]:attribute' \
- '--ldap-group-search-name-attr=[the attribute of the group that represents its name]:attribute' \
- '--oauth-display-name=[the auth provider name displayed to users on the login page]:display name' \
- '--oauth-client-id=[client id]:client ID' \
- '--oauth-client-secret=[client secret]:client secret' \
- '--oauth-auth-url=[Authorization URL]: :_urls' \
- '--oauth-token-url=[Token URL]: :_urls' \
- '--oauth-userinfo-url=[UserInfo URL]: :_urls' \
- '*--oauth-scope=[any additional scopes that need to be requested during authorization]:scope' \
- '--oauth-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key' \
- '--oauth-user-id-key=[the user id key indicates which claim to use to map an external user id to a Concourse user id]:id key' \
- '--oauth-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:name key' \
- '--oauth-ca-cert=[CA Certificate]: :_files' \
- '--oauth-skip-ssl-validation[skip SSL validation]' \
- '--oidc-display-name=[the auth provider name displayed to users on the login page]:display name' \
- '--oidc-issuer=[An OIDC issuer URL that will be used to discover provider configuration]: :_urls' \
- '--oidc-client-id=[client id]:client ID' \
- '--oidc-client-secret=[client secret]:client secret' \
- '*--oidc-scope=[any additional scopes that need to be requested during authorization]:scope' \
- '--oidc-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key' \
- '--oidc-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:user name key' \
- '*--oidc-hosted-domains=[list of whitelisted domains when using Google, only users from a listed domain will be allowed to log in]:domain' \
- '--oidc-ca-cert=[CA Certificate]: :_files' \
- '--oidc-skip-ssl-validation[skip SSL validation]' \
- '--tsa-log-level=[minimum level of logs to see]: :_concourse_log_levels' \
- '--tsa-bind-ip=[IP address on which to listen for SSH]: :_concourse_ip_addresses' \
- '--tsa-peer-address=[network address of this web node, reachable by other web nodes]: :_urls' \
- '--tsa-bind-port=[port on which to listen for SSH]: :_concourse_ports' \
- '--tsa-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
- '--tsa-debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
- '--tsa-host-key=[path to private key to use for the SSH server]: :_files' \
- '--tsa-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_files' \
- '--tsa-team-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_concourse_name_colon_paths' \
- '--tsa-atc-url=[ATC API endpoints to which workers will be registered]: :_urls' \
- '--tsa-session-signing-key=[path to private key to use when signing tokens in requests to the ATC during registration]: :_files' \
- '--tsa-heartbeat-interval=[interval on which to heartbeat workers to the ATC]: :_concourse_durations' \
+ $concourse_web_configurations[@] \
+ "--enable-lets-encrypt[automatically configure TLS certificate via Let's Encrypt/ACME]" \
+ "--lets-encrypt-acme-url=[URL of ACME CA directory endpoint]:url:_urls" \
+ $concourse_postgres_configurations[@] \
+ $concourse_credential_manager_configurations[@] \
+ '--signing-key-check-interval=[how often to check for outdated or expired signing keys(default: 10m)]: :_concourse_durations' \
+ '--signing-key-rotation-period=[after which time a new signing key for the idtoken secrets provider should be generated]: :_concourse_durations' \
+ '--signing-key-rotation-period=[how long a key should still be published after a new key has been generated]: :_concourse_durations' \
+ $concourse_container_placement_strategy_configurations[@] \
+ $concourse_metric_configurations[@] \
+ $concourse_tracing_configurations[@] \
+ $concourse_policy_check_agent_configurations[@] \
+ $concourse_web_server_configurations[@] \
+ $concourse_gc_configurations[@] \
+ $concourse_syslog_configurations[@] \
+ $concourse_authentication_configurations[@] \
+ $concourse_feature_flags[@] \
+ $concourse_tsa_configurations[@]
}
-(( $+functions[_concourse_worker_args] )) ||
-_concourse_worker_args() {
- _arguments -C \
+(( $+functions[_concourse_worker] )) ||
+_concourse_worker() {
+ _arguments \
'(- : *)'{-h,--help}'[display help information]' \
'--name=[the name to set for the worker during registration]:name' \
'*--tag=[a tag to set during registration]:tag' \
@@ -1396,18 +734,43 @@ _concourse_worker_args() {
'*--tsa-host=[TSA host to forward the worker through]: :_hosts' \
'--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
'--tsa-worker-private-key=[file containing the private key to use when authenticating to the TSA]: :_files' \
- '--garden-use-houdini[use the insecure Houdini Garden backend]' \
- '--garden-bin=[path to gdn executable (or leave as gdn to find it in $PATH)]: :_files' \
+ '--runtime=[runtime to use with the worker(default: guardian)]:runtime:(guardian containerd houdini)' \
+ '--garden-bin=[path to gdn executable (or leave as gdn to find it in $PATH)]: :_path_commands' \
+ '--garden-request-timeout=[how long to wait for requests to the Garden server to complete]:time' \
'--garden-config=[path to a config file to use for Garden]: :_files' \
'--garden-dns-proxy-enable[enable proxy DNS server]' \
+ '--garden-network-pool=[network range to use for dynamically allocated container subnets]:range' \
+ '--garden-max-containers=[maximum container capacity for Garden]:limit' \
+ '--containerd-config=[path to a config file to use for the Containerd]:path:_files' \
+ '--containerd-bin=[path to a containerd executable]:command:_path_commands' \
+ '--containerd-init-bin=[path to an init executable]:init:_files' \
+ '--containerd-seccomp-profile=[path to a seccomp filter override]:filter:_files' \
+ '--containerd-oci-hooks-dir=[path to the oci hooks dir]:dir:_files -/' \
+ '--containerd-cni-plugins-dir=[path to CnI network plugins]:dir:_files -/' \
+ '--containerd-request-timeout=[how long to wait for requests to Containerd to complete]:timeout' \
+ '--containerd-max-containers=[max conatiner capacity for containerd]:limit' \
+ '--containerd-privileged-mode=[how many privileges privileged containers get]:type:(full fuse-only ignore)' \
+ "--containerd-external-ip=[IP address to use to reach container's mapped ports]:ip" \
+ '--containerd-dns-server=[DNS server IP address to use instead of automatically determined servers]:ip' \
+ '--containerd-additional-hosts=[additioanl entries to add to /etc/hosts in containers]:hosts' \
+ '--containerd-restricted-network=[network ranges to which traffic from containers will be restricted]:range' \
+ '--containerd-network-pool=[network range to use for dynamically allocated containers subnets]:range' \
+ '--containerd-mtu=[MTU size for container network interfaces]:size' \
+ "--containerd-allow-host-access[allow containers to reach the host's network]" \
+ '--containerd-dns-proxy-enable[enable proxy DNS server]' \
+ '--containerd-v6-enable[enable IPv6 networking]' \
+ '--containerd-v6-pool=[IPv6 network range to use for dynamically allocated container addresses]:range' \
+ '--containerd-v6-disable-masquerade[Masquerade container traffic with worker address]' \
'--baggageclaim-log-level=[minimum level of logs to see]: :_concourse_log_levels' \
'--baggageclaim-bind-ip=[IP address on which to listen for API traffic]: :_concourse_ip_addresses' \
'--baggageclaim-bind-port=[port on which to listen for API traffic]: :_concourse_ports' \
'--baggageclaim-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
'--baggageclaim-debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
+ '--baggageclaim-p2p-interface-name-pattern=[regular expression to match a network interface for p2p streaming]:pattern' \
+ '--baggageclaim-p2p-interface-family=[IPv4 or IPv6(default IPv4)]:type:(4 6)' \
'--baggageclaim-volumes=[directory in which to place volume data]: :_files -/' \
- '--baggageclaim-driver=[driver to use for managing volumes]:driver:((detect naive btrfs overlay))' \
- '--baggageclaim-btrfs-bin=[path to btrfs binary]: :_files' \
+ '--baggageclaim-driver=[driver to use for managing volumes]:driver:(detect naive btrfs overlay)' \
+ '--baggageclaim-btrfs-bin=[path to btrfs binary]: :_path_commands' \
'--baggageclaim-mkfs-bin=[path to mkfs.btrfs binary]: :_files' \
'--baggageclaim-overlays-dir=[path to directory in which to store overlay data]: :_files -/' \
'--baggageclaim-disable-user-namespaces[disable remapping of user/group IDs in unprivileged volumes]'
@@ -1502,11 +865,7 @@ _concourse_log_levels() {
_describe -t log-levels 'log level' levels
}
-case $service in
- concourse) _concourse_server "$@" ;;
- fly) _concourse_fly "$@" ;;
- *) _message "unknown command ${service}" && ret=1 ;;
-esac
+_concourse_server "$@"
# Local Variables:
# mode: Shell-Script
diff --git a/src/_console b/src/_console
deleted file mode 100644
index 3ed8dd7..0000000
--- a/src/_console
+++ /dev/null
@@ -1,72 +0,0 @@
-#compdef console
-# ------------------------------------------------------------------------------
-# Copyright (c) 2011 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for symfony console (https://github.com/symfony/Console).
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * loranger (https://github.com/loranger)
-# * Yohan Tamb«² (https://github.com/Cronos87)
-#
-# ------------------------------------------------------------------------------
-
-_console_find_console() {
- echo "php $(find . -maxdepth 2 -mindepth 1 -name 'console' -type f | head -n 1)"
-}
-
-_console_get_command_list() {
- IFS=" "
- `_console_find_console` --no-ansi | \
- sed "1,/Available commands/d" | \
- awk '/ [a-z]+/ { print $0 }' | \
- sed -E 's/^[ ]+//g' | \
- sed -E 's/[:]+/\\:/g' | \
- sed -E 's/[ ]{2,}/\:/g'
-}
-
-_console() {
- local -a commands
- IFS=$'\n'
- commands=(`_console_get_command_list`)
- _describe 'commands' commands
-}
-
-compdef _console php console
-compdef _console console
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_emacs b/src/_emacs
index cba58eb..8ecd182 100644
--- a/src/_emacs
+++ b/src/_emacs
@@ -1,4 +1,4 @@
-#compdef emacs
+#compdef emacs emacsclient
# ------------------------------------------------------------------------------
# Copyright (c) 2022 Github zsh-users - https://github.com/zsh-users
# All rights reserved.
@@ -35,67 +35,92 @@
#
# ------------------------------------------------------------------------------
-_arguments \
- '--batch[do not do interactive display, implies -q]' \
- '--chdir[change to directory]: :_files -/' \
- '(--daemon --bg-daemon --fg-daemon)--daemon[start a server in the background]' \
- '(--daemon --bg-daemon --fg-daemon)--bg-daemon=-[start a named server in the background]::name' \
- '(--daemon --bg-daemon --fg-daemon)--fg-daemon=-[start a named server in the foreground]' \
- '--debug-init[enable Emacs Lisp debugger for init file]' \
- '(-d --display)'{-d,--display}'[use X server DISPLAY]:display' \
- '--module-assertions[assert behavior of dynamic modules]' \
- '--dump-file[read dumped state from FILE]: :_files' \
- '(- *)--fingerprint[output fingerprint and exit]' \
- '--seccomp=[read Seccomp BPF filter from FILE]: :_files' \
- '--no-build-details[do not add build details such as time stamps]' \
- '--no-desktop[do not load a saved desktop]' \
- '(-q --no-init-file)'{-q,--no-init-file}'[loader neither ~/.emacs nor default.el]' \
- '(-nl --no-loadup)'{-nl,--no-loadup}'[do not load loadup.el]' \
- '--no-site-file[do not load site-start.el]' \
- '--no-x-resources[do not load X resources]' \
- '(-nsl --no-site-lisp)'{-nsl,--no-site-lisp}'[do not add site-lisp directories to load-path]' \
- '--no-splash[do not display a splash screen on startup]' \
- '(-nw --no-window-system)'{-nw,--no-window-system}'[do not communicate with X, ignoring $DISPLAY]' \
- '--init-directory=[use DIR when looking for the Emacs init files]: :_files -/' \
- '(-Q --quick)'{-Q,--quick}'[equivalent to -q --no-site-file --no-site-lisp --no-splash --no-x-resources]' \
- '--script[run FILE as an Emacs Lisp script]: :_files' \
- '-x[same as -Q --script]' \
- '(-t --terminal)'{-t,--terminal}'[use DEVICE for terminal IO]: :_files' \
- '(-u --user)'{-u,--user}'[load ~USER/.emacs instead of your own]' \
- \*{-L,--directory}'[prepend DIR to load-path(with :DIR, append DIR)]: :_files -/' \
- {--eval,--execute}'[evaluate Emacs Lisp expression EXPR]:expr' \
- \*{--file,--find-file,--visit}'[visit FILE]: :_files' \
- \*{-f,--funcall}'[call Emacs Lisp function FUNC with no arguments]:func' \
- '--insert[insert contents of FILE into current buffer]:file:_files' \
- '--kill[exit without asking for confirmation]' \
- \*{-l,--load}'[load Emacs Lisp FILE using the load function]: :_files' \
- '(-bg --background-color)'{-bg,--background-color}'[window background color]:color' \
- '(-D --basic-display)'{-D,--basic-display}'[disable many display features]' \
- '(-bg --border-color)'{-bg,--border-color}'[main border color]:color' \
- '(-bw --border-width)'{-bw,--border-width}'[width of main border]' \
- '--color=-[override color mode for character terminals]:: :(auto never always ansi8)' \
- '(-cr --cursor-color)'{-cr,--cursor-color}'[color of the Emacs cursor indicating point]:color' \
- '(-fn --font)'{-fn,--font}'[default font, must be fixed-width]:font' \
- '(-fg --foreground)'{-fg,--foreground}'[window foreground color]:color' \
- '(-fh --fullheight)'{-fh,--fullheight}'[make the first frame high as the screen]' \
- '(-fs --fullscreen)'{-fs,--fullscreen}'[make the first frame fullscreen]' \
- '(-fw --fullwidth)'{-fw,--fullwidth}'[make the first frame wide as the screen]' \
- '(-mm --maximized)'{-mm,--maximized}'[make the first frame maximized]' \
- '(-g --geometry)'{-g,--geometry}'[window geometry]:geometry' \
- '(-nbi --no-bitmap-icon)'{-nbi,--no-bitmap-icon}'[do not use picture of gnu for Emacs icon]' \
- '--iconic[start Emacs in iconified state]' \
- '(-ib --internal-border)'{-ib,--internal-border}'[width between text and main border]:width' \
- '(-lsp --line-spacing)'{-lsp,--line-spacing}'[additional space to put between lines]' \
- '(-ms --mouse-color)'{-ms,--mouse-color}'[mouse cursor color in Emacs window]:color' \
- '(-T --name --title)'{-T,--name,--title}'[title for initial Emacs frame]:title' \
- '(-nbc,--no-blinking-cursor)'{-nbc,--no-blinking-cursor}'[disable blinking cursor]' \
- '(-r -rv --reverse-video)'{-r,-rv,--reverse-video}'[switch foreground and background]' \
- '(-vb --vertical-scroll-bars)'{-vb,--vertical-scroll-bars}'[enable vertical scroll bars]' \
- '--xrm[set additional X resources]:resource' \
- '--parent-id[set parent window]:xid' \
- '(- *)--help{display help and exit}' \
- '(- *)--version[output version information and exit]' \
- '*: :_files'
+case $service in
+ (emacs)
+ _arguments -s \
+ '--batch[do not do interactive display, implies -q]' \
+ '--chdir[change to directory]:dir:_files -/' \
+ '(--daemon --bg-daemon --fg-daemon)'{--daemon,--bg-daemon=-}'[start a named server in the background]::name' \
+ '(--daemon --bg-daemon --fg-daemon)--fg-daemon=-[start a named server in the foreground]::name' \
+ '--debug-init[enable Emacs Lisp debugger for init file]' \
+ '(-d --display)'{-d,--display}'[use X server DISPLAY]:display:_x_display' \
+ '--module-assertions[assert behavior of dynamic modules]' \
+ '--dump-file[read dumped state from FILE]:file:_files' \
+ '(- *)--fingerprint[output fingerprint and exit]' \
+ '--seccomp=[read Seccomp BPF filter from FILE]: :_files' \
+ '--no-build-details[do not add build details such as time stamps]' \
+ '--no-desktop[do not load a saved desktop]' \
+ '(-q --no-init-file)'{-q,--no-init-file}'[loader neither ~/.emacs nor default.el]' \
+ '(-nl --no-loadup)'{-nl,--no-loadup}'[do not load loadup.el]' \
+ '--no-site-file[do not load site-start.el]' \
+ '--no-x-resources[do not load X resources]' \
+ '(-nsl --no-site-lisp)'{-nsl,--no-site-lisp}'[do not add site-lisp directories to load-path]' \
+ '--no-splash[do not display a splash screen on startup]' \
+ '(-nw --no-window-system)'{-nw,--no-window-system}'[do not communicate with X, ignoring $DISPLAY]' \
+ '--init-directory=[use DIR when looking for the Emacs init files]:dir:_files -/' \
+ '(-Q --quick)'{-Q,--quick}'[equivalent to -q --no-site-file --no-site-lisp --no-splash --no-x-resources]' \
+ '--script[run FILE as an Emacs Lisp script]:file:_files' \
+ '-x[same as -Q --script]' \
+ '(-t --terminal)'{-t,--terminal}'[use DEVICE for terminal IO]: :_files' \
+ '(-u --user)'{-u,--user}"[load user's init file instead of your own]" \
+ \*{-L,--directory}'[prepend DIR to load-path(with :DIR, append DIR)]: :_files -/' \
+ {--eval,--execute}'[evaluate Emacs Lisp expression EXPR]:expr' \
+ \*{--file,--find-file,--visit}'[visit FILE]: :_files' \
+ \*{-f,--funcall}'[call Emacs Lisp function FUNC with no arguments]:func' \
+ '--insert[insert contents of FILE into current buffer]:file:_files' \
+ '--kill[exit without asking for confirmation]' \
+ \*{-l,--load}'[load Emacs Lisp FILE using the load function]: :_files' \
+ '(-bg --background-color)'{-bg,--background-color}'[window background color]:color' \
+ '(-D --basic-display)'{-D,--basic-display}'[disable many display features]' \
+ '(-bg --border-color)'{-bg,--border-color}'[main border color]:color' \
+ '(-bw --border-width)'{-bw,--border-width}'[width of main border]' \
+ '--color=-[override color mode for character terminals]:: :(auto never always ansi8)' \
+ '(-cr --cursor-color)'{-cr,--cursor-color}'[color of the Emacs cursor indicating point]:color' \
+ '(-fn --font)'{-fn,--font}'[default font, must be fixed-width]:font' \
+ '(-fg --foreground)'{-fg,--foreground}'[window foreground color]:color' \
+ '(-fh --fullheight)'{-fh,--fullheight}'[make the first frame high as the screen]' \
+ '(-fs --fullscreen)'{-fs,--fullscreen}'[make the first frame fullscreen]' \
+ '(-fw --fullwidth)'{-fw,--fullwidth}'[make the first frame wide as the screen]' \
+ '(-mm --maximized)'{-mm,--maximized}'[make the first frame maximized]' \
+ '(-g --geometry)'{-g,--geometry}'[window geometry]:geometry' \
+ '(-nbi --no-bitmap-icon)'{-nbi,--no-bitmap-icon}'[do not use picture of gnu for Emacs icon]' \
+ '--iconic[start Emacs in iconified state]' \
+ '(-ib --internal-border)'{-ib,--internal-border}'[width between text and main border]:width' \
+ '(-lsp --line-spacing)'{-lsp,--line-spacing}'[additional space to put between lines]' \
+ '(-ms --mouse-color)'{-ms,--mouse-color}'[mouse cursor color in Emacs window]:color' \
+ '(-T --name --title)'{-T,--name,--title}'[title for initial Emacs frame]:title' \
+ '(-nbc,--no-blinking-cursor)'{-nbc,--no-blinking-cursor}'[disable blinking cursor]' \
+ '(-r -rv --reverse-video)'{-r,-rv,--reverse-video}'[switch foreground and background]' \
+ '(-vb --vertical-scroll-bars)'{-vb,--vertical-scroll-bars}'[enable vertical scroll bars]' \
+ '--xrm[set additional X resources]:resource' \
+ '--parent-id[set parent window]:xid' \
+ '(- *)--help{display help and exit}' \
+ '(- *)--version[output version information and exit]' \
+ '*:: :_files'
+ ;;
+ (emacsclient)
+ _arguments -s -n : \
+ '(- *)'{-V,--version}'[Print version information and exit]' \
+ '(- *)'{-H,--help}'[Print this usage information message and exit]' \
+ '(-nw -t --tty --no-window-system)'{-nw,-t,--tty,--no-window-system}'[Open a new Emacs frame on the current terminal]' \
+ '(-c --create-frame)'{-c,--create-frame}'[Create a new frame instead of trying to use the current Emacs frame]' \
+ '(-r --reuse-frame)'{-r,--reuse-frame}'[Create a new frame if none exists, otherwise use the current Emacs frame]' \
+ '(-F --frame-parameters)'{-F,--frame-parameters=}'[Set the parameters of a new frame]:alist' \
+ '(-e --eval)'{-e,--eval}'[Evaluate as Emacs Lisp expressions]' \
+ '(-n --no-wait)'{-n,--no-wait}'[Returns immediately without waiting for finish]' \
+ '(-w --timeout)'{-w,--timeout=}'[Seconds to wait before timing out]:seconds' \
+ '(-q --quiet)'{-q,--quiet}"[Don't display messages on success]" \
+ '(-u --suppress-output)'{-u,--suppress-output}"[Don't display return values from the server]" \
+ '(-d --display)'{-d,--display=}'[Tell the server to display the files on the given display]:display:_x_display' \
+ '--parent-id=[Open in parent window ID, via XEmbed]' \
+ '(-s --socket-name)'{-s,--socket-name=}'[Set filename of the UNIX socket for communication]: :_files' \
+ '(-f --server-file)'{-f,--server-file=}'[Set filename of the TCP authentication file]: :_files' \
+ '(-a --alternate-editor)'{-a,--alternate-editor=}'[Editor to fallback to if the server is not running if EDITOR is the empty string]:editor:_files' \
+ '(-T --tramp)'{-T,--tramp=}'[PREFIX to prepend to filenames sent by emacsclient for locating files remotely via Tramp]:prefix' \
+ '*:: :_files'
+ ;;
+esac
+
# Local Variables:
# mode: Shell-Script
diff --git a/src/_emacsclient b/src/_emacsclient
deleted file mode 100644
index 47ef577..0000000
--- a/src/_emacsclient
+++ /dev/null
@@ -1,66 +0,0 @@
-#compdef emacsclient
-#
-# ------------------------------------------------------------------------------
-# Copyright (c) 2022 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
-# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for emacsclient 29
-# - https://www.gnu.org/software/emacs/manual/html_node/emacs/Invoking-emacsclient.html
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Shohei YOSHIDA (https://github.com/syohex)
-#
-# ------------------------------------------------------------------------------
-
-_arguments -s -n : \
- '(- *)'{-V,--version}'[Print version information and exit]' \
- '(- *)'{-H,--help}'[Print this usage information message and exit]' \
- '(-nw -t --tty)'{-nw,-t,--tty}'[Open a new Emacs frame on the current terminal]' \
- '(-c --create-frame)'{-c,--create-frame}'[Create a new frame instead of trying to use the current Emacs frame]' \
- '(-r --reuse-frame)'{-r,--reuse-frame}'[Create a new frame if none exists, otherwise use the current Emacs frame]' \
- '(-F --frame-parameters)'{-F,--frame-parameters=}'[Set the parameters of a new frame]:alist' \
- '(-e --eval)'{-e,--eval}'[Evaluate as Emacs Lisp expressions]' \
- '(-n --no-wait)'{-n,--no-wait}'[Returns immediately without waiting for finish]' \
- '(-w --timeout)'{-w,--timeout=}'[Seconds to wait before timing out]:seconds' \
- '(-q --quiet)'{-q,--quiet}"[Don't display messages on success]" \
- '(-u --suppress-output)'{-u,--suppress-output}"[Don't display return values from the server]" \
- '(-d --display)'{-d,--display=}'[Tell the server to display the files on the given display]:display:_x_display' \
- '--parent-id=[Open in parent window ID, via XEmbed]' \
- '(-s --socket-name)'{-s,--socket-name=}'[Set filename of the UNIX socket for communication]: :_files' \
- '(-f --server-file)'{-f,--server-file=}'[Set filename of the TCP authentication file]: :_files' \
- '(-a --alternate-editor)'{-a,--alternate-editor=}'[Editor to fallback to if the server is not running if EDITOR is the empty string]:editor:_files' \
- '(-T --tramp)'{-T,--tramp=}'[PREFIX to prepend to filenames sent by emacsclient for locating files remotely via Tramp]:prefix' \
- '*: :_files'
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_fleetctl b/src/_fleetctl
deleted file mode 100644
index 542d7cf..0000000
--- a/src/_fleetctl
+++ /dev/null
@@ -1,123 +0,0 @@
-#compdef fleetctl
-# ------------------------------------------------------------------------------
-# Copyright (c) 2009-2015 Robby Russell and contributors (see
-# https://github.com/ohmyzsh/ohmyzsh/graphs/contributors)
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for fleetctl (https://github.com/coreos/fleet).
-#
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Remi Paulmier (https://github.com/shtouff)
-#
-# ------------------------------------------------------------------------------
-
-# fleetctl zsh completion
-
-local -a _1st_arguments
-_1st_arguments=(
- 'cat:Output the contents of a submitted unit'
- 'destroy:Destroy one or more units in the cluster'
- 'fd-forward:Proxy stdin and stdout to a unix domain socket'
- 'help:Show a list of commands or help for one command'
- 'journal:Print the journal of a unit in the cluster to stdout'
- 'list-machines:Enumerate the current hosts in the cluster'
- 'list-unit-files:List the units that exist in the cluster.'
- 'list-units:List the current state of units in the cluster'
- 'load:Schedule one or more units in the cluster, first submitting them if necessary.'
- 'ssh:Open interactive shell on a machine in the cluster'
- 'start:Instruct systemd to start one or more units in the cluster, first submitting and loading if necessary.'
- 'status:Output the status of one or more units in the cluster'
- 'stop:Instruct systemd to stop one or more units in the cluster.'
- 'submit:Upload one or more units to the cluster without starting them'
- 'unload:Unschedule one or more units in the cluster.'
- 'version:Print the version and exit'
-)
-
-__task_list ()
-{
- local expl
- declare -a tasks
-
- tasks=(cat destroy fd-forward help journal list-machines list-unit-files \
- list-units load ssh start status stop submit unload version)
-
- _wanted tasks expl 'help' compadd $tasks
-}
-
-__unit_list ()
-{
- _wanted application expl 'command' compadd $(command fleetctl list-units | \
- tail -n +2 | awk '{print $1}')
-}
-
-local expl
-
-local curcontext="$curcontext" state line
-local -A opt_args
-
-_arguments -C \
- ':command:->command' \
- '*::options:->options'
-
-case $state in
- (command)
- _describe -t commands "gem subcommand" _1st_arguments
- return
- ;;
-
- (options)
- case $line[1] in
- (help)
- _arguments ':feature:__task_list'
- ;;
-
- (destroy|journal|start|status|stop|unload|cat)
- _arguments '*:feature:__unit_list'
- ;;
-
- (load|submit)
- _arguments '*:file:_files -g *.service'
- ;;
-
- (ssh)
- _arguments '*:host:_hosts'
- ;;
-
- (*)
- _arguments '*:file:_files'
- ;;
- esac
- ;;
-esac
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_flutter b/src/_flutter
index 3ea5408..6b721b8 100644
--- a/src/_flutter
+++ b/src/_flutter
@@ -1073,6 +1073,7 @@ _flutter_pub_token() {
_arguments \
'(- *)'{-h,--help}'[Print this usage information]' \
&& ret=0
+ ;;
esac
;;
esac
diff --git a/src/_git-pulls b/src/_git-pulls
index 8dfc117..9778590 100644
--- a/src/_git-pulls
+++ b/src/_git-pulls
@@ -3,7 +3,7 @@
# Description
# -----------
#
-# Completion script for git-pulls 0.3.1 (https://git-pulls.com/schacon/git-pulls).
+# Completion script for git-pulls 0.3.1 (https://github.com/schacon/git-pulls).
#
# ------------------------------------------------------------------------------
# Authors
diff --git a/src/_google b/src/_google
deleted file mode 100644
index 4a5e567..0000000
--- a/src/_google
+++ /dev/null
@@ -1,94 +0,0 @@
-#compdef google
-# ------------------------------------------------------------------------------
-# Copyright (c) 2016 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for googlecl (https://code.google.com/p/googlecl/)
-#
-# Source: https://raw.github.com/dadrc/zsh-cfg/master/completions/_google
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * dadrc (https://github.com/dadrc)
-# * Ben O'Hara (https://github.com/benohara)
-#
-# ------------------------------------------------------------------------------
-
-_google() {
- # init variables
- local curcontext="$curcontext" state line
- typeset -A opt_args
-
- # init state
- _arguments \
- '1: :->service'\
- '2: :->task'
-
- case $state in
- service)
- _arguments '1:service:(picasa blogger youtube docs contacts calendar finance)'
- ;;
- *)
- case $words[2] in
- picasa)
- compadd "$@" get create list list-albums tag post delete
- ;;
- blogger)
- compadd "$@" post tag list delete
- ;;
- youtube)
- compadd "$@" post tag list delete
- ;;
- docs)
- compadd "$@" edit delete list upload get
- ;;
- contacts)
- compadd "$@" list list-groups add add-groups delete-groups delete
- ;;
- calendar)
- compadd "$@" add list today delete
- ;;
- finance)
- compadd "$@" list-txn delete-pos create-pos delete-txn create create-txn list list-pos delete
- ;;
- *)
- esac
- esac
-}
-
-_google "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_jmeter b/src/_jmeter
index 4d7b01d..6a60aee 100644
--- a/src/_jmeter
+++ b/src/_jmeter
@@ -5,8 +5,6 @@
#
# Completion script for JMeter (https://jmeter.apache.org/).
#
-# Status: incomplete
-#
# ------------------------------------------------------------------------------
# Authors
# -------
@@ -15,7 +13,6 @@
#
# ------------------------------------------------------------------------------
-
_arguments \
'(- 1 *)--?[print command line options and exit]' \
'(- 1 *)'{-h,--help}'[print usage information and exit]' \
@@ -29,7 +26,6 @@ _arguments \
{-j,--jmeterlogfile}'[jmeter run file]: :_files -g "*.log"' \
{-n,--nongui}'[run JMeter in nongui mode]' \
{-s,--server}'[run the JMeter server]' \
- {-E,--proxyScheme}'[set a proxy scheme to use for the proxy server]:scheme' \
{-H,--proxyHost}'[set a proxy server for JMeter to use]: :_hosts' \
{-P,--proxyPort}'[set proxy server port for JMeter to use]:number' \
{-N,--nonProxyHosts}'[set non proxy host list]:host' \
@@ -45,7 +41,7 @@ _arguments \
{-R,--remotestart}'[start these remote servers (overrides remote_hosts)]:remote servers list' \
{-d,--homedir}'[the JMeter home directory to use]: :_files -/' \
{-X,--remoteexit}'[exit the remote servers at end of test (non-GUI)]' \
- {-g,--removeonly}'[generate report dashboard only, from a test results file]: :_files' \
+ {-g,--reportonly}'[generate report dashboard only, from a test results file]: :_files' \
{-e,--reportatendofloadtests}'[generate report dashboard after load test]' \
{-o,--reportoutputfolder}'[output folder for report dashboard]: :_files -/'
diff --git a/src/_knife b/src/_knife
deleted file mode 100644
index 959668e..0000000
--- a/src/_knife
+++ /dev/null
@@ -1,324 +0,0 @@
-#compdef knife
-# ------------------------------------------------------------------------------
-# Copyright (c) 2009-2015 Robby Russell and contributors (see
-# https://github.com/ohmyzsh/ohmyzsh/graphs/contributors)
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for Chef's knife (https://www.chef.io/).
-#
-# Source: https://github.com/ohmyzsh/ohmyzsh/blob/22fed4f/plugins/knife/_knife
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Frank Louwers (https://github.com/franklouwers)
-# * Mark Cornick (https://github.com/markcornick)
-#
-# ------------------------------------------------------------------------------
-
-
-# You can override the path to knife.rb and your cookbooks by setting
-# KNIFE_CONF_PATH=/path/to/my/.chef/knife.rb
-# KNIFE_COOKBOOK_PATH=/path/to/my/chef/cookbooks
-# If you want your local cookbooks path to be calculated relative to where you are then
-# set the below option
-# KNIFE_RELATIVE_PATH=true
-# Read around where these are used for more detail.
-
-# knife has a very special syntax, some example calls are:
-# knife status
-# knife cookbook list
-# knife role show ROLENAME
-# knife data bag show DATABAGNAME
-# knife role show ROLENAME --attribute ATTRIBUTENAME
-# knife cookbook show COOKBOOKNAME COOKBOOKVERSION recipes
-
-# The -Q switch in compadd allow for completions of things like "data bag" without having to go through two rounds of completion and avoids zsh inserting a \ for escaping spaces
-_knife() {
- # These flags should be available everywhere according to man knife
- local -a knife_general_flags; knife_general_flags=(--help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes)
-
- local curcontext="$curcontext" state line
- typeset -A opt_args
- local -a cloudproviders; cloudproviders=(bluebox ec2 rackspace slicehost terremark)
- _arguments \
- '1: :->knifecmd' \
- '2: :->knifesubcmd' \
- '3: :->knifesubcmd2' \
- '4: :->knifesubcmd3' \
- '5: :->knifesubcmd4' \
- '6: :->knifesubcmd5'
-
- case $state in
- knifecmd)
- compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" diff exec environment user index node recipe role search solo ssh status upload vault windows "$cloudproviders[@]"
- ;;
- knifesubcmd)
- case $words[2] in
- bluebox|ec2|rackspace|slicehost|terremark)
- compadd "$@" server images
- ;;
- client)
- compadd -Q "$@" "bulk delete" list create show delete edit reregister
- ;;
- configure)
- compadd "$@" client
- ;;
- cookbook)
- compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload
- ;;
- diff)
- _arguments '*:file or directory:_files -g "*"'
- ;;
- environment)
- compadd -Q "$@" list create delete edit show "from file"
- ;;
- user)
- compadd -Q "$@" create delete edit list reregister show
- ;;
- node)
- compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete"
- ;;
- recipe)
- compadd "$@" list
- ;;
- role)
- compadd -Q "$@" "bulk delete" create delete edit "from file" list show
- ;;
- solo)
- compadd "$@" bootstrap clean cook init prepare
- ;;
- upload)
- _arguments '*:file or directory:_files -g "*"'
- ;;
- vault)
- compadd -Q "$@" create decrypt delete edit remove "rotate all keys" "rotate keys" show update
- ;;
- windows)
- compadd "$@" bootstrap
- ;;
- *)
- _arguments '2:Subsubcommands:($(_knife_options1))'
- ;;
- esac
- ;;
- knifesubcmd2)
- case $words[3] in
- server)
- compadd "$@" list create delete
- ;;
- images)
- compadd "$@" list
- ;;
- site)
- compadd "$@" vendor show share search download list unshare
- ;;
- show|delete|edit|update)
- _arguments '3:Subsubcommands:($(_knife_list_remote "$words[2]"))'
- ;;
- upload|test)
- _arguments '3:Subsubcommands:($(_call_function - "_knife_list_local_$words[2]s") --all)'
- ;;
- list)
- compadd -a "$@" knife_general_flags
- ;;
- bag)
- compadd -Q "$@" show edit list "from file" create delete
- ;;
- bootstrap|clean|cook|prepare)
- compadd "$@" nodes/*.json(N:t:r)
- ;;
- init)
- compadd "$@" ./*(/N:t)
- ;;
- *)
- _arguments '3:Subsubcommands:($(_knife_options2))'
- ;;
- esac
- ;;
- knifesubcmd3)
- case "$words[3]" in
- show)
- case "$words[2]" in
- cookbook)
- versioncomp=1
- _arguments '4:Cookbookversions:($(_knife_cookbook_versions) latest)'
- ;;
- node|client|role)
- compadd "$@" --attribute
- ;;
- vault)
- _arguments '4:Keys:($(_knife_list_remote "$words[2]" "$words[4]"))'
- ;;
- esac
- ;;
- update)
- case "$words[2]" in
- vault)
- _arguments '4:Keys:($(_knife_list_remote "$words[2]" "$words[4]"))'
- ;;
- esac
- ;;
- esac
- case "$words[4]" in
- show|edit)
- _arguments '4:Subsubsubcommands:($(_knife_list_remote "$words[2]" "$words[3]"))'
- ;;
- file)
- case "$words[2]" in
- environment)
- _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/environments"'
- ;;
- node)
- _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/nodes"'
- ;;
- role)
- _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/roles"'
- ;;
- *)
- _arguments '*:Subsubcommands:($(_knife_options3))'
- ;;
- esac
- ;;
- list)
- compadd -a "$@" knife_general_flags
- ;;
- *)
- _arguments '*:Subsubcommands:($(_knife_options3))'
- ;;
- esac
- ;;
- knifesubcmd4)
- if ((versioncomp > 0)); then
- compadd "$@" attributes definitions files libraries providers recipes resources templates
- else
- case "$words[5]" in
- file)
- _arguments '*:directory:_path_files -/ -W "$(_knife_root)/data_bags" -qS \ '
- ;;
- *) _arguments '*:Subsubcommands:($(_knife_options2))' ;;
- esac
- fi
- ;;
- knifesubcmd5)
- case "$words[5]" in
- file)
- _arguments '*:files:_path_files -g "*.json" -W "$(_knife_root)/data_bags/$words[6]"'
- ;;
- *)
- _arguments '*:Subsubcommands:($(_knife_options3))'
- ;;
- esac
- ;;
- esac
-}
-
-# Helper functions to provide the argument completion for several depths of commands
-_knife_options1() {
- local line
- for line in $(_call_program commands knife "$words[2]" --help | grep -v "^knife"); do
- echo $line | grep "\-\-"
- done
-}
-
-_knife_options2() {
- local line
- for line in $(_call_program commands knife "$words[2]" "$words[3]" --help | grep -v "^knife"); do
- echo $line | grep "\-\-"
- done
-}
-
-_knife_options3() {
- local line
- for line in $(_call_program commands knife "$words[2]" "$words[3]" "$words[4]" --help | grep -v "^knife"); do
- echo $line | grep "\-\-"
- done
-}
-
-# get a list of objects of type x on the server
-_knife_list_remote() {
- case "$*" in
- role|client|node|cookbook|"cookbook site"|"data bag"|environment|user|vault)
- _call_program commands knife "$@" list --format json \
- | grep \" \
- | awk '{print $1}' \
- | awk -F"," '{print $1}' \
- | awk -F"\"" '{print $2}'
- ;;
- "vault "*)
- _call_program commands knife vault show "$2" --format json \
- | grep \" \
- | awk '{print $1}' \
- | awk -F"," '{print $1}' \
- | awk -F"\"" '{print $2}'
- ;;
- esac
-}
-
-# The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server
-_knife_list_local_cookbooks() {
- if [ $KNIFE_RELATIVE_PATH ]; then
- local cookbook_path="$(_knife_root)/cookbooks"
- else
- local knife_rb="${KNIFE_CONF_PATH:-${HOME}/.chef/knife.rb}"
- if [ -f ./.chef/knife.rb ]; then
- knife_rb="./.chef/knife.rb"
- fi
- local cookbook_path="${KNIFE_COOKBOOK_PATH:-$(grep -s cookbook_path "$knife_rb" | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' | cut -d '"' -f2)}"
- fi
-
- local i
- for i in $cookbook_path; do
- ls $i
- done
-}
-
-# This function extracts the available cookbook versions on the chef server
-_knife_cookbook_versions() {
- _call_program commands knife cookbook show "$words[4]" \
- | grep -v "$words[4]" \
- | grep -v -E '\]|\[|\{|\}' \
- | sed 's/ //g' \
- | sed 's/"//g'
-}
-
-# Searches up from current directory to find the closest folder that has a .chef folder
-# Useful for the knife upload/from file commands
-_knife_root() {
- local directory="$PWD"
- while [ $directory != '/' ]; do
- test -e "$directory/.chef" && echo "$directory" && return
- directory="${directory:h}"
- done
-}
-
-_knife "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_l3build b/src/_l3build
index 29d2d60..eb311e7 100644
--- a/src/_l3build
+++ b/src/_l3build
@@ -3,7 +3,7 @@
# Description
# -----------
#
-# Completion script for l3build (https://github.com/latex3/l3build/).
+# Completion script for l3build 2025-09-03 (https://github.com/latex3/l3build/).
# Modified from rejected https://github.com/latex3/l3build/pull/267
#
# ------------------------------------------------------------------------------
@@ -14,49 +14,60 @@
#
# ------------------------------------------------------------------------------
-__l3build() {
+_l3build_targets() {
local targets=(
- 'check:Run\ all\ automated\ tests'
- 'clean:Clean\ out\ directory\ tree'
- 'ctan:Create\ CTAN-ready\ archive'
- 'doc:Typesets\ all\ documentation\ files'
- 'install:Installs\ files\ into\ the\ local\ texmf\ tree'
- 'manifest:Creates\ a\ manifest\ file'
- 'save:Saves\ test\ validation\ log'
- 'tag:Updates\ release\ tags\ in\ files'
- 'uninstall:Uninstalls\ files\ from\ the\ local\ texmf\ tree'
- 'unpack:Unpacks\ the\ source\ files\ into\ the\ build\ tree'
- 'upload:Send\ archive\ to\ CTAN\ for\ public\ release'
+ 'check:Run all automated tests'
+ 'clean:Clean out directory tree'
+ 'ctan:Create CTAN-ready archive'
+ 'doc:Typesets all documentation files'
+ 'install:Installs files into the local texmf tree'
+ 'manifest:Creates a manifest file'
+ 'save:Saves test validation log'
+ 'tag:Updates release tags in files'
+ 'uninstall:Uninstalls files from the local texmf tree'
+ 'unpack:Unpacks the source files into the build tree'
+ 'upload:Send archive to CTAN for public release'
)
- local options=(
- {--config,-c}'[Sets the config(s) used for running tests]':lua_file:'_files -g "*.lua"'
- --date'[Sets the date to insert into sources]':
- --debug'[Runs target in debug mode]'
- --dirty'[Skip cleaning up the test area]'
- --dry-run'[Dry run for install or upload]'
- --email'[Email address of CTAN uploader]':
- {--engine,-e}'[Sets the engine(s) to use for running test]':engine:'(pdftex xetex luatex ptex uptex)'
- --epoch'[Sets the epoch for tests and typesetting]':
- {--file,-F}'[Take the upload announcement from the given file]':file:_files
- --first'[Name of first test to run]':
- {--force,-f}'[Force tests to run if engine is not set up]'
- --full'[Install all files]'
- {--halt-on-error,-H}'[Stops running tests after the first failure]'
- '(- : *)'{--help,-h}'[Print this message and exit]'
- --last'[Name of last test to run]':
- {--message,-m}'[Text for upload announcement message]':
- {--quiet,-q}'[Suppresses TeX output when unpacking]'
- --rerun'[Skip setup\: simply rerun tests]'
- --show-log-on-error'[Show the full log of the failure with '\''halt-on-error'\'']'
- {--show-saves,-S}'[Show the invocation to update failing .tlg files]'
- --shuffle'[Shuffle order of tests]'
- --texmfhome'[Location of user texmf tree]':
- '(- : *)'--version'[Print version information and exit]'
- )
- _arguments -s -S $options "1:target:(($targets))"
+
+ _describe -t targets 'target' targets "$@"
+}
+
+_l3build() {
+ local ret=1
+ local -a engines=(pdftex xetex luatex ptex uptex)
+
+ _arguments -s -S \
+ '(-c --config)'\*{-c,--config}'[Set the config(s) used for running tests]:lua_file:_files -g "*.(lua|tlg)"' \
+ '(-d --date)'{-d,--date}'[Set the date to insert into sources]:date' \
+ '--debug[Run target in debug mode]' \
+ '--dev[Use the development LaTex format]' \
+ '--dirty[Skip cleaning up the test area]' \
+ '--dry-run[Dry run for install or upload]' \
+ '--email[Email address of CTAN uploader]:email' \
+ '(-e --engine)'{--engine,-e}'[Set the engine(s) to use for running test]:engine:(($engines))' \
+ '--epoch[Sets the epoch for tests and typesetting]:epoch' \
+ '(-F --file)'{--file,-F}'[Take the upload announcement from the given file]:file:_files' \
+ '--first[Name of first test to run]:name' \
+ '--full[Install all files]' \
+ '(-H --halt-on-error)'{-H,--halt-on-error}'[Stops running tests after the first failure]' \
+ '(- : *)'{-h,--help}'[Print this message and exit]' \
+ '--last[Name of last test to run]:name' \
+ '(-m --message)'{-m,--message}'[Text for upload announcement message]:message' \
+ '(-q --quiet)'{-q,--quiet}'[Suppresses TeX output when unpacking]' \
+ '--rerun[Skip setup\: simply rerun tests]' \
+ '--show-log-on-error[Show the full log of the failure with "halt-on-error"]' \
+ '(-S --show-saves)'{-S,--show-saves}'[Show the invocation to update failing .tlg files]' \
+ '--shuffle[Shuffle order of tests]' \
+ '(-s --stdengine)'{-s,--stdengine}'[Run tests only with the standard engine]' \
+ '--texmfhome[Location of user texmf tree]:location:_files' \
+ '(- : *)--version[Print version information and exit]' \
+ "1:target:_l3build_targets" \
+ && ret=0
+
+ return ret
}
-__l3build
+_l3build "$@"
# Local Variables:
# mode: Shell-Script
diff --git a/src/_mussh b/src/_mussh
index d52cb40..2fe47b1 100644
--- a/src/_mussh
+++ b/src/_mussh
@@ -38,44 +38,32 @@
#
# ------------------------------------------------------------------------------
-local curcontext="$curcontext" state line ret=1
-typeset -A opt_args
-
-# TODO:
-# -i: admit more than one use
-# -d, -v, -t: control input
-# -m: Accept number (but also empty)
-# -h: Accept multiple hosts, also more than one use
-# -H: Accept multiple files, also more than one use
-
-_arguments -C \
- '--help[display this help message]' \
- '-V[print version info and exit]' \
- '-d[Verbose debug]:level (from 0 to 2)' \
- '-v[SSH debug level]:level (from 0 to 3)' \
- '-m[Run concurrently]' \
- '(-b -B)-b[Print each hosts output in a block without mingling with other hosts output]' \
- '(-b -B)-B[Allow hosts output to mingle. (default)]' \
+_arguments \
+ '(- *)--help[display this help message]' \
+ '-d-[Verbose debug]:level:(0 1 2)' \
+ '-v-[SSH debug levels]:level:(0 1 2 3)' \
+ '-m-[Run concurrently on the specified hosts at a time. "0" is used for infinite]:count' \
+ '-q[No output unless necessary]' \
+ '*-i[Load an identity file. May be used more than once]:identity:_files' \
+ '-o[Args to pass to ssh with -o option]:ssh-args' \
'(-a -A)-a[Force loading ssh-agent]' \
'(-a -A)-A[Do NOT load ssh-agent]' \
+ '(-b -B)-b[Print each hosts output in a block without mingling with other hosts output]' \
+ '(-b -B)-B[Allow hosts output to mingle. (default)]' \
'(-u -U)-u[Unique. Eliminate duplicate hosts. (default)]' \
'(-u -U)-U[Do NOT make host list unique]' \
'-P[Do NOT fall back to passwords on any host. This will skip hosts where keys fail]' \
- '-i[Load an identity file. May be used more than once]:identity' \
- '-o[Args to pass to ssh with -o option]:ssh-args' \
'(-l -L)-l[Use _login_ when no other is specified with the hostname]:login' \
'(-l -L)-L[Force use of _login_ on all hosts]:login' \
'-s[Path to shell on remote host]:shell' \
'-t[Timeout setting for each session]:timeout' \
- '-p[Host to use as proxy]:[user@]host' \
+ '(- *)-V[print version info and exit]' \
+ '-p[Host to use as proxy]:[user@]host:_hosts' \
'-po[Args to pass to ssh on proxy with -o option]:ssh-args' \
- '(-h -H)-h[Add a host to list of hosts]:[user@]host' \
- '(-h -H)-H[Add contents of file to list of hosts]:host file:_files' \
+ '*-h[Add a host to list of hosts]:user@host' \
+ '*-H[Add contents of file to list of hosts]:host file:_files' \
'(-c -C)-c[Add a command or quoted list of commands to list of commands to be executed on each host]:command' \
- '(-c -C)-C[Add file contents to list of commands to be executed on each host]:commands file:_files' \
- '(-q)-q[No output unless necessary]' && ret=0
-
-return ret
+ '(-c -C)-C[Add file contents to list of commands to be executed on each host]:commands file:_files'
# Local Variables:
# mode: Shell-Script
diff --git a/src/_networkQuality b/src/_networkQuality
index 136edd0..e278b3c 100644
--- a/src/_networkQuality
+++ b/src/_networkQuality
@@ -34,10 +34,10 @@
#
# ------------------------------------------------------------------------------
-_arguments \
+_arguments -s \
'-b[Show Bonjour advertised networkQuality servers]' \
'-B[Run against specified Bonjour instance]:name' \
- '-C[Use custom configuration URL or path]:url_or_path' \
+ '-C[Use custom configuration URL or path]:url_or_path:_urls' \
'-f[Force usage of a specific protocol selection]:option:(h1 h2 h3 L4S noL4S)' \
'-I[Bind test to interface]:interface:_net_interfaces' \
'-M[Maximum runtime in seconds]:seconds' \
diff --git a/src/_node b/src/_node
index b67807b..21a2ab8 100644
--- a/src/_node
+++ b/src/_node
@@ -69,182 +69,190 @@ _node_scripts() {
_describe 'scripts' scripts
}
-_arguments \
- '-[script read from stdin (default; interactive mode if a tty)]' \
- '--[indicate the end of node options]' \
- '--abort-on-uncaught-exception[aborting instead of exiting causes a core file to be generated for analysis]' \
- '--allow-addons[allow use of addons when any permissions are set]' \
- '--allow-child-process[allow use of child process when any permissions are set]' \
- '--allow-fs-read=[allow permissions to read the filesystem]: :_files' \
- '--allow-fs-write=[allow permissions to write in the filesystem]:_files' \
- '--allow-inspector[allow use of inspector when any permissions are set]' \
- '--allow-net[allow use of network when any permissions are set]' \
- '--allow-wasi[allow wasi when any permissions are set]' \
- '--allow-worker[allow worker threads when any permissions are set]' \
- '--build-snapshot[generate a snapshot blob when the process exits]' \
- '--build-snapshot-config=[generate a snapshot blob when the process exits using a JSON configuration in the specified path]:path:_files' \
- {-c,--check}'[syntax check script without executing]' \
- '--completion-bash[print source-able bash completion script]' \
- '*'{-C,--conditions=}'[additional user conditions for conditional exports and imports]:condition' \
- '--cpu-prof[Start the V8 CPU profiler on start up]' \
- '--cpu-prof-dir=[directory where the V8 profiles generated by --cpu-prof]:dir:_files -/' \
- '--cpu-prof-interval=[sampling interval in microseconds for the V8 CPU profiler]:number' \
- '--cpu-prof-name=[file name of the V8 profile generated with --cpu-prof]: :_files' \
- '--diagnostic-dir=[set dir for all output files(default: current working directory)]:dir:_files -/' \
- '--disable-proto=[disable Object.prototype.__proto__]:mode:(delete throw)' \
- '--disable-sigusr1[disable inspector thread to be listening for SIGUSR1 signal]' \
- '*--disable-warning=[silence specific process warnings]:warn type' \
- '--disable-wasm-trap-handler[disable trap-handler-based WebAssembly bound checks]' \
- '--disallow-code-generation-from-strings[disallow eval and friends]' \
- '--dns-result-order=[set default value of verbatim in dns.lookup]: :(ipv4first verbatim)' \
- '--enable-etw-stack-walking[provides heap data to ETW Windows native tracing]' \
- '--enable-fips[enable FIPS crypto at startup]' \
- '--enable-source-maps[source map support for stack traces]' \
- '--entry-url[treat the entrypoint as a URL]' \
- '*--env-file=[set environment variables from supplied file]:envfile:_files' \
- '*--env-file-if-exists=[set environment variables from supplied file if exists]:envfile:_files' \
- '(- 1 *)'{-e,--eval}'[evaluate script]:inline JavaScript' \
- '--experimental-addon-modules[enable experimental import support for addons]' \
- '--experimental-config-file=[set config file from supplied file]:file:_files' \
- '--experimental-default-config-file[set config file from default config file]' \
- '--experimental-eventsource[enable experimental EventSource API]' \
- '--experimental-import-meta-resolve[experimental ES Module import.meta.resolve() support]' \
- '--experimental-inspector-network-resource[experimental load network resources via the inspector]' \
- '(--loader --experimental-loader)'{--loader,--experimental-loader}'=[Specify the module of a custom ECMAScript Module loader]: :_files' \
- '--experimental-network-inspection[enable experimental network inspection support]' \
- '--experimental-print-required-tla[print pending top-level await]' \
- '--experimental-quic[experimental QUIC support]' \
- '--experimental-sea-config=[generate a blob that can be embedded into the single executable application]: :_files' \
- '--experimental-test-coverage[enable code coverage in the test runner]' \
- '--experimental-test-module-mocks[enable module mocking in the test runner]' \
- '--experimental-transform-types[enable transformation of TypeScript-onlysyntax into JavaScript code]' \
- '--experimental-vm-modules[experimental ES Module support in vm module]' \
- '--experimental-worker-inspection[experimental worker inspection support]' \
- '--expose-gc[expose gc extension]' \
- '--force-context-aware[disable loading non-context-aware addons]' \
- '--force-fips[force FIPS crypto]' \
- '--force-node-api-uncaught-exceptions-policy[enforces "uncaughtException" event on Node API asynchronous callbacks]' \
- '--frozen-intrinsics[experimental frozen intrinsics support]' \
- '--heap-prof[Start the V8 heap profiler on start up]' \
- '--heap-prof-dir=[Directory where the V8 profiles generated by --heap-prof]: :_files -/' \
- '--heap-prof-interval=[sampling interval in bytes for the V8 heap profile]: :number' \
- '--heap-prof-name=[file name of the V8 heap profile generated]: :_files' \
- '--heapsnapshot-near-heap-limit=[Generate heapsnapshots whenever V8 is approaching the heap limit]:limit' \
- '--heapsnapshot-signal=[Generate heap snapshot on specified signal]:signals:_signals -s' \
- '--icu-data-dir=[set ICU data load path to dir (overrides NODE_ICU_DATA) note: linked-in ICU data is present]: :_directories' \
- '--import=[ES module to preload]:module:_node_files' \
- '--input-type=[set module type for string input]:module type :(commonjs module)' \
- '--insecure-http-parser[Use an insecure HTTP parser that accepts invalid HTTP headers]' \
- '--inspect=-[activate inspector on host:port (default: 127.0.0.1:9229)]:host_port' \
- '--inspect-brk=-[activate inspector on host:port and break at start of user script]:host_port' \
- '(--debug-port --inspect-port)'{--debug-port,--inspect-port}'=[set host:port for inspector]:host_port' \
- '--inspect-publish-uid=[comma separated list of destinations for inspector uid]' \
- '--inspect-wait=-[activate inspector on host:port and wait for debugger to be attached]::host_port' \
- {-i,--interactive}'[always enter the REPL even if stdin does not appear to be a terminal]' \
- '--interpreted-frames-native-stack[help system profilers to translate JavaScript interpreted frames]' \
- '--jitless[Disable runtime allocation of executable memory]' \
- '--localstorage-file=[file used to persist localStorage data]:file:_files' \
- '--max-http-header-size=[set the maximum size of HTTP headers]: :number' \
- "--max-old-space-size-percentage=[set V8's max old space size as a percentage of available memory]:memory" \
- '--network-family-autoselection-attempt-timeout=[sets the default value for the network family autoselection attempt timeout]:timeout' \
- '--no-addons[disable loading native addons]' \
- '--no-async-context-frame[improve AsyncLocalStorage performance with AsyncContextFrame]' \
- '--no-deprecation[silence deprecation warnings]' \
- '--no-experimental-detect-module[when ambiguous modules fail to evaluate, try again to evaluate them as ES modules]' \
- '--no-experimental-global-navigator[expose experimental Navigator API on the global scope]' \
- '--no-experimental-repl-await[disable experimental await keyword support in REPL]' \
- '--no-experimental-require-module[allow loading synchronous ES Modules in require()]' \
- '--no-experimental-sqlite[disable experimental node sqlite module]' \
- '--no-experimental-strip-types[disable experimental type-stripping for TypeScript files]' \
- '--no-experimental-websocket[experimental WebSocket API (currently set)]' \
- '--no-extra-info-on-fatal-exception[hide extra information on fatal exception that causes exit]' \
- '--no-force-async-hooks-checks[disable checks for async_hooks]' \
- '--no-global-search-paths[disable global module search paths]' \
- '--no-network-family-autoselection[disable network address family autodetection algorithm]' \
- '--no-warnings[silence all process warnings]' \
- '--no-webstorage[disable Web Storage API]' \
- '--node-memory-debug[run with extra debug checks for memory leaks in Node.js itself]' \
- '--openssl-config=[load OpenSSL configuration from the specified file (overrides OPENSSL_CONF)]:file:_files' \
- '--openssl-legacy-provider[enable OpenSSL 3.0 legacy provider]' \
- '--openssl-shared-config[enable OpenSSL shared configuration]' \
- '--pending-deprecation[emit pending deprecation warnings]' \
- '--preserve-symlinks[preserve symbolic links when resolving]' \
- '--preserve-symlinks-main[preserve symbolic links when resolving the main module]' \
- '(- 1 *)'{-p,--print}'[evaluate script and print result]:inline JavaScript' \
- '--prof[generate V8 profiler output]' \
- '--prof-process[process V8 profiler output generated using --prof]' \
- '--redirect-warnings=[write warnings to file instead of stderr]: :_files' \
- '--report-compact[output compact single-line JSON]' \
- '--report-directory=[custom report path]: :_files -/' \
- '--report-exclude-network[exclude network interface diagnostics(default: false)]' \
- '--report-filename=[custom report file name]: :_files' \
- '--report-on-fatalerror[generate diagnostic report on fatal (internal) errors]' \
- '--report-on-signal=[generate diagnostic report upon receiving signals]' \
- '--report-signal=[causes diagnostic report to be produced on provided signal]:signals:_signals -s' \
- '--report-uncaught-exception[generate diagnostic report on uncaught exceptions]' \
- '*'{-r,--require}'[module to preload (option can be repeated)]: :_node_files' \
- '--run=[run a script specified in package.json]:script:_node_scripts' \
- '--secure-heap=[total size of the OpenSSL secure heap]: :number' \
- '--secure-heap-min=[minimum allocation size from the OpenSSL secure heap]' \
- '--snapshot-blob=[path to the snapshot blob that is used to restore the application state]:snapshot:_files' \
- '--test[launch test runner on startup]' \
- '--test-concurrency=[specify test runner concurrency]:concurrency' \
- '--test-coverage-branches=[the branch coverage minimum threshold]:threshold' \
- '--test-coverage-exclude=[exclude files from coverage report that match this glob pattern]:pattern' \
- '--test-coverage-functions=[the function coverage minimum threshold]:threshold' \
- '--test-coverage-include=[include files from coverage report that match this glob pattern]:pattern' \
- '--test-coverage-lines=[the line coverage minimum threshold]:threshold' \
- '--test-force-exit[force test runner to exit upon completion]' \
- '--test-global-setup=[specify the path to the global setup file]:file:_files' \
- '--test-isolation=[configurations the type of test isolation used in the test runner]:isolation' \
- '--test-name-pattern=[run tests whose name matches this regular expression]:pattern' \
- '--test-only[run tests with "only" option set]' \
- '*--test-reporter=[report test output using the given reporter]:reporter:(tap spec dot)' \
- '*--test-reporter-destination=[report given reporter to the given destination]:destination:_files' \
- '--test-rerun-failures=[specify the path to the rerun state file]:state file:_files' \
- '--test-shard=[run test at specific shard]:shard' \
- '--test-skip-pattern=[run tests whose name do not match this regular expression]' \
- '--test-timeout=[specify test runner timeout]' \
- '--test-update-snapshots[regenerate test snapshots]' \
- '--throw-deprecation[throw an exception on deprecations]' \
- '--title=[the process title to use on startup]:process title' \
- '--tls-cipher-list=[use an alternative default TLS cipher list]:cipher list string' \
- '--tls-keylog=[log TLS decryption keys to named file for traffic analysis]: :_files' \
- '(--tls-max-v1.3)--tls-max-v1.2[set default TLS maximum to TLSv1.2]' \
- '(--tls-max-v1.2)--tls-max-v1.3[set default TLS maximum to TLSv1.3]' \
- '(--tls-min-v1.1 --tls-min-v1.2 --tls-min-v1.3)--tls-min-v1.0[set default TLS minimum to TLSv1.0]' \
- '(--tls-min-v1.0 --tls-min-v1.2 --tls-min-v1.3)--tls-min-v1.1[set default TLS minimum to TLSv1.1]' \
- '(--tls-min-v1.0 --tls-min-v1.1 --tls-min-v1.3)--tls-min-v1.2[set default TLS minimum to TLSv1.2]' \
- '(--tls-max-v1.2 --tls-min-v1.0 --tls-min-v1.1 --tls-min-v1.2)--tls-min-v1.3[set default TLS minimum to TLSv1.3]' \
- '--trace-deprecation[show stack traces on deprecations]' \
- '--trace-env[print accesses to the environment variables]' \
- '--trace-env-js-stack[print accesses to the environment variables and the JavaScript stack trace]' \
- '--trace-env-native-stack[print accesses to the environment variables and the native stack trace ]' \
- '--trace-event-categories[comma separated list of trace event categories to record]: :{_values -s , categories node node.async_hooks node.bootstrap node.perf node.perf.usertiming node.perf.timerify node.fs.sync node.vm.script v8}' \
- '--trace-event-file-pattern[Template string specifying the filepath for the trace-events data, it supports ${rotation} and ${pid} log-rotation id. %2$u is the pid.]:template string' \
- '--trace-exit[show stack trace when an environment exits]' \
- '--trace-promises[show stack traces on promise initialization and resolution]' \
- '--trace-sigint[prints a stack trace on SIGINT]' \
- '--trace-sync-io[show stack trace when use of sync IO is detected after the first tick]' \
- '--trace-tls[prints TLS packet trace information to stderr]' \
- '--trace-uncaught[show stack traces for the throw behind uncaught exceptions]' \
- '--trace-warnings[show stack traces on process warnings]' \
- '--track-heap-objects[track heap object allocations for heap snapshots]' \
- '--unhandled-rejections=[define unhandled rejections behavior]:rejection behavior:(strict warn none)' \
- '--use-bundled-ca[use bundled CA store (default)]' \
- '--use-largepages=[re-map the Node.js static code to large memory pages at startup]:mode:(off on silent)' \
- "--use-openssl-ca[use OpenSSL's default CA store]" \
- "--use-system-ca[Use system's CA store]" \
- '(- 1 *)--v8-options[print v8 command line options]' \
- "--v8-pool-size=[set v8's thread pool size]:number" \
- "--watch[run in watch mode]" \
- '--watch-kill-signal=[kill signal to send to the process on watch mode restarts]:signal:_signals -s' \
- "--watch-path=[path to watch]: :_node_files" \
- '--watch-preserve-output[preserve outputs on watch mode restart]' \
- '--zero-fill-buffers[automatically zero-fill all newly allocated Buffer and SlowBuffer instances]' \
- '(- 1 *)'{-h,--help}'[print node command line options]' \
- '(- 1 *)'{-v,--version}'[print Node.js version]' \
- '*: :_node_args'
+_node() {
+ local ret=1
+
+ _arguments \
+ '-[script read from stdin (default; interactive mode if a tty)]' \
+ '--[indicate the end of node options]' \
+ '--abort-on-uncaught-exception[aborting instead of exiting causes a core file to be generated for analysis]' \
+ '--allow-addons[allow use of addons when any permissions are set]' \
+ '--allow-child-process[allow use of child process when any permissions are set]' \
+ '--allow-fs-read=[allow permissions to read the filesystem]: :_files' \
+ '--allow-fs-write=[allow permissions to write in the filesystem]:_files' \
+ '--allow-inspector[allow use of inspector when any permissions are set]' \
+ '--allow-net[allow use of network when any permissions are set]' \
+ '--allow-wasi[allow wasi when any permissions are set]' \
+ '--allow-worker[allow worker threads when any permissions are set]' \
+ '--build-snapshot[generate a snapshot blob when the process exits]' \
+ '--build-snapshot-config=[generate a snapshot blob when the process exits using a JSON configuration in the specified path]:path:_files' \
+ {-c,--check}'[syntax check script without executing]' \
+ '--completion-bash[print source-able bash completion script]' \
+ '*'{-C,--conditions=}'[additional user conditions for conditional exports and imports]:condition' \
+ '--cpu-prof[Start the V8 CPU profiler on start up]' \
+ '--cpu-prof-dir=[directory where the V8 profiles generated by --cpu-prof]:dir:_files -/' \
+ '--cpu-prof-interval=[sampling interval in microseconds for the V8 CPU profiler]:number' \
+ '--cpu-prof-name=[file name of the V8 profile generated with --cpu-prof]: :_files' \
+ '--diagnostic-dir=[set dir for all output files(default: current working directory)]:dir:_files -/' \
+ '--disable-proto=[disable Object.prototype.__proto__]:mode:(delete throw)' \
+ '--disable-sigusr1[disable inspector thread to be listening for SIGUSR1 signal]' \
+ '*--disable-warning=[silence specific process warnings]:warn type' \
+ '--disable-wasm-trap-handler[disable trap-handler-based WebAssembly bound checks]' \
+ '--disallow-code-generation-from-strings[disallow eval and friends]' \
+ '--dns-result-order=[set default value of verbatim in dns.lookup]: :(ipv4first verbatim)' \
+ '--enable-etw-stack-walking[provides heap data to ETW Windows native tracing]' \
+ '--enable-fips[enable FIPS crypto at startup]' \
+ '--enable-source-maps[source map support for stack traces]' \
+ '--entry-url[treat the entrypoint as a URL]' \
+ '*--env-file=[set environment variables from supplied file]:envfile:_files' \
+ '*--env-file-if-exists=[set environment variables from supplied file if exists]:envfile:_files' \
+ '(- 1 *)'{-e,--eval}'[evaluate script]:inline JavaScript' \
+ '--experimental-addon-modules[enable experimental import support for addons]' \
+ '--experimental-config-file=[set config file from supplied file]:file:_files' \
+ '--experimental-default-config-file[set config file from default config file]' \
+ '--experimental-eventsource[enable experimental EventSource API]' \
+ '--experimental-import-meta-resolve[experimental ES Module import.meta.resolve() support]' \
+ '--experimental-inspector-network-resource[experimental load network resources via the inspector]' \
+ '(--loader --experimental-loader)'{--loader,--experimental-loader}'=[Specify the module of a custom ECMAScript Module loader]: :_files' \
+ '--experimental-network-inspection[enable experimental network inspection support]' \
+ '--experimental-print-required-tla[print pending top-level await]' \
+ '--experimental-quic[experimental QUIC support]' \
+ '--experimental-sea-config=[generate a blob that can be embedded into the single executable application]: :_files' \
+ '--experimental-test-coverage[enable code coverage in the test runner]' \
+ '--experimental-test-module-mocks[enable module mocking in the test runner]' \
+ '--experimental-transform-types[enable transformation of TypeScript-onlysyntax into JavaScript code]' \
+ '--experimental-vm-modules[experimental ES Module support in vm module]' \
+ '--experimental-worker-inspection[experimental worker inspection support]' \
+ '--expose-gc[expose gc extension]' \
+ '--force-context-aware[disable loading non-context-aware addons]' \
+ '--force-fips[force FIPS crypto]' \
+ '--force-node-api-uncaught-exceptions-policy[enforces "uncaughtException" event on Node API asynchronous callbacks]' \
+ '--frozen-intrinsics[experimental frozen intrinsics support]' \
+ '--heap-prof[Start the V8 heap profiler on start up]' \
+ '--heap-prof-dir=[Directory where the V8 profiles generated by --heap-prof]: :_files -/' \
+ '--heap-prof-interval=[sampling interval in bytes for the V8 heap profile]: :number' \
+ '--heap-prof-name=[file name of the V8 heap profile generated]: :_files' \
+ '--heapsnapshot-near-heap-limit=[Generate heapsnapshots whenever V8 is approaching the heap limit]:limit' \
+ '--heapsnapshot-signal=[Generate heap snapshot on specified signal]:signals:_signals -s' \
+ '--icu-data-dir=[set ICU data load path to dir (overrides NODE_ICU_DATA) note: linked-in ICU data is present]: :_directories' \
+ '--import=[ES module to preload]:module:_node_files' \
+ '--input-type=[set module type for string input]:module type :(commonjs module)' \
+ '--insecure-http-parser[Use an insecure HTTP parser that accepts invalid HTTP headers]' \
+ '--inspect=-[activate inspector on host:port (default: 127.0.0.1:9229)]:host_port' \
+ '--inspect-brk=-[activate inspector on host:port and break at start of user script]:host_port' \
+ '(--debug-port --inspect-port)'{--debug-port,--inspect-port}'=[set host:port for inspector]:host_port' \
+ '--inspect-publish-uid=[comma separated list of destinations for inspector uid]' \
+ '--inspect-wait=-[activate inspector on host:port and wait for debugger to be attached]::host_port' \
+ {-i,--interactive}'[always enter the REPL even if stdin does not appear to be a terminal]' \
+ '--interpreted-frames-native-stack[help system profilers to translate JavaScript interpreted frames]' \
+ '--jitless[Disable runtime allocation of executable memory]' \
+ '--localstorage-file=[file used to persist localStorage data]:file:_files' \
+ '--max-http-header-size=[set the maximum size of HTTP headers]: :number' \
+ "--max-old-space-size-percentage=[set V8's max old space size as a percentage of available memory]:memory" \
+ '--network-family-autoselection-attempt-timeout=[sets the default value for the network family autoselection attempt timeout]:timeout' \
+ '--no-addons[disable loading native addons]' \
+ '--no-async-context-frame[improve AsyncLocalStorage performance with AsyncContextFrame]' \
+ '--no-deprecation[silence deprecation warnings]' \
+ '--no-experimental-detect-module[when ambiguous modules fail to evaluate, try again to evaluate them as ES modules]' \
+ '--no-experimental-global-navigator[expose experimental Navigator API on the global scope]' \
+ '--no-experimental-repl-await[disable experimental await keyword support in REPL]' \
+ '--no-experimental-require-module[allow loading synchronous ES Modules in require()]' \
+ '--no-experimental-sqlite[disable experimental node sqlite module]' \
+ '--no-experimental-strip-types[disable experimental type-stripping for TypeScript files]' \
+ '--no-experimental-websocket[experimental WebSocket API (currently set)]' \
+ '--no-extra-info-on-fatal-exception[hide extra information on fatal exception that causes exit]' \
+ '--no-force-async-hooks-checks[disable checks for async_hooks]' \
+ '--no-global-search-paths[disable global module search paths]' \
+ '--no-network-family-autoselection[disable network address family autodetection algorithm]' \
+ '--no-warnings[silence all process warnings]' \
+ '--no-webstorage[disable Web Storage API]' \
+ '--node-memory-debug[run with extra debug checks for memory leaks in Node.js itself]' \
+ '--openssl-config=[load OpenSSL configuration from the specified file (overrides OPENSSL_CONF)]:file:_files' \
+ '--openssl-legacy-provider[enable OpenSSL 3.0 legacy provider]' \
+ '--openssl-shared-config[enable OpenSSL shared configuration]' \
+ '--pending-deprecation[emit pending deprecation warnings]' \
+ '--preserve-symlinks[preserve symbolic links when resolving]' \
+ '--preserve-symlinks-main[preserve symbolic links when resolving the main module]' \
+ '(- 1 *)'{-p,--print}'[evaluate script and print result]:inline JavaScript' \
+ '--prof[generate V8 profiler output]' \
+ '--prof-process[process V8 profiler output generated using --prof]' \
+ '--redirect-warnings=[write warnings to file instead of stderr]: :_files' \
+ '--report-compact[output compact single-line JSON]' \
+ '--report-directory=[custom report path]: :_files -/' \
+ '--report-exclude-network[exclude network interface diagnostics(default: false)]' \
+ '--report-filename=[custom report file name]: :_files' \
+ '--report-on-fatalerror[generate diagnostic report on fatal (internal) errors]' \
+ '--report-on-signal=[generate diagnostic report upon receiving signals]' \
+ '--report-signal=[causes diagnostic report to be produced on provided signal]:signals:_signals -s' \
+ '--report-uncaught-exception[generate diagnostic report on uncaught exceptions]' \
+ '*'{-r,--require}'[module to preload (option can be repeated)]: :_node_files' \
+ '--run=[run a script specified in package.json]:script:_node_scripts' \
+ '--secure-heap=[total size of the OpenSSL secure heap]: :number' \
+ '--secure-heap-min=[minimum allocation size from the OpenSSL secure heap]' \
+ '--snapshot-blob=[path to the snapshot blob that is used to restore the application state]:snapshot:_files' \
+ '--test[launch test runner on startup]' \
+ '--test-concurrency=[specify test runner concurrency]:concurrency' \
+ '--test-coverage-branches=[the branch coverage minimum threshold]:threshold' \
+ '--test-coverage-exclude=[exclude files from coverage report that match this glob pattern]:pattern' \
+ '--test-coverage-functions=[the function coverage minimum threshold]:threshold' \
+ '--test-coverage-include=[include files from coverage report that match this glob pattern]:pattern' \
+ '--test-coverage-lines=[the line coverage minimum threshold]:threshold' \
+ '--test-force-exit[force test runner to exit upon completion]' \
+ '--test-global-setup=[specify the path to the global setup file]:file:_files' \
+ '--test-isolation=[configurations the type of test isolation used in the test runner]:isolation' \
+ '--test-name-pattern=[run tests whose name matches this regular expression]:pattern' \
+ '--test-only[run tests with "only" option set]' \
+ '*--test-reporter=[report test output using the given reporter]:reporter:(tap spec dot)' \
+ '*--test-reporter-destination=[report given reporter to the given destination]:destination:_files' \
+ '--test-rerun-failures=[specify the path to the rerun state file]:state file:_files' \
+ '--test-shard=[run test at specific shard]:shard' \
+ '--test-skip-pattern=[run tests whose name do not match this regular expression]' \
+ '--test-timeout=[specify test runner timeout]' \
+ '--test-update-snapshots[regenerate test snapshots]' \
+ '--throw-deprecation[throw an exception on deprecations]' \
+ '--title=[the process title to use on startup]:process title' \
+ '--tls-cipher-list=[use an alternative default TLS cipher list]:cipher list string' \
+ '--tls-keylog=[log TLS decryption keys to named file for traffic analysis]: :_files' \
+ '(--tls-max-v1.3)--tls-max-v1.2[set default TLS maximum to TLSv1.2]' \
+ '(--tls-max-v1.2)--tls-max-v1.3[set default TLS maximum to TLSv1.3]' \
+ '(--tls-min-v1.1 --tls-min-v1.2 --tls-min-v1.3)--tls-min-v1.0[set default TLS minimum to TLSv1.0]' \
+ '(--tls-min-v1.0 --tls-min-v1.2 --tls-min-v1.3)--tls-min-v1.1[set default TLS minimum to TLSv1.1]' \
+ '(--tls-min-v1.0 --tls-min-v1.1 --tls-min-v1.3)--tls-min-v1.2[set default TLS minimum to TLSv1.2]' \
+ '(--tls-max-v1.2 --tls-min-v1.0 --tls-min-v1.1 --tls-min-v1.2)--tls-min-v1.3[set default TLS minimum to TLSv1.3]' \
+ '--trace-deprecation[show stack traces on deprecations]' \
+ '--trace-env[print accesses to the environment variables]' \
+ '--trace-env-js-stack[print accesses to the environment variables and the JavaScript stack trace]' \
+ '--trace-env-native-stack[print accesses to the environment variables and the native stack trace ]' \
+ '--trace-event-categories[comma separated list of trace event categories to record]: :{_values -s , categories node node.async_hooks node.bootstrap node.perf node.perf.usertiming node.perf.timerify node.fs.sync node.vm.script v8}' \
+ '--trace-event-file-pattern[Template string specifying the filepath for the trace-events data, it supports ${rotation} and ${pid} log-rotation id. %2$u is the pid.]:template string' \
+ '--trace-exit[show stack trace when an environment exits]' \
+ '--trace-promises[show stack traces on promise initialization and resolution]' \
+ '--trace-sigint[prints a stack trace on SIGINT]' \
+ '--trace-sync-io[show stack trace when use of sync IO is detected after the first tick]' \
+ '--trace-tls[prints TLS packet trace information to stderr]' \
+ '--trace-uncaught[show stack traces for the throw behind uncaught exceptions]' \
+ '--trace-warnings[show stack traces on process warnings]' \
+ '--track-heap-objects[track heap object allocations for heap snapshots]' \
+ '--unhandled-rejections=[define unhandled rejections behavior]:rejection behavior:(strict warn none)' \
+ '--use-bundled-ca[use bundled CA store (default)]' \
+ '--use-largepages=[re-map the Node.js static code to large memory pages at startup]:mode:(off on silent)' \
+ "--use-openssl-ca[use OpenSSL's default CA store]" \
+ "--use-system-ca[Use system's CA store]" \
+ '(- 1 *)--v8-options[print v8 command line options]' \
+ "--v8-pool-size=[set v8's thread pool size]:number" \
+ "--watch[run in watch mode]" \
+ '--watch-kill-signal=[kill signal to send to the process on watch mode restarts]:signal:_signals -s' \
+ "--watch-path=[path to watch]: :_node_files" \
+ '--watch-preserve-output[preserve outputs on watch mode restart]' \
+ '--zero-fill-buffers[automatically zero-fill all newly allocated Buffer and SlowBuffer instances]' \
+ '(- 1 *)'{-h,--help}'[print node command line options]' \
+ '(- 1 *)'{-v,--version}'[print Node.js version]' \
+ '*: :_node_args' && ret=0
+
+ return ret
+}
+
+_node "$@"
# Local Variables:
# mode: Shell-Script
diff --git a/src/_pgsql_utils b/src/_pgsql_utils
deleted file mode 100644
index b6e8b59..0000000
--- a/src/_pgsql_utils
+++ /dev/null
@@ -1,590 +0,0 @@
-#compdef psql pg_dump pg_dumpall pg_restore createdb dropdb vacuumdb createuser dropuser initdb
-# ------------------------------------------------------------------------------
-# Copyright (c) 2016 Github zsh-users - https://github.com/zsh-users, Dominic Mitchell, Johann 'Myrkraverk' Oskarsson, Daniel Serodio, J Smith
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for PostgreSQL utils (https://www.postgresql.org/).
-#
-# Source: https://www.zsh.org/mla/users/2004/msg01006.html
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Dominic Mitchell <dom+zsh@happygiraffe.net>
-#
-# * Johann 'Myrkraverk' Oskarsson <johann@2ndquadrant.com>
-#
-# * Daniel Serodio <dserodio@gmail.com> pg_dumpall completion
-#
-# * J Smith <dark.panda@gmail.com> various completion additions
-#
-# ------------------------------------------------------------------------------
-
-
-_pgsql_get_identity () {
- _pgsql_user=${(v)opt_args[(i)-U|--username]}
- _pgsql_port=${(v)opt_args[(i)-p|--port]}
- _pgsql_host=${(v)opt_args[(i)-h|--host]}
-
- _pgsql_params=(
- ${_pgsql_user:+"--username=$_pgsql_user"}
- ${_pgsql_port:+"--port=$_pgsql_port"}
- ${_pgsql_host:+"--host=$_pgsql_host"}
- )
-}
-
-# Postgres Allows specifying the path to the directory containing the
-# socket as well as a hostname.
-_pgsql_host_or_dir() {
- _alternative \
- 'hosts:host:_hosts' \
- 'directories:directory:_directories'
-}
-
-# This creates a port completion list based on socket files on the
-# local computer. Be default, Postgres puts them in /tmp/ but Debian
-# changed that to /var/run/postgresql/ in their packages.
-_pgsql_ports() {
- compadd "$@" - /tmp/.s.PGSQL.<->(N:e) /var/run/postgresql/.s.PGSQL.<->(N:e)
-}
-
-_pgsql_users () {
- local _pgsql_user _pgsql_port _pgsql_host _pgsql_params
- local _pgsql_user_sql
- _pgsql_get_identity
-
- # We use _pgsql_port and _pgsql_host directly here instead of
- # _pgsql_params so as to not pick up a partially completed
- # username.
- _pgsql_params=(
- ${_pgsql_port:+"--port=$_pgsql_port"}
- ${_pgsql_host:+"--host=$_pgsql_host"}
- )
-
- _pgsql_user_sql='select r.rolname from pg_catalog.pg_roles r where r.rolcanlogin = true'
-
- compadd "$@" - $( psql $_pgsql_params[@] -XAqt -c $_pgsql_user_sql template1 2>/dev/null )
-
-}
-
-_pgsql_tables () {
- local _pgsql_user _pgsql_port _pgsql_host _pgsql_params
- _pgsql_get_identity
-
- # Need to pull out the database name from the existing arguments.
- # This is going to vary between commands. Thankfully, it's only
- # used by pg_dump, which always has the dbname in arg1. If it's
- # not present it defaults to ${PGDATABASE:-$LOGNAME}, which
- # matches (I think) the PostgreSQL behaviour.
-
- local db
- db=${line[1]:-${PGDATABASE:-$LOGNAME}}
-
- ## Instead of parsing the output of the psql \ commands, we look
- ## up the tables ourselves. The following query has been tested
- ## with Postgres 8.2 - 9.2.
-
- local _pgsql_table_sql
- _pgsql_table_sql="select n.nspname || '.' || c.relname \
- from pg_catalog.pg_class c \
- left join pg_catalog.pg_namespace n on n.oid = c.relnamespace \
- where c.relkind in ('r', '') \
- and n.nspname <> 'pg_catalog' \
- and n.nspname <> 'information_schema' \
- and n.nspname !~ '^pg_toast' \
- and pg_catalog.pg_table_is_visible( c.oid ) \
- order by 1"
-
- compadd "$@" - \
- $( psql $_pgsql_params[@] -AXqt -c $_pgsql_table_sql $db 2>/dev/null )
-}
-
-_pgsql_schemas () {
- local _pgsql_user _pgsql_port _pgsql_host _pgsql_params
- _pgsql_get_identity
-
- local db
- db=${line[1]:-${PGDATABASE:-$LOGNAME}}
-
- local _pgsql_schema_sql="select n.nspname \
- from pg_catalog.pg_namespace n \
- where n.nspname !~ '^pg_' \
- and n.nspname <> 'information_schema' \
- order by 1;"
-
- compadd "$@" - \
- $( psql $_pgsql_params[@] -AXqt -c $_pgsql_schema_sql $db 2>/dev/null )
-}
-
-_pgsql_databases () {
- local _pgsql_user _pgsql_port _pgsql_host _pgsql_params
- _pgsql_get_identity
-
- local _pgsql_services _pgsql_service_files
- _pgsql_service_files=(~/.pg_service.conf)
- (( $+commands[pg_config] )) && _pgsql_service_files+=$(pg_config --sysconfdir)/pg_service.conf
-
- _pgsql_services=$( grep -h '^\[.*\]' $_pgsql_service_files 2>/dev/null \
- | sed -e 's/^\[/service=/' -e 's/\].*$//' )
-
- local _pgsql_db_sql
- _pgsql_db_sql="select d.datname from pg_catalog.pg_database d \
- where d.datname <> 'template0'"
-
- compadd "$@" - \
- ${(f)_pgsql_services} \
- $( psql $_pgsql_params[@] -AXtq -c $_pgsql_db_sql template1 2>/dev/null )
-}
-
-_pgsql_encodings () {
- local _pgsql_user
- _pgsql_get_identity
-
- local _pgsql_db_sql
- _pgsql_db_sql="select pg_encoding_to_char(i) from generate_series(0,100) i;"
-
- compadd "$@" - $( psql $_pgsql_params[@] -AXtq -c $_pgsql_db_sql template1 )
-}
-
-
-##
-## The actual completion code for the commands
-##
-
-_psql () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s "-*" \
- "$_pgsql_common_opts[@]" \
- {-c+,--command=}':execute SQL command:' \
- {-d+,--dbname=}':database to connect to:_pgsql_databases' \
- {-f+,--file=}':SQL file to read:_files' \
- {-l,--list}'[list databases]' \
- {-v+,--set=,--variable=}':set SQL variable:' \
- {-V,--version}'[output version information, then exit]' \
- {-X,--no-psqlrc}'[don'\''t read ~/.psqlrc]' \
- {-1,--single-transaction}'[restore as a single transaction]' \
- {-\?,--help=}':display help:' \
- \
- {-a,--echo-all}'[print commands read]' \
- {-b,--echo-errors}'[echo failed commands]' \
- {-e,--echo-queries}'[display queries submitted]' \
- {-E,--echo-hidden}'[display hidden queries]' \
- {-L,--log-file=}'[send session log to file]' \
- {-n,--no-readline}'[disable enhanced command line editing (readline)]' \
- {-o+,--output=}':query output:_files' \
- {-q,--quiet}'[non verbose mode]' \
- {-s,--single-step}'[prompt before each query]' \
- {-S,--single-line}'[newline sends query]' \
- \
- {-A,--no-align}'[unaligned output mode]' \
- --csv'[CSV (Comma-Separated Values) table output mode]' \
- {-F+,--field-separator=}':field separator char:' \
- {-H,--html}'[HTML output]' \
- {-P+,--pset=}':set psql variable:' \
- {-R+,--record-separator=}':record separator char:' \
- {-t,--tuples-only}'[don'\''t display header/footer]' \
- {-T+,--table-attr=}':HTML table options:' \
- {-x,--expanded}'[one column per line]' \
- {-z,--field-separator-zero}'[set field separator for unaligned output to zero byte]' \
- {-0,--record-separator-zero}'[set record separator for unaligned output to zero byte]' \
- -u'[prompt for username/password]' \
- ':PostgreSQL database:_pgsql_databases' \
- ':PostgreSQL user:_pgsql_users'
-}
-
-_pg_dump () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-f+,--file=}':output file:_files' \
- {-F+,--format=}':output format:_values "format" "p[plain text]" "t[tar]" "c[custom]"' \
- {-j,--jobs=}'[use this many parallel jobs to dump]' \
- {-v,--verbose}'[verbose mode]' \
- {-V,--version}'[output version information, then exit]' \
- {-Z+,--compress=}':compression level:_values "level" 9 8 7 6 5 4 3 2 1 0' \
- --lock-wait-timeout='[fail after waiting TIMEOUT for a table lock]' \
- --no-sync'[do not wait for changes to be written safely to disk]' \
- {-\?,--help}'[display help]' \
- \
- {-a,--data-only}'[dump only data]' \
- {-b,--blobs}'[dump blobs as well]' \
- {-B,--no-blobs}'[exclude large objects in dump]' \
- {-c,--clean}'[include clean cmds in dump]' \
- {-C,--create}'[include createdb cmds in dump]' \
- {-e+,--extension=}'[dump the specified extension(s) only]' \
- {-E+,--encoding=}':database encoding:_pgsql_encodings' \
- {-n+,--schema=}':schema to dump:_pgsql_schemas' \
- {-N+,--exclude-schema=}':schema to NOT dump:_pgsql_schemas' \
- {-O,--no-owner}'[don'\''t recreate as same owner]' \
- {-s,--schema-only}'[no data, only schema]' \
- {-S+,--superuser=}':superuser name:_pgsql_users' \
- {-t+,--table=}':table to dump:_pgsql_tables' \
- {-T+,--exclude-table=}':table to NOT dump:_pgsql_tables' \
- {-x,--no-{acl,privileges}}'[don'\''t dump ACLs]' \
- --binary-upgrade'[for use by upgrade utilities only]' \
- {-D,--{attribute,column}-inserts}'[use INSERT (cols) not COPY]' \
- --disable-dollar-quoting'[disable dollar quoting, use SQL standard quoting]' \
- --disable-triggers'[disable triggers during data-only restore]' \
- --enable-row-security'[enable row security (dump only content user has access to)]' \
- --exclude-table-data='[do NOT dump data for the named table(s)]' \
- --if-exists'[use IF EXISTS when dropping objects]' \
- --include-foreign-data='[include data of foreign servers]' \
- --inserts'[dump data as INSERT commands, rather than COPY]' \
- --load-via-partition-root'[load partitions via the root table]' \
- --no-comments'[do not dump comments]' \
- --no-publications'[do not dump publications]' \
- --no-security-labels'[do not dump security label assignments]' \
- --no-subscriptions'[do not dump subscriptions]' \
- --no-synchronized-snapshots'[do not use synchronized snapshots in parallel jobs]' \
- --no-tablespaces'[do not dump tablespace assignments]' \
- --no-toast-compression'[do not dump TOAST compression methods]' \
- --no-unlogged-table-data'[do not dump unlogged table data]' \
- --on-conflict-do-nothing'[add ON CONFLICT DO NOTHING to INSERT commands]' \
- --quote-all-identifiers'[quote all identifiers, even if not key words]' \
- --rows-per-insert=['number of rows per INSERT'] \
- --section=':dump named section:_values "section" pre-data data post-data' \
- --serializable-deferrable'[wait until the dump can run without anomalies]' \
- --snapshot='[use given snapshot for the dump]' \
- --strict-names'[require table and/or schema include patterns to match at least one entity each]' \
- --use-set-session-authorization'[use SET SESSION AUTHORIZATION commands instead of ALTER OWNER]' \
- \
- {-i,--ignore-version}'[ignore version mismatch]' \
- {-o,--oids}'[dump objects identifiers for every table]' \
- {-R,--no-reconnect}'[don'\''t output connect]' \
- -X+':option:_values "option" use-set-session-authorization disable-triggers' \
- ':PostgreSQL database:_pgsql_databases'
-}
-
-_pg_restore () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-d+,--dbname=}':database to connect to:_pgsql_databases' \
- {-f+,--file=}':output file:_files' \
- {-F+,--format=}':output format:_values "format" "p[plain text]" "t[tar]" "c[custom]"' \
- {-l,--list}'[list databases]' \
- {-v,--verbose}'[verbose mode]' \
- {-V,--version}'[output version information, then exit]' \
- {-\?,--help}'[display help]' \
- \
- {-a,--data-only}'[dump only data]' \
- {-c,--clean}'[include clean (drop) cmds before recreating]' \
- {-C,--create}'[include createdb cmds in dump]' \
- {-e,--exit-on-error}'[exit on error, default is to continue]' \
- {-I,--index=}':index name:' \
- {-j,--jobs=}':use this many parallel jobs to restore:' \
- {-L,--use-list=}':use table of contents from this file for selecting/ordering output:' \
- {-n,--schema=}':restore only objects in this schema:' \
- {-O,--no-owner}'[skip restoration of object ownership]' \
- {-P,--function=}':restore named function:' \
- {-s,--schema-only}'[restore only the schema, no data]' \
- {-S,--superuser=}':superuser user name to use for disabling triggers:' \
- {-t,--table=}':restore named table:' \
- {-T,--trigger=}':restore named trigger:' \
- {-x,--no-privileges}'[skip restoration of access privileges (grant/revoke)]' \
- {-1,--single-transaction}'[restore as a single transaction]' \
- --disable-triggers'[disable triggers during data-only restore]' \
- --enable-row-security'[enable row security]' \
- --if-exists'[use IF EXISTS when dropping objects]' \
- --no-comments'[do not restore comments]' \
- --no-data-for-failed-tables'[do not restore data of tables that could not be created]' \
- --no-publications'[do not restore publications]' \
- --no-security-labels'[do not restore security labels]' \
- --no-subscriptions'[do not restore subscriptions]' \
- --no-tablespaces'[do not restore tablespace assignments]' \
- --section=':dump named section:_values "section" pre-data data post-data' \
- --strict-names'[require table and/or schema include patterns to match at least one entity each]' \
- --use-set-session-authorization'[use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership]' \
- \
- {-b,--blobs}'[include large objects in dump]' \
- {-B,--no-blobs}'[exclude large objects in dump]' \
- \
- "1: :_files"
-}
-
-_pg_dumpall () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-f+,--file=}':output file:_files' \
- {-v,--verbose}'[verbose mode]' \
- {-V,--version}'[output version information, then exit]' \
- --lock-wait-timeout='[fail after waiting TIMEOUT for a table lock]' \
- {-\?,--help}'[display help]' \
- \
- {-a,--data-only}'[dump only data]' \
- {-c,--clean}'[include clean (drop) cmds before recreating]' \
- {-E,--encoding=}'[dump the data in encoding]' \
- {-g,--globals-only}'[dump only global objects, no databases]' \
- {-O,--no-owner}'[don'\''t recreate as same owner]' \
- {-r,--roles-only}'[no databases or tablespaces, only roles]' \
- {-s,--schema-only}'[no data, only schema]' \
- {-S+,--superuser=}':superuser name:_pgsql_users' \
- {-t,--tablespaces-only}'[no databases or roles, only tablespaces]' \
- {-x,--no-privileges}'[don'\''t dump ACLs]' \
- --binary-upgrade'[for use by upgrade utilities only]' \
- --column-inserts'[use INSERT with column names not COPY]' \
- --disable-dollar-quoting'[disable dollar quoting, use SQL standard quoting]' \
- --disable-triggers'[disable triggers during data-only restore]' \
- --exclude-database=':exclude databases:_pgsql_databases' \
- --extra-float-digits='[override default setting for extra_float_digits]' \
- --if-exists'[use IF EXISTS when dropping objects]' \
- --inserts'[use INSERT not COPY]' \
- --load-via-partition-root'[pload partitions via the root table]' \
- --no-comments'[do not dump comments]' \
- --no-publications'[do not dump publications]' \
- --no-role-passwords'[do not dump passwords for roles]' \
- --no-security-labels'[do not dump security label assignments]' \
- --no-subscriptions'[do not dump subscriptions]' \
- --no-sync'[do not wait for changes to be written safely to disk]' \
- --no-tablespaces'[do not dump tablespace assignments]' \
- --no-toast-compression'[do not dump TOAST compression methods]' \
- --no-unlogged-table-data'[do not dump unlogged table data]' \
- --on-conflict-do-nothing'[add ON CONFLICT DO NOTHING to INSERT commands]' \
- --quote-all-identifiers'[quote all identifiers, even if not key words]' \
- --rows-per-insert='[number of rows per INSERT]' \
- --use-set-session-authorization'[use SET SESSION AUTHORIZATION cmds instead of ALTER OWNER]' \
- {-o,--oids}'[dump objects identifiers for every table]' \
-}
-
-_createdb () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-D+,--tablespace=}'[default tablespace for the database]' \
- {-e,--echo}'[display SQL queries]' \
- {-E+,--encoding=}':database encoding:_pgsql_encodings' \
- {-l+,--locale=}'[locale settings for the database]' \
- --lc-collate='[LC_COLLATE setting for the database]' \
- --lc-ctype='[LC_CTYPE setting for the database]' \
- {-O+,--owner=}':database user to own the new database:_pgsql_users' \
- {-T+,--template=}':database template:_pgsql_databases' \
- '--version[output version information, then exit]' \
- {-\?,--help}'[display help]' \
- \
- --maintenance-db=':alternate maintenance database:_pgsql_databases' \
- {-q,--quiet}'[non verbose mode]' \
- --location=':database location (unsupported since PostgreSQL 8.0):_directories' \
- ':PostgreSQL database:' \
- ':comment:'
-}
-
-_dropdb () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-e,--echo}'[display SQL queries]' \
- {-f,--force}'[try to terminate other connections before dropping]' \
- {-i,--interactive}'[confirm before drop]' \
- {-V,--version}'[output version information, then exit]' \
- --if-exists'[don'\''t report error if database does'\''t exist]' \
- --maintenance-db=':alternate maintenance database:_pgsql_databases' \
- {-q,--quiet}'[non verbose mode]' \
- ':PostgreSQL database:_pgsql_databases'
-}
-
-_vacuumdb () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-a,--all}'[vacuum all databases]' \
- {-d+,--dbname=}':database to connect to:_pgsql_databases' \
- --disable-page-skipping'[disable all page-skipping behavior]' \
- {-e,--echo}'[show the commands being sent to the server]' \
- {-f,--full}'[do full vacuuming]' \
- {-F,--freeze}'[freeze row transaction information]' \
- --force-index-cleanup'[always remove index entries that point to dead tuples]' \
- {-j,--jobs=}'[use this many concurrent connections to vacuum]' \
- '--min-mxid-age=[minimum multixact ID age of tables to vacuum]' \
- '--min-xid-age=[minimum transaction ID age of tables to vacuum]' \
- --no-index-cleanup'[don'\''t remove index entries that point to dead tuples]' \
- --no-process-toast'[skip the TOAST table associated with the table to vacuum]' \
- --no-truncate'[don'\''t truncate empty pages at the end of the table]' \
- {-P+,--parallel=}'[use this many background workers for vacuum, if available]' \
- {-q,--quiet}'[do not write any messages]' \
- '--skip-locked[skip relations that cannot be immediately locked]' \
- {-t+,--table=}':table to dump:_pgsql_tables' \
- {-v,--verbose}'[write a lot of output]' \
- {-V,--version}'[output version information, then exit]' \
- {-z,--analyze}'[update optimizer hints]' \
- {-Z,--analyze-only}'[only update optimizer statistics; no vacuum]' \
- --analyze-in-stages'[only update optimizer statistics, in multiple stages for faster results; no vacuum]' \
- {-\?,--help}'[display help]' \
- --maintenance-db='[alternate maintenance database]' \
- '1:PostgreSQL database:_pgsql_databases'
-}
-
-_createuser () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-c,--connection-limit=}'[connection limit for role (default: no limit)]' \
- {-d,--createdb}'[role can create new databases]' \
- {-D,--no-createdb}'[role cannot create databases]' \
- {-e,--echo}'[display SQL queries]' \
- {-g,--role=}'[new role will be a member of this role]' \
- {-i,--inherit}'[role inherits privileges of roles it is a member of (default)]' \
- {-I,--no-inherit}'[role does not inherit privileges]' \
- {-l,--login}'[role can login (default)]' \
- {-L,--no-login}'[role cannot login]' \
- {-P,--pwprompt}'[assign a password to new role]' \
- {-r,--createrole}'[role can create new roles]' \
- {-R,--no-createrole}'[role cannot create roles]' \
- {-s,--superuser}'[role will be superuser]' \
- {-S,--no-superuser}'[role will not be superuser]' \
- --interactive'[prompt for missing role name and attributes rather than using defaults]' \
- --replication'[role can initiate replication]' \
- --no-replication'[role cannot initiate replication]' \
- {-E,--encrypted}'[encrypt stored password]' \
- {-N,--unencrypted}'[do not encrypt stored password]' \
- {-\?,--help}'[display help]'
-}
-
-_dropuser () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- "$_pgsql_common_opts[@]" \
- {-e,--echo}'[display SQL queries]' \
- {-q,--quiet}'[non verbose mode]' \
- {-i,--interactive}'[prompt before deleting anything, and prompt for role name if not specified]' \
- {-V,--version}'[output version information, then exit]' \
- --if-exists'[don'\''t report error if user doesn'\''t exist]' \
- ':PostgreSQL user:_pgsql_users'
-}
-
-_initdb () {
- local curcontext="$curcontext" state line expl
- typeset -A opt_args
-
- _arguments -C -s \
- {--auth=,-A+}':default authentication method for local connections:_values "auth methods" $_pgsql_auth_methods[@]' \
- --auth-host=':default authentication method for local TCP/IP connections:_values "auth methods" $_pgsql_auth_methods[@]' \
- --auth-local=':default authentication method for local-socket connections:_values "auth methods" $_pgsql_auth_methods[@]' \
- {-D+,--pgdata=}':location for this database cluster:_files' \
- {-E+,--encoding=}':set default encoding for new databases:_pgsql_encodings' \
- {-g,--allow-group-access}'[allow group readexecute on data directory]' \
- {-k,--data-checksums}':use data page checksums:' \
- --locale=':set default locale for new databases:' \
- --lc-collate=':set the default locale for collate:' \
- --lc-ctype=':set the default locale for ctype:' \
- --lc-messages=':set the default locale for messages:' \
- --lc-monetary=':set the default locale for monetary:' \
- --lc-numeric=':set the default locale for numeric:' \
- --lc-time=':set the default local for time:' \
- --no-locale'[equivalent to --locale=C]' \
- --pwfile=':read password for the new superuser from file:_files' \
- {-T+,--text-search-config=}'[default text search configuration]' \
- {-U+,--username=NAME}':database superuser name:' \
- {-W,--pwprompt}'[prompt for a password for the new superuser]' \
- {-X+,--waldir=}':location for the write-ahead log directory:_files' \
- --xlogdir=':location for the transaction log directory (unsupported since PostgreSQL 10):_files' \
- --wal-segsize='[size of WAL segments, in megabytes]' \
- {-d,--debug}'[generate lots of debugging output]' \
- --discard-caches'[set debug_discard_caches=1]' \
- -L+':where to find the input files:_files' \
- {-n,--no-clean}'[do not clean up after errors]' \
- {-N,--no-sync}':do not wait for changes to be written safely to disk:' \
- --instructions'[do not print instructions for next steps]' \
- {-s,--show}'[show internal settings]' \
- {-S,--sync-only}'[only sync data directory]' \
- {-V,--version}'[output version information, then exit]' \
- {-\?,--help}'[display help]' \
- ':location for this database cluster:_files'
-}
-
-_pgsql_utils () {
- local _pgsql_common_opts _pgsql_auth_methods
-
- _pgsql_common_opts=(
- {-\?,--help}'[display help]'
- {-h+,--host=}':database host:_pgsql_host_or_dir'
- {-p+,--port=}':database port number:_pgsql_ports'
- {-U+,--username=}':connect as user:_pgsql_users'
- {-W,--password}'[prompt for password]'
- {-w,--no-password}'[never prompt for password]'
- --role='[do SET ROLE before restore]'
- )
-
- _pgsql_auth_methods=(
- trust
- reject
- md5
- password
- gss
- sspi
- krb5
- ident
- peer
- ldap
- radius
- cert
- pam
- )
-
- case "$service" in
- psql) _psql "$@" ;;
- pg_dump) _pg_dump "$@" ;;
- pg_dumpall) _pg_dumpall "$@" ;;
- pg_restore) _pg_restore "$@" ;;
- createdb) _createdb "$@" ;;
- dropdb) _dropdb "$@" ;;
- vacuumdb) _vacuumdb "$@" ;;
- createuser) _createuser "$@" ;;
- dropuser) _dropuser "$@" ;;
- initdb) _initdb "$@" ;;
- esac
-}
-
-_pgsql_utils "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_phing b/src/_phing
index eab4f98..83ce274 100644
--- a/src/_phing
+++ b/src/_phing
@@ -23,7 +23,7 @@
# Description
# -----------
#
-# Completion script for Phing (https://www.phing.info/).
+# Completion script for Phing 3.1.0 (https://www.phing.info/).
#
# ------------------------------------------------------------------------------
# Authors
@@ -39,36 +39,53 @@ _phing() {
typeset -A opt_args
# Follow https://www.phing.info/guide/chunkhtml/sec.commandlineargs.html for more information
- _arguments \
+ _arguments -C \
'(-h -help)'{-h,-help}'[display the help screen]' \
'(-v -version)'{-v,-version}'[print version information and exit]' \
- '(-l -list)'{-l,-list}'[list all available targets in buildfile (excluding targets that have their hidden attribute set to true)]' \
+ '(-l -list)'{-l,-list}'[list all available targets in buildfile]' \
+ '(-i -init)'{-i,-init}'[generates an initial buildfile]:file:_files' \
'(-q -quiet)'{-q,-quiet}'[quiet operation, no output at all]' \
+ '(-S -silent)'{-S,-silent}'[print nothing but task outputs and build failures]' \
'-verbose[verbose, give some more output]' \
'-debug[output debug information]' \
- '-logfile [use given file for log]:file:_files' \
- '-D[set the property to the specified value to be used in the buildfile]' \
- '-find []:file:_files' \
- '-buildfile [specify an alternate buildfile name. Default is build.xml]:file:_files' \
- '-logger [specify an alternate logger. Default is phing.listener.DefaultLogger. Other options include phing.listener.NoBannerLogger, phing.listener.AnsiColorLogger, phing.listener.XmlLogger, phing.listener.TargetLogger and phing.listener.HtmlColorLogger]' \
- '-propertyfile [load properties from the specified file]:file:_files' \
- '(-v --version)'{-v,--version}'[show version]' \
+ '(-e -emacs)'{-e,-emacs}'[produce logging information without adornments]' \
+ '-diagnostics[print diagnostics information]' \
+ '(-strict -no-strict)-strict[run build in strict mode]' \
+ '(-strict -no-strict)-no-strict[run build normally]' \
+ '-longtargets[show target descriptions during build]' \
+ '-logfile[use given file for log]:file:_files' \
+ '-logger[the class which is to perform logging]:class' \
+ '*-listener[add an instance of class as a project listener]:class' \
+ '(-f -buildfile)'{-f,-buildfile}'[build file]:file:_files' \
+ '*-D[set the property to the specified value to be used in the buildfile]' \
+ '(-k -keep-going)'{-k,-keep-going}'[execute all targets that do not depend on failed target(s)]' \
+ '-propertyfile[load all properties from the specified file]:file:_files' \
+ '-propertyfileoverride[values in property file override existing values]' \
+ '-find[search for buildfile towards the root of the filesystem and use it]:file:_files' \
+ '-inputhandler[the class to use to handle user input]:class' \
+ '(- *)'{-v,-version}'[show version]' \
'1: :->targets' \
'*:: :->args' \
&& ret=0
case $state in
- targets)
- local buildfile; buildfile=build.xml
+ (targets)
+ local buildfile=build.xml
+ if (( $+opt_args[-buildfile] )); then
+ buildfile=${opt_args[-buildfile]}
+ elif (($+opt_args[-f] )); then
+ buildfile=${opt_args[-f]}
+ fi
+
if [[ ! -f $buildfile ]]
then
ret=0
else
- local targets; targets=($(sed -nE "/<target /s/.*name=[\"'](\w+)[\"'].*/\1/p" $buildfile))
+ local -a targets=($(sed -nE "/<target /s/.*name=[\"'](\w+)[\"'].*/\1/p" $buildfile))
_describe -t 'targets' 'target' targets && ret=0
fi
;;
- args)
+ (args)
if [[ CURRENT -eq NORMARG && ${+opt_args[--match]} -eq 0 ]]
then
# If the current argument is the first non-option argument
diff --git a/src/_play b/src/_play
deleted file mode 100644
index 6b35899..0000000
--- a/src/_play
+++ /dev/null
@@ -1,190 +0,0 @@
-#compdef play
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for Play! framework 1.2.2 (https://www.playframework.com/).
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Julien Nicoulaud <julien.nicoulaud@gmail.com>
-# * Mario Fernandez (https://github.com/sirech)
-#
-# ------------------------------------------------------------------------------
-
-
-_play() {
- local context curcontext="$curcontext" state line
- typeset -A opt_args
-
- local ret=1
-
- _arguments -C \
- '1: :_play_cmds' \
- '*::arg:->args' \
- && ret=0
-
- case $state in
- (args)
- curcontext="${curcontext%:*:*}:play-cmd-$words[1]:"
- case $line[1] in
- (build-module|list-modules|lm|check|id)
- _message 'no more arguments' && ret=0
- ;;
- (dependencies|deps)
- _arguments \
- '1:: :_play_apps' \
- '(--debug)--debug[Debug mode (even more information logged than in verbose mode)]' \
- '(--jpda)--jpda[Listen for JPDA connection. The process will be suspended until a client is plugged to the JPDA port.]' \
- '(--sync)--sync[Keep lib/ and modules/ directory synced. Delete unknown dependencies.]' \
- '(--verbose)--verbose[Verbose Mode]' \
- && ret=0
- ;;
- (clean|javadoc|jd|out|pid|secret|stop)
- _arguments '1:: :_play_apps' && ret=0
- ;;
- (help)
- _arguments '1: :_play_cmds -F "(cp deps ec idea jd st lm nb nm help antify evolutions evolutions:apply evolutions:markApplied evolutions:resolve)"' && ret=0
- ;;
- (status|st)
- _arguments \
- '1:: :_play_apps' \
- '(--url)--url[If you want to monitor an application running on a remote server, specify the application URL using this option]:URL:_urls' \
- '(--secret)--secret[You can provide your own secret key using this option]:Secret key' \
- && ret=0
- ;;
- (new)
- _arguments \
- '1: :_play_apps' \
- '(--with)--with[Automatically enable this set of module for the newly created application]:Modules list:_play_modules_list' \
- && ret=0
- ;;
- (install)
- _arguments '1:Play! module:_play_modules_dash_versions' && ret=0
- ;;
- (new-module)
- _arguments '1:Module directory:_files -/' && ret=0
- ;;
- (test|precompile|run|start|war|auto-test|classpath|cp|eclipsify|ec|idealize|idea|modules|netbeansify|nb)
- local cmd_args; cmd_args=(
- '1:: :_play_apps'
- '(--deps)--deps[Resolve and install dependencies before running the command]'
- )
- case $line[1] in
- (precompile|run|start|restart|war)
- local app_dir="$line[2]"
- [[ -d "$app_dir" ]] || app_dir=.
- [[ -f "$app_dir/conf/application.conf" ]] && cmd_args+=('--'${(u)${(M)$(<$app_dir/conf/application.conf):#%*}%%.*}'[Use this ID to run the application (override the default framework ID)]')
- ;|
- (test|run)
- cmd_args+=('(-f)-f[Disable the JPDA port checking and force the jpda.port value]')
- ;|
- (war)
- cmd_args+=(
- '(-o --output)'{-o,--output}'[The path where the WAR directory will be created. The contents of this directory will first be deleted]:output directory:_files -/'
- '(--zip)--zip[By default, the script creates an exploded WAR. If you want a zipped archive, specify the --zip option]'
- '(--exclude)--exclude[Excludes a list of colon separated directories]:excluded directories list:_play_colon_dirs_list'
- )
- ;|
- (test|run|start|restart|war)
- cmd_args+=('*:Java option')
- ;;
- esac
- _arguments "$cmd_args[@]" && ret=0
- ;;
- *)
- _call_function ret _play_cmd_$words[1] && ret=0
- (( ret )) && _message 'no more arguments'
- ;;
- esac
- ;;
- esac
-}
-
-# FIXME Completes only core commands, some modules add commands too (eg Maven). Where do we get them ?
-# FIXME Parse 'play help' and 'play help <command>' (for aliases) instead of hard-coding.
-(( $+functions[_play_cmds] )) ||
-_play_cmds() {
- local commands; commands=(
- 'antify:Create a build.xml file for this project'
- 'auto-test:Automatically run all application tests'
- 'build-module:Build and package a module'
- 'check:Check for a release newer than the current one'
- {classpath,cp}':Display the computed classpath'
- 'clean:Delete temporary files (including the bytecode cache)'
- {dependencies,deps}':Resolve and retrieve project dependencies'
- {eclipsify,ec}':Create all Eclipse configuration files'
- 'evolutions:Run the evolution check'
- 'evolutions\:apply:Automatically apply pending evolutions'
- 'evolutions\:mark:AppliedMark pending evolutions as manually applied'
- 'evolutions\:resolve:Resolve partially applied evolution'
- 'help:Display help on a specific command'
- 'id:Define the framework ID'
- {idealize,idea}':Create all IntelliJ Idea configuration files'
- 'install:Install a module'
- {javadoc,jd}':Generate your application Javadoc'
- {list-modules,lm}':List modules available from the central modules repository'
- 'modules:Display the computed modules list'
- {netbeansify,nb}':Create all NetBeans configuration files'
- 'new:Create a new application'
- {new-module,nm}':Create a module'
- 'out:Follow logs/system.out file'
- 'pid:Show the PID of the running application'
- 'precompile:Precompile all Java sources and templates to speed up application start-up'
- 'restart:Restart the running application'
- 'run:Run the application in the current shell'
- 'secret:Generate a new secret key'
- 'start:Start the application in the background'
- {status,st}':Display the running application status'
- 'stop:Stop the running application'
- 'test:Run the application in test mode in the current shell'
- 'war:Export the application as a standalone WAR archive'
- )
- _describe -t commands 'Play! command' commands "$@"
-}
-
-(( $+functions[_play_apps] )) ||
-_play_apps() {
- _wanted application expl 'Play! application directory' _files -/
-}
-
-(( $+functions[_play_modules] )) ||
-_play_modules() {
- local modules; modules=(${(ps:,:)${${${(S)${(f)$(_call_program modules $service list-modules)}//\]*\[/,}%%\]*}##*\[}})
- _describe -t modules 'Play! module' modules "$@"
-}
-
-(( $+functions[_play_modules_dash_versions] )) ||
-_play_modules_dash_versions() {
- local ret=1
- if compset -P '*-'; then
- local versions; versions=(${(ps:,:)${${${${${(f)$(_call_program versions $service list-modules)}##*${IPREFIX%-}\]}#*Versions:}%%"~"*}//[[:space:]]/}})
- _describe -t module-versions "${IPREFIX%-} module versions" versions && ret=0
- else
- _wanted modules expl 'Play! module' _play_modules -qS- && ret=0
- fi
-}
-
-(( $+functions[_play_modules_list] )) ||
-_play_modules_list() {
- compset -P '*,'; compset -S ',*'
- _wanted module-list expl 'Play! modules list' _play_modules -qS,
-}
-
-(( $+functions[_play_colon_dirs_list] )) ||
-_play_colon_dirs_list() {
- compset -P '*:'; compset -S ':*'
- _wanted directories-list expl 'Directories list' _files -/ -qS:
-}
-
-_play "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_rkt b/src/_rkt
deleted file mode 100644
index d4ce021..0000000
--- a/src/_rkt
+++ /dev/null
@@ -1,369 +0,0 @@
-#compdef rkt
-# ------------------------------------------------------------------------------
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for rkt (https://coreos.com/rkt/).
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Massimiliano Torromeo <massimiliano.torromeo@gmail.com>
-#
-# ------------------------------------------------------------------------------
-
-typeset -A opt_args
-autoload -U regexp-replace
-
-_rkt() {
- _arguments \
- '--debug[print out more debug information to stderr]' \
- '--dir=[rkt data directory]:data directory:_files -/' \
- '--insecure-options=[comma-separated list of security features to disable]:option:{_values -s , none image tls ondisk http all}' \
- '--local-config=[local configuration directory]:configuration directory:_files -/' \
- '--system-config=[system configuration directory]:configuration directory:_files -/' \
- '--trust-keys-from-https[automatically trust gpg keys fetched from https]' \
- '--user-config=[user configuration directory]:configuration directory:_files -/' \
- '--help' \
- '1: :_rkt_cmds' \
- '*:: :->rkt_cmd_args'
-
- case $state in
- rkt_cmd_args)
- case $words[1] in
- help)
- _arguments \
- '1: :_rkt_cmds' \
- '*:: :->rkt_help_args' \
- ;;
-
- api-service)
- _arguments \
- '--listen=[address to listen for client API requests]:address' \
- ;;
-
- cat-manifest)
- _arguments \
- '--pretty-print[apply indent to format the output]' \
- '--uuid-file=[read pod UUID from file instead of argument]:uuid file:_files' \
- '1:POD:_rkt_pods' \
- ;;
-
- config)
- _arguments \
- '--pretty-print[apply indent to format the output]' \
- ;;
-
- enter)
- _arguments \
- '--app=:appname' \
- '1:POD:_rkt_pods' \
- ;;
-
- export)
- _arguments \
- '--app=:appname' \
- '--overwrite[overwrite output ACI]' \
- '1:POD:_rkt_pods' \
- '2:OUTPUT_ACI_FILE:_files' \
- ;;
-
- fetch)
- _arguments \
- '--full[print the full image hash after fetching]' \
- '--no-store[fetch images ignoring the local store]' \
- '--signature=[local signature file to use in validating the preceding image]:signature:_files' \
- '--store-only[use only available images in the store]' \
- ;;
-
- gc)
- _arguments \
- '--grace-period=[duration to wait before discarding inactive pods from garbage]:duration' \
- '--expire-prepared=[duration to wait before expiring prepared pods]:duration' \
- '--mark-only[move to the garbage directories without actually deleting]' \
- ;;
-
- image)
- _arguments \
- '1: :_rkt_image_cmds' \
- '*:: :->rkt_image_args'
- ;;
-
- list)
- _arguments \
- '--full[use long output format]' \
- '--no-legend[suppress a legend with the list]' \
- ;;
-
- metadata-service)
- _arguments \
- '--listen-port=[listen port]:port' \
- ;;
-
- prepare)
- # TODO: autocomplete stage1 images
- _arguments \
- '--caps-remove=[capability to remove]:capability' \
- '--caps-retain=[capability to retain]:capability' \
- '--cpu=[cpu limit for the preceding image]:cpu limit' \
- '--cpu-shares=[assign the specified CPU time share weight]:weight' \
- "--environment=[set the app's environment variables]:variable key=value" \
- '--exec=[override the exec command for the preceding image]:command' \
- '--group=[group override for the preceding image]:group' \
- '--inherit-env[inherit all environment variables not set by apps]' \
- '--memory=[memory limit for the preceding image]:memory' \
- '--mount=[mount point binding a volume to a path within an app]:mount point' \
- '--name=[set the name of the app]:name' \
- '--no-overlay[disable overlay filesystem]' \
- '--oom-score-adj=[oom-score-adj isolator override]:oom-score-adj' \
- '--pod-manifest=[the path to the pod manifest]:manifest:_files' \
- '--port=[ports to expose on the host]:NAME\:HOSTPORT' \
- '--private-users[run within user namespaces]' \
- '--quiet[suppress superfluous output on stdout, print only the UUID on success]' \
- '--readonly-rootfs=[mount rootfs read-only]:fs' \
- '--set-env=[an environment variable to set for apps]:NAME=VALUE' \
- '--set-env-file=[the path to an environment variables file]:file:_files' \
- '--signature=[local signature file to use in validating the preceding image]:signature:_files' \
- '--stage1-from-dir=[a filename of an image in stage1 images directory to use as stage1]:image' \
- '--stage1-hash=[a hash of an image to use as stage1]:image hash' \
- '--stage1-name=[a name of an image to use as stage1]:image name' \
- '--stage1-path=[a path to an image to use as stage1]:image path:_files' \
- '--stage1-url=[a URL to an image to use as stage1]:image url' \
- '--supplementary-gids=[supplementary group IDs override for the preceding image]:group IDs' \
- '--user=[user override for the preceding image]:user' \
- "--user-annotation=[set the app's annotations]:annotation key=value" \
- "--user-label=[set the app's labels]:label key=value" \
- '--volume=[volumes to make available in the pod]:volume' \
- '--working-dir=[override the working directory of the preceding image]:working directory:_files -/' \
- '1:IMAGE:_rkt_images' \
- ;;
-
- rm)
- _arguments \
- '--uuid-file=[read pod UUID from file instead of argument]:uuid file:_files' \
- '1:POD:_rkt_pods' \
- ;;
-
- run)
- _arguments \
- '--caps-remove=[capability to remove]:capability' \
- '--caps-retain=[capability to retain]:capability' \
- '--cpu=[cpu limit for the preceding image]:cpu limit' \
- '--cpu-shares=[assign the specified CPU time share weight]:weight' \
- '--dns=[name servers to write in /etc/resolv.conf]:name servers' \
- '--dns-domain=[DNS domain to write in]:domain' \
- '--dns-opt=[DNS options to write in /etc/resolv.conf]:dns options' \
- '--dns-search=[DNS search domains to write in /etc/resolv.conf]:search domains' \
- "--environment=[set the app's environment variables]:variable key=value" \
- '--exec=[override the exec command for the preceding image]:command' \
- '--group=[group override for the preceding image]:group' \
- "--hostname=[pod's hostname]:hostname" \
- "--hosts-entry=[entries to add to the pod-wide /etc/hosts. Pass 'host' to use the host's /etc/hosts]:hosts entry" \
- '--inherit-env[inherit all environment variables not set by apps]' \
- '--interactive[run pod interactively]' \
- '--mds-register[register pod with metadata service]' \
- '--memory=[memory limit for the preceding image]:memory limit' \
- '--mount=[mount point binding a volume to a path within an app]:mount point' \
- '--name=[set the name of the app]:name' \
- "--net=[configure the pod's networking]:networks" \
- '--no-overlay[disable overlay filesystem]' \
- '--pod-manifest=[the path to the pod manifest]:manifest:_files' \
- '--port=[ports to expose on the host]:NAME\:HOSTPORT' \
- '--private-users[run within user namespaces]' \
- '--set-env=[an environment variable to set for apps]:NAME=VALUE' \
- '--set-env-file=[the path to an environment variables file]:file:_files' \
- '--signature=[local signature file to use in validating the preceding image]:signature:_files' \
- '--stage1-from-dir=[a filename of an image in stage1 images directory to use as stage1]:image' \
- '--stage1-hash=[a hash of an image to use as stage1]:image hash' \
- '--stage1-name=[a name of an image to use as stage1]:image name' \
- '--stage1-path=[a path to an image to use as stage1]:image path:_files' \
- '--stage1-url=[a URL to an image to use as stage1]:image url' \
- '--supplementary-gids=[supplementary group IDs override for the preceding image]:group IDs' \
- '--user=[user override for the preceding image]:user' \
- "--user-annotation=[set the app's annotations]:annotation key=value" \
- "--user-label=[set the app's labels]:label key=value" \
- '--uuid-file-save=[write out pod UUID to specified file]:uuid file:_files' \
- '--volume=[volumes to make available in the pod]:volume' \
- '--working-dir=[override the working directory of the preceding image]:working directory:_files -/' \
- '1:IMAGE:_rkt_images' \
- ;;
-
- run-prepared)
- _arguments \
- '--dns=[name servers to write in /etc/resolv.conf]:name servers' \
- '--dns-domain=[DNS domain to write in]:domain' \
- '--dns-opt=[DNS options to write in /etc/resolv.conf]:dns options' \
- '--dns-search=[DNS search domains to write in /etc/resolv.conf]:search domains' \
- "--hostname=[pod's hostname]:hostname" \
- "--hosts-entry=[entries to add to the pod-wide /etc/hosts. Pass 'host' to use the host's /etc/hosts]:hosts entry" \
- '--interactive[run pod interactively]' \
- '--mds-register[register pod with metadata service]' \
- "--net=[configure the pod's networking]:networks" \
- '1:POD:_rkt_pods' \
- ;;
-
- status)
- _arguments \
- '--format=[choose the output format]:format:(json json-pretty)' \
- '--wait[toggles waiting for the pod to exit]' \
- '--wait-ready[toggles waiting until the pod is ready]' \
- '1:POD:_rkt_pods' \
- ;;
-
- stop)
- _arguments \
- '--force[forced stopping]' \
- '--uuid-file=[read pod UUID from file instead of argument]:uuid file:_files' \
- '1:POD:_rkt_pods' \
- ;;
-
- trust)
- _arguments \
- '--insecure-allow-http[allow HTTP use for key discovery and/or retrieval]' \
- '--prefix=[prefix to limit trust to]:prefix' \
- '--root[add root key from filesystem without a prefix]' \
- '--skip-fingerprint-review[accept key without fingerprint confirmation]' \
- ;;
- esac
- ;;
- esac
-
- case $state in
- rkt_help_args)
- case $words[1] in
- image)
- _arguments \
- '1: :_rkt_image_cmds'
- ;;
- esac
- ;;
-
- rkt_image_args)
- case $words[1] in
- cat-manifest)
- _arguments \
- '--pretty-print[apply indent to format the output]' \
- '1:IMAGE:_rkt_images' \
- ;;
-
- export)
- _arguments \
- '--overwrite[overwrite output ACI]' \
- '1:IMAGE:_rkt_images' \
- '2:OUTPUT_ACI_FILE:_files' \
- ;;
-
- extract|render)
- _arguments \
- '--overwrite[overwrite output ACI]' \
- '--rootfs-only[extract rootfs only]' \
- '1:IMAGE:_rkt_images' \
- '2:OUTPUT_DIR:_files -/' \
- ;;
-
- gc)
- _arguments \
- '--grace-period=[duration to wait before discarding inactive pods from garbage]:duration' \
- ;;
-
- list)
- _arguments \
- '--fields=[comma-separated list of fields to display]:fields:{_values -s , id name importtime lastused size latest}' \
- '--full[use long output format]' \
- '--no-legend[suppress a legend with the list]' \
- ;;
-
- rm)
- _arguments \
- '*:IMAGE:_rkt_images' \
- ;;
- esac
- ;;
- esac
-}
-
-_rkt_cmds() {
- local -a commands
- commands=(
- 'api-service:Run API service'
- 'cat-manifest:Inspect and print the pod manifest'
- 'config:Print configuration for each stage in JSON format'
- 'enter:Enter the namespaces of an app within a rkt pod'
- 'export:Export an app from an exited pod to an ACI file'
- 'fetch:Fetch image(s) and store them in the local store'
- 'gc:Garbage collect rkt pods no longer in use'
- 'image:Operate on image(s) in the local store'
- 'list:List pods'
- 'metadata-service:Run metadata service'
- 'prepare:Prepare to run image(s) in a pod in rkt'
- 'rm:Remove all files and resources associated with an exited pod'
- 'run:Run image(s) in a pod in rkt'
- 'run-prepared:Run a prepared application pod in rkt'
- 'status:Check the status of a rkt pod'
- 'stop:Stop a pod'
- 'trust:Trust a key for image verification'
- 'version:Print the version and exit'
- 'help:Help about any command'
- )
- _describe 'command' commands
-}
-
-_rkt_image_cmds() {
- local -a commands
- commands=(
- 'cat-manifest:Inspect and print the image manifest'
- 'export:Export a stored image to an ACI file'
- 'extract:Extract a stored image to a directory'
- 'gc:Garbage collect local store'
- 'list:List images in the local store'
- 'render:Render a stored image to a directory with all its dependencies'
- 'rm:Remove image(s) with the given ID(s) or name(s) from the local store'
- )
- _describe 'command' commands
-}
-
-_rkt_images() {
- local -a images
- images=($(rkt image list --fields id,name --no-legend | sed 's/\t/\n/;s/:/\\:/g' | sort | uniq))
- _describe 'IMAGE' images
-}
-
-_rkt_pods() {
- local -a pods
- IFS=$'\n'
- pods=($(rkt list --full --no-legend | sed 's/:/\\:/g;s/\t/:/;s/\t/ /g'))
- _describe 'POD' pods
-}
-
-_rkt "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_rslsync b/src/_rslsync
index c72fb75..f3907ba 100644
--- a/src/_rslsync
+++ b/src/_rslsync
@@ -28,7 +28,7 @@
# Description
# -----------
#
-# Completion script for resilio sync 2.7.3 (https://www.resilio.com/individuals/).
+# Completion script for resilio sync 3.1.2 (https://www.resilio.com/individuals/).
#
# ------------------------------------------------------------------------------
# Authors
@@ -38,30 +38,21 @@
#
# ------------------------------------------------------------------------------
-_rslsync(){
- integer ret=1
- local -a args
- args+=(
- '(- *)--help[Print help]'
- '--config[Use a configuration file]:file:_files'
- '--storage[Storage path for identity and license]:path:_files -/'
- '--identity[Creates user identity]:name:'
- '--license[Apply owner license]:file:_files'
- '--decrypt[Decrypt encrypted folder]:'
- '--upgradedb[Upgrade databases in specified storage or upgrade a single db]:db:_files'
- '--nodaemon[Do not daemonize]'
- '--dump-sample-config[Print a sample configuration file]'
- '--log[Set log file]:file:_files'
- '(--help)--webui.listen[Set the webui listening interface]:ip\:port:'
- '--generate-secret[Generate a read/write key]::version:(2)'
- '--get-ro-secret[Get the read-only key associated to a read/write key]:key:'
- '--server[Set Management Console address]:ip\:port:'
- )
- _arguments $args[@] && ret=0
- return ret
-}
-
-_rslsync
+_arguments \
+ '(- *)--help[Print help]' \
+ '--config[Use a configuration file]:file:_files' \
+ '--storage[Storage path for identity and license]:path:_files -/' \
+ '--identity[Creates user identity]:name' \
+ '--license[Apply owner license]:file:_files' \
+ '--decrypt[Decrypt encrypted folder]:secret_dbpath_encrypted_folder_output_folder' \
+ '--upgradedb[Upgrade databases in specified storage or upgrade a single db]:db:_files' \
+ '--nodaemon[Do not daemonize]' \
+ '--dump-sample-config[Print a sample configuration file]' \
+ '--log[Set log file]:file:_files' \
+ '(--help)--webui.listen[Set the webui listening interface]:ip_port:' \
+ '--generate-secret[Generate a read/write key]::version:(2)' \
+ '--get-ro-secret[Get the read-only key associated to a read/write key]:key' \
+ '--server[Set Management Console address]:ip_port'
# Local Variables:
# mode: Shell-Script
diff --git a/src/_rsvm b/src/_rsvm
deleted file mode 100644
index 3840eb2..0000000
--- a/src/_rsvm
+++ /dev/null
@@ -1,88 +0,0 @@
-#compdef rsvm
-# ------------------------------------------------------------------------------
-# Copyright (c) 2016 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for rsvm (https://github.com/sdepold/rsvm).
-# Adapted from Docker Machine completion by hhatto (https://github.com/ilkka)
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * michaelmior (https://github.com/michaelmior)
-#
-# ------------------------------------------------------------------------------
-
-# helper function for completing available rusts
-__rusts() {
- declare -a rusts_cmd
- rusts_cmd=($(ls "$HOME/.rsvm/versions"))
- _describe 'rusts' rusts_cmd
-}
-
-# subcommands
-local -a _rsvm_cmds
-
-_rsvm_cmds=(
- 'help:Show a help message' \
- 'install:Download and install a version' \
- 'uninstall:Uninstall a version' \
- 'use:Activate a version for now and the future' \
- 'ls:List all installed versions of rust' \
- 'ls-remote:List remote versions available for install' \
- 'ls-channel:Print a channel version available for install' \
-)
-
-# subcommand completion functions
-__uninstall() {
- __rusts
-}
-
-__use() {
- __rusts
-}
-
-# common args
-_arguments \
- '--help[show help]' \
- '--version[print the version]' \
- '*:: :->command'
-
-# start rusts!
-if (( CURRENT == 1 )); then
- _describe -t commands 'rsvm command' _rsvm_cmds
-fi
-
-local -a _command_args
-case "$words[1]" in
- uninstall)
- __uninstall ;;
- use)
- __use ;;
-esac
diff --git a/src/_screencapture b/src/_screencapture
index 43b4830..874d8b1 100644
--- a/src/_screencapture
+++ b/src/_screencapture
@@ -34,7 +34,7 @@
#
# ------------------------------------------------------------------------------
-_arguments \
+_arguments -s \
'-c[force screen capture to go to the clipboard]' \
'(-i -J -s -w -W)-b[capture touch bar only non interactive modes]' \
'(-b -i -J -s -w -W)-C[capture the cursor as well as the screen only in non interactive modes]' \
diff --git a/src/_sfdx b/src/_sfdx
deleted file mode 100644
index 613fc9e..0000000
--- a/src/_sfdx
+++ /dev/null
@@ -1,935 +0,0 @@
-#compdef sfdx
-
-# DESCRIPTION: Zsh completion script for the Salesforce CLI
-# AUTHOR: Wade Wegner (@WadeWegner)
-# REPO: https://github.com/wadewegner/salesforce-cli-zsh-completion
-# LICENSE: https://github.com/wadewegner/salesforce-cli-zsh-completion/blob/master/LICENSE
-
-local -a _1st_arguments
-
-_1st_arguments=(
- "force\:alias\:list":"list username aliases for the Salesforce CLI"
- "force\:alias\:set":"set username aliases for the Salesforce CLI"
- "force\:apex\:class\:create":"create an Apex class"
- "force\:apex\:execute":"execute anonymous Apex code"
- "force\:apex\:log\:get":"fetch a debug log"
- "force\:apex\:log\:list":"list debug logs"
- "force\:apex\:test\:report":"display test results"
- "force\:apex\:test\:run":"invoke Apex tests"
- "force\:apex\:trigger\:create":"create an Apex trigger"
- "force\:auth\:jwt\:grant":"authorize an org using the JWT flow"
- "force\:auth\:sfdxurl\:store":"authorize an org using an SFDX auth URL"
- "force\:auth\:web\:login":"authorize an org using the web login flow"
- "force\:config\:get":"get config var values for given names"
- "force\:config\:list":"list config vars for the Salesforce CLI"
- "force\:config\:set":"set config vars for the Salesforce CLI"
- "force\:data\:bulk\:delete":"bulk delete records from a csv file"
- "force\:data\:bulk\:status":"view the status of a bulk data load job or batch"
- "force\:data\:bulk\:upsert":"bulk upsert records from a CSV file"
- "force\:data\:record\:create":"create a record"
- "force\:data\:record\:delete":"delete a record"
- "force\:data\:record\:get":"view a record"
- "force\:data\:record\:update":"update a record"
- "force\:data\:soql\:query":"execute a SOQL query"
- "force\:data\:tree\:export":"export data from an org into sObject tree format for force:data:tree:import consumption"
- "force\:data\:tree\:import":"import data into an org using SObject Tree Save API"
- "force\:doc\:commands\:display":"display help for force commands"
- "force\:doc\:commands\:list":"list the force commands"
- "force\:lightning\:app\:create":"create a Lightning app"
- "force\:lightning\:component\:create":"create a Lightning component"
- "force\:lightning\:event\:create":"create a Lightning event"
- "force\:lightning\:interface\:create":"create a Lightning interface"
- "force\:lightning\:lint":"analyse (lint) Lightning component code"
- "force\:lightning\:test\:create":"create a Lightning test"
- "force\:lightning\:test\:install":"install Lightning Testing Service unmanaged package in your org"
- "force\:lightning\:test\:run":"invoke Lightning component tests"
- "force\:limits\:api\:display":"display current org’s limits"
- "force\:mdapi\:convert":"convert Metadata API source into the Salesforce DX source format"
- "force\:mdapi\:deploy":"deploy metadata to an org using Metadata API"
- "force\:mdapi\:deploy\:report":"check the status of a metadata deployment"
- "force\:mdapi\:retrieve":"retrieve metadata from an org using Metadata API"
- "force\:mdapi\:retrieve\:report":"check the status of a metadata retrieval"
- "force\:org\:create":"create a scratch org"
- "force\:org\:delete":"mark a scratch org for deletion"
- "force\:org\:display":"get org description"
- "force\:org\:list":"list all orgs you’ve created or authenticated to"
- "force\:org\:open":"open an org in your browser"
- "force\:org\:shape\:create":"create a snapshot of org edition, features, and licenses"
- "force\:org\:shape\:delete":"delete all org shapes for a target org"
- "force\:org\:shape\:list":"list all org shapes you’ve created"
- "force\:package1\:version\:create":"create a first-generation package version in the release org"
- "force\:package1\:version\:create\:get":"retrieve the status of a package version creation request"
- "force\:package1\:version\:display":"display details about a first-generation package version"
- "force\:package1\:version\:list":"list package versions for the specified first-generation package or for the org"
- "force\:package2\:create":"create a second-generation package"
- "force\:package2\:list":"list all second-generation packages in the Dev Hub org"
- "force\:package2\:update":"update a second-generation package"
- "force\:package2\:version\:create":"create a second-generation package version"
- "force\:package2\:version\:create\:get":"retrieve a package version creation request"
- "force\:package2\:version\:create\:list":"list package version creation requests"
- "force\:package2\:version\:get":"retrieve a package version in the Dev Hub org"
- "force\:package2\:version\:list":"list all package versions in the Dev Hub org"
- "force\:package2\:version\:update":"update a second-generation package version"
- "force\:package\:install":"install a package in the target org"
- "force\:package\:install\:get":"retrieve the status of a package installation request"
- "force\:package\:installed\:list":"list the org’s installed packages"
- "force\:package\:uninstall":"uninstall a second-generation package from the target org"
- "force\:package\:uninstall\:get":"retrieve status of package uninstall request"
- "force\:project\:create":"create a new SFDX project"
- "force\:project\:upgrade":"update project config files to the latest format"
- "force\:schema\:sobject\:describe":"describe an object"
- "force\:schema\:sobject\:list":"list all objects of a specified category"
- "force\:source\:convert":"convert Salesforce DX source into the Metadata API source format"
- "force\:source\:open":"edit a Lightning Page with Lightning App Builder"
- "force\:source\:pull":"pull source from the scratch org to the project"
- "force\:source\:push":"push source to an org from the project"
- "force\:source\:status":"list local changes and/or changes in a scratch org"
- "force\:user\:create":"create a user for a scratch org"
- "force\:user\:display":"displays information about a user of a scratch org"
- "force\:user\:list":"lists all users of a scratch org"
- "force\:user\:password\:generate":"generate a password for scratch org users"
- "force\:user\:permset\:assign":"assign a permission set to one or more users of an org"
- "force\:visualforce\:component\:create":"create a Visualforce component"
- "force\:visualforce\:page\:create":"create a Visualforce page"
-)
-
-_arguments '*:: :->command'
-
-if (( CURRENT == 1 )); then
- _describe -t commands "sfdx command" _1st_arguments
- return
-fi
-
-local -a _command_args
-case "$words[1]" in
- force:limits:api:display)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:app:create)
- _command_args=(
- '(-n|--appname)'{-n,--appname}'[name of the generated Lightning app]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultLightningApp*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:bulk:delete)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the sObject type of the records you’re deleting]' \
- '(-f|--csvfile)'{-f,--csvfile}'[the path to the CSV file containing the ids of the records to delete]:file:_files' \
- '(-w|--wait)'{-w,--wait}'[the number of minutes to wait for the command to complete before displaying the results]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:bulk:status)
- _command_args=(
- '(-i|--jobid)'{-i,--jobid}'[the ID of the job you want to view or of the job whose batch you want to view]' \
- '(-b|--batchid)'{-b,--batchid}'[the ID of the batch whose status you want to view]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:bulk:upsert)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the sObject type of the records you want to upsert]' \
- '(-f|--csvfile)'{-f,--csvfile}'[the path to the CSV file that defines the records to upsert]:file:_files' \
- '(-i|--externalid)'{-i,--externalid}'[the column name of the external ID; if not provided, an arbitrary ID is used]' \
- '(-w|--wait)'{-w,--wait}'[the number of minutes to wait for the command to complete before displaying the results]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:class:create)
- _command_args=(
- '(-n|--classname)'{-n,--classname}'[name of the generated Apex class]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultApexClass*,ApexException,ApexUnitTest,InboundEmailService)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:doc:commands:display)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:doc:commands:list)
- _command_args=(
- '(-u|--usage)'{-u,--usage}'[list only docopts usage strings]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:visualforce:component:create)
- _command_args=(
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultVFComponent*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-n|--componentname)'{-n,--componentname}'[name of the generated Visualforce component]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(-l|--label)'{-l,--label}'[Visualforce component label]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:component:create)
- _command_args=(
- '(-n|--componentname)'{-n,--componentname}'[name of the generated Lightning component]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultLightningCmp*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:mdapi:convert)
- _command_args=(
- '(-r|--rootdir)'{-r,--rootdir}'[the root directory containing the Metadata API source]:file:_files' \
- '(-d|--outputdir)'{-d,--outputdir}'[the output directory to store the sfdx source]:file:_files' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:source:convert)
- _command_args=(
- '(-r|--rootdir)'{-r,--rootdir}'[the source directory for the source to be converted]:file:_files' \
- '(-d|--outputdir)'{-d,--outputdir}'[the output directory to export the Metadata API source to]:file:_files' \
- '(-n|--packagename)'{-n,--packagename}'[the name of the package to associate with the Metadata API source]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:create)
- _command_args=(
- '(-f|--definitionfile)'{-f,--definitionfile}'[path to a scratch org definition file]:file:_files' \
- '(-j|--definitionjson)'{-j,--definitionjson}'[scratch org definition in json format ]' \
- '(-n|--nonamespace)'{-n,--nonamespace}'[creates the scratch org with no namespace]' \
- '(-c|--noancestors)'{-c,--noancestors}'[do not include second-generation package ancestors in the scratch org]' \
- '(-i|--clientid)'{-i,--clientid}'[connected app consumer key]' \
- '(-s|--setdefaultusername)'{-s,--setdefaultusername}'[set the created org as the default username]' \
- '(-a|--setalias)'{-a,--setalias}'[set an alias for the created scratch org]' \
- '(-e|--env)'{-e,--env}'[environment where the scratch org is created: \[sandbox*,virtual,prototype\] (sandbox*,virtual,prototype)]' \
- '(-w|--wait)'{-w,--wait}'[the streaming client socket timeout (in minutes) (default:6, min:2)]' \
- '(-d|--durationdays)'{-d,--durationdays}'[duration of the scratch org (in days) (default:7, min:1, max:30)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:create)
- _command_args=(
- '(-n|--name)'{-n,--name}'[package name]' \
- '(-o|--containeroptions)'{-o,--containeroptions}'[\[*Managed | Unlocked | Locked\] container options for the package (Managed=DeveloperManagedSubscriberManaged, Unlocked=DeveloperControlledSubscriberEditable, Locked=DeveloperControlledSubscriberLocked)]' \
- '(-d|--description)'{-d,--description}'[package description]' \
- '(-e|--nonamespace)'{-e,--nonamespace}'[creates the package with no namespace; available only for developer-controlled packages.]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:user:create)
- _command_args=(
- '(-f|--definitionfile)'{-f,--definitionfile}'[file path to a user definition]:file:_files' \
- '(-a|--setalias)'{-a,--setalias}'[set an alias for the created username to reference within the CLI]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:project:create)
- _command_args=(
- '(-n|--projectname)'{-n,--projectname}'[name of the generated project]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (Defaultsfdx-project.json*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-l|--loginurl)'{-l,--loginurl}'[Salesforce instance login URL (https://login.salesforce.com*)]' \
- '(-x|--sourceapiversion)'{-x,--sourceapiversion}'[source API version number (41.0*)]' \
- '(-s|--namespace)'{-s,--namespace}'[project associated namespace]' \
- '(-p|--defaultpackagedir)'{-p,--defaultpackagedir}'[default package directory name (force-app*)]:file:_files' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:delete)
- _command_args=(
- '(-p|--noprompt)'{-p,--noprompt}'[no prompt to confirm deletion]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:mdapi:deploy)
- _command_args=(
- '(-c|--checkonly)'{-c,--checkonly}'[validate deploy but don’t save to the org (default:false)]' \
- '(-d|--deploydir)'{-d,--deploydir}'[root of directory tree of files to deploy]:file:_files' \
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: 0)]' \
- '(-i|--jobid)'{-i,--jobid}'[WARNING: The flag "jobid" has been deprecated and will be removed in v41.01.0 or later. Instead, use "sfdx force:mdapi:deploy:report -i <jobId>".]' \
- '(-l|--testlevel)'{-l,--testlevel}'[deployment testing level (NoTestRun,RunSpecifiedTests,RunLocalTests,RunAllTestsInOrg)]' \
- '(-r|--runtests)'{-r,--runtests}'[tests to run if --testlevel RunSpecifiedTests]' \
- '(-e|--rollbackonerror)'{-e,--rollbackonerror}'[WARNING: The flag "rollbackonerror" has been deprecated and will be removed in v41.01.0 or later. Instead, use "ignoreerrors".]' \
- '(-o|--ignoreerrors)'{-o,--ignoreerrors}'[ignore any errors and do not roll back deployment (default:false)]' \
- '(-g|--ignorewarnings)'{-g,--ignorewarnings}'[whether a warning will allow a deployment to complete successfully (default:false)]' \
- '(-f|--zipfile)'{-f,--zipfile}'[path to .zip file of metadata to deploy]:file:_files' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[verbose output of deploy results]' \
- )
- ;;
- force:mdapi:deploy:report)
- _command_args=(
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: 0)]' \
- '(-i|--jobid)'{-i,--jobid}'[job ID of the deployment you want to check]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[verbose output of deploy results]' \
- )
- ;;
- force:org:display)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[emit additional command output to stdout]' \
- )
- ;;
- force:user:display)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:event:create)
- _command_args=(
- '(-n|--eventname)'{-n,--eventname}'[name of the generated Lightning event]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultLightningEvt*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:execute)
- _command_args=(
- '(-f|--apexcodefile)'{-f,--apexcodefile}'[path to a local file containing Apex code]:file:_files' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:config:get)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[emit additional command output to stdout]' \
- )
- ;;
- force:package:install)
- _command_args=(
- '(-i|--id)'{-i,--id}'[ID of the package version to install (starts with 04t)]' \
- '(-w|--wait)'{-w,--wait}'[number of minutes to wait for installation status]' \
- '(-k|--installationkey)'{-k,--installationkey}'[installation key for key-protected package (default: null)]' \
- '(-p|--publishwait)'{-p,--publishwait}'[number of minutes to wait for subscriber package version ID to become available in the target org]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package:install:get)
- _command_args=(
- '(-i|--requestid)'{-i,--requestid}'[ID of the package install request you want to check]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package:installed:list)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:interface:create)
- _command_args=(
- '(-n|--interfacename)'{-n,--interfacename}'[name of the generated Lightning interface]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultLightningIntf*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:auth:jwt:grant)
- _command_args=(
- '(-u|--username)'{-u,--username}'[authentication username]' \
- '(-f|--jwtkeyfile)'{-f,--jwtkeyfile}'[path to a file containing the private key]:file:_files' \
- '(-i|--clientid)'{-i,--clientid}'[OAuth client ID (sometimes called the consumer key)]' \
- '(-r|--instanceurl)'{-r,--instanceurl}'[the login URL of the instance the org lives on]' \
- '(-d|--setdefaultdevhubusername)'{-d,--setdefaultdevhubusername}'[set the authenticated org as the default dev hub org for scratch org creation]' \
- '(-s|--setdefaultusername)'{-s,--setdefaultusername}'[set the authenticated org as the default username that all commands run against]' \
- '(-a|--setalias)'{-a,--setalias}'[set an alias for the authenticated org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:lint)
- _command_args=(
- '(-i|--ignore)'{-i,--ignore}'[pattern used to ignore some folders]' \
- '(--files)--files[pattern used to include specific files]:file:_files' \
- '(-j|--json)'{-j,--json}'[format output as JSON]' \
- '(--config)--config[path to a custom ESLint configuration file]:file:_files' \
- '(--verbose)--verbose[report warnings in addition to errors]' \
- '(--exit)--exit[exit with error code 1 if there are lint issues]' \
- )
- ;;
- force:alias:list)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:config:list)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:list)
- _command_args=(
- '(--all)--all[include expired, deleted, and unknown-status scratch orgs]' \
- '(--clean)--clean[remove all local org authorizations for non-active orgs]' \
- '(-p|--noprompt)'{-p,--noprompt}'[do not prompt for confirmation]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[list more information about each org]' \
- )
- ;;
- force:package2:list)
- _command_args=(
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:user:list)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:log:get)
- _command_args=(
- '(-i|--logid)'{-i,--logid}'[ID of the log to display]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:log:list)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:open)
- _command_args=(
- '(-p|--path)'{-p,--path}'[navigation URL path]:file:_files' \
- '(-r|--urlonly)'{-r,--urlonly}'[display navigation URL, but don’t launch browser]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:source:open)
- _command_args=(
- '(-f|--sourcefile)'{-f,--sourcefile}'[file to edit]:file:_files' \
- '(-r|--urlonly)'{-r,--urlonly}'[generate a navigation URL; don’t launch the editor]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:visualforce:page:create)
- _command_args=(
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultVFPage*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-n|--pagename)'{-n,--pagename}'[name of the generated Visualforce page]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(-l|--label)'{-l,--label}'[Visualforce page label]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:user:password:generate)
- _command_args=(
- '(-o|--onbehalfof)'{-o,--onbehalfof}'[comma-separated list of usernames for which to generate passwords]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:user:permset:assign)
- _command_args=(
- '(-n|--permsetname)'{-n,--permsetname}'[the name of the permission set to assign]' \
- '(-o|--onbehalfof)'{-o,--onbehalfof}'[comma-separated list of usernames or aliases to assign the permission set to]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:source:pull)
- _command_args=(
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: 33) (default:33, min:1)]' \
- '(-f|--forceoverwrite)'{-f,--forceoverwrite}'[ignore conflict warnings and overwrite changes to the project]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:source:push)
- _command_args=(
- '(-f|--forceoverwrite)'{-f,--forceoverwrite}'[ignore conflict warnings and overwrite changes to scratch org]' \
- '(-g|--ignorewarnings)'{-g,--ignorewarnings}'[deploy changes even if warnings are generated]' \
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: 33) (default:33, min:1)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:record:create)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the type of the record you’re creating]' \
- '(-v|--values)'{-v,--values}'[the <fieldName>=<value> pairs you’re creating]' \
- '(-t|--usetoolingapi)'{-t,--usetoolingapi}'[create the record with tooling api]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:record:delete)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the type of the record you’re deleting]' \
- '(-i|--sobjectid)'{-i,--sobjectid}'[the ID of the record you’re deleting]' \
- '(-w|--where)'{-w,--where}'[a list of <fieldName>=<value> pairs to search for]' \
- '(-t|--usetoolingapi)'{-t,--usetoolingapi}'[delete the record with Tooling API]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:record:get)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the type of the record you’re retrieving]' \
- '(-i|--sobjectid)'{-i,--sobjectid}'[the ID of the record you’re retrieving]' \
- '(-w|--where)'{-w,--where}'[a list of <fieldName>=<value> pairs to search for]' \
- '(-t|--usetoolingapi)'{-t,--usetoolingapi}'[retrieve the record with Tooling API]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:record:update)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the type of the record you’re updating]' \
- '(-i|--sobjectid)'{-i,--sobjectid}'[the ID of the record you’re updating]' \
- '(-w|--where)'{-w,--where}'[a list of <fieldName>=<value> pairs to search for]' \
- '(-v|--values)'{-v,--values}'[the <fieldName>=<value> pairs you’re updating]' \
- '(-t|--usetoolingapi)'{-t,--usetoolingapi}'[update the record with Tooling API]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:mdapi:retrieve)
- _command_args=(
- '(-a|--apiversion)'{-a,--apiversion}'[target API version for the retrieve (default 41.0)]' \
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: -1 (no limit))]' \
- '(-r|--retrievetargetdir)'{-r,--retrievetargetdir}'[directory root for the retrieved files]:file:_files' \
- '(-k|--unpackaged)'{-k,--unpackaged}'[file path of manifest of components to retrieve]:file:_files' \
- '(-d|--sourcedir)'{-d,--sourcedir}'[source dir to use instead of default manifest sfdx-project.xml]' \
- '(-p|--packagenames)'{-p,--packagenames}'[a comma-separated list of packages to retrieve]' \
- '(-s|--singlepackage)'{-s,--singlepackage}'[a single-package retrieve (default: false)]' \
- '(-i|--jobid)'{-i,--jobid}'[WARNING: The flag "jobid" has been deprecated and will be removed in v41.01.0 or later. Instead, use "sfdx force:mdapi:retrieve:report -i <jobId> -r <targetDir>".]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[verbose output of retrieve result]' \
- )
- ;;
- force:mdapi:retrieve:report)
- _command_args=(
- '(-w|--wait)'{-w,--wait}'[wait time for command to finish in minutes (default: -1 (no limit))]' \
- '(-r|--retrievetargetdir)'{-r,--retrievetargetdir}'[directory root for the retrieved files]:file:_files' \
- '(-i|--jobid)'{-i,--jobid}'[job ID of the retrieve you want to check]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[verbose output of retrieve result]' \
- )
- ;;
- force:alias:set)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:config:set)
- _command_args=(
- '(-g|--global)'{-g,--global}'[set config var globally (to be used from any directory)]:file:_files' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:auth:sfdxurl:store)
- _command_args=(
- '(-f|--sfdxurlfile)'{-f,--sfdxurlfile}'[path to a file containing the sfdx url]:file:_files' \
- '(-d|--setdefaultdevhubusername)'{-d,--setdefaultdevhubusername}'[set the authenticated org as the default dev hub org for scratch org creation]' \
- '(-s|--setdefaultusername)'{-s,--setdefaultusername}'[set the authenticated org as the default username that all commands run against]' \
- '(-a|--setalias)'{-a,--setalias}'[set an alias for the authenticated org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:shape:create)
- _command_args=(
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:shape:delete)
- _command_args=(
- '(-p|--noprompt)'{-p,--noprompt}'[do not prompt for confirmation]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username for the target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:org:shape:list)
- _command_args=(
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[list more information about each org shape]' \
- )
- ;;
- force:schema:sobject:describe)
- _command_args=(
- '(-s|--sobjecttype)'{-s,--sobjecttype}'[the API name of the object to describe]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:schema:sobject:list)
- _command_args=(
- '(-c|--sobjecttypecategory)'{-c,--sobjecttypecategory}'[the type of objects to list (all|custom|standard)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:soql:query)
- _command_args=(
- '(-q|--query)'{-q,--query}'[SOQL query to execute]' \
- '(-t|--usetoolingapi)'{-t,--usetoolingapi}'[execute query with Tooling API]' \
- '(-r|--resultformat)'{-r,--resultformat}'[query result format emitted to stdout; --json flag overrides this parameter (human*,csv,json)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:source:status)
- _command_args=(
- '(-a|--all)'{-a,--all}'[list all the changes that have been made]' \
- '(-l|--local)'{-l,--local}'[list the changes that have been made locally]' \
- '(-r|--remote)'{-r,--remote}'[list the changes that have been made in the scratch org]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:test:create)
- _command_args=(
- '(-n|--testname)'{-n,--testname}'[name of the generated Lightning test]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (DefaultLightningTest*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:lightning:test:install)
- _command_args=(
- '(-w|--wait)'{-w,--wait}'[number of minutes to wait for installation status (default:2)]' \
- '(-r|--releaseversion)'{-r,--releaseversion}'[release version of Lightning Testing Service (default:latest)]' \
- '(-t|--packagetype)'{-t,--packagetype}'[type of unmanaged package. 'full' option contains both jasmine and mocha, plus examples (full*,jasmine,mocha)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:test:report)
- _command_args=(
- '(-i|--testrunid)'{-i,--testrunid}'[ID of test run]' \
- '(-c|--codecoverage)'{-c,--codecoverage}'[retrieve code coverage results]' \
- '(-d|--outputdir)'{-d,--outputdir}'[directory to store test run files]:file:_files' \
- '(-r|--resultformat)'{-r,--resultformat}'[test result format emitted to stdout; --json flag overrides this parameter (human*,tap,junit,json)]' \
- '(-w|--wait)'{-w,--wait}'[the streaming client socket timeout (in minutes) (default:6, min:2)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[display Apex test processing details]' \
- )
- ;;
- force:apex:test:run)
- _command_args=(
- '(-n|--classnames)'{-n,--classnames}'[comma-separated list of Apex test class names to execute]' \
- '(-s|--suitenames)'{-s,--suitenames}'[comma-separated list of Apex test suite names to execute]' \
- '(-c|--codecoverage)'{-c,--codecoverage}'[retrieve code coverage results]' \
- '(-d|--outputdir)'{-d,--outputdir}'[directory to store test run files]:file:_files' \
- '(-l|--testlevel)'{-l,--testlevel}'[testlevel enum value (RunLocalTests,RunAllTestsInOrg,RunSpecifiedTests)]' \
- '(-r|--resultformat)'{-r,--resultformat}'[test result format emitted to stdout; --json flag overrides this parameter (human*,tap,junit,json)]' \
- '(-w|--wait)'{-w,--wait}'[the streaming client socket timeout (in minutes) (default:6, min:2)]' \
- '(--precompilewait)--precompilewait[how long to wait (in minutes) for Apex pre-compilation (default:3, min:3)]' \
- '(-y|--synchronous)'{-y,--synchronous}'[run tests from a single class synchronously]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[display Apex test processing details]' \
- )
- ;;
- force:lightning:test:run)
- _command_args=(
- '(-a|--appname)'{-a,--appname}'[name of your Lightning test application]' \
- '(-d|--outputdir)'{-d,--outputdir}'[directory path to store test run artifacts: for example, log files and test results]:file:_files' \
- '(-r|--resultformat)'{-r,--resultformat}'[test result format emitted to stdout; --json flag overrides this parameter (human*,tap,junit,json)]' \
- '(-f|--configfile)'{-f,--configfile}'[path to config file for the test]:file:_files' \
- '(-o|--leavebrowseropen)'{-o,--leavebrowseropen}'[leave browser open]' \
- '(-t|--timeout)'{-t,--timeout}'[time (ms) to wait for results element in dom (default:60000)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:tree:export)
- _command_args=(
- '(-q|--query)'{-q,--query}'[soql query, or filepath of file containing a soql query, to retrieve records]:file:_files' \
- '(-p|--plan)'{-p,--plan}'[generate multiple sObject tree files and a plan definition file for aggregated import]' \
- '(-x|--prefix)'{-x,--prefix}'[prefix of generated files]' \
- '(-d|--outputdir)'{-d,--outputdir}'[directory to store files]:file:_files' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:data:tree:import)
- _command_args=(
- '(-f|--sobjecttreefiles)'{-f,--sobjecttreefiles}'[comma-delimited, ordered paths of json files containing collection of record trees to insert]:file:_files' \
- '(-p|--plan)'{-p,--plan}'[path to plan to insert multiple data files that have master-detail relationships]:file:_files' \
- '(-c|--contenttype)'{-c,--contenttype}'[if data file extension is not .json, provide content type (applies to all files)]' \
- '(--confighelp)--confighelp[display schema information for the --plan configuration file to stdout; if you use this option, all other options except --json are ignored]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:apex:trigger:create)
- _command_args=(
- '(-n|--triggername)'{-n,--triggername}'[name of the generated Apex trigger]' \
- '(-t|--template)'{-t,--template}'[template to use for file creation (ApexTrigger*)]' \
- '(-d|--outputdir)'{-d,--outputdir}'[folder for saving the created files]' \
- '(-r|--reflect)'{-r,--reflect}'[switch to return flag detailed information]' \
- '(-a|--apiversion)'{-a,--apiversion}'[API version number (41.0*,40.0)]' \
- '(-s|--sobject)'{-s,--sobject}'[sObject to create a trigger on (SOBJECT*)]' \
- '(-e|--triggerevents)'{-e,--triggerevents}'[events that fire the trigger (before insert*,before upsert,before delete,after insert,after upsert,after delete,after undelete)]' \
- '(--json)--json[JSON output]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package:uninstall)
- _command_args=(
- '(-i|--id)'{-i,--id}'[ID of the package to uninstall (starts with 04t)]' \
- '(-w|--wait)'{-w,--wait}'[number of minutes to wait for uninstall status]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package:uninstall:get)
- _command_args=(
- '(-i|--requestid)'{-i,--requestid}'[ID of the package uninstall request you want to check]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:update)
- _command_args=(
- '(-i|--package2id)'{-i,--package2id}'[id of the package (starts with 0Ho)]' \
- '(-n|--name)'{-n,--name}'[package name]' \
- '(-d|--description)'{-d,--description}'[package description]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:project:upgrade)
- _command_args=(
- '(-f|--forceupgrade)'{-f,--forceupgrade}'[run all upgrades even if project has already been upgraded]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package1:version:create)
- _command_args=(
- '(-i|--packageid)'{-i,--packageid}'[ID of the metadata package (starts with 033) of which you’re creating a new version]' \
- '(-n|--name)'{-n,--name}'[package version name]' \
- '(-d|--description)'{-d,--description}'[package version description]' \
- '(-v|--version)'{-v,--version}'[package version in major.minor format, for example, 3.2]' \
- '(-m|--managedreleased)'{-m,--managedreleased}'[create a managed package version]' \
- '(-r|--releasenotesurl)'{-r,--releasenotesurl}'[release notes URL]' \
- '(-p|--postinstallurl)'{-p,--postinstallurl}'[post install URL]' \
- '(-k|--installationkey)'{-k,--installationkey}'[installation key for key-protected package (default: null)]' \
- '(-w|--wait)'{-w,--wait}'[minutes to wait for the package version to be created (default: 2 minutes)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:version:create)
- _command_args=(
- '(-i|--package2id)'{-i,--package2id}'[ID of the parent package (starts with 0Ho)]' \
- '(-d|--directory)'{-d,--directory}'[path to directory that contains the contents of the package version]:file:_files' \
- '(-b|--branch)'{-b,--branch}'[the package version’s branch]' \
- '(-t|--tag)'{-t,--tag}'[the package version’s tag]' \
- '(-k|--installationkey)'{-k,--installationkey}'[installation key for key-protected package (default: null)]' \
- '(-p|--preserve)'{-p,--preserve}'[temp files are preserved that would otherwise be deleted]' \
- '(-j|--validateschema)'{-j,--validateschema}'[sfdx-project.json is validated against JSON schema]' \
- '(-w|--wait)'{-w,--wait}'[minutes to wait for the package version to be created (default:0)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package1:version:create:get)
- _command_args=(
- '(-i|--requestid)'{-i,--requestid}'[PackageUploadRequest ID]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:version:create:get)
- _command_args=(
- '(-i|--package2createrequestid)'{-i,--package2createrequestid}'[package2 version creation request ID (starts with 08c)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:version:create:list)
- _command_args=(
- '(-c|--createdlastdays)'{-c,--createdlastdays}'[created in the last specified number of days (starting at 00:00:00 of first day to now; 0 for today)]' \
- '(-s|--status)'{-s,--status}'[filter the list by version creation request status (Queued,InProgress,Success,Error)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package1:version:display)
- _command_args=(
- '(-i|--packageversionid)'{-i,--packageversionid}'[metadata package version ID (starts with 04t)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:version:get)
- _command_args=(
- '(-i|--package2versionid)'{-i,--package2versionid}'[the package version ID (starts with 05i)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package1:version:list)
- _command_args=(
- '(-i|--packageid)'{-i,--packageid}'[metadata package ID (starts with 033)]' \
- '(-u|--targetusername)'{-u,--targetusername}'[username or alias for the target org; overrides default target org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:package2:version:list)
- _command_args=(
- '(-c|--createdlastdays)'{-c,--createdlastdays}'[created in the last specified number of days (starting at 00:00:00 of first day to now; 0 for today)]' \
- '(-m|--modifiedlastdays)'{-m,--modifiedlastdays}'[list items modified in the specified last number of days (starting at 00:00:00 of first day to now; 0 for today)]' \
- '(-i|--package2ids)'{-i,--package2ids}'[filter results on specified comma-delimited package2 ids (start with 0Ho)]' \
- '(-r|--released)'{-r,--released}'[display released versions only]' \
- '(-o|--orderby)'{-o,--orderby}'[order by the specified package2 version fields]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--concise)--concise[display limited package2 version details]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- '(--verbose)--verbose[display extended package2 versions detail]' \
- )
- ;;
- force:package2:version:update)
- _command_args=(
- '(-i|--package2versionid)'{-i,--package2versionid}'[the package version ID (starts with 05i)]' \
- '(-n|--name)'{-n,--name}'[the package version name]' \
- '(-d|--description)'{-d,--description}'[the package version description]' \
- '(-b|--branch)'{-b,--branch}'[the package version branch]' \
- '(-t|--tag)'{-t,--tag}'[the package version tag]' \
- '(-k|--installationkey)'{-k,--installationkey}'[installation key for key-protected package (default: null)]' \
- '(-s|--setasreleased)'{-s,--setasreleased}'[set the package version as released (can’t be undone)]' \
- '(-v|--targetdevhubusername)'{-v,--targetdevhubusername}'[username or alias for the dev hub org; overrides default dev hub org]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- force:auth:web:login)
- _command_args=(
- '(-i|--clientid)'{-i,--clientid}'[OAuth client ID (sometimes called the consumer key)]' \
- '(-r|--instanceurl)'{-r,--instanceurl}'[the login URL of the instance the org lives on]' \
- '(-d|--setdefaultdevhubusername)'{-d,--setdefaultdevhubusername}'[set the authenticated org as the default dev hub org for scratch org creation]' \
- '(-s|--setdefaultusername)'{-s,--setdefaultusername}'[set the authenticated org as the default username that all commands run against]' \
- '(-a|--setalias)'{-a,--setalias}'[set an alias for the authenticated org]' \
- '(--disablemasking)--disablemasking[disable masking of user input (for use with problematic terminals)]' \
- '(--json)--json[format output as json]' \
- '(--loglevel)--loglevel[logging level for this command invocation (error*,trace,debug,info,warn,fatal)]' \
- )
- ;;
- esac
-
-_arguments \
- $_command_args \
- && return 0
diff --git a/src/_sslscan b/src/_sslscan
new file mode 100644
index 0000000..6a4cb49
--- /dev/null
+++ b/src/_sslscan
@@ -0,0 +1,98 @@
+#compdef sslscan
+# ------------------------------------------------------------------------------
+# Copyright (c) 2025 Github zsh-users - https://github.com/zsh-users
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+# ------------------------------------------------------------------------------
+# Description
+# -----------
+#
+# Completion script for sslscan 2.2.0. (https://github.com/rbsec/sslscan)
+#
+# ------------------------------------------------------------------------------
+# Authors
+# -------
+#
+# * Shohei YOSHIDA (https://github.com/syohex)
+#
+# ------------------------------------------------------------------------------
+
+_arguments \
+ '--targets=[a file containing a list of hosts to check]:file:_files' \
+ '--sni-name=[hostname for SNI]:name' \
+ '(-4 --ipv4 -6 --ipv6)'{-4,--ipv4}'[only use IPv4]' \
+ '(-4 --ipv4 -6 --ipv6)'{-6,--ipv6}'[only use IPv6]' \
+ '--show-certificate[show full certificate information]' \
+ '--show-certificates[show chain full certificates information]' \
+ '--show-client-cas[show trusted CAs for TLS client auth]' \
+ '--no-check-certificate[do not warn about weak certificate algorithm or keys]' \
+ '--ocsp[request OCSP response from server]' \
+ '--pk=[a file containing the private key or a PKCS#12 file]:file:_files' \
+ '--pkpass=[the password for private key or PKCS#12 file]:password' \
+ '--certs[a file containing PEM/ASN1 formatted client certificates]:file:_files' \
+ '--ssl2[only check if SSLv2 is enabled]' \
+ '--ssl3[only check if SSLv3 is enabled]' \
+ '--tls10[only check TLSv1.0 ciphers]' \
+ '--tls11[only check TLSv1.1 ciphers]' \
+ '--tls12[only check TLSv1.2 ciphers]' \
+ '--tls13[only check TLSv1.3 ciphers]' \
+ '--tlsall[only check TLS ciphers(all versions)]' \
+ '--show-ciphers[show supported client ciphers]' \
+ '--show-cipher-ids[show cipher IDs]' \
+ '--iana-names[use IANA/RFC cipher names rather than OpenSSL ones]' \
+ '--show-times[show handshake times in milliseconds]' \
+ '--no-cipher-details[disable EC curve names and EDH/RSA key lengths output]' \
+ '--no-ciphersuites[do not check for supported ciphersuites]' \
+ '--no-compression[do not check for TLS compression(CRIME)]' \
+ '--no-fallback[do not check for TLS Fallback SCSV]' \
+ '--no-groups[do not enumerate key exchange groups]' \
+ '--no-heartbleed[do not check for OpenSSL Heartbleed(CVE-2014-0160)]' \
+ '--no-renegotiation[do not check for TLS renegotiation]' \
+ '--show-sigs[enumerate signature algorithms]' \
+ '--starttls-ftp[STARTTLS setup for FTP]' \
+ '--starttls-imap[STARTTLS setup for IMAP]' \
+ '--starttls-irc[STARTTLS setup for IRC]' \
+ '--starttls-ldap[STARTTLS setup for LDAP]' \
+ '--starttls-mysql[STARTTLS setup for MYSQL]' \
+ '--starttls-pop3[STARTTLS setup for POP3]' \
+ '--starttls-psql[STARTTLS setup for PostgreSQL]' \
+ '--starttls-smtp[STARTTLS setup for SMTP]' \
+ '--starttls-xmpp[STARTTLS setup for XMPP]' \
+ '--xmpp-server[use a server-to-server XMPP handshake]' \
+ '--rdp[send RDP preamble before starting scan]' \
+ '--bugs[enable SSL implementation bug work-arounds]' \
+ '--no-colour[disable coloured output]' \
+ '--sleep=[pause between connection request(msecs)]:msecs' \
+ '--timeout=[set socket timeout(seconds). Default is 3s]:seconds' \
+ '--connect-timeout=[set connect timeout(seconds). Default is 75s]:seconds' \
+ '--verbose[display verbose output]' \
+ '--version[display the program version]' \
+ '--xml=[output results to an XML file. Use - for STDOUT]:file:_files' \
+ '(- *)--help[display the help text]' \
+ '*::host:_urls'
+
+# Local Variables:
+# mode: Shell-Script
+# sh-indentation: 2
+# indent-tabs-mode: nil
+# sh-basic-offset: 2
+# End:
+# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_stack b/src/_stack
deleted file mode 100644
index 847328c..0000000
--- a/src/_stack
+++ /dev/null
@@ -1,134 +0,0 @@
-#compdef stack
-# ------------------------------------------------------------------------------
-# Copyright (c) 2015 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# ------------------------------------------------------------------------------
-#
-# Completion script for stack (https://github.com/commercialhaskell/stack).
-#
-# ------------------------------------------------------------------------------
-# Authors
-# ------------------------------------------------------------------------------
-#
-# * Toshiki Teramura <toshiki.teramura@gmail.com>
-# * Nikita Ursol <nikita20001116@gmail.com>
-#
-# ------------------------------------------------------------------------------
-
-_stack () {
- _arguments \
- --help'[show usage information]' \
- --version'[show version]' \
- --numeric-version'[show only version number]' \
- --hpack-numeric-version"[show only hpack's version number]" \
- '--docker[enable using a Docker container, run "stack --docker-help" for details]' \
- '--no-docker[disable using a Docker container, run "stack --docker-help" for details]' \
- '--nix[enable use of a Nix-shell, run "stack --nix-help" for details]' \
- '--no-nix[disable use of a Nix-shell, run "stack --nix-help" for details]' \
- --verbosity'[verbosity: silent, error, warn, info, debug]' \
- {-v,--verbose}'[enable verbose mode: verbosity level "debug"]' \
- --silent'[enable silent mode: verbosity level "silent"]' \
- --time-in-log'[enable inclusion of timings in logs, to use diff with logs]' \
- --no-time-in-log'[disable inclusion of timings in logs, to use diff with logs]' \
- --stack-root'[absolute path to the global stack root directory]' \
- --work-dir'[relative path of work directory]' \
- --system-ghc'[enable using the system installed GHC if available and a matching version]' \
- --no-system-ghc'[disable using the system installed GHC if available and a matching version]' \
- --install-ghc'[enable downloading and installing GHC if necessary]' \
- --no-install-ghc'[disable downloading and installing GHC if necessary]' \
- --arch'[system architecture, e.g. i386, x86_64]' \
- --ghc-variant'[specialized GHC variant, e.g. integersimple (incompatible with --system-ghc)]' \
- --ghc-build'[specialized GHC build, e.g. "gmp4" or "standard" (usually auto-detected)]' \
- {-j,--jobs}'[number of concurrent jobs to run]' \
- --extra-include-dirs'[extra directories to check for C header files]' \
- --extra-lib-dirs'[extra directories to check for libraries]' \
- --with-gcc'[use custom path to gcc executable]' \
- --with-hpack'[use custom path to hpack executable]' \
- --skip-ghc-check'[enable skipping the GHC version and architecture check]' \
- --no-skip-ghc-check'[disable skipping the GHC version and architecture check]' \
- --skip-msys'[enable skipping the local MSYS installation (Windows only)]' \
- --no-skip-msys'[disable skipping the local MSYS installation (Windows only)]' \
- --local-bin-path'[install binaries to specified location]' \
- --setup-info-yaml'[alternate URL or relative / absolute path for stack dependencies]' \
- --modify-code-page'[enable setting the codepage to support UTF-8 (Windows only)]' \
- --no-modify-code-page'[disable setting the codepage to support UTF-8 (Windows only)]' \
- --allow-different-user'[enable permission for non-owners to use a stack installation (POSIX only)]' \
- --no-allow-different-user'[disable permission for non-owners to use a stack installation (POSIX only)]' \
- --dump-logs'[enable dump the build output logs for local packages to the console]' \
- --no-dump-logs'[disable dump the build output logs for local packages to the console]' \
- {--color,--colour}'[specify when to use color in output; accepts "always", "never", "auto"]' \
- --resolver'[override resolver in project file]' \
- --terminal'[enable overriding terminal detection in the case of running in a false terminal]' \
- --no-terminal'[disable overriding terminal detection in the case of running in a false terminal]' \
- {--stack-colors,--stack-colours}"[specify stack's output styles]" \
- --terminal-width'[specify the width of the terminal, used for pretty-print messages]' \
- --stack-yaml'[override project stack.yaml file]' \
- --lock-file'[specify how to interact with lock files.]' \
- '*: :__stack_modes'
-}
-
-__stack_modes () {
- _values \
- 'subcommand' \
- 'build[build the project(s) in this directory/configuration]' \
- 'install[build executables and install to a user path]' \
- 'test[build and test the project(s) in this directory/configuration]' \
- 'bench[build and benchmark the project(s) in this directory/configuration]' \
- 'haddock[generate haddocks for the project(s) in this directory/configuration]' \
- 'new[create a brand new project]' \
- 'templates[show how to find templates available for "stack new".]' \
- 'init[create stack project config from cabal or hpack package specifications]' \
- 'setup[get the appropriate ghc for your project]' \
- 'path[print out handy path information]' \
- "ls[list command. (supports snapshots, dependencies and stack's styles)]" \
- 'unpack[unpack one or more packages locally]' \
- 'update[update the package index]' \
- 'upgrade[upgrade to the latest stack]' \
- 'upload[upload a package to Hackage]' \
- 'sdist[create source distribution tarballs]' \
- 'dot[visualize your projects dependency graph using Graphviz dot]' \
- 'ghc[run ghc]' \
- 'hoogle[run hoogle, the Haskell API search engine.]' \
- 'exec[execute a command]' \
- 'run[build and run an executable.]' \
- 'ghci[run ghci in the context of package(s) (experimental)]' \
- "repl[run ghci in the context of package(s) (experimental) (alias for 'ghci')]" \
- 'runghc[run runghc]' \
- "runhaskell[run runghc (alias for 'runghc')]" \
- 'script[run a Stack Script]' \
- 'eval[evaluate some haskell code inline.]' \
- 'clean[delete build artefacts for the project packages.]' \
- 'purge[delete the project stack working directories.]' \
- 'query[query general build information (experimental)]' \
- 'ide[ide-specific commands]' \
- 'docker[subcommands specific to Docker use]' \
- 'config[subcommands for accessing and modifying configuration values]' \
- 'hpc[subcommands specific to Haskell Program Coverage]'
-
-}
-
-_stack "$@"
diff --git a/src/_supervisorctl b/src/_supervisorctl
deleted file mode 100644
index 4c5c42b..0000000
--- a/src/_supervisorctl
+++ /dev/null
@@ -1,237 +0,0 @@
-#compdef supervisorctl
-# ------------------------------------------------------------------------------
-# Copyright (c) 2015 Github zsh-users - https://github.com/zsh-users
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of the zsh-users nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
-# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# ------------------------------------------------------------------------------
-# Description
-# -----------
-#
-# Completion script for supervisorctl 4.2.5 from Supervisord (https://github.com/Supervisor/supervisor)
-#
-# ------------------------------------------------------------------------------
-# Authors
-# -------
-#
-# * Matt Black (https://github.com/mafrosis)
-# * dongweiming (https://github.com/dongweiming)
-# * Shohei Yoshida (https://github.com/syohex)
-#
-# ------------------------------------------------------------------------------
-
-_supervisorctl() {
- local -a procs
- typeset -A opt_args
- local context state line
- local curcontext="$curcontext"
- local ret=1
-
- _arguments -C \
- {--configuration,-c}='[configuration file path (default /etc/supervisor.conf)]:filename:_files' \
- '(- *)'{--help,-h}'[print usage message and exit]:' \
- {--interactive,-i}'[start an interactive shell after executing commands]' \
- {--serverurl,-s}='[URL on which supervisord server is listening (default "http://localhost:9001")]:url:_urls' \
- {--username,-u}='[username to use for authentication with server]:username:_users' \
- {--password,-p}='[password to use for authentication with server]:password' \
- {--history-file,-r}'[keep a readline history (if readline is available)]:filename:_files' \
- '1: :_supervisorctl_subcommands' \
- '*:: :->subcmds' && ret=0
-
- case $state in
- (subcmds)
- case "$words[1]" in
- (help)
- _arguments \
- '1: :_supervisorctl_subcommands' \
- && ret=0
- ;;
- (add|remove)
- _arguments \
- '1: :_supervisorctl_procs_groups' \
- && ret=0
- ;;
- (fg)
- _arguments \
- '1: :_supervisorctl_processes' \
- && ret=0
- ;;
- (pid|clear)
- _arguments \
- '*: :_supervisorctl_processes_all' \
- && ret=0
- ;;
- (restart|status)
- _arguments \
- '*:process_or_group:_supervisorctl_procs_and_group_prefixes' \
- && ret=0
- ;;
- (update)
- _arguments \
- '*: :_supervisorctl_groups' \
- && ret=0
- ;;
- (stop)
- _arguments \
- '*:running process or group:_supervisorctl_running_procs' \
- && ret=0
- ;;
- (start)
- _arguments \
- '*:stopped process or group:_supervisorctl_stopped_procs' \
- && ret=0
- ;;
- (signal)
- _arguments \
- '1:signal:_signals -s' \
- '*:running process or group:_supervisorctl_running_procs' \
- && ret=0
- ;;
- (tail)
- _arguments \
- '-f[Continuous tail of named process stdout]' \
- '-[last N *bytes* of process stdout]:number' \
- '1: :_supervisorctl_processes' \
- '2:output:(stdout stderr)' \
- && ret=0
- ;;
- (maintail)
- _arguments \
- '-f[Continuous tail of named process stdout]' \
- '-[last N *bytes* of process stdout]:number' \
- && ret=0
- ;;
- esac
- esac
-
- return 0
-}
-
-(( $+functions[_supervisorctl_subcommands] )) ||
-_supervisorctl_subcommands() {
- local -a commands=(
- 'add:Activates any updates in config for process/group'
- 'avail:Display all configured processes'
- 'clear:Clear single/multiple/all process log files'
- 'exit:Exit the supervisor shell'
- 'fg:Connect to a process in foreground mode'
- 'maintail:tail of supervisor main log file'
- 'open:Connect to a remote supervisord process. (for UNIX domain socket, use unix:///socket/path)'
- 'pid:Get the PID of process/supervisord'
- 'quit:Exit the supervisor shell'
- 'reload:Restart the remote supervisord'
- 'remove:Removes process/group from active config'
- "reread:Reload the daemon's configuration files"
- 'restart:Restart process, group or all'
- 'signal:Send signal to a process'
- 'shutdown:Shut the remote supervisord down'
- 'start:Start process, group or all'
- 'status:Get process/group status info'
- 'stop:Stop process, group or all'
- 'tail:tail of process stdout'
- 'update:Reload config and add/remove as necessary'
- 'version:Show the version of the remote supervisord process'
- 'help:Show help'
- )
-
- _describe -t commands 'command' commands "$@"
-}
-
-(( $+functions[_supervisorctl_processes] )) ||
-_supervisorctl_processes() {
- local -a procs
- procs=(${(f)"$(_call_program processes supervisorctl avail | awk '{gsub(":","\\:", $1); print $1 }')"})
- if [[ "$1" = 'all' ]]; then
- procs+=(all)
- fi
- _describe 'processes' procs
-}
-
-(( $+functions[_supervisorctl_processes_all] )) ||
-_supervisorctl_processes_all() {
- _supervisorctl_processes all
-}
-
-(( $+functions[_supervisorctl_procs_groups] )) ||
-_supervisorctl_procs_groups() {
- local -a procs
- procs=(${(f)"$(_call_program processes supervisorctl status \
- | awk '{n=$1;gsub(":","\\:",n); printf "%s\n%s\n",n,substr($1,1,index($1,":")-1)}' \
- | uniq)"})
- _describe 'process and groups' procs
-}
-
-(( $+functions[_supervisorctl_procs_and_group_prefixes] )) ||
-_supervisorctl_procs_and_group_prefixes() {
- _supervisorctl_collect_procs '.'
-}
-
-(( $+functions[_supervisorctl_running_procs] )) ||
-_supervisorctl_running_procs() {
- _supervisorctl_collect_procs 'RUNNING'
-}
-
-(( $+functions[_supervisorctl_stopped_procs] )) ||
-_supervisorctl_stopped_procs() {
- _supervisorctl_collect_procs 'STOPPED'
-}
-
-(( $+functions[_supervisorctl_collect_procs] )) ||
-_supervisorctl_collect_procs() {
- if (( $words[(I)all] )); then
- return
- fi
-
- local pattern=$1
-
- local -a procs
- procs=(${(f)"$(_call_program processes supervisorctl status \
- | awk "/$pattern/"'{n=$1;gsub(":","\\:",n); printf "%s\n%s\\:\n",n,substr($1,1,index($1,":")-1)}' \
- | uniq)"})
- procs+=(all)
- _describe 'stooped processes or groups' procs
-}
-
-(( $+functions[_supervisorctl_groups] )) ||
-_supervisorctl_groups() {
- if (( $words[(I)all] )); then
- return
- fi
-
- local -a groups
- groups=(${(f)"$(_call_program processes supervisorctl status \
- | awk '{printf "%s\n",substr($1,1,index($1,":")-1)}' \
- | uniq)"})
- groups+=(all)
- _describe 'groups' groups
-}
-
-_supervisorctl "$@"
-
-# Local Variables:
-# mode: Shell-Script
-# sh-indentation: 2
-# indent-tabs-mode: nil
-# sh-basic-offset: 2
-# End:
-# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_supervisord b/src/_supervisord
index 98210a0..50f9c0f 100644
--- a/src/_supervisord
+++ b/src/_supervisord
@@ -1,4 +1,4 @@
-#compdef supervisord
+#compdef supervisord supervisorctl pidproxy
# ------------------------------------------------------------------------------
# Copyright (c) 2023 Github zsh-users - https://github.com/zsh-users
# All rights reserved.
@@ -25,36 +25,237 @@
# Description
# -----------
#
-# Completion script for supervisord. (https://github.com/Supervisor/supervisor)
+# Completion script for supervisord tools v4.3.0 (https://github.com/Supervisor/supervisor)
#
# ------------------------------------------------------------------------------
# Authors
# -------
#
+# * Matt Black (https://github.com/mafrosis)
+# * dongweiming (https://github.com/dongweiming)
# * Shohei YOSHIDA (https://github.com/syohex)
#
# ------------------------------------------------------------------------------
-_arguments \
- '(-c --configuration)'{-c,--configuration}'[configuration file path]:config file:_files' \
- '(-n --nodaemon)'{-n,--nodaemon}'[run in the foreground]' \
- '(-s --silent)'{-s,--silent}'[no longs to stdout]' \
- '(- *)'{-h,--help}'[print this usage message and exit]' \
- '(- *)'{-v,--version}'[print supervisord version number and exit]' \
- '(-u --user)'{-u,--user}'[run supervisord as given user]:user:_users' \
- '(-m --umask)'{-m,--umask}'[use given umask for daemon subprocess]:umask' \
- '(-d --directory)'{-d,--directory}'[directory to chdir to when daemonized]:directory:_files -/' \
- '(-l --logfile)'{-l,--logfile}'[logfile path]:logfile:_files' \
- '(-y --logfile_maxbytes)'{-y,--logfile_maxbytes}'[limit the max size of logfile]:max bytes' \
- '(-z --logfile_backups)'{-z,--logfile_backups}'[number of backups to keep when max bytes reached]:number of backups' \
- '(-e --loglevel)'{-e,--loglevel}'[log level]:level:(debug info warn error critical)' \
- '(-j --pidfile)'{-j,--pidfile}'[pid file path]:pid file:_files' \
- '(-i --identifier)'{-i,--identifier}'[identifier used for this instance of supervisord]:id' \
- '(-q --childlogdir)'{-q,--childlogdir}'[the log directory for child process logs]:log dir:_files -/' \
- '(-k --nocleanup)'{-k,--nocleanup}'[prevent the process from performing cleanup]' \
- '(-a --minfds)'{-m,--minfds}'[the minimum number of file descriptors for start success]:min fds' \
- '(-t --strip_ansi)'{-t,--strip_ansi}'[strip ansi escape codes from process output]' \
- '--profile_options[profile options]:profile option:_values -s , "field" cumulative calls callers'
+_supervisorctl() {
+ local -a procs
+ typeset -A opt_args
+ local context state line
+ local curcontext="$curcontext"
+ local ret=1
+
+ _arguments -C \
+ {--configuration,-c}='[configuration file path (default /etc/supervisor.conf)]:filename:_files' \
+ '(- *)'{--help,-h}'[print usage message and exit]:' \
+ {--interactive,-i}'[start an interactive shell after executing commands]' \
+ {--serverurl,-s}='[URL on which supervisord server is listening (default "http://localhost:9001")]:url:_urls' \
+ {--username,-u}='[username to use for authentication with server]:username:_users' \
+ {--password,-p}='[password to use for authentication with server]:password' \
+ {--history-file,-r}'[keep a readline history (if readline is available)]:filename:_files' \
+ '1: :_supervisorctl_subcommands' \
+ '*:: :->subcmds' && ret=0
+
+ case $state in
+ (subcmds)
+ case "$words[1]" in
+ (help)
+ _arguments \
+ '1: :_supervisorctl_subcommands' \
+ && ret=0
+ ;;
+ (add|remove)
+ _arguments \
+ '1: :_supervisorctl_procs_groups' \
+ && ret=0
+ ;;
+ (fg)
+ _arguments \
+ '1: :_supervisorctl_processes' \
+ && ret=0
+ ;;
+ (pid|clear)
+ _arguments \
+ '*: :_supervisorctl_processes_all' \
+ && ret=0
+ ;;
+ (restart|status)
+ _arguments \
+ '*:process_or_group:_supervisorctl_procs_and_group_prefixes' \
+ && ret=0
+ ;;
+ (update)
+ _arguments \
+ '*: :_supervisorctl_groups' \
+ && ret=0
+ ;;
+ (stop)
+ _arguments \
+ '*:running process or group:_supervisorctl_running_procs' \
+ && ret=0
+ ;;
+ (start)
+ _arguments \
+ '*:stopped process or group:_supervisorctl_stopped_procs' \
+ && ret=0
+ ;;
+ (signal)
+ _arguments \
+ '1:signal:_signals -s' \
+ '*:running process or group:_supervisorctl_running_procs' \
+ && ret=0
+ ;;
+ (tail)
+ _arguments \
+ '-f[Continuous tail of named process stdout]' \
+ '-[last N *bytes* of process stdout]:number' \
+ '1: :_supervisorctl_processes' \
+ '2:output:(stdout stderr)' \
+ && ret=0
+ ;;
+ (maintail)
+ _arguments \
+ '-f[Continuous tail of named process stdout]' \
+ '-[last N *bytes* of process stdout]:number' \
+ && ret=0
+ ;;
+ esac
+ esac
+
+ return 0
+}
+
+(( $+functions[_supervisorctl_subcommands] )) ||
+_supervisorctl_subcommands() {
+ local -a commands=(
+ 'add:Activates any updates in config for process/group'
+ 'avail:Display all configured processes'
+ 'clear:Clear single/multiple/all process log files'
+ 'exit:Exit the supervisor shell'
+ 'fg:Connect to a process in foreground mode'
+ 'maintail:tail of supervisor main log file'
+ 'open:Connect to a remote supervisord process. (for UNIX domain socket, use unix:///socket/path)'
+ 'pid:Get the PID of process/supervisord'
+ 'quit:Exit the supervisor shell'
+ 'reload:Restart the remote supervisord'
+ 'remove:Removes process/group from active config'
+ "reread:Reload the daemon's configuration files"
+ 'restart:Restart process, group or all'
+ 'signal:Send signal to a process'
+ 'shutdown:Shut the remote supervisord down'
+ 'start:Start process, group or all'
+ 'status:Get process/group status info'
+ 'stop:Stop process, group or all'
+ 'tail:tail of process stdout'
+ 'update:Reload config and add/remove as necessary'
+ 'version:Show the version of the remote supervisord process'
+ 'help:Show help'
+ )
+
+ _describe -t commands 'command' commands "$@"
+}
+
+(( $+functions[_supervisorctl_processes] )) ||
+_supervisorctl_processes() {
+ local -a procs
+ procs=(${(f)"$(_call_program processes supervisorctl avail | awk '{gsub(":","\\:", $1); print $1 }')"})
+ if [[ "$1" = 'all' ]]; then
+ procs+=(all)
+ fi
+ _describe 'processes' procs
+}
+
+(( $+functions[_supervisorctl_processes_all] )) ||
+_supervisorctl_processes_all() {
+ _supervisorctl_processes all
+}
+
+(( $+functions[_supervisorctl_procs_groups] )) ||
+_supervisorctl_procs_groups() {
+ local -a procs
+ procs=(${(f)"$(_call_program processes supervisorctl status \
+ | awk '{n=$1;gsub(":","\\:",n); printf "%s\n%s\n",n,substr($1,1,index($1,":")-1)}' \
+ | uniq)"})
+ _describe 'process and groups' procs
+}
+
+(( $+functions[_supervisorctl_procs_and_group_prefixes] )) ||
+_supervisorctl_procs_and_group_prefixes() {
+ _supervisorctl_collect_procs '.'
+}
+
+(( $+functions[_supervisorctl_running_procs] )) ||
+_supervisorctl_running_procs() {
+ _supervisorctl_collect_procs 'RUNNING'
+}
+
+(( $+functions[_supervisorctl_stopped_procs] )) ||
+_supervisorctl_stopped_procs() {
+ _supervisorctl_collect_procs 'STOPPED'
+}
+
+(( $+functions[_supervisorctl_collect_procs] )) ||
+_supervisorctl_collect_procs() {
+ if (( $words[(I)all] )); then
+ return
+ fi
+
+ local pattern=$1
+
+ local -a procs
+ procs=(${(f)"$(_call_program processes supervisorctl status \
+ | awk "/$pattern/"'{n=$1;gsub(":","\\:",n); printf "%s\n%s\\:\n",n,substr($1,1,index($1,":")-1)}' \
+ | uniq)"})
+ procs+=(all)
+ _describe 'stooped processes or groups' procs
+}
+
+(( $+functions[_supervisorctl_groups] )) ||
+_supervisorctl_groups() {
+ if (( $words[(I)all] )); then
+ return
+ fi
+
+ local -a groups
+ groups=(${(f)"$(_call_program processes supervisorctl status \
+ | awk '{printf "%s\n",substr($1,1,index($1,":")-1)}' \
+ | uniq)"})
+ groups+=(all)
+ _describe 'groups' groups
+}
+
+case $service in
+ (supervisord)
+ _arguments \
+ '(-c --configuration)'{-c,--configuration}'[configuration file path]:config file:_files' \
+ '(-n --nodaemon)'{-n,--nodaemon}'[run in the foreground]' \
+ '(-s --silent)'{-s,--silent}'[no longs to stdout]' \
+ '(- *)'{-h,--help}'[print this usage message and exit]' \
+ '(- *)'{-v,--version}'[print supervisord version number and exit]' \
+ '(-u --user)'{-u,--user}'[run supervisord as given user]:user:_users' \
+ '(-m --umask)'{-m,--umask}'[use given umask for daemon subprocess]:umask' \
+ '(-d --directory)'{-d,--directory}'[directory to chdir to when daemonized]:directory:_files -/' \
+ '(-l --logfile)'{-l,--logfile}'[logfile path]:logfile:_files' \
+ '(-y --logfile_maxbytes)'{-y,--logfile_maxbytes}'[limit the max size of logfile]:max bytes' \
+ '(-z --logfile_backups)'{-z,--logfile_backups}'[number of backups to keep when max bytes reached]:number of backups' \
+ '(-e --loglevel)'{-e,--loglevel}'[log level]:level:(debug info warn error critical)' \
+ '(-j --pidfile)'{-j,--pidfile}'[pid file path]:pid file:_files' \
+ '(-i --identifier)'{-i,--identifier}'[identifier used for this instance of supervisord]:id' \
+ '(-q --childlogdir)'{-q,--childlogdir}'[the log directory for child process logs]:log dir:_files -/' \
+ '(-k --nocleanup)'{-k,--nocleanup}'[prevent the process from performing cleanup]' \
+ '(-a --minfds)'{-m,--minfds}'[the minimum number of file descriptors for start success]:min fds' \
+ '(-t --strip_ansi)'{-t,--strip_ansi}'[strip ansi escape codes from process output]' \
+ '--profile_options[profile options]:profile option:_values -s , "field" cumulative calls callers'
+ ;;
+ (supervisorctl)
+ _supervisorctl "$@"
+ ;;
+ (pidproxy)
+ _arguments \
+ '1:pid_file:_files' \
+ '2:cmd:_command_names -e' \
+ '*::argument:_normal'
+ ;;
+esac
# Local Variables:
# mode: Shell-Script
diff --git a/src/_svm b/src/_svm
index 7782397..1fc6475 100644
--- a/src/_svm
+++ b/src/_svm
@@ -28,7 +28,7 @@
# Description
# -----------
#
-# Completion script for svm (https://github.com/yuroyoro/svm)
+# Completion script for svm, Scala2 version manager (https://github.com/yuroyoro/svm)
#
# ------------------------------------------------------------------------------
# Authors
@@ -38,130 +38,120 @@
#
# ------------------------------------------------------------------------------
-local context curcontext="$curcontext" state line ret=1
-typeset -A opt_args
+_svm() {
+ local context curcontext="$curcontext" state line ret=1
+ typeset -A opt_args
+ _arguments -C \
+ '(- *)-h[show this usage information]' \
+ '-c[show the currently use scala version]' \
+ "-l[show the scala version installed in svm_path(default is ${HOME}/.svm)]" \
+ '-v[show the available scala version not installed]' \
+ '-i[install specific scala version]: :_svm_completion_not_installed_scala_versions' \
+ '-r[uninstall specific scala version and remove their sources]: :_svm_installed_scala_versions' \
+ '(-s -u)'{-s,-u}'[setup to use a specific scala version]: :_svm_not_selected_scala_versions' \
+ '1: :_svm_commands' \
+ '*:: :->args' && ret=0
-local -a _1st_arguments
-_1st_arguments=(
- 'help:show this usage information'
- 'current:show the currently use scala version'
- "list:show the scala version installed in svm_path(default is ${HOME}/.svm)"
- "versions:show the available scala version not installed"
- 'install:install specific scala version'
- 'remove:uninstall specific scala version and remove their sources'
- 'switch:setup to use a specific scala version'
- 'update-latest:install or update nightly build scala version'
- 'latest:setup to use nightly build scala version'
- 'stable:setup to use stable(x.x.x.final) scala version'
- 'self-update:update svm itself'
-)
+ case $state in
+ (args)
+ # scala version number
+ case $words[1] in
+ (install)
+ # install not installed version
+ _arguments \
+ '--docs[download scala-devel-docs]' \
+ '--sources[download scala-sources]' \
+ '1: :_svm_not_installed_scala_versions' \
+ && ret=0
+ ;;
+ (update-latest)
+ # update nightly build scala version
+ _arguments \
+ '--docs[download scala-devel-docs]' \
+ '--sources[download scala-sources]' \
+ && ret=0
+ ;;
+ (remove|uninstall)
+ # remove installed version
+ _arguments \
+ '1: :_svm_installed_scala_versions' \
+ && ret=0
+ ;;
+ (switch|use)
+ # use installed version
+ _arguments \
+ '1: :_svm_not_selected_scala_versions' \
+ && ret=0
+ ;;
+ esac
-_arguments -C \
- '(-)-h[show this usage information]' \
- '-c[show the currently use scala version]' \
- "-l[show the scala version installed in svm_path(default is ${HOME}/.svm)]" \
- '-v[show the available scala version not installed]' \
- '-i[install specific scala version]: :_svm_completion_not_installed_scala_versions' \
- '-r[uninstall specific scala version and remove their sources]: :_svm_completion_installed_scala_versions' \
- '(-s -u)'{-s,-u}'[setup to use a specific scala version]: :_svm_completion_not_selected_scala_versions' \
- '1: :->cmds' \
- '*:: :->args' && ret=0
+ ;; # end args
+ esac
+ return ret
+}
-# installed scala versions
-(( $+functions[_svm_completion_installed_scala_versions] )) ||
-_svm_completion_installed_scala_versions() {
- local -a _installed_versions
- _current_version="${$(_call_program installed svm current)#currently version is[[:space:]]*}"
+(( $+functions[_svm_commands] )) ||
+_svm_commands() {
+ case $PREFIX in
+ (u*)
+ local -a synonyms=(
+ 'uninstall:uninstall specific scala version and remove their sources'
+ 'use:setup to use a specific scala version'
+ 'update-latest:install or update nightly build scala version'
+ )
- # collect lines starts with digit
- _installed_versions=( ${(M)${(@f)"$(_call_program installed svm list)"}:#[[:digit:]]*} )
+ _describe -t actions 'svm actions' synonyms
+ ;;
+ (*)
+ local -a commands=(
+ 'help:show this usage information'
+ 'current:show the currently use scala version'
+ "list:show the scala version installed in svm_path(default is ${HOME}/.svm)"
+ "versions:show the available scala version not installed"
+ 'install:install specific scala version'
+ 'remove:uninstall specific scala version and remove their sources'
+ 'switch:setup to use a specific scala version'
+ 'update-latest:install or update nightly build scala version'
+ 'latest:setup to use nightly build scala version'
+ 'stable:setup to use stable(x.x.x.final) scala version'
+ 'self-update:update svm itself'
+ )
- _describe -t installed "installed versions" _installed_versions
+ _describe -t actions 'svm actions' commands
+ ;;
+ esac
}
-# installed and not selected scala versions
-(( $+functions[_svm_completion_not_selected_scala_versions] )) ||
-_svm_completion_not_selected_scala_versions() {
- local _current_version
- local -a _not_selected_versions
-
- _current_version="${$(_call_program installed svm current)#currently version is[[:space:]]*}"
-
+# installed scala versions
+(( $+functions[_svm_installed_scala_versions] )) ||
+_svm_installed_scala_versions() {
# collect lines starts with digit
- _not_selected_versions=( ${(M)${(@f)"$(_call_program installed svm list)"}:#[[:digit:]]*} )
+ local -a installed_versions=( ${(M)${(@f)"$(_call_program installed svm list)"}:#[[:digit:]]*} )
+ _describe -t installed "installed versions" installed_versions
+}
+
+# installed and not selected scala versions
+(( $+functions[_svm_not_selected_scala_versions] )) ||
+_svm_not_selected_scala_versions() {
+ local current_version=$(_call_program current svm current | sed 's/currently version is //')
+ local -a not_selected_versions=( ${(M)${(@f)"$(_call_program installed svm list)"}:#[[:digit:]]*} )
# remove current version
- _not_selected_versions=( ${_not_selected_versions:#$_current_version})
- _describe -t installed "not selected versions" _not_selected_versions
+ not_selected_versions=( ${not_selected_versions:#$current_version})
+ _describe -t installed "not selected versions" not_selected_versions
}
# not installed scala versions
-(( $+functions[_svm_completion_not_installed_scala_versions] )) ||
-_svm_completion_not_installed_scala_versions() {
- local -a _not_installed_versions
- # collect lines starts with digit
- _not_installed_versions=( ${(M)${(@f)"$(_call_program installed svm versions)"}:#[[:digit:]]*} )
+(( $+functions[_svm_not_installed_scala_versions] )) ||
+_svm_not_installed_scala_versions() {
+ local -a not_installed_versions=( ${(M)${(@f)"$(_call_program installed svm versions)"}:#[[:digit:]]*} )
- _describe -t notinstalled "not installed versions" _not_installed_versions
+ _describe -t notinstalled "not installed versions" not_installed_versions
}
-
-case $state in
- cmds)
- # action
- case $PREFIX in
- u*)
- # complete command synonyms
- local -a _synonym_arguments
- _synonym_arguments=(
- 'uninstall:uninstall specific scala version and remove their sources'
- 'use:setup to use a specific scala version'
- 'update-latest:install or update nightly build scala version'
- )
- _describe -t actions 'svm actions' _synonym_arguments && ret=0
- ;;
-
- *)
- _describe -t actions 'svm actions' _1st_arguments
- _svm_completion_not_selected_scala_versions && ret=0
- ;;
- esac
- ;; # end action
-
- args)
- # scala version number
- case $words[1] in
- (install)
- # install not installed version
- _arguments \
- '1: :_svm_completion_not_installed_scala_versions' \
- '--docs[download scala-devel-docs]' \
- '--sources[download scala-sources]' && ret=0
- ;;
- (update-latest)
- # update nightly build scala version
- _arguments \
- '--docs[download scala-devel-docs]' \
- '--sources[download scala-sources]' && ret=0
- ;;
- (remove|uninstall)
- # remove installed version
- _arguments \
- '1: :_svm_completion_installed_scala_versions' && ret=0
- ;;
- (switch|use)
- # use installed version
- _arguments \
- '1: :_svm_completion_not_selected_scala_versions' && ret=0
- ;;
- esac
-
- ;; # end args
-esac
-
-return ret
+_svm "$@"
# Local Variables:
# mode: Shell-Script
diff --git a/src/_textutil b/src/_textutil
index 938222e..abfc75c 100644
--- a/src/_textutil
+++ b/src/_textutil
@@ -39,15 +39,15 @@ local -a format=(txt html rtf rtfd doc docx wordml odt webarchive)
_arguments -S \
'-help[Show the usage information for the command and exit]' \
'-info[Display information about the specified files]' \
- '-convert[Convert the specified files to the indicated format and write]:format:'"($format)" \
- '-cat[Read the specified files, concatenate them in the indicated format]:format:'"($format)" \
+ '-convert[Convert the specified files to the indicated format and write]:format:(($format))' \
+ '-cat[Read the specified files, concatenate them in the indicated format]:format:(($format))' \
'-extension[Specify an extension to be used for output files]:ext' \
'-output[Specify the file name to be used for the first output file]:path:_files' \
'-stdin[Specify that input should be read from stdin rather than from files]' \
'-stdout[Specify that the first output file should go to stdout]' \
- '-encoding[Specify the encoding to be used for plain text or HTML output files]:encode' \
+ '-encoding[Specify the encoding to be used for plain text or HTML output files]:name' \
'-inputencoding[Force all plain text input files to be interpreted using the specified encoding]' \
- '-format[Force all input files to be interpreted using the indicated format]:format:'"($format)" \
+ '-format[Force all input files to be interpreted using the indicated format]:format:(($format))' \
'-font[Specify the name of the font to be used for converting plain to rich text]:font' \
'-fontsize[Specify the size in points of the font to be used for converting plain to rich text]:size' \
'-noload[Do not load subsidiary resources]' \
diff --git a/src/_tsx b/src/_tsx
new file mode 100644
index 0000000..e54c772
--- /dev/null
+++ b/src/_tsx
@@ -0,0 +1,58 @@
+#compdef tsx
+# ------------------------------------------------------------------------------
+# Copyright (c) 2025 Github zsh-users - https://github.com/zsh-users
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+# ------------------------------------------------------------------------------
+# Description
+# -----------
+#
+# Completion script for tsx v4.21.0. (https://github.com/privatenumber/tsx)
+#
+# ------------------------------------------------------------------------------
+# Authors
+# -------
+#
+# * Shohei Yoshida (https://github.com/syohex) <syohex@gmail.com>
+#
+# ------------------------------------------------------------------------------
+
+_tsx_completion() {
+ _arguments \
+ '(- *)'{-h,--help}'[show help]' \
+ '(- *)'{-v,--version}'[show version]' \
+ '--no-cache[disable caching]' \
+ '--tsconfig[custom tsconfig.json path]:path:_files -g "*.json"' \
+ '1: :(watch)'
+}
+
+_tsx() {
+ _alternative 'tsx: :_tsx_completion' 'node: :_node'
+}
+
+_tsx "$@"
+
+# Local Variables:
+# mode: Shell-Script
+# sh-indentation: 2
+# indent-tabs-mode: nil
+# sh-basic-offset: 2
+# End:
+# vim: ft=zsh sw=2 ts=2 et
diff --git a/src/_zcash-cli b/src/_zcash-cli
index 5859b2f..6f21fe2 100644
--- a/src/_zcash-cli
+++ b/src/_zcash-cli
@@ -28,7 +28,7 @@
# Description
# -----------
#
-# Completion script for zcash-cli (https://z.cash).
+# Completion script for zcash-cli v6.10.0 (https://z.cash).
#
# ------------------------------------------------------------------------------
# Authors
@@ -38,139 +38,148 @@
#
# ------------------------------------------------------------------------------
-local state line curcontext="$curcontext" ret=1
+_zcash_cli_commands() {
+ local -a commands=(
+ 'addmultisigaddress:Add a nrequired-to-sign transparent multisignature address to the wallet'
+ 'addnode:Attempts to add or remove a node from the addnode list'
+ 'backupwallet:Safely copies current wallet file to destination filename'
+ 'clearbanned:Clear all banned IPs'
+ 'createmultisig:Creates a multi-signature address with n signature of m keys required'
+ 'decoderawtransaction:Return a JSON object representing the serialized, hex-encoded transaction'
+ 'decodescript:Decode a hex-encoded script'
+ 'disconnectnode:Immediately disconnects from the specified node'
+ 'dumpprivkey:Reveals the private key corresponding to "t-addr"'
+ 'encryptwallet:Encrypts the wallet with "passphrase"'
+ 'generate:Mine blocks immediately'
+ 'getaddednodeinfo:Returns information about the given added node, or all added nodes'
+ 'getaddressbalance:Returns the balance for addresses'
+ 'getaddressdeltas:Returns all changes for an address'
+ 'getaddressmempool:Returns all mempool deltas for an address'
+ 'getaddresstxids:Returns the txids for given transparent addresses within the given'
+ 'getaddressutxos:Returns all unspent outputs for an address'
+ "getbalance:Returns the wallet's available transparent balance"
+ 'getbestblockhash:Returns the hash of the best (tip) block in the longest block chain'
+ 'getblock:return data for the block'
+ 'getblockchaininfo:Returns an object containing various state info regarding block chain processing'
+ 'getblockcount:Returns the height of the most recent block in the best valid block chain'
+ 'getblockdeltas:Returns information about the given block and its transactions'
+ 'getblockhash:Returns hash of block in best-block-chain at index provided'
+ 'getblockhashes:Returns array of hashes of blocks within the timestamp range provided'
+ 'getblockheader:return block header'
+ 'getblocksubsidy:Returns block subsidy reward'
+ 'getblocktemplate:get block template'
+ 'getchaintips:Return information about all known tips in the block tree'
+ 'getconnectioncount:Returns the number of connections to other nodes'
+ 'getdeprecationinfo:Returns an object containing current version and deprecation block height'
+ 'getdifficulty:Returns the proof-of-work difficulty as a multiple of the minimum difficulty'
+ 'getexperimentalfeatures:Returns enabled experimental features'
+ 'getgenerate:Return if the server is set to generate coins or not'
+ 'getinfo:Returns an object containing various state info'
+ 'getlocalsolps:Returns the average local solutions per second since this node was started'
+ 'getmemoryinfo:Returns an object containing information about memory usage'
+ 'getmempoolinfo:Returns details on the active state of the TX memory pool'
+ 'getmininginfo:Returns a json object containing mining-related information'
+ 'getnettotals:Returns information about network traffic'
+ 'getnetworkinfo:Returns an object containing various state info regarding P2P networking'
+ 'getnetworksolps:Returns the estimated network solutions per second based on the last n blocks'
+ 'getnewaddress:Returns a new transparent Zcash address'
+ 'getpeerinfo:Returns data about each connected network node as a json array of objects'
+ 'getrawchangeaddress:Returns a new transparent Zcash address for receiving change'
+ 'getrawmempool:Returns all transaction ids in memory pool as a json array of string transaction ids'
+ 'getrawtransaction:Return the raw transaction data'
+ 'getreceivedbyaddress:Returns the total amount received by the given transparent Zcash address'
+ 'getspentinfo:Returns the txid and index where an output is spent'
+ 'gettransaction:Returns detailed information about in-wallet transaction'
+ 'gettxout:Returns details about an unspent transaction output'
+ 'gettxoutproof:Returns a hex-encoded proof that "txid" was included in a block'
+ 'gettxoutsetinfo:Returns statistics about the unspent transaction output set'
+ "getunconfirmedbalance:Returns the server's total unconfirmed transparent balance"
+ 'getwalletinfo:Returns wallet state information'
+ 'help:List all commands, or get help for a specified command'
+ 'importaddress:import address'
+ 'importprivkey:Adds a private key to your wallet'
+ 'importpubkey:Adds a public key'
+ 'importwallet:Imports taddr keys from a wallet dump file'
+ 'listaddresses:Lists the addresses managed by this wallet by source'
+ 'listaddressgroupings:Lists groups of transparent addresses'
+ 'listbanned:List all banned IPs/Subnets'
+ 'listlockunspent:Returns list of temporarily unspendable transparent outputs'
+ 'listreceivedbyaddress:List balances by transparent receiving address'
+ 'listsinceblock:Get all transactions in blocks'
+ "listtransactions:Returns up to 'count' of the most recent transactions"
+ 'listunspent:Returns array of unspent transparent transaction outputs'
+ 'lockunspent:Updates list of temporarily unspendable outputs'
+ 'ping:Requests that a ping be sent to all other nodes, to measure ping time'
+ 'prioritisetransaction:Accepts the transaction into mined blocks at a higher/lower priority'
+ 'sendmany:Send to multiple transparent recipient'
+ 'sendrawtransaction:Submits raw transaction to local node and network'
+ 'sendtoaddress:Send an amount to a given transparent address'
+ 'setban:Attempts to add or remove an IP/Subnet from the banned list'
+ 'setgenerate:Set "generate" true or false to turn generation on or off'
+ 'setlogfilter:Sets the filter to be used for selecting events to log'
+ 'signmessage:Sign a message with the private key of a t-addr'
+ 'stop:Stop Zcash server'
+ 'submitblock:Attempts to submit new block to network'
+ 'validateaddress:Return information about the given Zcash address'
+ 'verifychain:Verifies blockchain database'
+ 'verifymessage:Verify a signed message'
+ 'verifytxoutproof:Verifies that a proof points to a transaction in a block'
+ 'walletconfirmbackup:Notify the wallet that the user has backed up the emergency recovery phrase'
+ 'z_converttex:Converts a transparent Zcash address to a TEX address'
+ 'z_exportkey:Reveals the zkey corresponding to "zaddr"'
+ 'z_exportviewingkey:Returns the full viewing key corresponding to "zaddr"'
+ 'z_exportwallet:Exports all wallet keys, for taddr and zaddr, in a human-readable format'
+ 'z_getaddressforaccount:get address for account'
+ "z_getbalance:Returns the balance of a taddr or zaddr belonging to the node's wallet"
+ "z_getbalanceforaccount:Returns the account's spendable balance for each value pool"
+ 'z_getbalanceforviewingkey:Returns the balance viewable by a full viewing key'
+ 'z_getmigrationstatus:Returns information about the status of the Sprout to Sapling migration'
+ 'z_getnewaccount:Prepares and returns a new account'
+ 'z_getnewaddress:Returns a new shielded address for receiving payments'
+ 'z_getnotescount:Returns the number of notes available in the wallet for each shielded value pool'
+ 'z_getoperationresult:Retrieve the result and status of an operation which has finished'
+ 'z_getoperationstatus:Get operation status and any associated result or error data'
+ 'z_getpaymentdisclosure:Generate a payment disclosure for a given joinsplit output'
+ "z_getsubtreesbyindex:Returns roots of subtrees of the given pool's note commitment tree"
+ "z_gettreestate:Return information about the given block's tree state"
+ 'z_importkey:Adds a zkey (as returned by z_exportkey) to your wallet'
+ 'z_importviewingkey:Adds a viewing key to your wallet'
+ 'z_importwallet:Imports taddr and zaddr keys from a wallet export file'
+ 'z_listaccounts:Returns the list of accounts created with z_getnewaccount'
+ 'z_listaddresses:Returns the list of shielded addresses belonging to the wallet'
+ 'z_listoperationids:Returns the list of operation ids currently known to the wallet'
+ "z_listreceivedbyaddress:Return a list of amounts received by a zaddr belonging to the node's wallet"
+ 'z_listunifiedreceivers:Returns a record of the individual receivers contained within the provided UA'
+ 'z_listunspent:Returns an array of unspent shielded notes'
+ 'z_mergetoaddress:Merge multiple UTXOs and notes into a single UTXO or note'
+ 'z_sendmany:Send a transaction with multiple recipients'
+ "z_setmigration:attempt to migrate all funds from this wallet's Sprout addresses"
+ 'z_shieldcoinbase:Shield transparent coinbase funds by sending to a shielded zaddr'
+ 'z_validateaddress:Return information about the given address'
+ 'z_validatepaymentdisclosure:Validates a payment disclosure'
+ 'z_viewtransaction:Get detailed shielded information about in-wallet transaction'
+ 'zcbenchmark:Runs a benchmark of the selected benchmark type samplecount times'
+ 'zcsamplejoinsplit:Perform a joinsplit and return the JSDescription'
+ )
-_arguments -C \
- '-?[display usage information]' \
- -conf='[specify configuration file]:file [zcash.conf]:_files' \
- -datadir='[specify data directory]:directory:_directories' \
- -testnet'[use the test network]' \
- -regtest'[enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.]' \
- -rpcconnect='[send commands to node running on specified ip]:rpcconnect [127.0.0.1]:_hosts' \
- -rpcport='[connect to JSON-RPC on specified port]: :_guard "[[\:digit\:]]#" "port [8232 or testnet\: 18232]"' \
- -rpcwait'[wait for RPC server to start]' \
- -rpcuser='[username for JSON-RPC connections]:rpcuser' \
- -rpcpassword='[password for JSON-RPC connections]:rpcpassword' \
- -rpcclienttimeout='[specify timeout during HTTP requests, or 0 for no timeout]: :_guard "[[\:digit\:]]#" "timeout (seconds) [900]"' \
- ':subcommand:->subcommand' && ret=0
+ _describe -t commands 'command' commands "$@"
+}
-case $state in
- subcommand)
- subcommands=(
- 'getbestblockhash'
- 'getblock'
- 'getblockchaininfo'
- 'getblockcount'
- 'getblockhash'
- 'getblockheader'
- 'getchaintips'
- 'getdifficulty'
- 'getmempoolinfo'
- 'getrawmempool'
- 'gettxout'
- 'gettxoutproof'
- 'gettxoutsetinfo'
- 'verifychain'
- 'verifytxoutproof'
- 'getinfo'
- 'help'
- 'stop'
- 'generate'
- 'getgenerate'
- 'setgenerate'
- 'getblocksubsidy'
- 'getblocktemplate'
- 'getlocalsolps'
- 'getmininginfo'
- 'getnetworkhashps'
- 'getnetworksolps'
- 'prioritisetransaction'
- 'submitblock'
- 'addnode'
- 'clearbanned'
- 'disconnectnode'
- 'getaddednodeinfo'
- 'getconnectioncount'
- 'getnettotals'
- 'getnetworkinfo'
- 'getpeerinfo'
- 'listbanned'
- 'ping'
- 'setban'
- 'createrawtransaction'
- 'decoderawtransaction'
- 'decodescript'
- 'fundrawtransaction'
- 'getrawtransaction'
- 'sendrawtransaction'
- 'signrawtransaction'
- 'createmultisig'
- 'estimatefee'
- 'estimatepriority'
- 'validateaddress'
- 'verifymessage'
- 'z_validateaddress'
- 'addmultisigaddress'
- 'backupwallet'
- 'dumpprivkey'
- 'dumpwallet'
- 'encryptwallet'
- 'getaccount'
- 'getaccountaddress'
- 'getaddressesbyaccount'
- 'getbalance'
- 'getnewaddress'
- 'getrawchangeaddress'
- 'getreceivedbyaccount'
- 'getreceivedbyaddress'
- 'gettransaction'
- 'getunconfirmedbalance'
- 'getwalletinfo'
- 'importaddress'
- 'importprivkey'
- 'importwallet'
- 'keypoolrefill'
- 'listaccounts'
- 'listaddressgroupings'
- 'listlockunspent'
- 'listreceivedbyaccount'
- 'listreceivedbyaddress'
- 'listsinceblock'
- 'listtransactions'
- 'listunspent'
- 'lockunspent'
- 'move'
- 'sendfrom'
- 'sendmany'
- 'sendtoaddress'
- 'setaccount'
- 'settxfee'
- 'signmessage'
- 'z_exportkey'
- 'z_exportwallet'
- 'z_getbalance'
- 'z_getnewaddress'
- 'z_getoperationresult'
- 'z_getoperationstatus'
- 'z_gettotalbalance'
- 'z_importkey'
- 'z_importwallet'
- 'z_listaddresses'
- 'z_listoperationids'
- 'z_listreceivedbyaddress'
- 'z_sendmany'
- 'zcbenchmark'
- 'zcrawjoinsplit'
- 'zcrawkeygen'
- 'zcrawreceive'
- 'zcsamplejoinsplit'
- )
-
- _describe -t subcommands 'zcash-cli subcommand' subcommands && ret=0
- ;;
-esac
-
-return ret
+_arguments \
+ '(- *)-?[display usage information]' \
+ '(- *)-version[display version and exit]' \
+ '-conf=[specify configuration file]:file [zcash.conf]:_files' \
+ '-datadir=[specify data directory]:directory:_files -/' \
+ '-testnet[use the test network]' \
+ '-regtest[enter regression test mode]' \
+ '-rpcconnect=[send commands to node running on specified ip]:rpcconnect [127.0.0.1]:_hosts' \
+ '-rpcport=[connect to JSON-RPC on specified port]: :_guard "[[\:digit\:]]#" "port [8232 or testnet\: 18232]"' \
+ '-rpcwait[wait for RPC server to start]' \
+ '-rpcuser=[username for JSON-RPC connections]:rpcuser' \
+ '-rpcpassword=[password for JSON-RPC connections]:rpcpassword' \
+ '-rpcclienttimeout=[specify timeout during HTTP requests, or 0 for no timeout]: :_guard "[[\:digit\:]]#" "timeout (seconds) [900]"' \
+ '1: :_zcash_cli_commands' \
+ '*:: :_files'
# Local Variables:
# mode: Shell-Script