aboutsummaryrefslogtreecommitdiffstats
path: root/src/_concourse
diff options
context:
space:
mode:
authorJulien Nicoulaud <julien.nicoulaud@gmail.com>2019-06-18 19:55:00 +0200
committerJulien Nicoulaud <julien.nicoulaud@gmail.com>2019-06-18 19:55:11 +0200
commitb512d57b6d0d2b85368a8068ec1a13288a93d267 (patch)
treedcca899393104a71aeaf6db09d68d733edfe9134 /src/_concourse
parentMerge pull request #638 from dark-panda/fix-pg-createuser (diff)
downloadzsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar.gz
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar.bz2
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar.lz
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar.xz
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.tar.zst
zsh-completions-b512d57b6d0d2b85368a8068ec1a13288a93d267.zip
#610: add compdef for concourse 5.2.0 (https://concourse-ci.org)
Diffstat (limited to 'src/_concourse')
-rw-r--r--src/_concourse1517
1 files changed, 1517 insertions, 0 deletions
diff --git a/src/_concourse b/src/_concourse
new file mode 100644
index 0000000..2bc1240
--- /dev/null
+++ b/src/_concourse
@@ -0,0 +1,1517 @@
+#compdef concourse fly
+# ------------------------------------------------------------------------------
+# Description
+# -----------
+#
+# Completion script for concourse 5.2.0 (https://concourse-ci.org).
+#
+# ------------------------------------------------------------------------------
+# Authors
+# -------
+#
+# * Julien Nicoulaud <https://github.com/nicoulaj>
+#
+# ------------------------------------------------------------------------------
+
+local _concourse_fly_target \
+ _concourse_fly_pipeline \
+ _concourse_fly_pipeline_config \
+ _concourse_fly_job \
+ _concourse_fly_resource \
+ _concourse_fly_resource_type
+
+(( $+functions[_concourse_fly] )) ||
+_concourse_fly() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(- : *)'{-v,--version}'[print the version of Fly and exit]' \
+ {-t,--target=}'[concourse target name]: :_concourse_fly_targets' \
+ --verbose'[print API requests and responses]' \
+ --print-table-headers'[print table headers even for redirected output]' \
+ '(-): :->command' \
+ '(-)*:: :->arguments' \
+ && ret=0
+
+ case $state in
+ (command)
+ _concourse_fly_commands
+ ;;
+ (arguments)
+ curcontext=${curcontext%:*:*}:concourse-fly-$words[1]:
+ if (( $+functions[_concourse_fly_${words[1]}_args] )); then
+ _concourse_fly_target=${(v)opt_args[(i)-t|--target]}
+ _concourse_fly_${words[1]}_args && ret=0
+ else
+ _message "unknown command ${words[1]}" && ret=1
+ fi
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_commands] )) ||
+_concourse_fly_commands() {
+ local commands=(
+ {ab,abort-build}":abort a build"
+ {bs,builds}":list builds data"
+ {cr,check-resource}":check a resource"
+ {crt,check-resource-type}":check a resource-type"
+ {cl,checklist}":print a Checkfile of the given pipeline"
+ {ctc,clear-task-cache}":clears cache from a task container"
+ {cs,containers}":print the active containers"
+ {c,curl}":curl the api"
+ {dtg,delete-target}":delete target"
+ {dp,destroy-pipeline}":destroy a pipeline"
+ {dt,destroy-team}":destroy a team and delete all of its data"
+ {etg,edit-target}":edit a target"
+ {e,execute}":execute a one-off build using local bits"
+ {ep,expose-pipeline}":make a pipeline publicly viewable"
+ {fp,format-pipeline}":format a pipeline config"
+ {gp,get-pipeline}":get a pipeline's current configuration"
+ {gt,get-team}":show team configuration"
+ "help:print help message"
+ {hp,hide-pipeline}":hide a pipeline from the public"
+ {i,intercept,hijack}":execute a command in a container"
+ {js,jobs}":list the jobs in the pipelines"
+ {lw,land-worker}":land a worker"
+ {l,login}":authenticate with the target"
+ {o,logout}":release authentication with the target"
+ {op,order-pipelines}":orders pipelines"
+ {pj,pause-job}":pause a job"
+ {pp,pause-pipeline}":pause a pipeline"
+ {ps,pipelines}":list the configured pipelines"
+ {pw,prune-worker}":prune a stalled, landing, landed, or retiring worker"
+ {rp,rename-pipeline}":rename a pipeline"
+ {rt,rename-team}":rename a team"
+ {rvs,resource-versions}":list the versions of a resource"
+ {rs,resources}":list the resources in the pipeline"
+ {sp,set-pipeline}":create or update a pipeline's configuration"
+ {st,set-team}":create or modify a team to have the given credentials"
+ "status:login status"
+ {s,sync}":download and replace the current fly from the target"
+ {ts,targets}":list saved targets"
+ {t,teams}":list the configured teams"
+ {tj,trigger-job}":start a job in a pipeline"
+ {uj,unpause-job}":unpause a job"
+ {up,unpause-pipeline}":un-pause a pipeline"
+ "userinfo:user information"
+ {vp,validate-pipeline}":validate a pipeline config"
+ {vs,volumes}":list the active volumes"
+ {w,watch}":stream a build's output"
+ {ws,workers}":list the registered workers"
+ )
+ _describe -t commands commands commands
+}
+
+(( $+functions[_concourse_fly_ab_args] )) ||
+_concourse_fly_ab_args() {
+ _concourse_fly_abort-build_args
+}
+
+(( $+functions[_concourse_fly_bs_args] )) ||
+_concourse_fly_bs_args() {
+ _concourse_fly_builds_args
+}
+
+(( $+functions[_concourse_fly_cl_args] )) ||
+_concourse_fly_cl_args() {
+ _concourse_fly_checklist_args
+}
+
+(( $+functions[_concourse_fly_cr_args] )) ||
+_concourse_fly_cr_args() {
+ _concourse_fly_check-resource_args
+}
+
+(( $+functions[_concourse_fly_crt_args] )) ||
+_concourse_fly_crt_args() {
+ _concourse_fly_check-resource-type_args
+}
+
+(( $+functions[_concourse_fly_ctc_args] )) ||
+_concourse_fly_ctc_args() {
+ _concourse_fly_clear-task-cache_args
+}
+
+(( $+functions[_concourse_fly_cs_args] )) ||
+_concourse_fly_cs_args() {
+ _concourse_fly_containers_args
+}
+
+(( $+functions[_concourse_fly_c_args] )) ||
+_concourse_fly_c_args() {
+ _concourse_fly_curl_args
+}
+
+(( $+functions[_concourse_fly_dtg_args] )) ||
+_concourse_fly_dtg_args() {
+ _concourse_fly_delete-target_args
+}
+
+(( $+functions[_concourse_fly_dp_args] )) ||
+_concourse_fly_dp_args() {
+ _concourse_fly_destroy-pipeline_args
+}
+
+(( $+functions[_concourse_fly_dt_args] )) ||
+_concourse_fly_dt_args() {
+ _concourse_fly_destroy-team_args
+}
+
+(( $+functions[_concourse_fly_etg_args] )) ||
+_concourse_fly_etg_args() {
+ _concourse_fly_edit-target_args
+}
+
+(( $+functions[_concourse_fly_e_args] )) ||
+_concourse_fly_e_args() {
+ _concourse_fly_execute_args
+}
+
+(( $+functions[_concourse_fly_ep_args] )) ||
+_concourse_fly_ep_args() {
+ _concourse_fly_expose-pipeline_args
+}
+
+(( $+functions[_concourse_fly_fp_args] )) ||
+_concourse_fly_fp_args() {
+ _concourse_fly_format-pipeline_args
+}
+
+(( $+functions[_concourse_fly_gp_args] )) ||
+_concourse_fly_gp_args() {
+ _concourse_fly_get-pipeline_args
+}
+
+(( $+functions[_concourse_fly_gt_args] )) ||
+_concourse_fly_gt_args() {
+ _concourse_fly_get-team_args
+}
+
+(( $+functions[_concourse_fly_hp_args] )) ||
+_concourse_fly_hp_args() {
+ _concourse_fly_hide-pipeline_args
+}
+
+(( $+functions[_concourse_fly_hijack_args] )) ||
+_concourse_fly_hijack_args() {
+ _concourse_fly_intercept_args
+}
+
+(( $+functions[_concourse_fly_i_args] )) ||
+_concourse_fly_i_args() {
+ _concourse_fly_intercept_args
+}
+
+(( $+functions[_concourse_fly_js_args] )) ||
+_concourse_fly_js_args() {
+ _concourse_fly_jobs_args
+}
+
+(( $+functions[_concourse_fly_lw_args] )) ||
+_concourse_fly_lw_args() {
+ _concourse_fly_land-worker_args
+}
+
+(( $+functions[_concourse_fly_l_args] )) ||
+_concourse_fly_l_args() {
+ _concourse_fly_login_args
+}
+
+(( $+functions[_concourse_fly_o_args] )) ||
+_concourse_fly_o_args() {
+ _concourse_fly_logout_args
+}
+
+(( $+functions[_concourse_fly_op_args] )) ||
+_concourse_fly_op_args() {
+ _concourse_fly_order-pipelines_args
+}
+
+(( $+functions[_concourse_fly_pj_args] )) ||
+_concourse_fly_pj_args() {
+ _concourse_fly_pause-job_args
+}
+
+(( $+functions[_concourse_fly_pp_args] )) ||
+_concourse_fly_pp_args() {
+ _concourse_fly_pause-pipeline_args
+}
+
+(( $+functions[_concourse_fly_ps_args] )) ||
+_concourse_fly_ps_args() {
+ _concourse_fly_pipelines_args
+}
+
+(( $+functions[_concourse_fly_pw_args] )) ||
+_concourse_fly_pw_args() {
+ _concourse_fly_prune-worker_args
+}
+
+(( $+functions[_concourse_fly_rp_args] )) ||
+_concourse_fly_rp_args() {
+ _concourse_fly_rename-pipeline_args
+}
+
+(( $+functions[_concourse_fly_rt_args] )) ||
+_concourse_fly_rt_args() {
+ _concourse_fly_rename-team_args
+}
+
+(( $+functions[_concourse_fly_rs_args] )) ||
+_concourse_fly_rs_args() {
+ _concourse_fly_resources_args
+}
+
+(( $+functions[_concourse_fly_rvs_args] )) ||
+_concourse_fly_rvs_args() {
+ _concourse_fly_resource-versions_args
+}
+
+(( $+functions[_concourse_fly_sp_args] )) ||
+_concourse_fly_sp_args() {
+ _concourse_fly_set-pipeline_args
+}
+
+(( $+functions[_concourse_fly_st_args] )) ||
+_concourse_fly_st_args() {
+ _concourse_fly_set-team_args
+}
+
+(( $+functions[_concourse_fly_s_args] )) ||
+_concourse_fly_s_args() {
+ _concourse_fly_sync_args
+}
+
+(( $+functions[_concourse_fly_ts_args] )) ||
+_concourse_fly_ts_args() {
+ _concourse_fly_targets_args
+}
+
+(( $+functions[_concourse_fly_t_args] )) ||
+_concourse_fly_t_args() {
+ _concourse_fly_teams_args
+}
+
+(( $+functions[_concourse_fly_tj_args] )) ||
+_concourse_fly_tj_args() {
+ _concourse_fly_trigger-job_args
+}
+
+(( $+functions[_concourse_fly_uj_args] )) ||
+_concourse_fly_uj_args() {
+ _concourse_fly_unpause-job_args
+}
+
+(( $+functions[_concourse_fly_up_args] )) ||
+_concourse_fly_up_args() {
+ _concourse_fly_unpause-pipeline_args
+}
+
+(( $+functions[_concourse_fly_vp_args] )) ||
+_concourse_fly_vp_args() {
+ _concourse_fly_validate-pipeline_args
+}
+
+(( $+functions[_concourse_fly_vs_args] )) ||
+_concourse_fly_vs_args() {
+ _concourse_fly_volumes_args
+}
+
+(( $+functions[_concourse_fly_w_args] )) ||
+_concourse_fly_w_args() {
+ _concourse_fly_watch_args
+}
+
+(( $+functions[_concourse_fly_ws_args] )) ||
+_concourse_fly_ws_args() {
+ _concourse_fly_workers_args
+}
+
+(( $+functions[_concourse_fly_help_args] )) ||
+_concourse_fly_help_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]'
+}
+
+(( $+functions[_concourse_fly_status_args] )) ||
+_concourse_fly_status_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]'
+}
+
+(( $+functions[_concourse_fly_userinfo_args] )) ||
+_concourse_fly_userinfo_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_abort-build_args] )) ||
+_concourse_fly_abort-build_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \
+ '(-b --build)'{-b,--build=}'[job build number to cancel, or build id]: :_concourse_fly_builds'
+}
+
+(( $+functions[_concourse_fly_builds_args] )) ||
+_concourse_fly_builds_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-a --all-teams)'{-a,--all-teams}'[show builds for the all teams that user has access to]' \
+ '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]: :number' \
+ '--current-team[show builds for the currently targeted team]' \
+ '(-j --job -p --pipeline)'{-j,--job=}'[name of a job to get builds for]: :_concourse_fly_pipeline_slash_jobs' \
+ '--json[print command result as JSON]' \
+ '(-j --job -p --pipeline)'{-p,--pipeline=}'[name of a pipeline to get builds for]: :_concourse_fly_pipelines' \
+ '--since=[start of the range to filter builds]: :_concourse_fly_dates' \
+ '--until=[end of the range to filter builds]: :_concourse_fly_dates'
+}
+
+(( $+functions[_concourse_fly_checklist_args] )) ||
+_concourse_fly_checklist_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[the pipeline from which to generate the Checkfile]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_check-resource_args] )) ||
+_concourse_fly_check-resource_args() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-r --resource)'{-r,--resource=}'[name of a resource to check]: :_concourse_fly_pipeline_slash_resources' \
+ '(-f --from)'{-f,--from=}'[version of the resource to check from]: :->version' \
+ && ret=0
+
+ case $state in
+ (version)
+ _concourse_fly_resource=${(v)opt_args[(i)-r|--resource]}
+ _concourse_fly_pipeline_resource_versions && ret=0
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_check-resource-type_args] )) ||
+_concourse_fly_check-resource-type_args() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-r --resource-type)'{-r,--resource-type=}'[name of a resource type to check]: :_concourse_fly_pipeline_slash_resource_types' \
+ '(-f --from)'{-f,--from=}'[version of the resource type to check from]: :->version' \
+ && ret=0
+
+ case $state in
+ (version)
+ _concourse_fly_resource_type=${(v)opt_args[(i)-r|--resource-type]}
+ _concourse_fly_pipeline_resource_type_versions && ret=0
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_clear-task-cache_args] )) ||
+_concourse_fly_clear-task-cache_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \
+ '(-s --step)'{-s,--step=}'[step name to clear cache from]:task step' \
+ '(-c --cache-path)'{-c,--cache-path=}'[cache directory to clear out]: :_files -/' \
+ '(-n --non-interactive)'{-n,--non-interactive=}'[destroy the task cache(s) without confirmation]'
+}
+
+(( $+functions[_concourse_fly_containers_args] )) ||
+_concourse_fly_containers_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_curl_args] )) ||
+_concourse_fly_curl_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--print-and-exit[print curl command and exit]'
+}
+
+(( $+functions[_concourse_fly_delete-target_args] )) ||
+_concourse_fly_delete-target_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-a --all)'{-a,--all}'[delete all targets]'
+}
+
+(( $+functions[_concourse_fly_destroy-pipeline_args] )) ||
+_concourse_fly_destroy-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[the pipeline to destroy]: :_concourse_fly_pipelines' \
+ '(-n --non-interactive)'{-n,--non-interactive}'[destroy the pipeline without confirmation]'
+}
+
+(( $+functions[_concourse_fly_destroy-team_args] )) ||
+_concourse_fly_destroy-team_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-n --team-name)'{-n,--team-name=}'[the team to delete]: :_concourse_fly_teams' \
+ '(-n --non-interactive)'{-n,--non-interactive}'[force apply configuration]'
+}
+
+(( $+functions[_concourse_fly_edit-target_args] )) ||
+_concourse_fly_edit-target_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--target-name=[update target name]: :_concourse_fly_targets' \
+ '(-u --concourse-url)'{-u,--concourse-url=}'[update concourse URL]: :_urls' \
+ '(-n --team-name)'{-n,--team-name=}'[update concourse URL]: :_concourse_fly_teams'
+}
+
+(( $+functions[_concourse_fly_execute_args] )) ||
+_concourse_fly_execute_args() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-c --config)'{-c,--config=}'[the task config to execute]: :_concourse_config_files' \
+ '(-p --privileged)'{-p,--privileged}'[run the task with full privileges]' \
+ '--include-ignored[disregard .gitignore entries and uploads everything]' \
+ '*'{-i,--input=}'[an input to provide to the task]: :->input' \
+ '*'{-m,--input-mapping=}'[map a resource to a different name as task input]: :->input-mapping' \
+ '(-j --inputs-from)'{-j,--inputs-from=}'[a job to base the inputs on]: :_concourse_fly_pipeline_slash_jobs' \
+ '*'{-o,--output=}'[an output to fetch from the task]: :->output' \
+ '--image=[image resource for the one-off build]: :_concourse_fly_images' \
+ '*--tag=[a tag for a specific environment]: :_concourse_fly_tags' \
+ '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
+ '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
+ '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
+ && ret=0
+
+ _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
+
+ case $state in
+ (input-mapping)
+ # TODO complete --input-mapping
+ _message 'input mapping'
+ ;;
+ (input)
+ _concourse_fly_input_equal_paths && ret=0
+ ;;
+ (output)
+ _concourse_fly_output_equal_paths && ret=0
+ ;;
+ (var)
+ _concourse_fly_var_equal_values && ret=0
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_expose-pipeline_args] )) ||
+_concourse_fly_expose-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[pipeline to expose]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_format-pipeline_args] )) ||
+_concourse_fly_format-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
+ '(-w --write)'{-w,--write}'[do not print to stdout, overwrite the file in place]'
+}
+
+(( $+functions[_concourse_fly_get-pipeline_args] )) ||
+_concourse_fly_get-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[get configuration of this pipeline]: :_concourse_fly_pipelines' \
+ '(-j --json)'{-j,--json}'[print config as json instead of yaml]'
+}
+
+(( $+functions[_concourse_fly_get-team_args] )) ||
+_concourse_fly_get-team_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-n --team)'{-n,--team=}'[get configuration of this team]: :_concourse_fly_teams' \
+ '(-j --json)'{-j,--json}'[print config as json instead of yaml]'
+}
+
+(( $+functions[_concourse_fly_hide-pipeline_args] )) ||
+_concourse_fly_hide-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[pipeline to hide]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_intercept_args] )) ||
+_concourse_fly_intercept_args() {
+ # TODO complete --handle
+ # TODO complete --check
+ # TODO complete --step
+ # TODO complete --step-type
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job --handle -c --check -u --url)'{-j,--job=}'[name of a job to hijack]: :_concourse_fly_pipeline_slash_jobs' \
+ '(-j --job --handle -c --check -u --url)--handle=[handle id of a job to hijack]:job handle' \
+ '(-j --job --handle -c --check -u --url)'{-c,--check=}'[name of a resource'\''s checking container to hijack]:name' \
+ '(-j --job --handle -c --check -u --url)'{-u,--url=}'[URL for the build, job, or check container to hijack]: :_urls' \
+ '(-b --build)'{-b,--build=}'[build number within the job, or global build ID]: :_concourse_fly_builds' \
+ '(-s --step)'{-s,--step=}'[name of step to hijack]:step' \
+ '--step-type=[type of step to hijack]:step type' \
+ '(-a --attempt)'{-a,--attempt=}'[attempt number of step to hijack]: :_values -s, "number" 1 2 3 4 5 6 7 8 9' \
+ '(-):command name: _command_names -e' \
+ '*::arguments:_normal'
+}
+
+(( $+functions[_concourse_fly_jobs_args] )) ||
+_concourse_fly_jobs_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[get jobs in this pipeline]: :_concourse_fly_pipelines' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_land-worker_args] )) ||
+_concourse_fly_land-worker_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-w --worker)'{-w,--worker=}'[worker to land]: :_concourse_fly_workers'
+}
+
+(( $+functions[_concourse_fly_login_args] )) ||
+_concourse_fly_login_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-c --concourse-url)'{-c,--concourse-url=}'[concourse URL to authenticate with]: :_urls' \
+ '(-k --insecure)'{-k,--insecure}'[skip verification of the endpoint'\''s SSL certificate]' \
+ '(-u --username)'{-u,--username=}'[username for basic auth]: :_users' \
+ '(-p --password)'{-p,--password=}'[password for basic auth]:password' \
+ '(-n --team-name)'{-n,--team-name=}'[team to authenticate with]: :_concourse_fly_teams' \
+ '--ca-cert=[path to Concourse PEM-encoded CA certificate file]: :_files -g "*.pem"' \
+ '(-b --open-browser)'{-b,--open-browser}'[open browser to the auth endpoint]'
+}
+
+(( $+functions[_concourse_fly_logout_args] )) ||
+_concourse_fly_logout_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-a --all)'{-a,--all}'[logout of all targets]'
+}
+
+(( $+functions[_concourse_fly_order-pipelines_args] )) ||
+_concourse_fly_order-pipelines_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[name of pipeline to order]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_pause-job_args] )) ||
+_concourse_fly_pause-job_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[name of a job to pause]: :_concourse_fly_pipeline_slash_jobs'
+}
+
+(( $+functions[_concourse_fly_pause-pipeline_args] )) ||
+_concourse_fly_pause-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[pipeline to pause]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_pipelines_args] )) ||
+_concourse_fly_pipelines_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-a --all)'{-a,--all}'[show all pipelines]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_prune-worker_args] )) ||
+_concourse_fly_prune-worker_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-w --worker)'{-w,--worker=}'[worker to prune]: :_concourse_fly_workers' \
+ '(-a --all-stalled)'{-a,--all-stalled}'[prune all stalled workers]'
+}
+
+(( $+functions[_concourse_fly_rename-pipeline_args] )) ||
+_concourse_fly_rename-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-o --old-name)'{-o,--old-name=}'[pipeline to rename]: :_concourse_fly_pipelines' \
+ '(-n --new-name)'{-n,--new-name=}'[name to set as pipeline name]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_rename-team_args] )) ||
+_concourse_fly_rename-team_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-o --old-name)'{-o,--old-name=}'[current team name]: :_concourse_fly_teams' \
+ '(-n --new-name)'{-n,--new-name=}'[new team name]: :_concourse_fly_teams'
+}
+
+(( $+functions[_concourse_fly_resources_args] )) ||
+_concourse_fly_resources_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[get resources in this pipeline]: :_concourse_fly_pipelines' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_resource-versions_args] )) ||
+_concourse_fly_resource-versions_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]:number' \
+ '(-r --resource)'{-r,--resource=}'[name of a resource to get versions for]: :_concourse_fly_pipeline_slash_resources' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_set-pipeline_args] )) ||
+_concourse_fly_set-pipeline_args() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-n --non-interactive)'{-n,--non-interactive}'[skips interactions, uses default values]' \
+ '--no-color[disable color output]' \
+ '--check-creds[validate credential variables against credential manager]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[pipeline to configure]: :_concourse_fly_pipelines' \
+ '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
+ '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
+ '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
+ '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
+ && ret=0
+
+ _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
+
+ case $state in
+ (var)
+ _concourse_fly_var_equal_values && ret=0
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_set-team_args] )) ||
+_concourse_fly_set-team_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-n --team-name)'{-n,--team-name=}'[the team to create or modify]: :_concourse_fly_teams' \
+ '--non-interactive[force apply configuration]' \
+ '*--local-user=[list of whitelisted local concourse users]: :_users' \
+ '(-c --config)'{-c,--config=}'[configuration file for specifying team params]: :_concourse_config_files' \
+ '*--bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]:user name' \
+ '*--bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team name' \
+ '*--cf-user=[list of whitelisted CloudFoundry users]:user name' \
+ '*--cf-org=[list of whitelisted CloudFoundry orgs]:org name' \
+ '*--cf-space=[list of whitelisted CloudFoundry spaces]:space name' \
+ '*--github-user=[list of whitelisted GitHub users]:user name' \
+ '*--github-org=[list of whitelisted GitHub orgs]:org name' \
+ '*--github-team=[list of whitelisted GitHub teams]:team name' \
+ '*--gitlab-user=[list of whitelisted GitLab users]:user name' \
+ '*--gitlab-group=[list of whitelisted GitLab groups]:group name' \
+ '*--ldap-user=[list of whitelisted LDAP users]:user name' \
+ '*--ldap-group=[list of whitelisted LDAP groups]:group name' \
+ '*--oauth-user=[list of whitelisted OAuth2 users]:user name' \
+ '*--oauth-group=[list of whitelisted OAuth2 groups]:group name' \
+ '*--oidc-user=[list of whitelisted OIDC users]:user name' \
+ '*--oidc-group=[list of whitelisted OIDC groups]:group name'
+}
+
+(( $+functions[_concourse_fly_sync_args] )) ||
+_concourse_fly_sync_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-f --force)'{-f,--force}'[sync even if versions already match]'
+}
+
+(( $+functions[_concourse_fly_targets_args] )) ||
+_concourse_fly_targets_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]'
+}
+
+(( $+functions[_concourse_fly_teams_args] )) ||
+_concourse_fly_teams_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-d --details)'{-d,--details}'[print authentication configuration]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_trigger-job_args] )) ||
+_concourse_fly_trigger-job_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[name of a job to trigger]: :_concourse_fly_pipeline_slash_jobs' \
+ '(-w --watch)'{-w,--watch}'[start watching the build output]'
+}
+
+(( $+functions[_concourse_fly_unpause-job_args] )) ||
+_concourse_fly_unpause-job_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[name of a job to unpause]: :_concourse_fly_pipeline_slash_jobs'
+}
+
+(( $+functions[_concourse_fly_unpause-pipeline_args] )) ||
+_concourse_fly_unpause-pipeline_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-p --pipeline)'{-p,--pipeline=}'[pipeline to unpause]: :_concourse_fly_pipelines'
+}
+
+(( $+functions[_concourse_fly_validate-pipeline_args] )) ||
+_concourse_fly_validate-pipeline_args() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \
+ '(-s --strict)'{-s,--strict}'[fail on warnings]' \
+ '(-o --output)'{-o,--output}'[output templated pipeline to stdout]' \
+ '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \
+ '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \
+ '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \
+ && ret=0
+
+ _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]}
+
+ case $state in
+ (var)
+ _concourse_fly_var_equal_values && ret=0
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_fly_volumes_args] )) ||
+_concourse_fly_volumes_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-d --details)'{-d,--details}'[print additional information for each volume]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_watch_args] )) ||
+_concourse_fly_watch_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-j --job)'{-j,--job=}'[watches builds of the given job]: :_concourse_fly_pipeline_slash_jobs' \
+ '(-b --build)'{-b,--build=}'[watches a specific build]: :_concourse_fly_builds' \
+ '(-t --timestamps)'{-t,--timestamps}'[print with local timestamp]'
+}
+
+(( $+functions[_concourse_fly_workers_args] )) ||
+_concourse_fly_workers_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-d --details)'{-d,--details}'[print additional information for each worker]' \
+ '--json[print command result as JSON]'
+}
+
+(( $+functions[_concourse_fly_targets] )) ||
+_concourse_fly_targets() {
+ local targets=($(_call_program targets $service targets | awk '{print $1}'))
+ _describe -t targets 'target' targets $@ || _message 'no target found'
+}
+
+(( $+functions[_concourse_fly_teams] )) ||
+_concourse_fly_teams() {
+ if [[ -n ${_concourse_fly_target} ]]; then
+ local teams=($(_call_program teams $service -t ${_concourse_fly_target} teams | awk '{print $1}'))
+ _describe -t teams 'team' teams $@ || _message 'no team found'
+ else
+ _message 'team'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipelines] )) ||
+_concourse_fly_pipelines() {
+ if [[ -n ${_concourse_fly_target} ]]; then
+ local pipelines=($(_call_program pipelines $service -t ${_concourse_fly_target} pipelines | awk '{print $1}'))
+ _describe -t pipelines 'pipeline' pipelines $@ || _message 'no pipeline found'
+ else
+ _message 'pipeline'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_jobs] )) ||
+_concourse_fly_pipeline_jobs() {
+ if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
+ local jobs=($(_call_program jobs $service -t ${_concourse_fly_target} jobs -p ${_concourse_fly_pipeline} 2>&1 | awk '{print $1}'))
+ _describe -t jobs "${_concourse_fly_pipeline} job" jobs $@ || _message 'no job found'
+ else
+ _message 'job'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_resources] )) ||
+_concourse_fly_pipeline_resources() {
+ if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
+ local resources=($(_call_program resources $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $1}'))
+ _describe -t resources 'resource' resources $@ || _message 'no resource found'
+ else
+ _message 'resource'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_resource_types] )) ||
+_concourse_fly_pipeline_resource_types() {
+ if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then
+ local resource_types=($(_call_program resource-types $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $2}'))
+ _describe -t resource-types 'resource type' resource_types $@ || _message 'no resource type found'
+ else
+ _message 'resource type'
+ fi
+}
+
+(( $+functions[_concourse_fly_workers] )) ||
+_concourse_fly_workers() {
+ if [[ -n ${_concourse_fly_target} ]]; then
+ local workers=($(_call_program workers $service -t ${_concourse_fly_target} workers | awk '{print $1}'))
+ _describe -t workers 'worker' workers $@ || _message 'no worker found'
+ else
+ _message 'worker'
+ fi
+}
+
+(( $+functions[_concourse_fly_builds] )) ||
+_concourse_fly_builds() {
+ if [[ -n ${_concourse_fly_target} ]]; then
+ local builds=($(_call_program builds $service -t ${_concourse_fly_target} builds | awk '{print $1}'))
+ _describe -t builds 'build' builds $@ || _message 'no build found'
+ else
+ _message 'build'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_resource_versions] )) ||
+_concourse_fly_pipeline_resource_versions() {
+ if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_resource} ]]; then
+ local resource_versions=($(_call_program resource-versions $service -t ${_concourse_fly_target} resource-versions -r ${_concourse_fly_resource} | awk '{print $2}'))
+ _describe -t resource-versions 'resource version' resource_versions $@ || _message 'no version found'
+ else
+ _message 'resource version'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_config_vars] )) ||
+_concourse_fly_pipeline_config_vars() {
+ if [[ -n ${_concourse_fly_pipeline_config} ]]; then
+ local variables=($(grep -Po '(?<=\(\()[^\)]+' ${_concourse_fly_pipeline_config}))
+ _describe -t variables 'variables' variables $@ || _message 'no variable found'
+ else
+ _message 'variable'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_config_inputs] )) ||
+_concourse_fly_pipeline_config_inputs() {
+ if [[ -n ${_concourse_fly_pipeline_config} ]]; then
+ if (( $+commands[yq] )); then
+ local inputs=($(yq -r '.. | .inputs? | arrays | .[].name' ${_concourse_fly_pipeline_config} 2>&1))
+ _describe -t inputs 'input' inputs $@ || _message 'no input found'
+ else
+ _message 'install yq (https://github.com/kislyuk/yq) to get completion of inputs'
+ fi
+ else
+ _message 'input'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_config_outputs] )) ||
+_concourse_fly_pipeline_config_outputs() {
+ if [[ -n ${_concourse_fly_pipeline_config} ]]; then
+ if (( $+commands[yq] )); then
+ local outputs=($(yq -r '.. | .outputs? | arrays | .[].name' ${_concourse_fly_pipeline_config}))
+ _describe -t outputs 'output' outputs $@ || _message 'no output found'
+ else
+ _message 'install yq (https://github.com/kislyuk/yq) to get completion of outputs'
+ fi
+ else
+ _message 'output'
+ fi
+}
+
+(( $+functions[_concourse_fly_pipeline_resource_type_versions] )) ||
+_concourse_fly_pipeline_resource_type_versions() {
+ # seems like there is no command for listing resource type versions...
+ _message 'resource type version'
+}
+
+(( $+functions[_concourse_fly_tags] )) ||
+_concourse_fly_tags() {
+ # seems like there is no command for listing tags...
+ _message 'tag'
+}
+
+(( $+functions[_concourse_fly_dates] )) ||
+_concourse_fly_dates() {
+ # _dates completer does not seem to work on zsh 5.7.1
+ _dates -f '%Y-%m-%d %H:%M:%S'
+}
+
+(( $+functions[_concourse_fly_pipeline_slash_jobs] )) ||
+_concourse_fly_pipeline_slash_jobs() {
+ local ret=1
+ if compset -P '*/'; then
+ _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
+ _concourse_fly_pipeline_jobs && ret=0
+ else
+ _concourse_fly_pipelines -qS/ && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_fly_pipeline_slash_resources] )) ||
+_concourse_fly_pipeline_slash_resources() {
+ local ret=1
+ if compset -P '*/'; then
+ _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
+ _concourse_fly_pipeline_resources && ret=0
+ else
+ _concourse_fly_pipelines -qS/ && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_fly_pipeline_slash_resource_types] )) ||
+_concourse_fly_pipeline_slash_resource_types() {
+ local ret=1
+ if compset -P '*/'; then
+ _concourse_fly_pipeline="${${IPREFIX%/}##*=}"
+ _concourse_fly_pipeline_resource_types && ret=0
+ else
+ _concourse_fly_pipelines -qS/ && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_fly_var_equal_values] )) ||
+_concourse_fly_var_equal_values() {
+ local ret=1
+ if compset -P '*='; then
+ _message 'value' && ret=0
+ else
+ _concourse_fly_pipeline_config_vars -qS= && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_fly_input_equal_paths] )) ||
+_concourse_fly_input_equal_paths() {
+ local ret=1
+ if compset -P '*='; then
+ _files && ret=0
+ else
+ _concourse_fly_pipeline_config_inputs -qS= && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_fly_output_equal_paths] )) ||
+_concourse_fly_output_equal_paths() {
+ local ret=1
+ if compset -P '*='; then
+ _files && ret=0
+ else
+ _concourse_fly_pipeline_config_outputs -qS= && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_server] )) ||
+_concourse_server() {
+
+ local context state state_descr line ret=1
+ typeset -A opt_args
+
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(- : *)'{-v,--version}'[print the version of Concourse and exit]' \
+ '(-): :->command' \
+ '(-)*:: :->arguments' \
+ && ret=0
+
+ case $state in
+ (command)
+ _concourse_commands && ret=0
+ ;;
+ (arguments)
+ curcontext=${curcontext%:*:*}:concourse-$words[1]:
+ if (( $+functions[_concourse_${words[1]}_args] )); then
+ _concourse_${words[1]}_args && ret=0
+ else
+ _message "unknown command ${words[1]}" && ret=1
+ fi
+ ;;
+ esac
+
+ return ret
+}
+
+(( $+functions[_concourse_commands] )) ||
+_concourse_commands() {
+ local commands=(
+ "generate-key:generate RSA key for use with Concourse components"
+ "land-worker:safely drain a worker's assignments for temporary downtime"
+ "migrate:run database migrations"
+ "quickstart:run both 'web' and 'worker' together, auto-wired"
+ "retire-worker:safely remove a worker from the cluster permanently"
+ "web:run the web UI and build scheduler"
+ "worker:run and register a worker"
+ )
+ _describe -t commands commands commands
+}
+
+(( $+functions[_concourse_generate-key_args] )) ||
+_concourse_generate-key_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(-t --type)'{-t,--type=}'[the type of key to generate]:key type:(rsa ssh)' \
+ '(-f --filename)'{-f,--filename=}'[file path where the key shall be created. When generating ssh keys, the public key will be stored in a file with the same name but with .pub appended]: :_files' \
+ '(-b --bits)'{-b,--bits=}'[the number of bits in the key to create]:integer'
+}
+
+(( $+functions[_concourse_land-worker_args] )) ||
+_concourse_land-worker_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--name=[the name of the worker you wish to land]:worker name' \
+ '*--tsa-host=[TSA host to forward the worker through]: :_concourse_host_colon_ports' \
+ '--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
+ '--tsa-worker-private-key=[file containing a public key to expect from the TSA]: :_files'
+}
+
+(( $+functions[_concourse_migrate_args] )) ||
+_concourse_migrate_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '(- : *)--current-db-version[print the current database version and exit]' \
+ '(- : *)--supported-db-version[print the max supported database version and exit]' \
+ '(- : *)--migrate-db-to-version=[migrate to the specified database version and exit]:database version' \
+ '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key' \
+ '--postgres-host=[the host to connect to]: :_hosts' \
+ '--postgres-port=[the port to connect to]: :_concourse_ports' \
+ '--postgres-socket=[path to a UNIX domain socket to connect to]: :_files' \
+ '--postgres-user=[the user to sign in as]: :_users' \
+ '--postgres-password=[the user'\''s password]:password' \
+ '--postgres-sslmode=[whether or not to use SSL]:SSL mode:((disable require verify-ca verify-full))' \
+ '--postgres-ca-cert=[CA cert file location, to verify when connecting with SSL]: :_files' \
+ '--postgres-client-cert=[client cert file location]: :_files' \
+ '--postgres-client-key=[client key file location]: :_files' \
+ '--postgres-connect-timeout=[dialing timeout]:duration' \
+ '--postgres-database=[the name of the database to use]:database name'
+}
+
+(( $+functions[_concourse_retire-worker_args] )) ||
+_concourse_retire-worker_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--name=[the name of the worker you wish to retire]:worker name' \
+ '*--tsa-host=[TSA host to forward the worker through]: :_concourse_host_colon_ports' \
+ '--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
+ '--tsa-worker-private-key=[file containing a public key to expect from the TSA]: :_files'
+}
+
+(( $+functions[_concourse_web_args] )) ||
+_concourse_web_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--peer-address=[network address of this web node, reachable by other web nodes]: :_concourse_host_colon_ports' \
+ '--log-level=[minimum level of logs to see]: :_concourse_log_levels' \
+ '--bind-ip=[IP address on which to listen for web traffic]: :_concourse_ip_addresses' \
+ '--bind-port=[port on which to listen for HTTP traffic]: :_concourse_ports' \
+ '--tls-bind-port=[port on which to listen for HTTPS traffic]: :_concourse_ports' \
+ '--tls-cert=[file containing an SSL certificate]: :_files' \
+ '--tls-key=[file containing an RSA private key, used to encrypt HTTPS traffic]: :_files' \
+ '--external-url=[URL used to reach any ATC from the outside world]: :_urls' \
+ '--encryption-key=[a 16 or 32 length key used to encrypt sensitive information before storing it in the database]:encryption key' \
+ '--old-encryption-key=[encryption key previously used for encrypting sensitive information]:encryption key' \
+ '--debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
+ '--debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
+ '--intercept-idle-timeout=[length of time for a intercepted session to be idle before terminating]: :_concourse_durations' \
+ '--enable-global-resources[enable equivalent resources across pipelines and teams to share a single version history]' \
+ '--global-resource-check-timeout=[time limit on checking for new versions of resources]: :_concourse_durations' \
+ '--resource-checking-interval=[interval on which to check for new versions of resources]: :_concourse_durations' \
+ '--resource-type-checking-interval=[interval on which to check for new versions of resource types]: :_concourse_durations' \
+ '--container-placement-strategy=[method by which a worker is selected during container placement]:strategy:((volume-locality random fewest-build-containers))' \
+ '--baggageclaim-response-header-timeout=[how long to wait for Baggageclaim to send the response header]: :_concourse_durations' \
+ '--cli-artifacts-dir=[directory containing downloadable CLI binaries]: :_files -/' \
+ '--log-db-queries[log database queries]' \
+ '--build-tracker-interval=[interval on which to run build tracking]: :_concourse_durations' \
+ '--default-build-logs-to-retain=[default build logs to retain, 0 means all]:number' \
+ '--max-build-logs-to-retain=[maximum build logs to retain, 0 means not specified]:number' \
+ '--default-days-to-retain-build-logs=[default days to retain build logs. 0 means unlimited]:number' \
+ '--max-days-to-retain-build-logs=[maximum days to retain build logs, 0 means not specified]:number' \
+ '--default-task-cpu-limit=[default max number of cpu shares per task, 0 means unlimited]:number' \
+ '--default-task-memory-limit=[default maximum memory per task, 0 means unlimited]:number' \
+ '--enable-build-auditing[enable auditing for all api requests connected to builds]' \
+ '--enable-container-auditing[enable auditing for all api requests connected to containers]' \
+ '--enable-job-auditing[enable auditing for all api requests connected to jobs]' \
+ '--enable-pipeline-auditing[enable auditing for all api requests connected to pipelines]' \
+ '--enable-resource-auditing[enable auditing for all api requests connected to resources]' \
+ '--enable-system-auditing[enable auditing for all api requests connected to system transactions]' \
+ '--enable-team-auditing[enable auditing for all api requests connected to teams]' \
+ '--enable-worker-auditing[enable auditing for all api requests connected to workers]' \
+ '--enable-volume-auditing[enable auditing for all api requests connected to volumes]' \
+ '--postgres-host=[the host to connect to]: :_hosts' \
+ '--postgres-port=[the port to connect to]: :_concourse_ports' \
+ '--postgres-socket=[path to a UNIX domain socket to connect to]: :_files' \
+ '--postgres-user=[the user to sign in as]: :_users' \
+ '--postgres-password=[the user'\''s password]:password' \
+ '--postgres-sslmode=[whether or not to use SSL]:SSL mode:((disable require verify-ca verify-full))' \
+ '--postgres-ca-cert=[CA cert file location, to verify when connecting with SSL]: :_files' \
+ '--postgres-client-cert=[client cert file location]: :_files' \
+ '--postgres-client-key=[client key file location]: :_files' \
+ '--postgres-connect-timeout=[dialing timeout]: :_concourse_durations' \
+ '--postgres-database=[the name of the database to use]:database name' \
+ '--secret-retry-attempts=[the number of attempts secret will be retried to be fetched, in case a retryable error happens]:number' \
+ '--secret-retry-interval=[the interval between secret retry retrieval attempts]: :_concourse_durations' \
+ '--secret-cache-enabled[enable in-memory cache for secrets]' \
+ '--secret-cache-duration=[if the cache is enabled, secret values will be cached for not longer than this duration]: :_concourse_durations' \
+ '--secret-cache-purge-interval=[if the cache is enabled, expired items will be removed on this internal]: :_concourse_durations' \
+ '--credhub-url=[CredHub server address used to access secrets]: :_urls' \
+ '--credhub-path-prefix=[path under which to namespace credential lookup]:path' \
+ '--credhub-ca-cert=[path to PEM-encoded CA cert files to use to verify the CredHub server SSL cert]: :_files' \
+ '--credhub-client-cert=[path to the client certificate for mutual TLS authorization]: :_files' \
+ '--credhub-client-key=[path to the client private key for mutual TLS authorization]: :_files' \
+ '--credhub-insecure-skip-verify[enable insecure SSL verification]' \
+ '--credhub-client-id=[client ID for CredHub authorization]:client ID' \
+ '--credhub-client-secret=[client secret for CredHub authorization]:client secret' \
+ '--kubernetes-in-cluster[enables the in-cluster client]' \
+ '--kubernetes-config-path=[path to Kubernetes config when running ATC outside Kubernetes]: :_files' \
+ '--kubernetes-namespace-prefix=[prefix to use for Kubernetes namespaces under which secrets will be looked up]:prefex' \
+ '--aws-secretsmanager-access-key=[AWS Access key ID]:access key' \
+ '--aws-secretsmanager-secret-key=[AWS Secret Access Key]:secret key' \
+ '--aws-secretsmanager-session-token=[AWS Session Token]:session token' \
+ '--aws-secretsmanager-region=[AWS region to send requests to]:region' \
+ '--aws-secretsmanager-pipeline-secret-template=[AWS Secrets Manager secret identifier template used for pipeline specific parameter]:template' \
+ '--aws-secretsmanager-team-secret-template=[AWS Secrets Manager secret identifier template used for team specific parameter]:template' \
+ '--aws-ssm-access-key=[AWS Access key ID]:access key' \
+ '--aws-ssm-secret-key=[AWS Secret Access Key]:secret key' \
+ '--aws-ssm-session-token=[AWS Session Token]:session token' \
+ '--aws-ssm-region=[AWS region to send requests to]:region' \
+ '--aws-ssm-pipeline-secret-template=[AWS SSM parameter name template used for pipeline specific parameter]:template' \
+ '--aws-ssm-team-secret-template=[AWS SSM parameter name template used for team specific parameter]:template' \
+ '--vault-url=[vault server address used to access secrets]: :_urls' \
+ '--vault-path-prefix=[path under which to namespace credential lookup]:prefix' \
+ '--vault-shared-path=[path under which to lookup shared credentials]:path' \
+ '--vault-ca-cert=[path to a PEM-encoded CA cert file to use to verify the vault server SSL cert]: :_files' \
+ '--vault-ca-path=[path to a directory of PEM-encoded CA cert files to verify the vault server SSL cert]: :_files -/' \
+ '--vault-client-cert=[path to the client certificate for Vault authorization]: :_files' \
+ '--vault-client-key=[path to the client private key for Vault authorization]: :_files' \
+ '--vault-server-name=[if set, is used to set the SNI host when connecting via TLS]:server name' \
+ '--vault-insecure-skip-verify[enable insecure SSL verification]' \
+ '--vault-client-token=[client token for accessing secrets within the Vault server]:client token' \
+ '--vault-auth-backend=[auth backend to use for logging in to Vault]:auth backend' \
+ '--vault-auth-backend-max-ttl=[time after which to force a re-login]: :_concourse_durations' \
+ '--vault-retry-max=[the maximum time between retries when logging in or re-authing a secret]: :_concourse_durations' \
+ '--vault-retry-initial=[the initial time between retries when logging in or re-authing a secret]: :_concourse_durations' \
+ '*--vault-auth-param=[parameter to pass when logging in via the backend]: :_concourse_name_colon_values' \
+ {-n,--noop}'[don'\''t actually do any automatic scheduling or checking]' \
+ '--worker-garden-url=[a Garden API endpoint to register as a worker]: :_urls' \
+ '--worker-baggageclaim-url=[a Baggageclaim API endpoint to register with the worker]: :_urls' \
+ '*--worker-resource=[a resource type to advertise for the worker]: :_concourse_type_colon_images' \
+ '--metrics-host-name=[host string to attach to emitted metrics]: :_hosts' \
+ '*--metrics-attribute=[a key-value attribute to attach to emitted metrics]: :_concourse_name_colon_values' \
+ '--capture-error-metrics[enable capturing of error log metrics]' \
+ '--datadog-agent-host=[datadog agent host to expose dogstatsd metrics]: :_hosts' \
+ '--datadog-agent-port=[datadog agent port to expose dogstatsd metrics]: :_concourse_ports' \
+ '--datadog-prefix=[prefix for all metrics to easily find them in Datadog]:prefix' \
+ '--influxdb-url=[influxDB server address to emit points to]: :_urls' \
+ '--influxdb-database=[influxDB database to write points to]:database name' \
+ '--influxdb-username=[influxDB server username]: :_users' \
+ '--influxdb-password=[influxDB server password]:password' \
+ '--influxdb-insecure-skip-verify[skip SSL verification when emitting to InfluxDB]' \
+ '--emit-to-logs[emit metrics to logs]' \
+ '--newrelic-account-id=[new Relic Account ID]:account ID' \
+ '--newrelic-api-key=[new Relic Insights API Key]:API key' \
+ '--newrelic-service-prefix=[an optional prefix for emitted New Relic events]:prefix' \
+ '--prometheus-bind-ip=[IP to listen on to expose Prometheus metrics]: :_concourse_ip_addresses' \
+ '--prometheus-bind-port=[port to listen on to expose Prometheus metrics]: :_concourse_ports' \
+ '--riemann-host=[riemann server address to emit metrics to]: :_hosts' \
+ '--riemann-port=[port of the Riemann server to emit metrics to]: :_concourse_ports' \
+ '--riemann-service-prefix=[an optional prefix for emitted Riemann services]:prefix' \
+ '*--riemann-tag=[tag to attach to emitted metrics]:tag' \
+ '--x-frame-options=[the value to set for X-Frame-Options]:options' \
+ '--cluster-name=[a name for this Concourse cluster, to be displayed on the dashboard page]:name' \
+ '--gc-interval=[interval on which to perform garbage collection]: :_concourse_durations' \
+ '--gc-one-off-grace-period=[period after which one-off build containers will be garbage-collected]: :_concourse_durations' \
+ '--gc-missing-grace-period=[period after which to reap containers and volumes that were created but went missing from the worker]: :_concourse_durations' \
+ '--syslog-hostname=[client hostname with which the build logs will be sent to the syslog server]: :_hosts' \
+ '--syslog-address=[remote syslog server address with port]: :_concourse_host_colon_ports' \
+ '--syslog-transport=[transport protocol for syslog messages]:protocol:((tcp udp tls))' \
+ '--syslog-drain-interval=[interval over which checking is done for new build logs to send to syslog server]: :_concourse_durations' \
+ '--syslog-ca-cert=[paths to PEM-encoded CA cert files to use to verify the Syslog server SSL cert]: :_files' \
+ '--cookie-secure[force sending secure flag on http cookies]' \
+ '--auth-duration=[length of time for which tokens are valid]: :_concourse_durations' \
+ '--session-signing-key=[file containing an RSA private key, used to sign auth tokens]: :_files' \
+ '*--add-local-user=[list of username:password combinations for all your local users]: :_concourse_username_colon_passwords' \
+ '*--main-team-local-user=[list of whitelisted local concourse users]: :_users' \
+ {-c,--main-team-config=}'[configuration file for specifying team params]: :_concourse_config_files' \
+ '*--main-team-bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]: :_users' \
+ '*--main-team-bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team' \
+ '*--main-team-cf-user=[list of whitelisted CloudFoundry users]: :_users' \
+ '*--main-team-cf-org=[list of whitelisted CloudFoundry orgs]:org name' \
+ '*--main-team-cf-space=[list of whitelisted CloudFoundry spaces]:space name' \
+ '*--main-team-github-user=[list of whitelisted GitHub users]: :_users' \
+ '*--main-team-github-org=[list of whitelisted GitHub orgs]:org name' \
+ '*--main-team-github-team=[list of whitelisted GitHub teams]:team name' \
+ '*--main-team-gitlab-user=[list of whitelisted GitLab users]: :_users' \
+ '*--main-team-gitlab-group=[list of whitelisted GitLab groups]:group name' \
+ '*--main-team-ldap-user=[list of whitelisted LDAP users]: :_users' \
+ '*--main-team-ldap-group=[list of whitelisted LDAP groups]:group name' \
+ '*--main-team-oauth-user=[list of whitelisted OAuth2 users]: :_users' \
+ '*--main-team-oauth-group=[list of whitelisted OAuth2 groups]:group name' \
+ '*--main-team-oidc-user=[list of whitelisted OIDC users]: :_users' \
+ '*--main-team-oidc-group=[list of whitelisted OIDC groups]:group name' \
+ '--bitbucket-cloud-client-id=[client id]:client ID' \
+ '--bitbucket-cloud-client-secret=[client secret]:client secret' \
+ '--cf-client-id=[client id]:client ID' \
+ '--cf-client-secret=[client secret]:client secret' \
+ '--cf-api-url=[the base API URL of your CF deployment]: :_urls' \
+ '--cf-ca-cert=[CA Certificate]: :_files' \
+ '--cf-skip-ssl-validation[skip SSL validation]' \
+ '--github-client-id=[client id]:client ID' \
+ '--github-client-secret=[client secret]:client secret' \
+ '--github-host=[hostname of GitHub Enterprise deployment]: :_hosts' \
+ '--github-ca-cert=[CA certificate of GitHub Enterprise deployment]: :_files' \
+ '--gitlab-client-id=[client id]:client ID' \
+ '--gitlab-client-secret=[client secret]:client secret' \
+ '--gitlab-host=[hostname of Gitlab Enterprise deployment]: :_hosts' \
+ '--ldap-display-name=[the auth provider name displayed to users on the login page]:display name' \
+ '--ldap-host=[the host and optional port of the LDAP server]: :_hosts' \
+ '--ldap-bind-dn=[bind DN for searching LDAP users and groups]:bind DN' \
+ '--ldap-bind-pw=[bind Password for the user specified by bind-dn]:bind password' \
+ '--ldap-insecure-no-ssl[required if LDAP host does not use TLS]' \
+ '--ldap-insecure-skip-verify[skip certificate verification]' \
+ '--ldap-start-tls[start on insecure port, then negotiate TLS]' \
+ '--ldap-ca-cert=[CA certificate]: :_files' \
+ '--ldap-user-search-base-dn= [baseDN to start the search from]:baseDN' \
+ '--ldap-user-search-filter=[optional filter to apply when searching the directory]:filter' \
+ '--ldap-user-search-username=[attribute to match against the inputted username]:attribute' \
+ '--ldap-user-search-scope=[can either be: '\''sub'\'' - search the whole sub tree or '\''one'\'' - only search one level]:scope:((sub one))' \
+ '--ldap-user-search-id-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
+ '--ldap-user-search-email-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
+ '--ldap-user-search-name-attr=[a mapping of attributes on the user entry to claims]:attribute mapping' \
+ '--ldap-group-search-base-dn=[baseDN to start the search from]:baseDN' \
+ '--ldap-group-search-filter=[optional filter to apply when searching the directory]:filter' \
+ '--ldap-group-search-scope=[can either be: '\''sub'\'' - search the whole sub tree or '\''one'\'' - only search one level]:scope:((sub one))' \
+ '--ldap-group-search-user-attr=[adds an additional requirement to the filter that an attribute in the group match the user'\''s attribute value]:attribute' \
+ '--ldap-group-search-group-attr=[adds an additional requirement to the filter that an attribute in the group match the user'\''s attribute value]:attribute' \
+ '--ldap-group-search-name-attr=[the attribute of the group that represents its name]:attribute' \
+ '--oauth-display-name=[the auth provider name displayed to users on the login page]:display name' \
+ '--oauth-client-id=[client id]:client ID' \
+ '--oauth-client-secret=[client secret]:client secret' \
+ '--oauth-auth-url=[Authorization URL]: :_urls' \
+ '--oauth-token-url=[Token URL]: :_urls' \
+ '--oauth-userinfo-url=[UserInfo URL]: :_urls' \
+ '*--oauth-scope=[any additional scopes that need to be requested during authorization]:scope' \
+ '--oauth-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key' \
+ '--oauth-user-id-key=[the user id key indicates which claim to use to map an external user id to a Concourse user id]:id key' \
+ '--oauth-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:name key' \
+ '--oauth-ca-cert=[CA Certificate]: :_files' \
+ '--oauth-skip-ssl-validation[skip SSL validation]' \
+ '--oidc-display-name=[the auth provider name displayed to users on the login page]:display name' \
+ '--oidc-issuer=[An OIDC issuer URL that will be used to discover provider configuration]: :_urls' \
+ '--oidc-client-id=[client id]:client ID' \
+ '--oidc-client-secret=[client secret]:client secret' \
+ '*--oidc-scope=[any additional scopes that need to be requested during authorization]:scope' \
+ '--oidc-groups-key=[the groups key indicates which claim to use to map external groups to Concourse teams]:group key' \
+ '--oidc-user-name-key=[the user name key indicates which claim to use to map an external user name to a Concourse user name]:user name key' \
+ '*--oidc-hosted-domains=[list of whitelisted domains when using Google, only users from a listed domain will be allowed to log in]:domain' \
+ '--oidc-ca-cert=[CA Certificate]: :_files' \
+ '--oidc-skip-ssl-validation[skip SSL validation]' \
+ '--tsa-log-level=[minimum level of logs to see]: :_concourse_log_levels' \
+ '--tsa-bind-ip=[IP address on which to listen for SSH]: :_concourse_ip_addresses' \
+ '--tsa-peer-address=[network address of this web node, reachable by other web nodes]: :_urls' \
+ '--tsa-bind-port=[port on which to listen for SSH]: :_concourse_ports' \
+ '--tsa-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
+ '--tsa-debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
+ '--tsa-host-key=[path to private key to use for the SSH server]: :_files' \
+ '--tsa-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_files' \
+ '--tsa-team-authorized-keys=[path to file containing keys to authorize, in SSH authorized_keys format]: :_concourse_name_colon_paths' \
+ '--tsa-atc-url=[ATC API endpoints to which workers will be registered]: :_urls' \
+ '--tsa-session-signing-key=[path to private key to use when signing tokens in reqests to the ATC during registration]: :_files' \
+ '--tsa-heartbeat-interval=[interval on which to heartbeat workers to the ATC]: :_concourse_durations' \
+}
+
+(( $+functions[_concourse_worker_args] )) ||
+_concourse_worker_args() {
+ _arguments -C \
+ '(- : *)'{-h,--help}'[display help information]' \
+ '--name=[the name to set for the worker during registration]:name' \
+ '*--tag=[a tag to set during registration]:tag' \
+ '--team=[the name of the team that this worker will be assigned to]:team name' \
+ '--http-proxy=[HTTP proxy endpoint to use for containers]: :_urls' \
+ '--https-proxy=[HTTPS proxy endpoint to use for containers]: :_urls' \
+ '*--no-proxy=[blacklist of addresses to skip the proxy when reaching]: :_urls' \
+ '--ephemeral[if set, the worker will be immediately removed upon stalling]' \
+ '--certs-dir=[directory to use when creating the resource certificates volume]: :_files -/' \
+ '--work-dir=[directory in which to place container data]: :_files -/' \
+ '--bind-ip=[IP address on which to listen for the Garden server]: :_concourse_ip_addresses' \
+ '--bind-port=[port on which to listen for the Garden server]: :_concourse_ports' \
+ '--debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
+ '--debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
+ '--healthcheck-bind-ip=[IP address on which to listen for health checking requests]: :_concourse_ip_addresses' \
+ '--healthcheck-bind-port=[port on which to listen for health checking requests]: :_concourse_ports' \
+ '--healthcheck-timeout=[HTTP timeout for the full duration of health checking]: :_concourse_durations' \
+ '--sweep-interval=[interval on which containers and volumes will be garbage collected from the worker]: :_concourse_durations' \
+ '--volume-sweeper-max-in-flight=[maximum number of volumes which can be swept in parallel]:number' \
+ '--container-sweeper-max-in-flight=[maximum number of containers which can be swept in parallel]:number' \
+ '--rebalance-interval=[duration after which the registration should be swapped to another random SSH gateway]: :_concourse_durations' \
+ '--connection-drain-timeout=[duration after which a worker should give up draining forwarded connections on shutdown]: :_concourse_durations' \
+ '--external-garden-url=[API endpoint of an externally managed Garden server to use instead of running the embedded Garden server]: :_urls' \
+ '--resource-types=[path to directory containing resource types the worker should advertise]: :_files -/' \
+ '--log-level=[minimum level of logs to see]: :_concourse_log_levels' \
+ '*--tsa-host=[TSA host to forward the worker through]: :_hosts' \
+ '--tsa-public-key=[file containing a public key to expect from the TSA]: :_files' \
+ '--tsa-worker-private-key=[file containing the private key to use when authenticating to the TSA]: :_files' \
+ '--garden-use-houdini[use the insecure Houdini Garden backend]' \
+ '--garden-bin=[path to gdn executable (or leave as gdn to find it in $PATH)]: :_files' \
+ '--garden-config=[path to a config file to use for Garden]: :_files' \
+ '--garden-dns-proxy-enable[enable proxy DNS server]' \
+ '--baggageclaim-log-level=[minimum level of logs to see]: :_concourse_log_levels' \
+ '--baggageclaim-bind-ip=[IP address on which to listen for API traffic]: :_concourse_ip_addresses' \
+ '--baggageclaim-bind-port=[port on which to listen for API traffic]: :_concourse_ports' \
+ '--baggageclaim-debug-bind-ip=[IP address on which to listen for the pprof debugger endpoints]: :_concourse_ip_addresses' \
+ '--baggageclaim-debug-bind-port=[port on which to listen for the pprof debugger endpoints]: :_concourse_ports' \
+ '--baggageclaim-volumes=[directory in which to place volume data]: :_files -/' \
+ '--baggageclaim-driver=[driver to use for managing volumes]:driver:((detect naive btrfs overlay))' \
+ '--baggageclaim-btrfs-bin=[path to btrfs binary]: :_files' \
+ '--baggageclaim-mkfs-bin=[path to mkfs.btrfs binary]: :_files' \
+ '--baggageclaim-overlays-dir=[path to directory in which to store overlay data]: :_files -/' \
+ '--baggageclaim-disable-user-namespaces[disable remapping of user/group IDs in unprivileged volumes]'
+}
+
+(( $+functions[_concourse_config_files] )) ||
+_concourse_config_files() {
+ _files -g "*.(yml|yaml)"
+}
+
+(( $+functions[_concourse_ip_addresses] )) ||
+_concourse_ip_addresses() {
+ _message 'IP address'
+}
+
+(( $+functions[_concourse_ports] )) ||
+_concourse_ports() {
+ _message 'port number'
+}
+
+(( $+functions[_concourse_host_colon_ports] )) ||
+_concourse_host_colon_ports() {
+ local ret=1
+ if compset -P '*:'; then
+ _concourse_ports && ret=0
+ else
+ _alternative \
+ 'hosts: :_hosts -qS:' \
+ 'ip-addresses: :_guard "[[:digit:]]*" "IP address"' \
+ && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_type_colon_images] )) ||
+_concourse_type_colon_images() {
+ local ret=1
+ if compset -P '*:'; then
+ _message 'type' && ret=0
+ else
+ _message 'image' && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_name_colon_values] )) ||
+_concourse_name_colon_values() {
+ local ret=1
+ if compset -P '*:'; then
+ _message 'name' && ret=0
+ else
+ _message 'value' && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_username_colon_passwords] )) ||
+_concourse_username_colon_passwords() {
+ local ret=1
+ if compset -P '*:'; then
+ _message 'username' && ret=0
+ else
+ _message 'password' && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_name_colon_paths] )) ||
+_concourse_name_colon_paths() {
+ local ret=1
+ if compset -P '*:'; then
+ _message 'name' && ret=0
+ else
+ _files && ret=0
+ fi
+ return ret
+}
+
+(( $+functions[_concourse_durations] )) ||
+_concourse_durations() {
+ _message 'duration, eg: "5s", "5m", "5h", "5d"'
+}
+
+(( $+functions[_concourse_log_levels] )) ||
+_concourse_log_levels() {
+ local levels=(
+ 'debug:debug traces'
+ 'info:normal log level'
+ 'error:log only errors'
+ 'fatal:log only fatal errors'
+ )
+ _describe -t log-levels 'log level' levels
+}
+
+case $service in
+ concourse) _concourse_server "$@" ;;
+ fly) _concourse_fly "$@" ;;
+ *) _message "unknown command ${service}" && ret=1 ;;
+esac
+
+# Local Variables:
+# mode: Shell-Script
+# sh-indentation: 2
+# indent-tabs-mode: nil
+# sh-basic-offset: 2
+# End:
+# vim: ft=zsh sw=2 ts=2 et