diff options
| -rw-r--r-- | src/_age | 39 | ||||
| -rw-r--r-- | src/_concourse | 1038 | ||||
| -rw-r--r-- | src/_console | 72 | ||||
| -rw-r--r-- | src/_flutter | 1 | ||||
| -rw-r--r-- | src/_git-pulls | 2 | ||||
| -rw-r--r-- | src/_google | 94 | ||||
| -rw-r--r-- | src/_jmeter | 6 | ||||
| -rw-r--r-- | src/_knife | 324 | ||||
| -rw-r--r-- | src/_l3build | 91 | ||||
| -rw-r--r-- | src/_pgsql_utils | 590 | ||||
| -rw-r--r-- | src/_phing | 45 | ||||
| -rw-r--r-- | src/_play | 190 |
12 files changed, 110 insertions, 2382 deletions
@@ -1,9 +1,9 @@ -#compdef age +#compdef age age-keygen # ------------------------------------------------------------------------------ # Description # ----------- # -# Completion script for age 1.1.1 (https://github.com/FiloSottile/age). +# Completion script for age 1.2.1 (https://github.com/FiloSottile/age). # # ------------------------------------------------------------------------------ # Authors @@ -13,19 +13,28 @@ # # ------------------------------------------------------------------------------ -_arguments \ - -A '-*' \ - '(- *)'{-h,--help}'[show help message and exit]' \ - '(-e --encrypt -d --decrypt)'{-e,--encrypt}'[Encrypt INPUT to OUTPUT]' \ - '(-e --encrypt -d --decrypt -a --armor -p --passphrase -r --recipient -R --recipients-file)'{-d,--decrypt}'[Decrypt INPUT to OUTPUT]' \ - \*{-i,--identity=}'[Encrypt/Decrypt using the identities at PATH]:IDENTITY:_files' \ - '(-o --output)'{-o,--output=}'[Write encrypted/decrypted file to OUTPUT]:OUTPUT:_files' \ - '(-j --plugin)'{-j,--plugin=}'[Encrypt/Decrypt using the data-less PLUGIN]:PLUGIN:' \ - '(-d --decrypt)'\*{-r,--recipient=}'[Encrypt to the explicitly specified RECIPIENT]:RECIPIENT:' \ - '(-d --decrypt)'\*{-R,--recipients-file=}'[Encrypt to the RECIPIENTS listed in the file at PATH]:RECIPIENTS_FILE:_files' \ - '(-a --armor -d --decrypt)'{-a,--armor}'[Encrypt to an ASCII-only "armored" encoding]' \ - '(-p --passphrase -d --decrypt)'{-p,--passphrase}'[Encrypt with a passphrase]' \ - :INPUT:_files +case $service in + (age) + _arguments \ + -A '-*' \ + '(- *)'{-h,--help}'[show help message and exit]' \ + '(-e --encrypt -d --decrypt)'{-e,--encrypt}'[Encrypt the input to the output. Default if omitted]' \ + '(-e --encrypt -d --decrypt -a --armor -p --passphrase -r --recipient -R --recipients-file)'{-d,--decrypt}'[Decrypt the input to the output]' \ + '(-o --output)'{-o,--output=}'[Write the result to the given file]:OUTPUT:_files' \ + '(-a --armor -d --decrypt)'{-a,--armor}'[Encrypt to a PEM encoded format]' \ + '(-p --passphrase -d --decrypt)'{-p,--passphrase}'[Encrypt with a passphrase]' \ + '(-d --decrypt)'\*{-r,--recipient=}'[Encrypt to the explicitly specified RECIPIENT]:RECIPIENT:' \ + '(-d --decrypt)'\*{-R,--recipients-file=}'[Encrypt to the RECIPIENTS listed in the file at PATH]:RECIPIENTS_FILE:_files' \ + \*{-i,--identity=}'[Use the given identify file]:IDENTITY:_files' \ + :INPUT:_files + ;; + (age-keygen) + _arguments \ + '(-o --output)'{-o,--output}'[write the result to the given file]:file:_files' \ + '-y[convert an identity file to a recipients file]' \ + '*:input' + ;; +esac # Local Variables: # mode: Shell-Script diff --git a/src/_concourse b/src/_concourse index 4d0cbc6..802b9d1 100644 --- a/src/_concourse +++ b/src/_concourse @@ -1,4 +1,4 @@ -#compdef concourse fly +#compdef concourse # ------------------------------------------------------------------------------ # Description # ----------- @@ -13,1041 +13,6 @@ # # ------------------------------------------------------------------------------ -local _concourse_fly_target \ - _concourse_fly_pipeline \ - _concourse_fly_pipeline_config \ - _concourse_fly_job \ - _concourse_fly_resource \ - _concourse_fly_resource_type - -(( $+functions[_concourse_fly] )) || -_concourse_fly() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(- : *)'{-v,--version}'[print the version of Fly and exit]' \ - {-t,--target=}'[concourse target name]: :_concourse_fly_targets' \ - --verbose'[print API requests and responses]' \ - --print-table-headers'[print table headers even for redirected output]' \ - '(-): :->command' \ - '(-)*:: :->arguments' \ - && ret=0 - - case $state in - (command) - _concourse_fly_commands - ;; - (arguments) - curcontext=${curcontext%:*:*}:concourse-fly-$words[1]: - if (( $+functions[_concourse_fly_${words[1]}_args] )); then - _concourse_fly_target=${(v)opt_args[(i)-t|--target]} - _concourse_fly_${words[1]}_args && ret=0 - else - _message "unknown command ${words[1]}" && ret=1 - fi - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_commands] )) || -_concourse_fly_commands() { - local commands=( - {ab,abort-build}":abort a build" - {bs,builds}":list builds data" - {cr,check-resource}":check a resource" - {crt,check-resource-type}":check a resource-type" - {cl,checklist}":print a Checkfile of the given pipeline" - {ctc,clear-task-cache}":clears cache from a task container" - {cs,containers}":print the active containers" - {c,curl}":curl the api" - {dtg,delete-target}":delete target" - {dp,destroy-pipeline}":destroy a pipeline" - {dt,destroy-team}":destroy a team and delete all of its data" - {etg,edit-target}":edit a target" - {e,execute}":execute a one-off build using local bits" - {ep,expose-pipeline}":make a pipeline publicly viewable" - {fp,format-pipeline}":format a pipeline config" - {gp,get-pipeline}":get a pipeline's current configuration" - {gt,get-team}":show team configuration" - "help:print help message" - {hp,hide-pipeline}":hide a pipeline from the public" - {i,intercept,hijack}":execute a command in a container" - {js,jobs}":list the jobs in the pipelines" - {lw,land-worker}":land a worker" - {l,login}":authenticate with the target" - {o,logout}":release authentication with the target" - {op,order-pipelines}":orders pipelines" - {pj,pause-job}":pause a job" - {pp,pause-pipeline}":pause a pipeline" - {ps,pipelines}":list the configured pipelines" - {pw,prune-worker}":prune a stalled, landing, landed, or retiring worker" - {rp,rename-pipeline}":rename a pipeline" - {rt,rename-team}":rename a team" - {rvs,resource-versions}":list the versions of a resource" - {rs,resources}":list the resources in the pipeline" - {sp,set-pipeline}":create or update a pipeline's configuration" - {st,set-team}":create or modify a team to have the given credentials" - "status:login status" - {s,sync}":download and replace the current fly from the target" - {ts,targets}":list saved targets" - {t,teams}":list the configured teams" - {tj,trigger-job}":start a job in a pipeline" - {uj,unpause-job}":unpause a job" - {up,unpause-pipeline}":un-pause a pipeline" - "userinfo:user information" - {vp,validate-pipeline}":validate a pipeline config" - {vs,volumes}":list the active volumes" - {w,watch}":stream a build's output" - {ws,workers}":list the registered workers" - ) - _describe -t commands commands commands -} - -(( $+functions[_concourse_fly_ab_args] )) || -_concourse_fly_ab_args() { - _concourse_fly_abort-build_args -} - -(( $+functions[_concourse_fly_bs_args] )) || -_concourse_fly_bs_args() { - _concourse_fly_builds_args -} - -(( $+functions[_concourse_fly_cl_args] )) || -_concourse_fly_cl_args() { - _concourse_fly_checklist_args -} - -(( $+functions[_concourse_fly_cr_args] )) || -_concourse_fly_cr_args() { - _concourse_fly_check-resource_args -} - -(( $+functions[_concourse_fly_crt_args] )) || -_concourse_fly_crt_args() { - _concourse_fly_check-resource-type_args -} - -(( $+functions[_concourse_fly_ctc_args] )) || -_concourse_fly_ctc_args() { - _concourse_fly_clear-task-cache_args -} - -(( $+functions[_concourse_fly_cs_args] )) || -_concourse_fly_cs_args() { - _concourse_fly_containers_args -} - -(( $+functions[_concourse_fly_c_args] )) || -_concourse_fly_c_args() { - _concourse_fly_curl_args -} - -(( $+functions[_concourse_fly_dtg_args] )) || -_concourse_fly_dtg_args() { - _concourse_fly_delete-target_args -} - -(( $+functions[_concourse_fly_dp_args] )) || -_concourse_fly_dp_args() { - _concourse_fly_destroy-pipeline_args -} - -(( $+functions[_concourse_fly_dt_args] )) || -_concourse_fly_dt_args() { - _concourse_fly_destroy-team_args -} - -(( $+functions[_concourse_fly_etg_args] )) || -_concourse_fly_etg_args() { - _concourse_fly_edit-target_args -} - -(( $+functions[_concourse_fly_e_args] )) || -_concourse_fly_e_args() { - _concourse_fly_execute_args -} - -(( $+functions[_concourse_fly_ep_args] )) || -_concourse_fly_ep_args() { - _concourse_fly_expose-pipeline_args -} - -(( $+functions[_concourse_fly_fp_args] )) || -_concourse_fly_fp_args() { - _concourse_fly_format-pipeline_args -} - -(( $+functions[_concourse_fly_gp_args] )) || -_concourse_fly_gp_args() { - _concourse_fly_get-pipeline_args -} - -(( $+functions[_concourse_fly_gt_args] )) || -_concourse_fly_gt_args() { - _concourse_fly_get-team_args -} - -(( $+functions[_concourse_fly_hp_args] )) || -_concourse_fly_hp_args() { - _concourse_fly_hide-pipeline_args -} - -(( $+functions[_concourse_fly_hijack_args] )) || -_concourse_fly_hijack_args() { - _concourse_fly_intercept_args -} - -(( $+functions[_concourse_fly_i_args] )) || -_concourse_fly_i_args() { - _concourse_fly_intercept_args -} - -(( $+functions[_concourse_fly_js_args] )) || -_concourse_fly_js_args() { - _concourse_fly_jobs_args -} - -(( $+functions[_concourse_fly_lw_args] )) || -_concourse_fly_lw_args() { - _concourse_fly_land-worker_args -} - -(( $+functions[_concourse_fly_l_args] )) || -_concourse_fly_l_args() { - _concourse_fly_login_args -} - -(( $+functions[_concourse_fly_o_args] )) || -_concourse_fly_o_args() { - _concourse_fly_logout_args -} - -(( $+functions[_concourse_fly_op_args] )) || -_concourse_fly_op_args() { - _concourse_fly_order-pipelines_args -} - -(( $+functions[_concourse_fly_pj_args] )) || -_concourse_fly_pj_args() { - _concourse_fly_pause-job_args -} - -(( $+functions[_concourse_fly_pp_args] )) || -_concourse_fly_pp_args() { - _concourse_fly_pause-pipeline_args -} - -(( $+functions[_concourse_fly_ps_args] )) || -_concourse_fly_ps_args() { - _concourse_fly_pipelines_args -} - -(( $+functions[_concourse_fly_pw_args] )) || -_concourse_fly_pw_args() { - _concourse_fly_prune-worker_args -} - -(( $+functions[_concourse_fly_rp_args] )) || -_concourse_fly_rp_args() { - _concourse_fly_rename-pipeline_args -} - -(( $+functions[_concourse_fly_rt_args] )) || -_concourse_fly_rt_args() { - _concourse_fly_rename-team_args -} - -(( $+functions[_concourse_fly_rs_args] )) || -_concourse_fly_rs_args() { - _concourse_fly_resources_args -} - -(( $+functions[_concourse_fly_rvs_args] )) || -_concourse_fly_rvs_args() { - _concourse_fly_resource-versions_args -} - -(( $+functions[_concourse_fly_sp_args] )) || -_concourse_fly_sp_args() { - _concourse_fly_set-pipeline_args -} - -(( $+functions[_concourse_fly_st_args] )) || -_concourse_fly_st_args() { - _concourse_fly_set-team_args -} - -(( $+functions[_concourse_fly_s_args] )) || -_concourse_fly_s_args() { - _concourse_fly_sync_args -} - -(( $+functions[_concourse_fly_ts_args] )) || -_concourse_fly_ts_args() { - _concourse_fly_targets_args -} - -(( $+functions[_concourse_fly_t_args] )) || -_concourse_fly_t_args() { - _concourse_fly_teams_args -} - -(( $+functions[_concourse_fly_tj_args] )) || -_concourse_fly_tj_args() { - _concourse_fly_trigger-job_args -} - -(( $+functions[_concourse_fly_uj_args] )) || -_concourse_fly_uj_args() { - _concourse_fly_unpause-job_args -} - -(( $+functions[_concourse_fly_up_args] )) || -_concourse_fly_up_args() { - _concourse_fly_unpause-pipeline_args -} - -(( $+functions[_concourse_fly_vp_args] )) || -_concourse_fly_vp_args() { - _concourse_fly_validate-pipeline_args -} - -(( $+functions[_concourse_fly_vs_args] )) || -_concourse_fly_vs_args() { - _concourse_fly_volumes_args -} - -(( $+functions[_concourse_fly_w_args] )) || -_concourse_fly_w_args() { - _concourse_fly_watch_args -} - -(( $+functions[_concourse_fly_ws_args] )) || -_concourse_fly_ws_args() { - _concourse_fly_workers_args -} - -(( $+functions[_concourse_fly_help_args] )) || -_concourse_fly_help_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' -} - -(( $+functions[_concourse_fly_status_args] )) || -_concourse_fly_status_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' -} - -(( $+functions[_concourse_fly_userinfo_args] )) || -_concourse_fly_userinfo_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_abort-build_args] )) || -_concourse_fly_abort-build_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \ - '(-b --build)'{-b,--build=}'[job build number to cancel, or build id]: :_concourse_fly_builds' -} - -(( $+functions[_concourse_fly_builds_args] )) || -_concourse_fly_builds_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-a --all-teams)'{-a,--all-teams}'[show builds for the all teams that user has access to]' \ - '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]: :number' \ - '--current-team[show builds for the currently targeted team]' \ - '(-j --job -p --pipeline)'{-j,--job=}'[name of a job to get builds for]: :_concourse_fly_pipeline_slash_jobs' \ - '--json[print command result as JSON]' \ - '(-j --job -p --pipeline)'{-p,--pipeline=}'[name of a pipeline to get builds for]: :_concourse_fly_pipelines' \ - '--since=[start of the range to filter builds]: :_concourse_fly_dates' \ - '--until=[end of the range to filter builds]: :_concourse_fly_dates' -} - -(( $+functions[_concourse_fly_checklist_args] )) || -_concourse_fly_checklist_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[the pipeline from which to generate the Checkfile]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_check-resource_args] )) || -_concourse_fly_check-resource_args() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-r --resource)'{-r,--resource=}'[name of a resource to check]: :_concourse_fly_pipeline_slash_resources' \ - '(-f --from)'{-f,--from=}'[version of the resource to check from]: :->version' \ - && ret=0 - - case $state in - (version) - _concourse_fly_resource=${(v)opt_args[(i)-r|--resource]} - _concourse_fly_pipeline_resource_versions && ret=0 - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_check-resource-type_args] )) || -_concourse_fly_check-resource-type_args() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-r --resource-type)'{-r,--resource-type=}'[name of a resource type to check]: :_concourse_fly_pipeline_slash_resource_types' \ - '(-f --from)'{-f,--from=}'[version of the resource type to check from]: :->version' \ - && ret=0 - - case $state in - (version) - _concourse_fly_resource_type=${(v)opt_args[(i)-r|--resource-type]} - _concourse_fly_pipeline_resource_type_versions && ret=0 - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_clear-task-cache_args] )) || -_concourse_fly_clear-task-cache_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[name of a job to cancel]: :_concourse_fly_pipeline_slash_jobs' \ - '(-s --step)'{-s,--step=}'[step name to clear cache from]:task step' \ - '(-c --cache-path)'{-c,--cache-path=}'[cache directory to clear out]: :_files -/' \ - '(-n --non-interactive)'{-n,--non-interactive=}'[destroy the task cache(s) without confirmation]' -} - -(( $+functions[_concourse_fly_containers_args] )) || -_concourse_fly_containers_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_curl_args] )) || -_concourse_fly_curl_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '--print-and-exit[print curl command and exit]' -} - -(( $+functions[_concourse_fly_delete-target_args] )) || -_concourse_fly_delete-target_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-a --all)'{-a,--all}'[delete all targets]' -} - -(( $+functions[_concourse_fly_destroy-pipeline_args] )) || -_concourse_fly_destroy-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[the pipeline to destroy]: :_concourse_fly_pipelines' \ - '(-n --non-interactive)'{-n,--non-interactive}'[destroy the pipeline without confirmation]' -} - -(( $+functions[_concourse_fly_destroy-team_args] )) || -_concourse_fly_destroy-team_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-n --team-name)'{-n,--team-name=}'[the team to delete]: :_concourse_fly_teams' \ - '(-n --non-interactive)'{-n,--non-interactive}'[force apply configuration]' -} - -(( $+functions[_concourse_fly_edit-target_args] )) || -_concourse_fly_edit-target_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '--target-name=[update target name]: :_concourse_fly_targets' \ - '(-u --concourse-url)'{-u,--concourse-url=}'[update concourse URL]: :_urls' \ - '(-n --team-name)'{-n,--team-name=}'[update concourse URL]: :_concourse_fly_teams' -} - -(( $+functions[_concourse_fly_execute_args] )) || -_concourse_fly_execute_args() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-c --config)'{-c,--config=}'[the task config to execute]: :_concourse_config_files' \ - '(-p --privileged)'{-p,--privileged}'[run the task with full privileges]' \ - '--include-ignored[disregard .gitignore entries and uploads everything]' \ - '*'{-i,--input=}'[an input to provide to the task]: :->input' \ - '*'{-m,--input-mapping=}'[map a resource to a different name as task input]: :->input-mapping' \ - '(-j --inputs-from)'{-j,--inputs-from=}'[a job to base the inputs on]: :_concourse_fly_pipeline_slash_jobs' \ - '*'{-o,--output=}'[an output to fetch from the task]: :->output' \ - '--image=[image resource for the one-off build]: :_concourse_fly_images' \ - '*--tag=[a tag for a specific environment]: :_concourse_fly_tags' \ - '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \ - '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \ - '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \ - && ret=0 - - _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]} - - case $state in - (input-mapping) - # TODO complete --input-mapping - _message 'input mapping' - ;; - (input) - _concourse_fly_input_equal_paths && ret=0 - ;; - (output) - _concourse_fly_output_equal_paths && ret=0 - ;; - (var) - _concourse_fly_var_equal_values && ret=0 - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_expose-pipeline_args] )) || -_concourse_fly_expose-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[pipeline to expose]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_format-pipeline_args] )) || -_concourse_fly_format-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \ - '(-w --write)'{-w,--write}'[do not print to stdout, overwrite the file in place]' -} - -(( $+functions[_concourse_fly_get-pipeline_args] )) || -_concourse_fly_get-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[get configuration of this pipeline]: :_concourse_fly_pipelines' \ - '(-j --json)'{-j,--json}'[print config as json instead of yaml]' -} - -(( $+functions[_concourse_fly_get-team_args] )) || -_concourse_fly_get-team_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-n --team)'{-n,--team=}'[get configuration of this team]: :_concourse_fly_teams' \ - '(-j --json)'{-j,--json}'[print config as json instead of yaml]' -} - -(( $+functions[_concourse_fly_hide-pipeline_args] )) || -_concourse_fly_hide-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[pipeline to hide]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_intercept_args] )) || -_concourse_fly_intercept_args() { - # TODO complete --handle - # TODO complete --check - # TODO complete --step - # TODO complete --step-type - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job --handle -c --check -u --url)'{-j,--job=}'[name of a job to hijack]: :_concourse_fly_pipeline_slash_jobs' \ - '(-j --job --handle -c --check -u --url)--handle=[handle id of a job to hijack]:job handle' \ - '(-j --job --handle -c --check -u --url)'{-c,--check=}'[name of a resource'\''s checking container to hijack]:name' \ - '(-j --job --handle -c --check -u --url)'{-u,--url=}'[URL for the build, job, or check container to hijack]: :_urls' \ - '(-b --build)'{-b,--build=}'[build number within the job, or global build ID]: :_concourse_fly_builds' \ - '(-s --step)'{-s,--step=}'[name of step to hijack]:step' \ - '--step-type=[type of step to hijack]:step type' \ - '(-a --attempt)'{-a,--attempt=}'[attempt number of step to hijack]: :_values -s, "number" 1 2 3 4 5 6 7 8 9' \ - '(-):command name: _command_names -e' \ - '*::arguments:_normal' -} - -(( $+functions[_concourse_fly_jobs_args] )) || -_concourse_fly_jobs_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[get jobs in this pipeline]: :_concourse_fly_pipelines' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_land-worker_args] )) || -_concourse_fly_land-worker_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-w --worker)'{-w,--worker=}'[worker to land]: :_concourse_fly_workers' -} - -(( $+functions[_concourse_fly_login_args] )) || -_concourse_fly_login_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-c --concourse-url)'{-c,--concourse-url=}'[concourse URL to authenticate with]: :_urls' \ - '(-k --insecure)'{-k,--insecure}'[skip verification of the endpoint'\''s SSL certificate]' \ - '(-u --username)'{-u,--username=}'[username for basic auth]: :_users' \ - '(-p --password)'{-p,--password=}'[password for basic auth]:password' \ - '(-n --team-name)'{-n,--team-name=}'[team to authenticate with]: :_concourse_fly_teams' \ - '--ca-cert=[path to Concourse PEM-encoded CA certificate file]: :_files -g "*.pem"' \ - '(-b --open-browser)'{-b,--open-browser}'[open browser to the auth endpoint]' -} - -(( $+functions[_concourse_fly_logout_args] )) || -_concourse_fly_logout_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-a --all)'{-a,--all}'[logout of all targets]' -} - -(( $+functions[_concourse_fly_order-pipelines_args] )) || -_concourse_fly_order-pipelines_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[name of pipeline to order]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_pause-job_args] )) || -_concourse_fly_pause-job_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[name of a job to pause]: :_concourse_fly_pipeline_slash_jobs' -} - -(( $+functions[_concourse_fly_pause-pipeline_args] )) || -_concourse_fly_pause-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[pipeline to pause]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_pipelines_args] )) || -_concourse_fly_pipelines_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-a --all)'{-a,--all}'[show all pipelines]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_prune-worker_args] )) || -_concourse_fly_prune-worker_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-w --worker)'{-w,--worker=}'[worker to prune]: :_concourse_fly_workers' \ - '(-a --all-stalled)'{-a,--all-stalled}'[prune all stalled workers]' -} - -(( $+functions[_concourse_fly_rename-pipeline_args] )) || -_concourse_fly_rename-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-o --old-name)'{-o,--old-name=}'[pipeline to rename]: :_concourse_fly_pipelines' \ - '(-n --new-name)'{-n,--new-name=}'[name to set as pipeline name]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_rename-team_args] )) || -_concourse_fly_rename-team_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-o --old-name)'{-o,--old-name=}'[current team name]: :_concourse_fly_teams' \ - '(-n --new-name)'{-n,--new-name=}'[new team name]: :_concourse_fly_teams' -} - -(( $+functions[_concourse_fly_resources_args] )) || -_concourse_fly_resources_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[get resources in this pipeline]: :_concourse_fly_pipelines' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_resource-versions_args] )) || -_concourse_fly_resource-versions_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-c --count)'{-c,--count=}'[number of builds you want to limit the return to]:number' \ - '(-r --resource)'{-r,--resource=}'[name of a resource to get versions for]: :_concourse_fly_pipeline_slash_resources' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_set-pipeline_args] )) || -_concourse_fly_set-pipeline_args() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-n --non-interactive)'{-n,--non-interactive}'[skips interactions, uses default values]' \ - '--no-color[disable color output]' \ - '--check-creds[validate credential variables against credential manager]' \ - '(-p --pipeline)'{-p,--pipeline=}'[pipeline to configure]: :_concourse_fly_pipelines' \ - '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \ - '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \ - '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \ - '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \ - && ret=0 - - _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]} - - case $state in - (var) - _concourse_fly_var_equal_values && ret=0 - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_set-team_args] )) || -_concourse_fly_set-team_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-n --team-name)'{-n,--team-name=}'[the team to create or modify]: :_concourse_fly_teams' \ - '--non-interactive[force apply configuration]' \ - '*--local-user=[list of whitelisted local concourse users]: :_users' \ - '(-c --config)'{-c,--config=}'[configuration file for specifying team params]: :_concourse_config_files' \ - '*--bitbucket-cloud-user=[list of whitelisted Bitbucket Cloud users]:user name' \ - '*--bitbucket-cloud-team=[list of whitelisted Bitbucket Cloud teams]:team name' \ - '*--cf-user=[list of whitelisted CloudFoundry users]:user name' \ - '*--cf-org=[list of whitelisted CloudFoundry orgs]:org name' \ - '*--cf-space=[list of whitelisted CloudFoundry spaces]:space name' \ - '*--github-user=[list of whitelisted GitHub users]:user name' \ - '*--github-org=[list of whitelisted GitHub orgs]:org name' \ - '*--github-team=[list of whitelisted GitHub teams]:team name' \ - '*--gitlab-user=[list of whitelisted GitLab users]:user name' \ - '*--gitlab-group=[list of whitelisted GitLab groups]:group name' \ - '*--ldap-user=[list of whitelisted LDAP users]:user name' \ - '*--ldap-group=[list of whitelisted LDAP groups]:group name' \ - '*--oauth-user=[list of whitelisted OAuth2 users]:user name' \ - '*--oauth-group=[list of whitelisted OAuth2 groups]:group name' \ - '*--oidc-user=[list of whitelisted OIDC users]:user name' \ - '*--oidc-group=[list of whitelisted OIDC groups]:group name' -} - -(( $+functions[_concourse_fly_sync_args] )) || -_concourse_fly_sync_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-f --force)'{-f,--force}'[sync even if versions already match]' -} - -(( $+functions[_concourse_fly_targets_args] )) || -_concourse_fly_targets_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' -} - -(( $+functions[_concourse_fly_teams_args] )) || -_concourse_fly_teams_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-d --details)'{-d,--details}'[print authentication configuration]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_trigger-job_args] )) || -_concourse_fly_trigger-job_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[name of a job to trigger]: :_concourse_fly_pipeline_slash_jobs' \ - '(-w --watch)'{-w,--watch}'[start watching the build output]' -} - -(( $+functions[_concourse_fly_unpause-job_args] )) || -_concourse_fly_unpause-job_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[name of a job to unpause]: :_concourse_fly_pipeline_slash_jobs' -} - -(( $+functions[_concourse_fly_unpause-pipeline_args] )) || -_concourse_fly_unpause-pipeline_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-p --pipeline)'{-p,--pipeline=}'[pipeline to unpause]: :_concourse_fly_pipelines' -} - -(( $+functions[_concourse_fly_validate-pipeline_args] )) || -_concourse_fly_validate-pipeline_args() { - - local context state state_descr line ret=1 - typeset -A opt_args - - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-c --config)'{-c,--config=}'[pipeline configuration file]: :_concourse_config_files' \ - '(-s --strict)'{-s,--strict}'[fail on warnings]' \ - '(-o --output)'{-o,--output}'[output templated pipeline to stdout]' \ - '*'{-v,--var=}'[specify a string value to set for a variable in the pipeline]: :->var' \ - '*'{-y,--yaml-var=}'[specify a YAML value to set for a variable in the pipeline]: :->var' \ - '(-l --load-vars-from)'{-l,--load-vars-from=}'[variable flag that can be used for filling in template values in configuration from a YAML file]: :_files' \ - && ret=0 - - _concourse_fly_pipeline_config=${(v)opt_args[(i)-c|--config]} - - case $state in - (var) - _concourse_fly_var_equal_values && ret=0 - ;; - esac - - return ret -} - -(( $+functions[_concourse_fly_volumes_args] )) || -_concourse_fly_volumes_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-d --details)'{-d,--details}'[print additional information for each volume]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_watch_args] )) || -_concourse_fly_watch_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-j --job)'{-j,--job=}'[watches builds of the given job]: :_concourse_fly_pipeline_slash_jobs' \ - '(-b --build)'{-b,--build=}'[watches a specific build]: :_concourse_fly_builds' \ - '(-t --timestamps)'{-t,--timestamps}'[print with local timestamp]' -} - -(( $+functions[_concourse_fly_workers_args] )) || -_concourse_fly_workers_args() { - _arguments -C \ - '(- : *)'{-h,--help}'[display help information]' \ - '(-d --details)'{-d,--details}'[print additional information for each worker]' \ - '--json[print command result as JSON]' -} - -(( $+functions[_concourse_fly_targets] )) || -_concourse_fly_targets() { - local targets=($(_call_program targets $service targets | awk '{print $1}')) - _describe -t targets 'target' targets $@ || _message 'no target found' -} - -(( $+functions[_concourse_fly_teams] )) || -_concourse_fly_teams() { - if [[ -n ${_concourse_fly_target} ]]; then - local teams=($(_call_program teams $service -t ${_concourse_fly_target} teams | awk '{print $1}')) - _describe -t teams 'team' teams $@ || _message 'no team found' - else - _message 'team' - fi -} - -(( $+functions[_concourse_fly_pipelines] )) || -_concourse_fly_pipelines() { - if [[ -n ${_concourse_fly_target} ]]; then - local pipelines=($(_call_program pipelines $service -t ${_concourse_fly_target} pipelines | awk '{print $1}')) - _describe -t pipelines 'pipeline' pipelines $@ || _message 'no pipeline found' - else - _message 'pipeline' - fi -} - -(( $+functions[_concourse_fly_pipeline_jobs] )) || -_concourse_fly_pipeline_jobs() { - if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then - local jobs=($(_call_program jobs $service -t ${_concourse_fly_target} jobs -p ${_concourse_fly_pipeline} 2>&1 | awk '{print $1}')) - _describe -t jobs "${_concourse_fly_pipeline} job" jobs $@ || _message 'no job found' - else - _message 'job' - fi -} - -(( $+functions[_concourse_fly_pipeline_resources] )) || -_concourse_fly_pipeline_resources() { - if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then - local resources=($(_call_program resources $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $1}')) - _describe -t resources 'resource' resources $@ || _message 'no resource found' - else - _message 'resource' - fi -} - -(( $+functions[_concourse_fly_pipeline_resource_types] )) || -_concourse_fly_pipeline_resource_types() { - if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_pipeline} ]]; then - local resource_types=($(_call_program resource-types $service -t ${_concourse_fly_target} resources -p ${_concourse_fly_pipeline} | awk '{print $2}')) - _describe -t resource-types 'resource type' resource_types $@ || _message 'no resource type found' - else - _message 'resource type' - fi -} - -(( $+functions[_concourse_fly_workers] )) || -_concourse_fly_workers() { - if [[ -n ${_concourse_fly_target} ]]; then - local workers=($(_call_program workers $service -t ${_concourse_fly_target} workers | awk '{print $1}')) - _describe -t workers 'worker' workers $@ || _message 'no worker found' - else - _message 'worker' - fi -} - -(( $+functions[_concourse_fly_builds] )) || -_concourse_fly_builds() { - if [[ -n ${_concourse_fly_target} ]]; then - local builds=($(_call_program builds $service -t ${_concourse_fly_target} builds | awk '{print $1}')) - _describe -t builds 'build' builds $@ || _message 'no build found' - else - _message 'build' - fi -} - -(( $+functions[_concourse_fly_pipeline_resource_versions] )) || -_concourse_fly_pipeline_resource_versions() { - if [[ -n ${_concourse_fly_target} ]] && [[ -n ${_concourse_fly_resource} ]]; then - local resource_versions=($(_call_program resource-versions $service -t ${_concourse_fly_target} resource-versions -r ${_concourse_fly_resource} | awk '{print $2}')) - _describe -t resource-versions 'resource version' resource_versions $@ || _message 'no version found' - else - _message 'resource version' - fi -} - -(( $+functions[_concourse_fly_pipeline_config_vars] )) || -_concourse_fly_pipeline_config_vars() { - if [[ -n ${_concourse_fly_pipeline_config} ]]; then - local variables=($(grep -Po '(?<=\(\()[^\)]+' ${_concourse_fly_pipeline_config})) - _describe -t variables 'variables' variables $@ || _message 'no variable found' - else - _message 'variable' - fi -} - -(( $+functions[_concourse_fly_pipeline_config_inputs] )) || -_concourse_fly_pipeline_config_inputs() { - if [[ -n ${_concourse_fly_pipeline_config} ]]; then - if (( $+commands[yq] )); then - local inputs=($(yq -r '.. | .inputs? | arrays | .[].name' ${_concourse_fly_pipeline_config} 2>&1)) - _describe -t inputs 'input' inputs $@ || _message 'no input found' - else - _message 'install yq (https://github.com/kislyuk/yq) to get completion of inputs' - fi - else - _message 'input' - fi -} - -(( $+functions[_concourse_fly_pipeline_config_outputs] )) || -_concourse_fly_pipeline_config_outputs() { - if [[ -n ${_concourse_fly_pipeline_config} ]]; then - if (( $+commands[yq] )); then - local outputs=($(yq -r '.. | .outputs? | arrays | .[].name' ${_concourse_fly_pipeline_config})) - _describe -t outputs 'output' outputs $@ || _message 'no output found' - else - _message 'install yq (https://github.com/kislyuk/yq) to get completion of outputs' - fi - else - _message 'output' - fi -} - -(( $+functions[_concourse_fly_pipeline_resource_type_versions] )) || -_concourse_fly_pipeline_resource_type_versions() { - # seems like there is no command for listing resource type versions... - _message 'resource type version' -} - -(( $+functions[_concourse_fly_tags] )) || -_concourse_fly_tags() { - # seems like there is no command for listing tags... - _message 'tag' -} - -(( $+functions[_concourse_fly_dates] )) || -_concourse_fly_dates() { - # _dates completer does not seem to work on zsh 5.7.1 - _dates -f '%Y-%m-%d %H:%M:%S' -} - -(( $+functions[_concourse_fly_pipeline_slash_jobs] )) || -_concourse_fly_pipeline_slash_jobs() { - local ret=1 - if compset -P '*/'; then - _concourse_fly_pipeline="${${IPREFIX%/}##*=}" - _concourse_fly_pipeline_jobs && ret=0 - else - _concourse_fly_pipelines -qS/ && ret=0 - fi - return ret -} - -(( $+functions[_concourse_fly_pipeline_slash_resources] )) || -_concourse_fly_pipeline_slash_resources() { - local ret=1 - if compset -P '*/'; then - _concourse_fly_pipeline="${${IPREFIX%/}##*=}" - _concourse_fly_pipeline_resources && ret=0 - else - _concourse_fly_pipelines -qS/ && ret=0 - fi - return ret -} - -(( $+functions[_concourse_fly_pipeline_slash_resource_types] )) || -_concourse_fly_pipeline_slash_resource_types() { - local ret=1 - if compset -P '*/'; then - _concourse_fly_pipeline="${${IPREFIX%/}##*=}" - _concourse_fly_pipeline_resource_types && ret=0 - else - _concourse_fly_pipelines -qS/ && ret=0 - fi - return ret -} - -(( $+functions[_concourse_fly_var_equal_values] )) || -_concourse_fly_var_equal_values() { - local ret=1 - if compset -P '*='; then - _message 'value' && ret=0 - else - _concourse_fly_pipeline_config_vars -qS= && ret=0 - fi - return ret -} - -(( $+functions[_concourse_fly_input_equal_paths] )) || -_concourse_fly_input_equal_paths() { - local ret=1 - if compset -P '*='; then - _files && ret=0 - else - _concourse_fly_pipeline_config_inputs -qS= && ret=0 - fi - return ret -} - -(( $+functions[_concourse_fly_output_equal_paths] )) || -_concourse_fly_output_equal_paths() { - local ret=1 - if compset -P '*='; then - _files && ret=0 - else - _concourse_fly_pipeline_config_outputs -qS= && ret=0 - fi - return ret -} - (( $+functions[_concourse_server] )) || _concourse_server() { @@ -1504,7 +469,6 @@ _concourse_log_levels() { case $service in concourse) _concourse_server "$@" ;; - fly) _concourse_fly "$@" ;; *) _message "unknown command ${service}" && ret=1 ;; esac diff --git a/src/_console b/src/_console deleted file mode 100644 index 3ed8dd7..0000000 --- a/src/_console +++ /dev/null @@ -1,72 +0,0 @@ -#compdef console -# ------------------------------------------------------------------------------ -# Copyright (c) 2011 Github zsh-users - https://github.com/zsh-users -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the zsh-users nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# ------------------------------------------------------------------------------ -# Description -# ----------- -# -# Completion script for symfony console (https://github.com/symfony/Console). -# -# ------------------------------------------------------------------------------ -# Authors -# ------- -# -# * loranger (https://github.com/loranger) -# * Yohan Tamb«² (https://github.com/Cronos87) -# -# ------------------------------------------------------------------------------ - -_console_find_console() { - echo "php $(find . -maxdepth 2 -mindepth 1 -name 'console' -type f | head -n 1)" -} - -_console_get_command_list() { - IFS=" " - `_console_find_console` --no-ansi | \ - sed "1,/Available commands/d" | \ - awk '/ [a-z]+/ { print $0 }' | \ - sed -E 's/^[ ]+//g' | \ - sed -E 's/[:]+/\\:/g' | \ - sed -E 's/[ ]{2,}/\:/g' -} - -_console() { - local -a commands - IFS=$'\n' - commands=(`_console_get_command_list`) - _describe 'commands' commands -} - -compdef _console php console -compdef _console console - -# Local Variables: -# mode: Shell-Script -# sh-indentation: 2 -# indent-tabs-mode: nil -# sh-basic-offset: 2 -# End: -# vim: ft=zsh sw=2 ts=2 et diff --git a/src/_flutter b/src/_flutter index 3ea5408..6b721b8 100644 --- a/src/_flutter +++ b/src/_flutter @@ -1073,6 +1073,7 @@ _flutter_pub_token() { _arguments \ '(- *)'{-h,--help}'[Print this usage information]' \ && ret=0 + ;; esac ;; esac diff --git a/src/_git-pulls b/src/_git-pulls index 8dfc117..9778590 100644 --- a/src/_git-pulls +++ b/src/_git-pulls @@ -3,7 +3,7 @@ # Description # ----------- # -# Completion script for git-pulls 0.3.1 (https://git-pulls.com/schacon/git-pulls). +# Completion script for git-pulls 0.3.1 (https://github.com/schacon/git-pulls). # # ------------------------------------------------------------------------------ # Authors diff --git a/src/_google b/src/_google deleted file mode 100644 index 4a5e567..0000000 --- a/src/_google +++ /dev/null @@ -1,94 +0,0 @@ -#compdef google -# ------------------------------------------------------------------------------ -# Copyright (c) 2016 Github zsh-users - https://github.com/zsh-users -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the zsh-users nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# ------------------------------------------------------------------------------ -# Description -# ----------- -# -# Completion script for googlecl (https://code.google.com/p/googlecl/) -# -# Source: https://raw.github.com/dadrc/zsh-cfg/master/completions/_google -# -# ------------------------------------------------------------------------------ -# Authors -# ------- -# -# * dadrc (https://github.com/dadrc) -# * Ben O'Hara (https://github.com/benohara) -# -# ------------------------------------------------------------------------------ - -_google() { - # init variables - local curcontext="$curcontext" state line - typeset -A opt_args - - # init state - _arguments \ - '1: :->service'\ - '2: :->task' - - case $state in - service) - _arguments '1:service:(picasa blogger youtube docs contacts calendar finance)' - ;; - *) - case $words[2] in - picasa) - compadd "$@" get create list list-albums tag post delete - ;; - blogger) - compadd "$@" post tag list delete - ;; - youtube) - compadd "$@" post tag list delete - ;; - docs) - compadd "$@" edit delete list upload get - ;; - contacts) - compadd "$@" list list-groups add add-groups delete-groups delete - ;; - calendar) - compadd "$@" add list today delete - ;; - finance) - compadd "$@" list-txn delete-pos create-pos delete-txn create create-txn list list-pos delete - ;; - *) - esac - esac -} - -_google "$@" - -# Local Variables: -# mode: Shell-Script -# sh-indentation: 2 -# indent-tabs-mode: nil -# sh-basic-offset: 2 -# End: -# vim: ft=zsh sw=2 ts=2 et diff --git a/src/_jmeter b/src/_jmeter index 4d7b01d..6a60aee 100644 --- a/src/_jmeter +++ b/src/_jmeter @@ -5,8 +5,6 @@ # # Completion script for JMeter (https://jmeter.apache.org/). # -# Status: incomplete -# # ------------------------------------------------------------------------------ # Authors # ------- @@ -15,7 +13,6 @@ # # ------------------------------------------------------------------------------ - _arguments \ '(- 1 *)--?[print command line options and exit]' \ '(- 1 *)'{-h,--help}'[print usage information and exit]' \ @@ -29,7 +26,6 @@ _arguments \ {-j,--jmeterlogfile}'[jmeter run file]: :_files -g "*.log"' \ {-n,--nongui}'[run JMeter in nongui mode]' \ {-s,--server}'[run the JMeter server]' \ - {-E,--proxyScheme}'[set a proxy scheme to use for the proxy server]:scheme' \ {-H,--proxyHost}'[set a proxy server for JMeter to use]: :_hosts' \ {-P,--proxyPort}'[set proxy server port for JMeter to use]:number' \ {-N,--nonProxyHosts}'[set non proxy host list]:host' \ @@ -45,7 +41,7 @@ _arguments \ {-R,--remotestart}'[start these remote servers (overrides remote_hosts)]:remote servers list' \ {-d,--homedir}'[the JMeter home directory to use]: :_files -/' \ {-X,--remoteexit}'[exit the remote servers at end of test (non-GUI)]' \ - {-g,--removeonly}'[generate report dashboard only, from a test results file]: :_files' \ + {-g,--reportonly}'[generate report dashboard only, from a test results file]: :_files' \ {-e,--reportatendofloadtests}'[generate report dashboard after load test]' \ {-o,--reportoutputfolder}'[output folder for report dashboard]: :_files -/' diff --git a/src/_knife b/src/_knife deleted file mode 100644 index 959668e..0000000 --- a/src/_knife +++ /dev/null @@ -1,324 +0,0 @@ -#compdef knife -# ------------------------------------------------------------------------------ -# Copyright (c) 2009-2015 Robby Russell and contributors (see -# https://github.com/ohmyzsh/ohmyzsh/graphs/contributors) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# ------------------------------------------------------------------------------ -# Description -# ----------- -# -# Completion script for Chef's knife (https://www.chef.io/). -# -# Source: https://github.com/ohmyzsh/ohmyzsh/blob/22fed4f/plugins/knife/_knife -# -# ------------------------------------------------------------------------------ -# Authors -# ------- -# -# * Frank Louwers (https://github.com/franklouwers) -# * Mark Cornick (https://github.com/markcornick) -# -# ------------------------------------------------------------------------------ - - -# You can override the path to knife.rb and your cookbooks by setting -# KNIFE_CONF_PATH=/path/to/my/.chef/knife.rb -# KNIFE_COOKBOOK_PATH=/path/to/my/chef/cookbooks -# If you want your local cookbooks path to be calculated relative to where you are then -# set the below option -# KNIFE_RELATIVE_PATH=true -# Read around where these are used for more detail. - -# knife has a very special syntax, some example calls are: -# knife status -# knife cookbook list -# knife role show ROLENAME -# knife data bag show DATABAGNAME -# knife role show ROLENAME --attribute ATTRIBUTENAME -# knife cookbook show COOKBOOKNAME COOKBOOKVERSION recipes - -# The -Q switch in compadd allow for completions of things like "data bag" without having to go through two rounds of completion and avoids zsh inserting a \ for escaping spaces -_knife() { - # These flags should be available everywhere according to man knife - local -a knife_general_flags; knife_general_flags=(--help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes) - - local curcontext="$curcontext" state line - typeset -A opt_args - local -a cloudproviders; cloudproviders=(bluebox ec2 rackspace slicehost terremark) - _arguments \ - '1: :->knifecmd' \ - '2: :->knifesubcmd' \ - '3: :->knifesubcmd2' \ - '4: :->knifesubcmd3' \ - '5: :->knifesubcmd4' \ - '6: :->knifesubcmd5' - - case $state in - knifecmd) - compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" diff exec environment user index node recipe role search solo ssh status upload vault windows "$cloudproviders[@]" - ;; - knifesubcmd) - case $words[2] in - bluebox|ec2|rackspace|slicehost|terremark) - compadd "$@" server images - ;; - client) - compadd -Q "$@" "bulk delete" list create show delete edit reregister - ;; - configure) - compadd "$@" client - ;; - cookbook) - compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload - ;; - diff) - _arguments '*:file or directory:_files -g "*"' - ;; - environment) - compadd -Q "$@" list create delete edit show "from file" - ;; - user) - compadd -Q "$@" create delete edit list reregister show - ;; - node) - compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete" - ;; - recipe) - compadd "$@" list - ;; - role) - compadd -Q "$@" "bulk delete" create delete edit "from file" list show - ;; - solo) - compadd "$@" bootstrap clean cook init prepare - ;; - upload) - _arguments '*:file or directory:_files -g "*"' - ;; - vault) - compadd -Q "$@" create decrypt delete edit remove "rotate all keys" "rotate keys" show update - ;; - windows) - compadd "$@" bootstrap - ;; - *) - _arguments '2:Subsubcommands:($(_knife_options1))' - ;; - esac - ;; - knifesubcmd2) - case $words[3] in - server) - compadd "$@" list create delete - ;; - images) - compadd "$@" list - ;; - site) - compadd "$@" vendor show share search download list unshare - ;; - show|delete|edit|update) - _arguments '3:Subsubcommands:($(_knife_list_remote "$words[2]"))' - ;; - upload|test) - _arguments '3:Subsubcommands:($(_call_function - "_knife_list_local_$words[2]s") --all)' - ;; - list) - compadd -a "$@" knife_general_flags - ;; - bag) - compadd -Q "$@" show edit list "from file" create delete - ;; - bootstrap|clean|cook|prepare) - compadd "$@" nodes/*.json(N:t:r) - ;; - init) - compadd "$@" ./*(/N:t) - ;; - *) - _arguments '3:Subsubcommands:($(_knife_options2))' - ;; - esac - ;; - knifesubcmd3) - case "$words[3]" in - show) - case "$words[2]" in - cookbook) - versioncomp=1 - _arguments '4:Cookbookversions:($(_knife_cookbook_versions) latest)' - ;; - node|client|role) - compadd "$@" --attribute - ;; - vault) - _arguments '4:Keys:($(_knife_list_remote "$words[2]" "$words[4]"))' - ;; - esac - ;; - update) - case "$words[2]" in - vault) - _arguments '4:Keys:($(_knife_list_remote "$words[2]" "$words[4]"))' - ;; - esac - ;; - esac - case "$words[4]" in - show|edit) - _arguments '4:Subsubsubcommands:($(_knife_list_remote "$words[2]" "$words[3]"))' - ;; - file) - case "$words[2]" in - environment) - _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/environments"' - ;; - node) - _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/nodes"' - ;; - role) - _arguments '*:files:_path_files -g "*.(rb|json)" -W "$(_knife_root)/roles"' - ;; - *) - _arguments '*:Subsubcommands:($(_knife_options3))' - ;; - esac - ;; - list) - compadd -a "$@" knife_general_flags - ;; - *) - _arguments '*:Subsubcommands:($(_knife_options3))' - ;; - esac - ;; - knifesubcmd4) - if ((versioncomp > 0)); then - compadd "$@" attributes definitions files libraries providers recipes resources templates - else - case "$words[5]" in - file) - _arguments '*:directory:_path_files -/ -W "$(_knife_root)/data_bags" -qS \ ' - ;; - *) _arguments '*:Subsubcommands:($(_knife_options2))' ;; - esac - fi - ;; - knifesubcmd5) - case "$words[5]" in - file) - _arguments '*:files:_path_files -g "*.json" -W "$(_knife_root)/data_bags/$words[6]"' - ;; - *) - _arguments '*:Subsubcommands:($(_knife_options3))' - ;; - esac - ;; - esac -} - -# Helper functions to provide the argument completion for several depths of commands -_knife_options1() { - local line - for line in $(_call_program commands knife "$words[2]" --help | grep -v "^knife"); do - echo $line | grep "\-\-" - done -} - -_knife_options2() { - local line - for line in $(_call_program commands knife "$words[2]" "$words[3]" --help | grep -v "^knife"); do - echo $line | grep "\-\-" - done -} - -_knife_options3() { - local line - for line in $(_call_program commands knife "$words[2]" "$words[3]" "$words[4]" --help | grep -v "^knife"); do - echo $line | grep "\-\-" - done -} - -# get a list of objects of type x on the server -_knife_list_remote() { - case "$*" in - role|client|node|cookbook|"cookbook site"|"data bag"|environment|user|vault) - _call_program commands knife "$@" list --format json \ - | grep \" \ - | awk '{print $1}' \ - | awk -F"," '{print $1}' \ - | awk -F"\"" '{print $2}' - ;; - "vault "*) - _call_program commands knife vault show "$2" --format json \ - | grep \" \ - | awk '{print $1}' \ - | awk -F"," '{print $1}' \ - | awk -F"\"" '{print $2}' - ;; - esac -} - -# The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server -_knife_list_local_cookbooks() { - if [ $KNIFE_RELATIVE_PATH ]; then - local cookbook_path="$(_knife_root)/cookbooks" - else - local knife_rb="${KNIFE_CONF_PATH:-${HOME}/.chef/knife.rb}" - if [ -f ./.chef/knife.rb ]; then - knife_rb="./.chef/knife.rb" - fi - local cookbook_path="${KNIFE_COOKBOOK_PATH:-$(grep -s cookbook_path "$knife_rb" | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' | cut -d '"' -f2)}" - fi - - local i - for i in $cookbook_path; do - ls $i - done -} - -# This function extracts the available cookbook versions on the chef server -_knife_cookbook_versions() { - _call_program commands knife cookbook show "$words[4]" \ - | grep -v "$words[4]" \ - | grep -v -E '\]|\[|\{|\}' \ - | sed 's/ //g' \ - | sed 's/"//g' -} - -# Searches up from current directory to find the closest folder that has a .chef folder -# Useful for the knife upload/from file commands -_knife_root() { - local directory="$PWD" - while [ $directory != '/' ]; do - test -e "$directory/.chef" && echo "$directory" && return - directory="${directory:h}" - done -} - -_knife "$@" - -# Local Variables: -# mode: Shell-Script -# sh-indentation: 2 -# indent-tabs-mode: nil -# sh-basic-offset: 2 -# End: -# vim: ft=zsh sw=2 ts=2 et diff --git a/src/_l3build b/src/_l3build index 29d2d60..eb311e7 100644 --- a/src/_l3build +++ b/src/_l3build @@ -3,7 +3,7 @@ # Description # ----------- # -# Completion script for l3build (https://github.com/latex3/l3build/). +# Completion script for l3build 2025-09-03 (https://github.com/latex3/l3build/). # Modified from rejected https://github.com/latex3/l3build/pull/267 # # ------------------------------------------------------------------------------ @@ -14,49 +14,60 @@ # # ------------------------------------------------------------------------------ -__l3build() { +_l3build_targets() { local targets=( - 'check:Run\ all\ automated\ tests' - 'clean:Clean\ out\ directory\ tree' - 'ctan:Create\ CTAN-ready\ archive' - 'doc:Typesets\ all\ documentation\ files' - 'install:Installs\ files\ into\ the\ local\ texmf\ tree' - 'manifest:Creates\ a\ manifest\ file' - 'save:Saves\ test\ validation\ log' - 'tag:Updates\ release\ tags\ in\ files' - 'uninstall:Uninstalls\ files\ from\ the\ local\ texmf\ tree' - 'unpack:Unpacks\ the\ source\ files\ into\ the\ build\ tree' - 'upload:Send\ archive\ to\ CTAN\ for\ public\ release' + 'check:Run all automated tests' + 'clean:Clean out directory tree' + 'ctan:Create CTAN-ready archive' + 'doc:Typesets all documentation files' + 'install:Installs files into the local texmf tree' + 'manifest:Creates a manifest file' + 'save:Saves test validation log' + 'tag:Updates release tags in files' + 'uninstall:Uninstalls files from the local texmf tree' + 'unpack:Unpacks the source files into the build tree' + 'upload:Send archive to CTAN for public release' ) - local options=( - {--config,-c}'[Sets the config(s) used for running tests]':lua_file:'_files -g "*.lua"' - --date'[Sets the date to insert into sources]': - --debug'[Runs target in debug mode]' - --dirty'[Skip cleaning up the test area]' - --dry-run'[Dry run for install or upload]' - --email'[Email address of CTAN uploader]': - {--engine,-e}'[Sets the engine(s) to use for running test]':engine:'(pdftex xetex luatex ptex uptex)' - --epoch'[Sets the epoch for tests and typesetting]': - {--file,-F}'[Take the upload announcement from the given file]':file:_files - --first'[Name of first test to run]': - {--force,-f}'[Force tests to run if engine is not set up]' - --full'[Install all files]' - {--halt-on-error,-H}'[Stops running tests after the first failure]' - '(- : *)'{--help,-h}'[Print this message and exit]' - --last'[Name of last test to run]': - {--message,-m}'[Text for upload announcement message]': - {--quiet,-q}'[Suppresses TeX output when unpacking]' - --rerun'[Skip setup\: simply rerun tests]' - --show-log-on-error'[Show the full log of the failure with '\''halt-on-error'\'']' - {--show-saves,-S}'[Show the invocation to update failing .tlg files]' - --shuffle'[Shuffle order of tests]' - --texmfhome'[Location of user texmf tree]': - '(- : *)'--version'[Print version information and exit]' - ) - _arguments -s -S $options "1:target:(($targets))" + + _describe -t targets 'target' targets "$@" +} + +_l3build() { + local ret=1 + local -a engines=(pdftex xetex luatex ptex uptex) + + _arguments -s -S \ + '(-c --config)'\*{-c,--config}'[Set the config(s) used for running tests]:lua_file:_files -g "*.(lua|tlg)"' \ + '(-d --date)'{-d,--date}'[Set the date to insert into sources]:date' \ + '--debug[Run target in debug mode]' \ + '--dev[Use the development LaTex format]' \ + '--dirty[Skip cleaning up the test area]' \ + '--dry-run[Dry run for install or upload]' \ + '--email[Email address of CTAN uploader]:email' \ + '(-e --engine)'{--engine,-e}'[Set the engine(s) to use for running test]:engine:(($engines))' \ + '--epoch[Sets the epoch for tests and typesetting]:epoch' \ + '(-F --file)'{--file,-F}'[Take the upload announcement from the given file]:file:_files' \ + '--first[Name of first test to run]:name' \ + '--full[Install all files]' \ + '(-H --halt-on-error)'{-H,--halt-on-error}'[Stops running tests after the first failure]' \ + '(- : *)'{-h,--help}'[Print this message and exit]' \ + '--last[Name of last test to run]:name' \ + '(-m --message)'{-m,--message}'[Text for upload announcement message]:message' \ + '(-q --quiet)'{-q,--quiet}'[Suppresses TeX output when unpacking]' \ + '--rerun[Skip setup\: simply rerun tests]' \ + '--show-log-on-error[Show the full log of the failure with "halt-on-error"]' \ + '(-S --show-saves)'{-S,--show-saves}'[Show the invocation to update failing .tlg files]' \ + '--shuffle[Shuffle order of tests]' \ + '(-s --stdengine)'{-s,--stdengine}'[Run tests only with the standard engine]' \ + '--texmfhome[Location of user texmf tree]:location:_files' \ + '(- : *)--version[Print version information and exit]' \ + "1:target:_l3build_targets" \ + && ret=0 + + return ret } -__l3build +_l3build "$@" # Local Variables: # mode: Shell-Script diff --git a/src/_pgsql_utils b/src/_pgsql_utils deleted file mode 100644 index b6e8b59..0000000 --- a/src/_pgsql_utils +++ /dev/null @@ -1,590 +0,0 @@ -#compdef psql pg_dump pg_dumpall pg_restore createdb dropdb vacuumdb createuser dropuser initdb -# ------------------------------------------------------------------------------ -# Copyright (c) 2016 Github zsh-users - https://github.com/zsh-users, Dominic Mitchell, Johann 'Myrkraverk' Oskarsson, Daniel Serodio, J Smith -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the zsh-users nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# ------------------------------------------------------------------------------ -# Description -# ----------- -# -# Completion script for PostgreSQL utils (https://www.postgresql.org/). -# -# Source: https://www.zsh.org/mla/users/2004/msg01006.html -# -# ------------------------------------------------------------------------------ -# Authors -# ------- -# -# * Dominic Mitchell <dom+zsh@happygiraffe.net> -# -# * Johann 'Myrkraverk' Oskarsson <johann@2ndquadrant.com> -# -# * Daniel Serodio <dserodio@gmail.com> pg_dumpall completion -# -# * J Smith <dark.panda@gmail.com> various completion additions -# -# ------------------------------------------------------------------------------ - - -_pgsql_get_identity () { - _pgsql_user=${(v)opt_args[(i)-U|--username]} - _pgsql_port=${(v)opt_args[(i)-p|--port]} - _pgsql_host=${(v)opt_args[(i)-h|--host]} - - _pgsql_params=( - ${_pgsql_user:+"--username=$_pgsql_user"} - ${_pgsql_port:+"--port=$_pgsql_port"} - ${_pgsql_host:+"--host=$_pgsql_host"} - ) -} - -# Postgres Allows specifying the path to the directory containing the -# socket as well as a hostname. -_pgsql_host_or_dir() { - _alternative \ - 'hosts:host:_hosts' \ - 'directories:directory:_directories' -} - -# This creates a port completion list based on socket files on the -# local computer. Be default, Postgres puts them in /tmp/ but Debian -# changed that to /var/run/postgresql/ in their packages. -_pgsql_ports() { - compadd "$@" - /tmp/.s.PGSQL.<->(N:e) /var/run/postgresql/.s.PGSQL.<->(N:e) -} - -_pgsql_users () { - local _pgsql_user _pgsql_port _pgsql_host _pgsql_params - local _pgsql_user_sql - _pgsql_get_identity - - # We use _pgsql_port and _pgsql_host directly here instead of - # _pgsql_params so as to not pick up a partially completed - # username. - _pgsql_params=( - ${_pgsql_port:+"--port=$_pgsql_port"} - ${_pgsql_host:+"--host=$_pgsql_host"} - ) - - _pgsql_user_sql='select r.rolname from pg_catalog.pg_roles r where r.rolcanlogin = true' - - compadd "$@" - $( psql $_pgsql_params[@] -XAqt -c $_pgsql_user_sql template1 2>/dev/null ) - -} - -_pgsql_tables () { - local _pgsql_user _pgsql_port _pgsql_host _pgsql_params - _pgsql_get_identity - - # Need to pull out the database name from the existing arguments. - # This is going to vary between commands. Thankfully, it's only - # used by pg_dump, which always has the dbname in arg1. If it's - # not present it defaults to ${PGDATABASE:-$LOGNAME}, which - # matches (I think) the PostgreSQL behaviour. - - local db - db=${line[1]:-${PGDATABASE:-$LOGNAME}} - - ## Instead of parsing the output of the psql \ commands, we look - ## up the tables ourselves. The following query has been tested - ## with Postgres 8.2 - 9.2. - - local _pgsql_table_sql - _pgsql_table_sql="select n.nspname || '.' || c.relname \ - from pg_catalog.pg_class c \ - left join pg_catalog.pg_namespace n on n.oid = c.relnamespace \ - where c.relkind in ('r', '') \ - and n.nspname <> 'pg_catalog' \ - and n.nspname <> 'information_schema' \ - and n.nspname !~ '^pg_toast' \ - and pg_catalog.pg_table_is_visible( c.oid ) \ - order by 1" - - compadd "$@" - \ - $( psql $_pgsql_params[@] -AXqt -c $_pgsql_table_sql $db 2>/dev/null ) -} - -_pgsql_schemas () { - local _pgsql_user _pgsql_port _pgsql_host _pgsql_params - _pgsql_get_identity - - local db - db=${line[1]:-${PGDATABASE:-$LOGNAME}} - - local _pgsql_schema_sql="select n.nspname \ - from pg_catalog.pg_namespace n \ - where n.nspname !~ '^pg_' \ - and n.nspname <> 'information_schema' \ - order by 1;" - - compadd "$@" - \ - $( psql $_pgsql_params[@] -AXqt -c $_pgsql_schema_sql $db 2>/dev/null ) -} - -_pgsql_databases () { - local _pgsql_user _pgsql_port _pgsql_host _pgsql_params - _pgsql_get_identity - - local _pgsql_services _pgsql_service_files - _pgsql_service_files=(~/.pg_service.conf) - (( $+commands[pg_config] )) && _pgsql_service_files+=$(pg_config --sysconfdir)/pg_service.conf - - _pgsql_services=$( grep -h '^\[.*\]' $_pgsql_service_files 2>/dev/null \ - | sed -e 's/^\[/service=/' -e 's/\].*$//' ) - - local _pgsql_db_sql - _pgsql_db_sql="select d.datname from pg_catalog.pg_database d \ - where d.datname <> 'template0'" - - compadd "$@" - \ - ${(f)_pgsql_services} \ - $( psql $_pgsql_params[@] -AXtq -c $_pgsql_db_sql template1 2>/dev/null ) -} - -_pgsql_encodings () { - local _pgsql_user - _pgsql_get_identity - - local _pgsql_db_sql - _pgsql_db_sql="select pg_encoding_to_char(i) from generate_series(0,100) i;" - - compadd "$@" - $( psql $_pgsql_params[@] -AXtq -c $_pgsql_db_sql template1 ) -} - - -## -## The actual completion code for the commands -## - -_psql () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s "-*" \ - "$_pgsql_common_opts[@]" \ - {-c+,--command=}':execute SQL command:' \ - {-d+,--dbname=}':database to connect to:_pgsql_databases' \ - {-f+,--file=}':SQL file to read:_files' \ - {-l,--list}'[list databases]' \ - {-v+,--set=,--variable=}':set SQL variable:' \ - {-V,--version}'[output version information, then exit]' \ - {-X,--no-psqlrc}'[don'\''t read ~/.psqlrc]' \ - {-1,--single-transaction}'[restore as a single transaction]' \ - {-\?,--help=}':display help:' \ - \ - {-a,--echo-all}'[print commands read]' \ - {-b,--echo-errors}'[echo failed commands]' \ - {-e,--echo-queries}'[display queries submitted]' \ - {-E,--echo-hidden}'[display hidden queries]' \ - {-L,--log-file=}'[send session log to file]' \ - {-n,--no-readline}'[disable enhanced command line editing (readline)]' \ - {-o+,--output=}':query output:_files' \ - {-q,--quiet}'[non verbose mode]' \ - {-s,--single-step}'[prompt before each query]' \ - {-S,--single-line}'[newline sends query]' \ - \ - {-A,--no-align}'[unaligned output mode]' \ - --csv'[CSV (Comma-Separated Values) table output mode]' \ - {-F+,--field-separator=}':field separator char:' \ - {-H,--html}'[HTML output]' \ - {-P+,--pset=}':set psql variable:' \ - {-R+,--record-separator=}':record separator char:' \ - {-t,--tuples-only}'[don'\''t display header/footer]' \ - {-T+,--table-attr=}':HTML table options:' \ - {-x,--expanded}'[one column per line]' \ - {-z,--field-separator-zero}'[set field separator for unaligned output to zero byte]' \ - {-0,--record-separator-zero}'[set record separator for unaligned output to zero byte]' \ - -u'[prompt for username/password]' \ - ':PostgreSQL database:_pgsql_databases' \ - ':PostgreSQL user:_pgsql_users' -} - -_pg_dump () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-f+,--file=}':output file:_files' \ - {-F+,--format=}':output format:_values "format" "p[plain text]" "t[tar]" "c[custom]"' \ - {-j,--jobs=}'[use this many parallel jobs to dump]' \ - {-v,--verbose}'[verbose mode]' \ - {-V,--version}'[output version information, then exit]' \ - {-Z+,--compress=}':compression level:_values "level" 9 8 7 6 5 4 3 2 1 0' \ - --lock-wait-timeout='[fail after waiting TIMEOUT for a table lock]' \ - --no-sync'[do not wait for changes to be written safely to disk]' \ - {-\?,--help}'[display help]' \ - \ - {-a,--data-only}'[dump only data]' \ - {-b,--blobs}'[dump blobs as well]' \ - {-B,--no-blobs}'[exclude large objects in dump]' \ - {-c,--clean}'[include clean cmds in dump]' \ - {-C,--create}'[include createdb cmds in dump]' \ - {-e+,--extension=}'[dump the specified extension(s) only]' \ - {-E+,--encoding=}':database encoding:_pgsql_encodings' \ - {-n+,--schema=}':schema to dump:_pgsql_schemas' \ - {-N+,--exclude-schema=}':schema to NOT dump:_pgsql_schemas' \ - {-O,--no-owner}'[don'\''t recreate as same owner]' \ - {-s,--schema-only}'[no data, only schema]' \ - {-S+,--superuser=}':superuser name:_pgsql_users' \ - {-t+,--table=}':table to dump:_pgsql_tables' \ - {-T+,--exclude-table=}':table to NOT dump:_pgsql_tables' \ - {-x,--no-{acl,privileges}}'[don'\''t dump ACLs]' \ - --binary-upgrade'[for use by upgrade utilities only]' \ - {-D,--{attribute,column}-inserts}'[use INSERT (cols) not COPY]' \ - --disable-dollar-quoting'[disable dollar quoting, use SQL standard quoting]' \ - --disable-triggers'[disable triggers during data-only restore]' \ - --enable-row-security'[enable row security (dump only content user has access to)]' \ - --exclude-table-data='[do NOT dump data for the named table(s)]' \ - --if-exists'[use IF EXISTS when dropping objects]' \ - --include-foreign-data='[include data of foreign servers]' \ - --inserts'[dump data as INSERT commands, rather than COPY]' \ - --load-via-partition-root'[load partitions via the root table]' \ - --no-comments'[do not dump comments]' \ - --no-publications'[do not dump publications]' \ - --no-security-labels'[do not dump security label assignments]' \ - --no-subscriptions'[do not dump subscriptions]' \ - --no-synchronized-snapshots'[do not use synchronized snapshots in parallel jobs]' \ - --no-tablespaces'[do not dump tablespace assignments]' \ - --no-toast-compression'[do not dump TOAST compression methods]' \ - --no-unlogged-table-data'[do not dump unlogged table data]' \ - --on-conflict-do-nothing'[add ON CONFLICT DO NOTHING to INSERT commands]' \ - --quote-all-identifiers'[quote all identifiers, even if not key words]' \ - --rows-per-insert=['number of rows per INSERT'] \ - --section=':dump named section:_values "section" pre-data data post-data' \ - --serializable-deferrable'[wait until the dump can run without anomalies]' \ - --snapshot='[use given snapshot for the dump]' \ - --strict-names'[require table and/or schema include patterns to match at least one entity each]' \ - --use-set-session-authorization'[use SET SESSION AUTHORIZATION commands instead of ALTER OWNER]' \ - \ - {-i,--ignore-version}'[ignore version mismatch]' \ - {-o,--oids}'[dump objects identifiers for every table]' \ - {-R,--no-reconnect}'[don'\''t output connect]' \ - -X+':option:_values "option" use-set-session-authorization disable-triggers' \ - ':PostgreSQL database:_pgsql_databases' -} - -_pg_restore () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-d+,--dbname=}':database to connect to:_pgsql_databases' \ - {-f+,--file=}':output file:_files' \ - {-F+,--format=}':output format:_values "format" "p[plain text]" "t[tar]" "c[custom]"' \ - {-l,--list}'[list databases]' \ - {-v,--verbose}'[verbose mode]' \ - {-V,--version}'[output version information, then exit]' \ - {-\?,--help}'[display help]' \ - \ - {-a,--data-only}'[dump only data]' \ - {-c,--clean}'[include clean (drop) cmds before recreating]' \ - {-C,--create}'[include createdb cmds in dump]' \ - {-e,--exit-on-error}'[exit on error, default is to continue]' \ - {-I,--index=}':index name:' \ - {-j,--jobs=}':use this many parallel jobs to restore:' \ - {-L,--use-list=}':use table of contents from this file for selecting/ordering output:' \ - {-n,--schema=}':restore only objects in this schema:' \ - {-O,--no-owner}'[skip restoration of object ownership]' \ - {-P,--function=}':restore named function:' \ - {-s,--schema-only}'[restore only the schema, no data]' \ - {-S,--superuser=}':superuser user name to use for disabling triggers:' \ - {-t,--table=}':restore named table:' \ - {-T,--trigger=}':restore named trigger:' \ - {-x,--no-privileges}'[skip restoration of access privileges (grant/revoke)]' \ - {-1,--single-transaction}'[restore as a single transaction]' \ - --disable-triggers'[disable triggers during data-only restore]' \ - --enable-row-security'[enable row security]' \ - --if-exists'[use IF EXISTS when dropping objects]' \ - --no-comments'[do not restore comments]' \ - --no-data-for-failed-tables'[do not restore data of tables that could not be created]' \ - --no-publications'[do not restore publications]' \ - --no-security-labels'[do not restore security labels]' \ - --no-subscriptions'[do not restore subscriptions]' \ - --no-tablespaces'[do not restore tablespace assignments]' \ - --section=':dump named section:_values "section" pre-data data post-data' \ - --strict-names'[require table and/or schema include patterns to match at least one entity each]' \ - --use-set-session-authorization'[use SET SESSION AUTHORIZATION commands instead of ALTER OWNER commands to set ownership]' \ - \ - {-b,--blobs}'[include large objects in dump]' \ - {-B,--no-blobs}'[exclude large objects in dump]' \ - \ - "1: :_files" -} - -_pg_dumpall () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-f+,--file=}':output file:_files' \ - {-v,--verbose}'[verbose mode]' \ - {-V,--version}'[output version information, then exit]' \ - --lock-wait-timeout='[fail after waiting TIMEOUT for a table lock]' \ - {-\?,--help}'[display help]' \ - \ - {-a,--data-only}'[dump only data]' \ - {-c,--clean}'[include clean (drop) cmds before recreating]' \ - {-E,--encoding=}'[dump the data in encoding]' \ - {-g,--globals-only}'[dump only global objects, no databases]' \ - {-O,--no-owner}'[don'\''t recreate as same owner]' \ - {-r,--roles-only}'[no databases or tablespaces, only roles]' \ - {-s,--schema-only}'[no data, only schema]' \ - {-S+,--superuser=}':superuser name:_pgsql_users' \ - {-t,--tablespaces-only}'[no databases or roles, only tablespaces]' \ - {-x,--no-privileges}'[don'\''t dump ACLs]' \ - --binary-upgrade'[for use by upgrade utilities only]' \ - --column-inserts'[use INSERT with column names not COPY]' \ - --disable-dollar-quoting'[disable dollar quoting, use SQL standard quoting]' \ - --disable-triggers'[disable triggers during data-only restore]' \ - --exclude-database=':exclude databases:_pgsql_databases' \ - --extra-float-digits='[override default setting for extra_float_digits]' \ - --if-exists'[use IF EXISTS when dropping objects]' \ - --inserts'[use INSERT not COPY]' \ - --load-via-partition-root'[pload partitions via the root table]' \ - --no-comments'[do not dump comments]' \ - --no-publications'[do not dump publications]' \ - --no-role-passwords'[do not dump passwords for roles]' \ - --no-security-labels'[do not dump security label assignments]' \ - --no-subscriptions'[do not dump subscriptions]' \ - --no-sync'[do not wait for changes to be written safely to disk]' \ - --no-tablespaces'[do not dump tablespace assignments]' \ - --no-toast-compression'[do not dump TOAST compression methods]' \ - --no-unlogged-table-data'[do not dump unlogged table data]' \ - --on-conflict-do-nothing'[add ON CONFLICT DO NOTHING to INSERT commands]' \ - --quote-all-identifiers'[quote all identifiers, even if not key words]' \ - --rows-per-insert='[number of rows per INSERT]' \ - --use-set-session-authorization'[use SET SESSION AUTHORIZATION cmds instead of ALTER OWNER]' \ - {-o,--oids}'[dump objects identifiers for every table]' \ -} - -_createdb () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-D+,--tablespace=}'[default tablespace for the database]' \ - {-e,--echo}'[display SQL queries]' \ - {-E+,--encoding=}':database encoding:_pgsql_encodings' \ - {-l+,--locale=}'[locale settings for the database]' \ - --lc-collate='[LC_COLLATE setting for the database]' \ - --lc-ctype='[LC_CTYPE setting for the database]' \ - {-O+,--owner=}':database user to own the new database:_pgsql_users' \ - {-T+,--template=}':database template:_pgsql_databases' \ - '--version[output version information, then exit]' \ - {-\?,--help}'[display help]' \ - \ - --maintenance-db=':alternate maintenance database:_pgsql_databases' \ - {-q,--quiet}'[non verbose mode]' \ - --location=':database location (unsupported since PostgreSQL 8.0):_directories' \ - ':PostgreSQL database:' \ - ':comment:' -} - -_dropdb () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-e,--echo}'[display SQL queries]' \ - {-f,--force}'[try to terminate other connections before dropping]' \ - {-i,--interactive}'[confirm before drop]' \ - {-V,--version}'[output version information, then exit]' \ - --if-exists'[don'\''t report error if database does'\''t exist]' \ - --maintenance-db=':alternate maintenance database:_pgsql_databases' \ - {-q,--quiet}'[non verbose mode]' \ - ':PostgreSQL database:_pgsql_databases' -} - -_vacuumdb () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-a,--all}'[vacuum all databases]' \ - {-d+,--dbname=}':database to connect to:_pgsql_databases' \ - --disable-page-skipping'[disable all page-skipping behavior]' \ - {-e,--echo}'[show the commands being sent to the server]' \ - {-f,--full}'[do full vacuuming]' \ - {-F,--freeze}'[freeze row transaction information]' \ - --force-index-cleanup'[always remove index entries that point to dead tuples]' \ - {-j,--jobs=}'[use this many concurrent connections to vacuum]' \ - '--min-mxid-age=[minimum multixact ID age of tables to vacuum]' \ - '--min-xid-age=[minimum transaction ID age of tables to vacuum]' \ - --no-index-cleanup'[don'\''t remove index entries that point to dead tuples]' \ - --no-process-toast'[skip the TOAST table associated with the table to vacuum]' \ - --no-truncate'[don'\''t truncate empty pages at the end of the table]' \ - {-P+,--parallel=}'[use this many background workers for vacuum, if available]' \ - {-q,--quiet}'[do not write any messages]' \ - '--skip-locked[skip relations that cannot be immediately locked]' \ - {-t+,--table=}':table to dump:_pgsql_tables' \ - {-v,--verbose}'[write a lot of output]' \ - {-V,--version}'[output version information, then exit]' \ - {-z,--analyze}'[update optimizer hints]' \ - {-Z,--analyze-only}'[only update optimizer statistics; no vacuum]' \ - --analyze-in-stages'[only update optimizer statistics, in multiple stages for faster results; no vacuum]' \ - {-\?,--help}'[display help]' \ - --maintenance-db='[alternate maintenance database]' \ - '1:PostgreSQL database:_pgsql_databases' -} - -_createuser () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-c,--connection-limit=}'[connection limit for role (default: no limit)]' \ - {-d,--createdb}'[role can create new databases]' \ - {-D,--no-createdb}'[role cannot create databases]' \ - {-e,--echo}'[display SQL queries]' \ - {-g,--role=}'[new role will be a member of this role]' \ - {-i,--inherit}'[role inherits privileges of roles it is a member of (default)]' \ - {-I,--no-inherit}'[role does not inherit privileges]' \ - {-l,--login}'[role can login (default)]' \ - {-L,--no-login}'[role cannot login]' \ - {-P,--pwprompt}'[assign a password to new role]' \ - {-r,--createrole}'[role can create new roles]' \ - {-R,--no-createrole}'[role cannot create roles]' \ - {-s,--superuser}'[role will be superuser]' \ - {-S,--no-superuser}'[role will not be superuser]' \ - --interactive'[prompt for missing role name and attributes rather than using defaults]' \ - --replication'[role can initiate replication]' \ - --no-replication'[role cannot initiate replication]' \ - {-E,--encrypted}'[encrypt stored password]' \ - {-N,--unencrypted}'[do not encrypt stored password]' \ - {-\?,--help}'[display help]' -} - -_dropuser () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - "$_pgsql_common_opts[@]" \ - {-e,--echo}'[display SQL queries]' \ - {-q,--quiet}'[non verbose mode]' \ - {-i,--interactive}'[prompt before deleting anything, and prompt for role name if not specified]' \ - {-V,--version}'[output version information, then exit]' \ - --if-exists'[don'\''t report error if user doesn'\''t exist]' \ - ':PostgreSQL user:_pgsql_users' -} - -_initdb () { - local curcontext="$curcontext" state line expl - typeset -A opt_args - - _arguments -C -s \ - {--auth=,-A+}':default authentication method for local connections:_values "auth methods" $_pgsql_auth_methods[@]' \ - --auth-host=':default authentication method for local TCP/IP connections:_values "auth methods" $_pgsql_auth_methods[@]' \ - --auth-local=':default authentication method for local-socket connections:_values "auth methods" $_pgsql_auth_methods[@]' \ - {-D+,--pgdata=}':location for this database cluster:_files' \ - {-E+,--encoding=}':set default encoding for new databases:_pgsql_encodings' \ - {-g,--allow-group-access}'[allow group readexecute on data directory]' \ - {-k,--data-checksums}':use data page checksums:' \ - --locale=':set default locale for new databases:' \ - --lc-collate=':set the default locale for collate:' \ - --lc-ctype=':set the default locale for ctype:' \ - --lc-messages=':set the default locale for messages:' \ - --lc-monetary=':set the default locale for monetary:' \ - --lc-numeric=':set the default locale for numeric:' \ - --lc-time=':set the default local for time:' \ - --no-locale'[equivalent to --locale=C]' \ - --pwfile=':read password for the new superuser from file:_files' \ - {-T+,--text-search-config=}'[default text search configuration]' \ - {-U+,--username=NAME}':database superuser name:' \ - {-W,--pwprompt}'[prompt for a password for the new superuser]' \ - {-X+,--waldir=}':location for the write-ahead log directory:_files' \ - --xlogdir=':location for the transaction log directory (unsupported since PostgreSQL 10):_files' \ - --wal-segsize='[size of WAL segments, in megabytes]' \ - {-d,--debug}'[generate lots of debugging output]' \ - --discard-caches'[set debug_discard_caches=1]' \ - -L+':where to find the input files:_files' \ - {-n,--no-clean}'[do not clean up after errors]' \ - {-N,--no-sync}':do not wait for changes to be written safely to disk:' \ - --instructions'[do not print instructions for next steps]' \ - {-s,--show}'[show internal settings]' \ - {-S,--sync-only}'[only sync data directory]' \ - {-V,--version}'[output version information, then exit]' \ - {-\?,--help}'[display help]' \ - ':location for this database cluster:_files' -} - -_pgsql_utils () { - local _pgsql_common_opts _pgsql_auth_methods - - _pgsql_common_opts=( - {-\?,--help}'[display help]' - {-h+,--host=}':database host:_pgsql_host_or_dir' - {-p+,--port=}':database port number:_pgsql_ports' - {-U+,--username=}':connect as user:_pgsql_users' - {-W,--password}'[prompt for password]' - {-w,--no-password}'[never prompt for password]' - --role='[do SET ROLE before restore]' - ) - - _pgsql_auth_methods=( - trust - reject - md5 - password - gss - sspi - krb5 - ident - peer - ldap - radius - cert - pam - ) - - case "$service" in - psql) _psql "$@" ;; - pg_dump) _pg_dump "$@" ;; - pg_dumpall) _pg_dumpall "$@" ;; - pg_restore) _pg_restore "$@" ;; - createdb) _createdb "$@" ;; - dropdb) _dropdb "$@" ;; - vacuumdb) _vacuumdb "$@" ;; - createuser) _createuser "$@" ;; - dropuser) _dropuser "$@" ;; - initdb) _initdb "$@" ;; - esac -} - -_pgsql_utils "$@" - -# Local Variables: -# mode: Shell-Script -# sh-indentation: 2 -# indent-tabs-mode: nil -# sh-basic-offset: 2 -# End: -# vim: ft=zsh sw=2 ts=2 et @@ -23,7 +23,7 @@ # Description # ----------- # -# Completion script for Phing (https://www.phing.info/). +# Completion script for Phing 3.1.0 (https://www.phing.info/). # # ------------------------------------------------------------------------------ # Authors @@ -39,36 +39,53 @@ _phing() { typeset -A opt_args # Follow https://www.phing.info/guide/chunkhtml/sec.commandlineargs.html for more information - _arguments \ + _arguments -C \ '(-h -help)'{-h,-help}'[display the help screen]' \ '(-v -version)'{-v,-version}'[print version information and exit]' \ - '(-l -list)'{-l,-list}'[list all available targets in buildfile (excluding targets that have their hidden attribute set to true)]' \ + '(-l -list)'{-l,-list}'[list all available targets in buildfile]' \ + '(-i -init)'{-i,-init}'[generates an initial buildfile]:file:_files' \ '(-q -quiet)'{-q,-quiet}'[quiet operation, no output at all]' \ + '(-S -silent)'{-S,-silent}'[print nothing but task outputs and build failures]' \ '-verbose[verbose, give some more output]' \ '-debug[output debug information]' \ - '-logfile [use given file for log]:file:_files' \ - '-D[set the property to the specified value to be used in the buildfile]' \ - '-find []:file:_files' \ - '-buildfile [specify an alternate buildfile name. Default is build.xml]:file:_files' \ - '-logger [specify an alternate logger. Default is phing.listener.DefaultLogger. Other options include phing.listener.NoBannerLogger, phing.listener.AnsiColorLogger, phing.listener.XmlLogger, phing.listener.TargetLogger and phing.listener.HtmlColorLogger]' \ - '-propertyfile [load properties from the specified file]:file:_files' \ - '(-v --version)'{-v,--version}'[show version]' \ + '(-e -emacs)'{-e,-emacs}'[produce logging information without adornments]' \ + '-diagnostics[print diagnostics information]' \ + '(-strict -no-strict)-strict[run build in strict mode]' \ + '(-strict -no-strict)-no-strict[run build normally]' \ + '-longtargets[show target descriptions during build]' \ + '-logfile[use given file for log]:file:_files' \ + '-logger[the class which is to perform logging]:class' \ + '*-listener[add an instance of class as a project listener]:class' \ + '(-f -buildfile)'{-f,-buildfile}'[build file]:file:_files' \ + '*-D[set the property to the specified value to be used in the buildfile]' \ + '(-k -keep-going)'{-k,-keep-going}'[execute all targets that do not depend on failed target(s)]' \ + '-propertyfile[load all properties from the specified file]:file:_files' \ + '-propertyfileoverride[values in property file override existing values]' \ + '-find[search for buildfile towards the root of the filesystem and use it]:file:_files' \ + '-inputhandler[the class to use to handle user input]:class' \ + '(- *)'{-v,-version}'[show version]' \ '1: :->targets' \ '*:: :->args' \ && ret=0 case $state in - targets) - local buildfile; buildfile=build.xml + (targets) + local buildfile=build.xml + if (( $+opt_args[-buildfile] )); then + buildfile=${opt_args[-buildfile]} + elif (($+opt_args[-f] )); then + buildfile=${opt_args[-f]} + fi + if [[ ! -f $buildfile ]] then ret=0 else - local targets; targets=($(sed -nE "/<target /s/.*name=[\"'](\w+)[\"'].*/\1/p" $buildfile)) + local -a targets=($(sed -nE "/<target /s/.*name=[\"'](\w+)[\"'].*/\1/p" $buildfile)) _describe -t 'targets' 'target' targets && ret=0 fi ;; - args) + (args) if [[ CURRENT -eq NORMARG && ${+opt_args[--match]} -eq 0 ]] then # If the current argument is the first non-option argument diff --git a/src/_play b/src/_play deleted file mode 100644 index 6b35899..0000000 --- a/src/_play +++ /dev/null @@ -1,190 +0,0 @@ -#compdef play -# ------------------------------------------------------------------------------ -# Description -# ----------- -# -# Completion script for Play! framework 1.2.2 (https://www.playframework.com/). -# -# ------------------------------------------------------------------------------ -# Authors -# ------- -# -# * Julien Nicoulaud <julien.nicoulaud@gmail.com> -# * Mario Fernandez (https://github.com/sirech) -# -# ------------------------------------------------------------------------------ - - -_play() { - local context curcontext="$curcontext" state line - typeset -A opt_args - - local ret=1 - - _arguments -C \ - '1: :_play_cmds' \ - '*::arg:->args' \ - && ret=0 - - case $state in - (args) - curcontext="${curcontext%:*:*}:play-cmd-$words[1]:" - case $line[1] in - (build-module|list-modules|lm|check|id) - _message 'no more arguments' && ret=0 - ;; - (dependencies|deps) - _arguments \ - '1:: :_play_apps' \ - '(--debug)--debug[Debug mode (even more information logged than in verbose mode)]' \ - '(--jpda)--jpda[Listen for JPDA connection. The process will be suspended until a client is plugged to the JPDA port.]' \ - '(--sync)--sync[Keep lib/ and modules/ directory synced. Delete unknown dependencies.]' \ - '(--verbose)--verbose[Verbose Mode]' \ - && ret=0 - ;; - (clean|javadoc|jd|out|pid|secret|stop) - _arguments '1:: :_play_apps' && ret=0 - ;; - (help) - _arguments '1: :_play_cmds -F "(cp deps ec idea jd st lm nb nm help antify evolutions evolutions:apply evolutions:markApplied evolutions:resolve)"' && ret=0 - ;; - (status|st) - _arguments \ - '1:: :_play_apps' \ - '(--url)--url[If you want to monitor an application running on a remote server, specify the application URL using this option]:URL:_urls' \ - '(--secret)--secret[You can provide your own secret key using this option]:Secret key' \ - && ret=0 - ;; - (new) - _arguments \ - '1: :_play_apps' \ - '(--with)--with[Automatically enable this set of module for the newly created application]:Modules list:_play_modules_list' \ - && ret=0 - ;; - (install) - _arguments '1:Play! module:_play_modules_dash_versions' && ret=0 - ;; - (new-module) - _arguments '1:Module directory:_files -/' && ret=0 - ;; - (test|precompile|run|start|war|auto-test|classpath|cp|eclipsify|ec|idealize|idea|modules|netbeansify|nb) - local cmd_args; cmd_args=( - '1:: :_play_apps' - '(--deps)--deps[Resolve and install dependencies before running the command]' - ) - case $line[1] in - (precompile|run|start|restart|war) - local app_dir="$line[2]" - [[ -d "$app_dir" ]] || app_dir=. - [[ -f "$app_dir/conf/application.conf" ]] && cmd_args+=('--'${(u)${(M)$(<$app_dir/conf/application.conf):#%*}%%.*}'[Use this ID to run the application (override the default framework ID)]') - ;| - (test|run) - cmd_args+=('(-f)-f[Disable the JPDA port checking and force the jpda.port value]') - ;| - (war) - cmd_args+=( - '(-o --output)'{-o,--output}'[The path where the WAR directory will be created. The contents of this directory will first be deleted]:output directory:_files -/' - '(--zip)--zip[By default, the script creates an exploded WAR. If you want a zipped archive, specify the --zip option]' - '(--exclude)--exclude[Excludes a list of colon separated directories]:excluded directories list:_play_colon_dirs_list' - ) - ;| - (test|run|start|restart|war) - cmd_args+=('*:Java option') - ;; - esac - _arguments "$cmd_args[@]" && ret=0 - ;; - *) - _call_function ret _play_cmd_$words[1] && ret=0 - (( ret )) && _message 'no more arguments' - ;; - esac - ;; - esac -} - -# FIXME Completes only core commands, some modules add commands too (eg Maven). Where do we get them ? -# FIXME Parse 'play help' and 'play help <command>' (for aliases) instead of hard-coding. -(( $+functions[_play_cmds] )) || -_play_cmds() { - local commands; commands=( - 'antify:Create a build.xml file for this project' - 'auto-test:Automatically run all application tests' - 'build-module:Build and package a module' - 'check:Check for a release newer than the current one' - {classpath,cp}':Display the computed classpath' - 'clean:Delete temporary files (including the bytecode cache)' - {dependencies,deps}':Resolve and retrieve project dependencies' - {eclipsify,ec}':Create all Eclipse configuration files' - 'evolutions:Run the evolution check' - 'evolutions\:apply:Automatically apply pending evolutions' - 'evolutions\:mark:AppliedMark pending evolutions as manually applied' - 'evolutions\:resolve:Resolve partially applied evolution' - 'help:Display help on a specific command' - 'id:Define the framework ID' - {idealize,idea}':Create all IntelliJ Idea configuration files' - 'install:Install a module' - {javadoc,jd}':Generate your application Javadoc' - {list-modules,lm}':List modules available from the central modules repository' - 'modules:Display the computed modules list' - {netbeansify,nb}':Create all NetBeans configuration files' - 'new:Create a new application' - {new-module,nm}':Create a module' - 'out:Follow logs/system.out file' - 'pid:Show the PID of the running application' - 'precompile:Precompile all Java sources and templates to speed up application start-up' - 'restart:Restart the running application' - 'run:Run the application in the current shell' - 'secret:Generate a new secret key' - 'start:Start the application in the background' - {status,st}':Display the running application status' - 'stop:Stop the running application' - 'test:Run the application in test mode in the current shell' - 'war:Export the application as a standalone WAR archive' - ) - _describe -t commands 'Play! command' commands "$@" -} - -(( $+functions[_play_apps] )) || -_play_apps() { - _wanted application expl 'Play! application directory' _files -/ -} - -(( $+functions[_play_modules] )) || -_play_modules() { - local modules; modules=(${(ps:,:)${${${(S)${(f)$(_call_program modules $service list-modules)}//\]*\[/,}%%\]*}##*\[}}) - _describe -t modules 'Play! module' modules "$@" -} - -(( $+functions[_play_modules_dash_versions] )) || -_play_modules_dash_versions() { - local ret=1 - if compset -P '*-'; then - local versions; versions=(${(ps:,:)${${${${${(f)$(_call_program versions $service list-modules)}##*${IPREFIX%-}\]}#*Versions:}%%"~"*}//[[:space:]]/}}) - _describe -t module-versions "${IPREFIX%-} module versions" versions && ret=0 - else - _wanted modules expl 'Play! module' _play_modules -qS- && ret=0 - fi -} - -(( $+functions[_play_modules_list] )) || -_play_modules_list() { - compset -P '*,'; compset -S ',*' - _wanted module-list expl 'Play! modules list' _play_modules -qS, -} - -(( $+functions[_play_colon_dirs_list] )) || -_play_colon_dirs_list() { - compset -P '*:'; compset -S ':*' - _wanted directories-list expl 'Directories list' _files -/ -qS: -} - -_play "$@" - -# Local Variables: -# mode: Shell-Script -# sh-indentation: 2 -# indent-tabs-mode: nil -# sh-basic-offset: 2 -# End: -# vim: ft=zsh sw=2 ts=2 et |
