Merge branch 'master' of https://github.com/robbyrussell/oh-my-zsh
Conflicts: .gitignore lib/aliases.zsh lib/completion.zsh lib/functions.zsh lib/git.zsh lib/key-bindings.zsh lib/misc.zsh lib/spectrum.zsh plugins/git/git.plugin.zsh plugins/osx/_man-preview plugins/osx/osx.plugin.zsh plugins/svn/svn.plugin.zsh templates/zshrc.zsh-template themes/lukerandall.zsh-theme themes/philips.zsh-theme tools/upgrade.sh
This commit is contained in:
parent
0e284f6c2a
commit
ba9900a488
76
plugins/archlinux/archlinux.plugin.zsh
Normal file
76
plugins/archlinux/archlinux.plugin.zsh
Normal file
|
@ -0,0 +1,76 @@
|
|||
# Archlinux zsh aliases and functions
|
||||
# Usage is also described at https://github.com/robbyrussell/oh-my-zsh/wiki/Plugins
|
||||
|
||||
# Look for yaourt, and add some useful functions if we have it.
|
||||
if [[ -x `which yaourt` ]]; then
|
||||
upgrade () {
|
||||
yaourt -Syu
|
||||
}
|
||||
alias yaconf='yaourt -C' # Fix all configuration files with vimdiff
|
||||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias yaupg='yaourt -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system.
|
||||
alias yain='yaourt -S' # Install specific package(s) from the repositories
|
||||
alias yains='yaourt -U' # Install specific package not from the repositories but from a file
|
||||
alias yare='yaourt -R' # Remove the specified package(s), retaining its configuration(s) and required dependencies
|
||||
alias yarem='yaourt -Rns' # Remove the specified package(s), its configuration(s) and unneeded dependencies
|
||||
alias yarep='yaourt -Si' # Display information about a given package in the repositories
|
||||
alias yareps='yaourt -Ss' # Search for package(s) in the repositories
|
||||
alias yaloc='yaourt -Qi' # Display information about a given package in the local database
|
||||
alias yalocs='yaourt -Qs' # Search for package(s) in the local database
|
||||
# Additional yaourt alias examples
|
||||
if [[ -x `which abs` ]]; then
|
||||
alias yaupd='yaourt -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories
|
||||
else
|
||||
alias yaupd='yaourt -Sy' # Update and refresh the local package and ABS databases against repositories
|
||||
fi
|
||||
alias yainsd='yaourt -S --asdeps' # Install given package(s) as dependencies of another package
|
||||
alias yamir='yaourt -Syy' # Force refresh of all package lists after updating /etc/pacman.d/mirrorlist
|
||||
else
|
||||
upgrade() {
|
||||
sudo pacman -Syu
|
||||
}
|
||||
fi
|
||||
|
||||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias pacupg='sudo pacman -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system.
|
||||
alias pacin='sudo pacman -S' # Install specific package(s) from the repositories
|
||||
alias pacins='sudo pacman -U' # Install specific package not from the repositories but from a file
|
||||
alias pacre='sudo pacman -R' # Remove the specified package(s), retaining its configuration(s) and required dependencies
|
||||
alias pacrem='sudo pacman -Rns' # Remove the specified package(s), its configuration(s) and unneeded dependencies
|
||||
alias pacrep='pacman -Si' # Display information about a given package in the repositories
|
||||
alias pacreps='pacman -Ss' # Search for package(s) in the repositories
|
||||
alias pacloc='pacman -Qi' # Display information about a given package in the local database
|
||||
alias paclocs='pacman -Qs' # Search for package(s) in the local database
|
||||
# Additional pacman alias examples
|
||||
if [[ -x `which abs` ]]; then
|
||||
alias pacupd='sudo pacman -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories
|
||||
else
|
||||
alias pacupd='sudo pacman -Sy' # Update and refresh the local package and ABS databases against repositories
|
||||
fi
|
||||
alias pacinsd='sudo pacman -S --asdeps' # Install given package(s) as dependencies of another package
|
||||
alias pacmir='sudo pacman -Syy' # Force refresh of all package lists after updating /etc/pacman.d/mirrorlist
|
||||
|
||||
# https://bbs.archlinux.org/viewtopic.php?id=93683
|
||||
paclist() {
|
||||
sudo pacman -Qei $(pacman -Qu|cut -d" " -f 1)|awk ' BEGIN {FS=":"}/^Name/{printf("\033[1;36m%s\033[1;37m", $2)}/^Description/{print $2}'
|
||||
}
|
||||
|
||||
alias paclsorphans='sudo pacman -Qdt'
|
||||
alias pacrmorphans='sudo pacman -Rs $(pacman -Qtdq)'
|
||||
|
||||
pacdisowned() {
|
||||
tmp=${TMPDIR-/tmp}/pacman-disowned-$UID-$$
|
||||
db=$tmp/db
|
||||
fs=$tmp/fs
|
||||
|
||||
mkdir "$tmp"
|
||||
trap 'rm -rf "$tmp"' EXIT
|
||||
|
||||
pacman -Qlq | sort -u > "$db"
|
||||
|
||||
find /bin /etc /lib /sbin /usr \
|
||||
! -name lost+found \
|
||||
\( -type d -printf '%p/\n' -o -print \) | sort > "$fs"
|
||||
|
||||
comm -23 "$fs" "$db"
|
||||
}
|
|
@ -1,3 +1,37 @@
|
|||
alias be="bundle exec"
|
||||
alias bi="bundle install"
|
||||
alias bl="bundle list"
|
||||
alias bu="bundle update"
|
||||
alias bp="bundle package"
|
||||
|
||||
# The following is based on https://github.com/gma/bundler-exec
|
||||
|
||||
bundled_commands=(cap capify cucumber guard heroku rackup rails rake rspec ruby shotgun spec spork thin unicorn unicorn_rails)
|
||||
|
||||
## Functions
|
||||
|
||||
_bundler-installed() {
|
||||
which bundle > /dev/null 2>&1
|
||||
}
|
||||
|
||||
_within-bundled-project() {
|
||||
local check_dir=$PWD
|
||||
while [ "$(dirname $check_dir)" != "/" ]; do
|
||||
[ -f "$check_dir/Gemfile" ] && return
|
||||
check_dir="$(dirname $check_dir)"
|
||||
done
|
||||
false
|
||||
}
|
||||
|
||||
_run-with-bundler() {
|
||||
if _bundler-installed && _within-bundled-project; then
|
||||
bundle exec $@
|
||||
else
|
||||
$@
|
||||
fi
|
||||
}
|
||||
|
||||
## Main program
|
||||
for cmd in $bundled_commands; do
|
||||
alias $cmd="_run-with-bundler $cmd"
|
||||
done
|
||||
|
|
32
plugins/cake/cake.plugin.zsh
Normal file
32
plugins/cake/cake.plugin.zsh
Normal file
|
@ -0,0 +1,32 @@
|
|||
# Set this to 1 if you want to cache the tasks
|
||||
cache_task_list=1
|
||||
|
||||
# Cache filename
|
||||
cache_file='.cake_task_cache'
|
||||
|
||||
_cake_does_target_list_need_generating () {
|
||||
|
||||
if [ $cache_task_list -eq 0 ]; then
|
||||
return 1;
|
||||
fi
|
||||
|
||||
if [ ! -f $cache_file ]; then return 0;
|
||||
else
|
||||
accurate=$(stat -f%m $cache_file)
|
||||
changed=$(stat -f%m Cakefile)
|
||||
return $(expr $accurate '>=' $changed)
|
||||
fi
|
||||
}
|
||||
|
||||
_cake () {
|
||||
if [ -f Cakefile ]; then
|
||||
if _cake_does_target_list_need_generating; then
|
||||
cake | sed -e "s/cake \([^ ]*\) .*/\1/" | grep -v '^$' > $cache_file
|
||||
compadd `cat $cache_file`
|
||||
else
|
||||
compadd `cake | sed -e "s/cake \([^ ]*\) .*/\1/" | grep -v '^$'`
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _cake cake
|
2
plugins/cloudapp/cloudapp.plugin.zsh
Normal file
2
plugins/cloudapp/cloudapp.plugin.zsh
Normal file
|
@ -0,0 +1,2 @@
|
|||
alias cloudapp="${0:r:r}.rb"
|
||||
|
60
plugins/cloudapp/cloudapp.rb
Executable file
60
plugins/cloudapp/cloudapp.rb
Executable file
|
@ -0,0 +1,60 @@
|
|||
#!/usr/bin/env ruby
|
||||
#
|
||||
# cloudapp
|
||||
# Zach Holman / @holman
|
||||
#
|
||||
# Uploads a file from the command line to CloudApp, drops it into your
|
||||
# clipboard (on a Mac, at least).
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# cloudapp drunk-blake.png
|
||||
#
|
||||
# This requires Aaron Russell's cloudapp_api gem:
|
||||
#
|
||||
# gem install cloudapp_api
|
||||
#
|
||||
# Requires you set your CloudApp credentials in ~/.cloudapp as a simple file of:
|
||||
#
|
||||
# email
|
||||
# password
|
||||
|
||||
require 'rubygems'
|
||||
begin
|
||||
require 'cloudapp_api'
|
||||
rescue LoadError
|
||||
puts "You need to install cloudapp_api: gem install cloudapp_api"
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
config_file = "#{ENV['HOME']}/.cloudapp"
|
||||
unless File.exist?(config_file)
|
||||
puts "You need to type your email and password (one per line) into "+
|
||||
"`~/.cloudapp`"
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
email,password = File.read(config_file).split("\n")
|
||||
|
||||
class HTTParty::Response
|
||||
# Apparently HTTPOK.ok? IS NOT OKAY WTFFFFFFFFFFUUUUUUUUUUUUUU
|
||||
# LETS MONKEY PATCH IT I FEEL OKAY ABOUT IT
|
||||
def ok? ; true end
|
||||
end
|
||||
|
||||
if ARGV[0].nil?
|
||||
puts "You need to specify a file to upload."
|
||||
exit!(1)
|
||||
end
|
||||
|
||||
CloudApp.authenticate(email,password)
|
||||
url = CloudApp::Item.create(:upload, {:file => ARGV[0]}).url
|
||||
|
||||
# Say it for good measure.
|
||||
puts "Uploaded to #{url}."
|
||||
|
||||
# Get the embed link.
|
||||
url = "#{url}/#{ARGV[0].split('/').last}"
|
||||
|
||||
# Copy it to your (Mac's) clipboard.
|
||||
`echo '#{url}' | tr -d "\n" | pbcopy`
|
222
plugins/django/django.plugin.zsh
Normal file
222
plugins/django/django.plugin.zsh
Normal file
|
@ -0,0 +1,222 @@
|
|||
#compdef manage.py
|
||||
|
||||
typeset -ga nul_args
|
||||
nul_args=(
|
||||
'--settings=-[the Python path to a settings module.]:file:_files'
|
||||
'--pythonpath=-[a directory to add to the Python path.]::directory:_directories'
|
||||
'--traceback[print traceback on exception.]'
|
||||
"--version[show program's version number and exit.]"
|
||||
{-h,--help}'[show this help message and exit.]'
|
||||
)
|
||||
|
||||
_managepy-adminindex(){
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*::directory:_directories' && ret=0
|
||||
}
|
||||
|
||||
_managepy-createcachetable(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dbshell(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-diffsettings(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dumpdata(){
|
||||
_arguments -s : \
|
||||
'--format=-[specifies the output serialization format for fixtures.]:format:(json yaml xml)' \
|
||||
'--indent=-[specifies the indent level to use when pretty-printing output.]:' \
|
||||
$nul_args \
|
||||
'*::appname:_applist' && ret=0
|
||||
}
|
||||
|
||||
_managepy-flush(){
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-help(){
|
||||
_arguments -s : \
|
||||
'*:command:_managepy_cmds' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy_cmds(){
|
||||
local line
|
||||
local -a cmd
|
||||
_call_program help-command ./manage.py help \
|
||||
|& sed -n '/^ /s/[(), ]/ /gp' \
|
||||
| while read -A line; do cmd=($line $cmd) done
|
||||
_describe -t managepy-command 'manage.py command' cmd
|
||||
}
|
||||
|
||||
_managepy-inspectdb(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-loaddata(){
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'*::file:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-reset(){
|
||||
_arguments -s : \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-runfcgi(){
|
||||
local state
|
||||
|
||||
local fcgi_opts
|
||||
fcgi_opts=(
|
||||
'protocol[fcgi, scgi, ajp, ... (default fcgi)]:protocol:(fcgi scgi ajp)'
|
||||
'host[hostname to listen on..]:'
|
||||
'port[port to listen on.]:'
|
||||
'socket[UNIX socket to listen on.]::file:_files'
|
||||
'method[prefork or threaded (default prefork)]:method:(prefork threaded)'
|
||||
'maxrequests[number of requests a child handles before it is killed and a new child is forked (0 = no limit).]:'
|
||||
'maxspare[max number of spare processes / threads.]:'
|
||||
'minspare[min number of spare processes / threads.]:'
|
||||
'maxchildren[hard limit number of processes / threads.]:'
|
||||
'daemonize[whether to detach from terminal.]:boolean:(False True)'
|
||||
'pidfile[write the spawned process-id to this file.]:file:_files'
|
||||
'workdir[change to this directory when daemonizing.]:directory:_files'
|
||||
'outlog[write stdout to this file.]:file:_files'
|
||||
'errlog[write stderr to this file.]:file:_files'
|
||||
)
|
||||
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*: :_values "FCGI Setting" $fcgi_opts' && ret=0
|
||||
}
|
||||
|
||||
_managepy-runserver(){
|
||||
_arguments -s : \
|
||||
'--noreload[tells Django to NOT use the auto-reloader.]' \
|
||||
'--adminmedia[specifies the directory from which to serve admin media.]:directory:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-shell(){
|
||||
_arguments -s : \
|
||||
'--plain[tells Django to use plain Python, not IPython.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sql(){}
|
||||
_managepy-sqlall(){}
|
||||
_managepy-sqlclear(){}
|
||||
_managepy-sqlcustom(){}
|
||||
_managepy-sqlflush(){}
|
||||
_managepy-sqlindexes(){}
|
||||
_managepy-sqlinitialdata(){}
|
||||
_managepy-sqlreset(){}
|
||||
_managepy-sqlsequencereset(){}
|
||||
_managepy-startapp(){}
|
||||
|
||||
_managepy-syncdb() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-test() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]' \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-testserver() {
|
||||
_arguments -s : \
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))' \
|
||||
'--addrport=-[port number or ipaddr:port to run the server on.]' \
|
||||
'*::fixture:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-validate() {
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-commands() {
|
||||
local -a commands
|
||||
|
||||
commands=(
|
||||
'adminindex:prints the admin-index template snippet for the given app name(s).'
|
||||
'createcachetable:creates the table needed to use the SQL cache backend.'
|
||||
'dbshell:runs the command-line client for the current DATABASE_ENGINE.'
|
||||
"diffsettings:displays differences between the current settings.py and Django's default settings."
|
||||
'dumpdata:Output the contents of the database as a fixture of the given format.'
|
||||
'flush:Executes ``sqlflush`` on the current database.'
|
||||
'help:manage.py help.'
|
||||
'inspectdb:Introspects the database tables in the given database and outputs a Django model module.'
|
||||
'loaddata:Installs the named fixture(s) in the database.'
|
||||
'reset:Executes ``sqlreset`` for the given app(s) in the current database.'
|
||||
'runfcgi:Run this project as a fastcgi (or some other protocol supported by flup) application,'
|
||||
'runserver:Starts a lightweight Web server for development.'
|
||||
'shell:Runs a Python interactive interpreter.'
|
||||
'sql:Prints the CREATE TABLE SQL statements for the given app name(s).'
|
||||
'sqlall:Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).'
|
||||
'sqlclear:Prints the DROP TABLE SQL statements for the given app name(s).'
|
||||
'sqlcustom:Prints the custom table modifying SQL statements for the given app name(s).'
|
||||
'sqlflush:Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed.'
|
||||
'sqlindexes:Prints the CREATE INDEX SQL statements for the given model module name(s).'
|
||||
"sqlinitialdata:RENAMED: see 'sqlcustom'"
|
||||
'sqlreset:Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s).'
|
||||
'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).'
|
||||
"startapp:Creates a Django app directory structure for the given app name in this project's directory."
|
||||
"syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
|
||||
'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.'
|
||||
'testserver:Runs a development server with data from the given fixture(s).'
|
||||
'validate:Validates all installed models.'
|
||||
)
|
||||
|
||||
_describe -t commands 'manage.py command' commands && ret=0
|
||||
}
|
||||
|
||||
_applist() {
|
||||
local line
|
||||
local -a apps
|
||||
_call_program help-command "python -c \"import os.path as op, re, django.conf, sys;\\
|
||||
bn=op.basename(op.abspath(op.curdir));[sys\\
|
||||
.stdout.write(str(re.sub(r'^%s\.(.*?)$' %
|
||||
bn, r'\1', i)) + '\n') for i in django.conf.settings.\\
|
||||
INSTALLED_APPS if re.match(r'^%s' % bn, i)]\"" \
|
||||
| while read -A line; do apps=($line $apps) done
|
||||
_values 'Application' $apps && ret=0
|
||||
}
|
||||
|
||||
_managepy() {
|
||||
local curcontext=$curcontext ret=1
|
||||
|
||||
if ((CURRENT == 2)); then
|
||||
_managepy-commands
|
||||
else
|
||||
shift words
|
||||
(( CURRENT -- ))
|
||||
curcontext="${curcontext%:*:*}:managepy-$words[1]:"
|
||||
_call_function ret _managepy-$words[1]
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _managepy manage.py
|
||||
compdef _managepy django
|
34
plugins/gas/_gas
Normal file
34
plugins/gas/_gas
Normal file
|
@ -0,0 +1,34 @@
|
|||
#compdef gas
|
||||
|
||||
local curcontext="$curcontext" state line cmds ret=1
|
||||
|
||||
_arguments -C \
|
||||
'(- 1 *)'{-v,--version}'[display version information]' \
|
||||
'(-h|--help)'{-h,--help}'[show help information]' \
|
||||
'1: :->cmds' \
|
||||
'*: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
cmds=(
|
||||
"version:Prints Gas's version"
|
||||
"use:Uses author"
|
||||
"show:Shows your current user"
|
||||
"list:Lists your authors"
|
||||
"import:Imports current user to gasconfig"
|
||||
"help:Describe available tasks or one specific task"
|
||||
"delete:Deletes author"
|
||||
"add:Adds author to gasconfig"
|
||||
)
|
||||
_describe -t commands 'gas command' cmds && ret=0
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
(use|delete)
|
||||
_values -S , 'authors' $(cat ~/.gas | sed -n -e 's/^\[\(.*\)\]/\1/p') && ret=0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
158
plugins/heroku/_heroku
Normal file
158
plugins/heroku/_heroku
Normal file
|
@ -0,0 +1,158 @@
|
|||
#compdef heroku
|
||||
|
||||
# Heroku Autocomplete plugin for Oh-My-Zsh
|
||||
# Requires: The Heroku client gem (https://github.com/heroku/heroku)
|
||||
# Author: Ali B. (http://awhitebox.com)
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
"account\:confirm_billing":"Confirm that your account can be billed at the end of the month"
|
||||
"addons":"list installed addons"
|
||||
"addons\:list":"list all available addons"
|
||||
"addons\:add":"install an addon"
|
||||
"addons\:upgrade":"upgrade an existing addon"
|
||||
"addons\:downgrade":"downgrade an existing addon"
|
||||
"addons\:remove":"uninstall an addon"
|
||||
"addons\:open":"open an addon's dashboard in your browser"
|
||||
"apps":"list your apps"
|
||||
"apps\:info":"show detailed app information"
|
||||
"apps\:create":"create a new app"
|
||||
"apps\:rename":"rename the app"
|
||||
"apps\:open":"open the app in a web browser"
|
||||
"apps\:destroy":"permanently destroy an app"
|
||||
"auth\:login":"log in with your heroku credentials"
|
||||
"auth\:logout":"clear local authentication credentials"
|
||||
"config":"display the config vars for an app"
|
||||
"config\:add":"add one or more config vars"
|
||||
"config\:remove":"remove a config var"
|
||||
"db\:push":"push local data up to your app"
|
||||
"db\:pull":"pull heroku data down into your local database"
|
||||
"domains":"list custom domains for an app"
|
||||
"domains\:add":"add a custom domain to an app"
|
||||
"domains\:remove":"remove a custom domain from an app"
|
||||
"domains\:clear":"remove all custom domains from an app"
|
||||
"help":"list available commands or display help for a specific command"
|
||||
"keys":"display keys for the current user"
|
||||
"keys\:add":"add a key for the current user"
|
||||
"keys\:remove":"remove a key from the current user"
|
||||
"keys\:clear":"remove all authentication keys from the current user"
|
||||
"logs":"display recent log output"
|
||||
"logs\:cron":"DEPRECATED: display cron logs from legacy logging"
|
||||
"logs\:drains":"manage syslog drains"
|
||||
"maintenance\:on":"put the app into maintenance mode"
|
||||
"maintenance\:off":"take the app out of maintenance mode"
|
||||
"pg\:info":"display database information"
|
||||
"pg\:ingress":"allow direct connections to the database from this IP for one minute"
|
||||
"pg\:promote":"sets DATABASE as your DATABASE_URL"
|
||||
"pg\:psql":"open a psql shell to the database"
|
||||
"pg\:reset":"delete all data in DATABASE"
|
||||
"pg\:unfollow":"stop a replica from following and make it a read/write database"
|
||||
"pg\:wait":"monitor database creation, exit when complete"
|
||||
"pgbackups":"list captured backups"
|
||||
"pgbackups\:url":"get a temporary URL for a backup"
|
||||
"pgbackups\:capture":"capture a backup from a database id"
|
||||
"pgbackups\:restore":"restore a backup to a database"
|
||||
"pgbackups\:destroy":"destroys a backup"
|
||||
"plugins":"list installed plugins"
|
||||
"plugins\:install":"install a plugin"
|
||||
"plugins\:uninstall":"uninstall a plugin"
|
||||
"ps\:dynos":"scale to QTY web processes"
|
||||
"ps\:workers":"scale to QTY background processes"
|
||||
"ps":"list processes for an app"
|
||||
"ps\:restart":"restart an app process"
|
||||
"ps\:scale":"scale processes by the given amount"
|
||||
"releases":"list releases"
|
||||
"releases\:info":"view detailed information for a release"
|
||||
"rollback":"roll back to an older release"
|
||||
"run":"run an attached process"
|
||||
"run\:rake":"remotely execute a rake command"
|
||||
"run\:console":"open a remote console session"
|
||||
"sharing":"list collaborators on an app"
|
||||
"sharing\:add":"add a collaborator to an app"
|
||||
"sharing\:remove":"remove a collaborator from an app"
|
||||
"sharing\:transfer":"transfer an app to a new owner"
|
||||
"ssl":"list certificates for an app"
|
||||
"ssl\:add":"add an ssl certificate to an app"
|
||||
"ssl\:remove":"remove an ssl certificate from an app"
|
||||
"ssl\:clear":"remove all ssl certificates from an app"
|
||||
"stack":"show the list of available stacks"
|
||||
"stack\:migrate":"prepare migration of this app to a new stack"
|
||||
"version":"show heroku client version"
|
||||
)
|
||||
|
||||
_arguments '*:: :->command'
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "heroku command" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
||||
local -a _command_args
|
||||
case "$words[1]" in
|
||||
apps:info)
|
||||
_command_args=(
|
||||
'(-r|--raw)'{-r,--raw}'[output info as raw key/value pairs]' \
|
||||
)
|
||||
;;
|
||||
apps:create)
|
||||
_command_args=(
|
||||
'(-a|--addons)'{-a,--addons}'[a list of addons to install]' \
|
||||
'(-r|--remote)'{-r,--remote}'[the git remote to create, default "heroku"]' \
|
||||
'(-s|--stack)'{-s,--stack}'[the stack on which to create the app]' \
|
||||
)
|
||||
;;
|
||||
config)
|
||||
_command_args=(
|
||||
'(-s|--shell)'{-s,--shell}'[output config vars in shell format]' \
|
||||
)
|
||||
;;
|
||||
db:push)
|
||||
_command_args=(
|
||||
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
|
||||
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
|
||||
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the push]' \
|
||||
'(-f|--filter)'{-f,--filter}'[only push certain tables]' \
|
||||
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
|
||||
'(-t|--tables)'{-t,--tables}'[only push the specified tables]' \
|
||||
)
|
||||
;;
|
||||
db:pull)
|
||||
_command_args=(
|
||||
'(-c|--chunksize)'{-c,--chunksize}'[specify the number of rows to send in each batch]' \
|
||||
'(-d|--debug)'{-d,--debug}'[enable debugging output]' \
|
||||
'(-e|--exclude)'{-e,--exclude}'[exclude the specified tables from the pull]' \
|
||||
'(-f|--filter)'{-f,--filter}'[only pull certain tables]' \
|
||||
'(-r|--resume)'{-r,--resume}'[resume transfer described by a .dat file]' \
|
||||
'(-t|--tables)'{-t,--tables}'[only pull the specified tables]' \
|
||||
)
|
||||
;;
|
||||
keys)
|
||||
_command_args=(
|
||||
'(-l|--long)'{-l,--long}'[display extended information for each key]' \
|
||||
)
|
||||
;;
|
||||
logs)
|
||||
_command_args=(
|
||||
'(-n|--num)'{-n,--num}'[the number of lines to display]' \
|
||||
'(-p|--ps)'{-p,--ps}'[only display logs from the given process]' \
|
||||
'(-s|--source)'{-s,--source}'[only display logs from the given source]' \
|
||||
'(-t|--tail)'{-t,--tail}'[continually stream logs]' \
|
||||
)
|
||||
;;
|
||||
pgbackups:capture)
|
||||
_command_args=(
|
||||
'(-e|--expire)'{-e,--expire}'[if no slots are available to capture, delete the oldest backup to make room]' \
|
||||
)
|
||||
;;
|
||||
stack)
|
||||
_command_args=(
|
||||
'(-a|--all)'{-a,--all}'[include deprecated stacks]' \
|
||||
)
|
||||
;;
|
||||
esac
|
||||
|
||||
_arguments \
|
||||
$_command_args \
|
||||
'(--app)--app[the app name]' \
|
||||
&& return 0
|
||||
|
7
plugins/history-substring-search/README
Normal file
7
plugins/history-substring-search/README
Normal file
|
@ -0,0 +1,7 @@
|
|||
To activate this script, load it into an interactive ZSH session:
|
||||
|
||||
% source history-substring-search.zsh
|
||||
|
||||
See the "history-substring-search.zsh" file for more information:
|
||||
|
||||
% sed -n '2,/^$/s/^#//p' history-substring-search.zsh | more
|
|
@ -0,0 +1,12 @@
|
|||
# This file integrates the history-substring-search script into oh-my-zsh.
|
||||
|
||||
source "${0:r:r}.zsh"
|
||||
|
||||
if test "$CASE_SENSITIVE" = true; then
|
||||
unset HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS
|
||||
fi
|
||||
|
||||
if test "$DISABLE_COLOR" = true; then
|
||||
unset HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
unset HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
fi
|
642
plugins/history-substring-search/history-substring-search.zsh
Normal file
642
plugins/history-substring-search/history-substring-search.zsh
Normal file
|
@ -0,0 +1,642 @@
|
|||
#!/usr/bin/env zsh
|
||||
#
|
||||
# This is a clean-room implementation of the Fish[1] shell's history search
|
||||
# feature, where you can type in any part of any previously entered command
|
||||
# and press the UP and DOWN arrow keys to cycle through the matching commands.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# Usage
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# 1. Load this script into your interactive ZSH session:
|
||||
#
|
||||
# % source history-substring-search.zsh
|
||||
#
|
||||
# If you want to use the zsh-syntax-highlighting[6] script along with this
|
||||
# script, then make sure that you load it *before* you load this script:
|
||||
#
|
||||
# % source zsh-syntax-highlighting.zsh
|
||||
# % source history-substring-search.zsh
|
||||
#
|
||||
# 2. Type any part of any previous command and then:
|
||||
#
|
||||
# * Press the UP arrow key to select the nearest command that (1) contains
|
||||
# your query and (2) is older than the current command in the command
|
||||
# history.
|
||||
#
|
||||
# * Press the DOWN arrow key to select the nearest command that (1)
|
||||
# contains your query and (2) is newer than the current command in the
|
||||
# command history.
|
||||
#
|
||||
# * Press ^U (the Control and U keys simultaneously) to abort the search.
|
||||
#
|
||||
# 3. If a matching command spans more than one line of text, press the LEFT
|
||||
# arrow key to move the cursor away from the end of the command, and then:
|
||||
#
|
||||
# * Press the UP arrow key to move the cursor to the line above. When the
|
||||
# cursor reaches the first line of the command, pressing the UP arrow
|
||||
# key again will cause this script to perform another search.
|
||||
#
|
||||
# * Press the DOWN arrow key to move the cursor to the line below. When
|
||||
# the cursor reaches the last line of the command, pressing the DOWN
|
||||
# arrow key again will cause this script to perform another search.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# Configuration
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# This script defines the following global variables. You may override their
|
||||
# default values only after having loaded this script into your ZSH session.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND is a global variable that defines
|
||||
# how the query should be highlighted inside a matching command. Its default
|
||||
# value causes this script to highlight using bold, white text on a magenta
|
||||
# background. See the "Character Highlighting" section in the zshzle(1) man
|
||||
# page to learn about the kinds of values you may assign to this variable.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND is a global variable that
|
||||
# defines how the query should be highlighted when no commands in the
|
||||
# history match it. Its default value causes this script to highlight using
|
||||
# bold, white text on a red background. See the "Character Highlighting"
|
||||
# section in the zshzle(1) man page to learn about the kinds of values you
|
||||
# may assign to this variable.
|
||||
#
|
||||
# * HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS is a global variable that defines
|
||||
# how the command history will be searched for your query. Its default value
|
||||
# causes this script to perform a case-insensitive search. See the "Globbing
|
||||
# Flags" section in the zshexpn(1) man page to learn about the kinds of
|
||||
# values you may assign to this variable.
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
# History
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# This script was originally written by Peter Stephenson[2], who published it
|
||||
# to the ZSH users mailing list (thereby making it public domain) in September
|
||||
# 2009. It was later revised by Guido van Steen and released under the BSD
|
||||
# license (see below) as part of the fizsh[3] project in January 2011.
|
||||
#
|
||||
# It was later extracted from fizsh[3] release 1.0.1, refactored heavily, and
|
||||
# repackaged as both an oh-my-zsh plugin[4] and as an independently loadable
|
||||
# ZSH script[5] by Suraj N. Kurapati in 2011.
|
||||
#
|
||||
# It was further developed[4] by Guido van Steen, Suraj N. Kurapati, Sorin
|
||||
# Ionescu, and Vincent Guerci in 2011.
|
||||
#
|
||||
# [1]: http://fishshell.com
|
||||
# [2]: http://www.zsh.org/mla/users/2009/msg00818.html
|
||||
# [3]: http://sourceforge.net/projects/fizsh/
|
||||
# [4]: https://github.com/robbyrussell/oh-my-zsh/pull/215
|
||||
# [5]: https://github.com/sunaku/zsh-history-substring-search
|
||||
# [6]: https://github.com/nicoulaj/zsh-syntax-highlighting
|
||||
#
|
||||
##############################################################################
|
||||
#
|
||||
# Copyright (c) 2009 Peter Stephenson
|
||||
# Copyright (c) 2011 Guido van Steen
|
||||
# Copyright (c) 2011 Suraj N. Kurapati
|
||||
# Copyright (c) 2011 Sorin Ionescu
|
||||
# Copyright (c) 2011 Vincent Guerci
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
#
|
||||
# * Neither the name of the FIZSH nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this
|
||||
# software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# configuration variables
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND='bg=magenta,fg=white,bold'
|
||||
HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND='bg=red,fg=white,bold'
|
||||
HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS='i'
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# the main ZLE widgets
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
function history-substring-search-up() {
|
||||
_history-substring-search-begin
|
||||
|
||||
_history-substring-search-up-history ||
|
||||
_history-substring-search-up-buffer ||
|
||||
_history-substring-search-up-search
|
||||
|
||||
_history-substring-search-end
|
||||
}
|
||||
|
||||
function history-substring-search-down() {
|
||||
_history-substring-search-begin
|
||||
|
||||
_history-substring-search-down-history ||
|
||||
_history-substring-search-down-buffer ||
|
||||
_history-substring-search-down-search
|
||||
|
||||
_history-substring-search-end
|
||||
}
|
||||
|
||||
zle -N history-substring-search-up
|
||||
zle -N history-substring-search-down
|
||||
|
||||
bindkey '\e[A' history-substring-search-up
|
||||
bindkey '\e[B' history-substring-search-down
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# implementation details
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
setopt extendedglob
|
||||
zmodload -F zsh/parameter
|
||||
|
||||
#
|
||||
# We have to "override" some keys and widgets if the
|
||||
# zsh-syntax-highlighting plugin has not been loaded:
|
||||
#
|
||||
# https://github.com/nicoulaj/zsh-syntax-highlighting
|
||||
#
|
||||
if [[ $+functions[_zsh_highlight] -eq 0 ]]; then
|
||||
#
|
||||
# Dummy implementation of _zsh_highlight()
|
||||
# that simply removes existing highlights
|
||||
#
|
||||
function _zsh_highlight() {
|
||||
region_highlight=()
|
||||
}
|
||||
|
||||
#
|
||||
# Remove existing highlights when the user
|
||||
# inserts printable characters into $BUFFER
|
||||
#
|
||||
function ordinary-key-press() {
|
||||
if [[ $KEYS == [[:print:]] ]]; then
|
||||
region_highlight=()
|
||||
fi
|
||||
zle .self-insert
|
||||
}
|
||||
zle -N self-insert ordinary-key-press
|
||||
|
||||
#
|
||||
# Override ZLE widgets to invoke _zsh_highlight()
|
||||
#
|
||||
# https://github.com/nicoulaj/zsh-syntax-highlighting/blob/
|
||||
# bb7fcb79fad797a40077bebaf6f4e4a93c9d8163/zsh-syntax-highlighting.zsh#L121
|
||||
#
|
||||
#--------------8<-------------------8<-------------------8<-----------------
|
||||
#
|
||||
# Copyright (c) 2010-2011 zsh-syntax-highlighting contributors
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
#
|
||||
# * Neither the name of the zsh-syntax-highlighting contributors nor the
|
||||
# names of its contributors may be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Load ZSH module zsh/zleparameter, needed to override user defined widgets.
|
||||
zmodload zsh/zleparameter 2>/dev/null || {
|
||||
echo 'zsh-syntax-highlighting: failed loading zsh/zleparameter, exiting.' >&2
|
||||
return -1
|
||||
}
|
||||
|
||||
# Override ZLE widgets to make them invoke _zsh_highlight.
|
||||
for event in ${${(f)"$(zle -la)"}:#(_*|orig-*|.run-help|.which-command)}; do
|
||||
if [[ "$widgets[$event]" == completion:* ]]; then
|
||||
eval "zle -C orig-$event ${${${widgets[$event]}#*:}/:/ } ; $event() { builtin zle orig-$event && _zsh_highlight } ; zle -N $event"
|
||||
else
|
||||
case $event in
|
||||
accept-and-menu-complete)
|
||||
eval "$event() { builtin zle .$event && _zsh_highlight } ; zle -N $event"
|
||||
;;
|
||||
|
||||
# The following widgets should NOT remove any previously
|
||||
# applied highlighting. Therefore we do not remap them.
|
||||
.forward-char|.backward-char|.up-line-or-history|.down-line-or-history)
|
||||
;;
|
||||
|
||||
.*)
|
||||
clean_event=$event[2,${#event}] # Remove the leading dot in the event name
|
||||
case ${widgets[$clean_event]-} in
|
||||
(completion|user):*)
|
||||
;;
|
||||
*)
|
||||
eval "$clean_event() { builtin zle $event && _zsh_highlight } ; zle -N $clean_event"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
unset event clean_event
|
||||
#-------------->8------------------->8------------------->8-----------------
|
||||
fi
|
||||
|
||||
function _history-substring-search-begin() {
|
||||
_history_substring_search_move_cursor_eol=false
|
||||
_history_substring_search_query_highlight=
|
||||
|
||||
#
|
||||
# Continue using the previous $_history_substring_search_result by default,
|
||||
# unless the current query was cleared or a new/different query was entered.
|
||||
#
|
||||
if [[ -z $BUFFER || $BUFFER != $_history_substring_search_result ]]; then
|
||||
#
|
||||
# For the purpose of highlighting we will also keep
|
||||
# a version without doubly-escaped meta characters.
|
||||
#
|
||||
_history_substring_search_query=$BUFFER
|
||||
|
||||
#
|
||||
# $BUFFER contains the text that is in the command-line currently.
|
||||
# we put an extra "\\" before meta characters such as "\(" and "\)",
|
||||
# so that they become "\\\(" and "\\\)".
|
||||
#
|
||||
_history_substring_search_query_escaped=${BUFFER//(#m)[\][()|\\*?#<>~^]/\\$MATCH}
|
||||
|
||||
#
|
||||
# Find all occurrences of the search query in the history file.
|
||||
#
|
||||
# (k) turns it an array of line numbers.
|
||||
#
|
||||
# (on) seems to remove duplicates, which are default
|
||||
# options. They can be turned off by (ON).
|
||||
#
|
||||
_history_substring_search_matches=(${(kon)history[(R)(#$HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS)*${_history_substring_search_query_escaped}*]})
|
||||
|
||||
#
|
||||
# Define the range of values that $_history_substring_search_match_index
|
||||
# can take: [0, $_history_substring_search_matches_count_plus].
|
||||
#
|
||||
_history_substring_search_matches_count=$#_history_substring_search_matches
|
||||
_history_substring_search_matches_count_plus=$(( _history_substring_search_matches_count + 1 ))
|
||||
_history_substring_search_matches_count_sans=$(( _history_substring_search_matches_count - 1 ))
|
||||
|
||||
#
|
||||
# If $_history_substring_search_match_index is equal to
|
||||
# $_history_substring_search_matches_count_plus, this indicates that we
|
||||
# are beyond the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# If $_history_substring_search_match_index is equal to 0, this indicates
|
||||
# that we are beyond the end of $_history_substring_search_matches.
|
||||
#
|
||||
# If we have initially pressed "up" we have to initialize
|
||||
# $_history_substring_search_match_index to
|
||||
# $_history_substring_search_matches_count_plus so that it will be
|
||||
# decreased to $_history_substring_search_matches_count.
|
||||
#
|
||||
# If we have initially pressed "down" we have to initialize
|
||||
# $_history_substring_search_match_index to
|
||||
# $_history_substring_search_matches_count so that it will be increased to
|
||||
# $_history_substring_search_matches_count_plus.
|
||||
#
|
||||
if [[ $WIDGET == history-substring-search-down ]]; then
|
||||
_history_substring_search_match_index=$_history_substring_search_matches_count
|
||||
else
|
||||
_history_substring_search_match_index=$_history_substring_search_matches_count_plus
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function _history-substring-search-end() {
|
||||
_history_substring_search_result=$BUFFER
|
||||
|
||||
# move the cursor to the end of the command line
|
||||
if [[ $_history_substring_search_move_cursor_eol == true ]]; then
|
||||
CURSOR=${#BUFFER}
|
||||
fi
|
||||
|
||||
# highlight command line using zsh-syntax-highlighting
|
||||
_zsh_highlight
|
||||
|
||||
# highlight the search query inside the command line
|
||||
if [[ -n $_history_substring_search_query_highlight && -n $_history_substring_search_query ]]; then
|
||||
#
|
||||
# The following expression yields a variable $MBEGIN, which
|
||||
# indicates the begin position + 1 of the first occurrence
|
||||
# of _history_substring_search_query_escaped in $BUFFER.
|
||||
#
|
||||
: ${(S)BUFFER##(#m$HISTORY_SUBSTRING_SEARCH_GLOBBING_FLAGS)($_history_substring_search_query##)}
|
||||
local begin=$(( MBEGIN - 1 ))
|
||||
local end=$(( begin + $#_history_substring_search_query ))
|
||||
region_highlight+=("$begin $end $_history_substring_search_query_highlight")
|
||||
fi
|
||||
|
||||
# For debugging purposes:
|
||||
# zle -R "mn: "$_history_substring_search_match_index" m#: "${#_history_substring_search_matches}
|
||||
# read -k -t 200 && zle -U $REPLY
|
||||
|
||||
# Exit successfully from the history-substring-search-* widgets.
|
||||
true
|
||||
}
|
||||
|
||||
function _history-substring-search-up-buffer() {
|
||||
#
|
||||
# Check if the UP arrow was pressed to move the cursor within a multi-line
|
||||
# buffer. This amounts to three tests:
|
||||
#
|
||||
# 1. $#buflines -gt 1.
|
||||
#
|
||||
# 2. $CURSOR -ne $#BUFFER.
|
||||
#
|
||||
# 3. Check if we are on the first line of the current multi-line buffer.
|
||||
# If so, pressing UP would amount to leaving the multi-line buffer.
|
||||
#
|
||||
# We check this by adding an extra "x" to $LBUFFER, which makes
|
||||
# sure that xlbuflines is always equal to the number of lines
|
||||
# until $CURSOR (including the line with the cursor on it).
|
||||
#
|
||||
local buflines XLBUFFER xlbuflines
|
||||
buflines=(${(f)BUFFER})
|
||||
XLBUFFER=$LBUFFER"x"
|
||||
xlbuflines=(${(f)XLBUFFER})
|
||||
|
||||
if [[ $#buflines -gt 1 && $CURSOR -ne $#BUFFER && $#xlbuflines -ne 1 ]]; then
|
||||
zle up-line-or-history
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-down-buffer() {
|
||||
#
|
||||
# Check if the DOWN arrow was pressed to move the cursor within a multi-line
|
||||
# buffer. This amounts to three tests:
|
||||
#
|
||||
# 1. $#buflines -gt 1.
|
||||
#
|
||||
# 2. $CURSOR -ne $#BUFFER.
|
||||
#
|
||||
# 3. Check if we are on the last line of the current multi-line buffer.
|
||||
# If so, pressing DOWN would amount to leaving the multi-line buffer.
|
||||
#
|
||||
# We check this by adding an extra "x" to $RBUFFER, which makes
|
||||
# sure that xrbuflines is always equal to the number of lines
|
||||
# from $CURSOR (including the line with the cursor on it).
|
||||
#
|
||||
local buflines XRBUFFER xrbuflines
|
||||
buflines=(${(f)BUFFER})
|
||||
XRBUFFER="x"$RBUFFER
|
||||
xrbuflines=(${(f)XRBUFFER})
|
||||
|
||||
if [[ $#buflines -gt 1 && $CURSOR -ne $#BUFFER && $#xrbuflines -ne 1 ]]; then
|
||||
zle down-line-or-history
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-up-history() {
|
||||
#
|
||||
# Behave like up in ZSH, except clear the $BUFFER
|
||||
# when beginning of history is reached like in Fish.
|
||||
#
|
||||
if [[ -z $_history_substring_search_query ]]; then
|
||||
|
||||
# we have reached the absolute top of history
|
||||
if [[ $HISTNO -eq 1 ]]; then
|
||||
BUFFER=
|
||||
|
||||
# going up from somewhere below the top of history
|
||||
else
|
||||
zle up-history
|
||||
fi
|
||||
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-down-history() {
|
||||
#
|
||||
# Behave like down-history in ZSH, except clear the
|
||||
# $BUFFER when end of history is reached like in Fish.
|
||||
#
|
||||
if [[ -z $_history_substring_search_query ]]; then
|
||||
|
||||
# going down from the absolute top of history
|
||||
if [[ $HISTNO -eq 1 && -z $BUFFER ]]; then
|
||||
BUFFER=${history[1]}
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
# going down from somewhere above the bottom of history
|
||||
else
|
||||
zle down-history
|
||||
fi
|
||||
|
||||
return true
|
||||
fi
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
function _history-substring-search-up-search() {
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
#
|
||||
# Highlight matches during history-substring-up-search:
|
||||
#
|
||||
# The following constants have been initialized in
|
||||
# _history-substring-search-up/down-search():
|
||||
#
|
||||
# $_history_substring_search_matches is the current list of matches
|
||||
# $_history_substring_search_matches_count is the current number of matches
|
||||
# $_history_substring_search_matches_count_plus is the current number of matches + 1
|
||||
# $_history_substring_search_matches_count_sans is the current number of matches - 1
|
||||
# $_history_substring_search_match_index is the index of the current match
|
||||
#
|
||||
# The range of values that $_history_substring_search_match_index can take
|
||||
# is: [0, $_history_substring_search_matches_count_plus]. A value of 0
|
||||
# indicates that we are beyond the end of
|
||||
# $_history_substring_search_matches. A value of
|
||||
# $_history_substring_search_matches_count_plus indicates that we are beyond
|
||||
# the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# In _history-substring-search-up-search() the initial value of
|
||||
# $_history_substring_search_match_index is
|
||||
# $_history_substring_search_matches_count_plus. This value is set in
|
||||
# _history-substring-search-begin(). _history-substring-search-up-search()
|
||||
# will initially decrease it to $_history_substring_search_matches_count.
|
||||
#
|
||||
if [[ $_history_substring_search_match_index -ge 2 ]]; then
|
||||
#
|
||||
# Highlight the next match:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
BUFFER=$history[$_history_substring_search_matches[$_history_substring_search_match_index]]
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq 1 ]]; then
|
||||
#
|
||||
# We will move beyond the end of $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Save the current buffer in $_history_substring_search_old_buffer,
|
||||
# so that it can be retrieved by
|
||||
# _history-substring-search-down-search() later.
|
||||
#
|
||||
# 3. Make $BUFFER equal to $_history_substring_search_query.
|
||||
#
|
||||
# 4. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
_history_substring_search_old_buffer=$BUFFER
|
||||
BUFFER=$_history_substring_search_query
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq $_history_substring_search_matches_count_plus ]]; then
|
||||
#
|
||||
# We were beyond the beginning of $_history_substring_search_matches but
|
||||
# UP makes us move back to $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Decrease the value of $_history_substring_search_match_index.
|
||||
#
|
||||
# 2. Restore $BUFFER from $_history_substring_search_old_buffer.
|
||||
#
|
||||
# 3. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index-- ))
|
||||
BUFFER=$_history_substring_search_old_buffer
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
fi
|
||||
}
|
||||
|
||||
function _history-substring-search-down-search() {
|
||||
_history_substring_search_move_cursor_eol=true
|
||||
|
||||
#
|
||||
# Highlight matches during history-substring-up-search:
|
||||
#
|
||||
# The following constants have been initialized in
|
||||
# _history-substring-search-up/down-search():
|
||||
#
|
||||
# $_history_substring_search_matches is the current list of matches
|
||||
# $_history_substring_search_matches_count is the current number of matches
|
||||
# $_history_substring_search_matches_count_plus is the current number of matches + 1
|
||||
# $_history_substring_search_matches_count_sans is the current number of matches - 1
|
||||
# $_history_substring_search_match_index is the index of the current match
|
||||
#
|
||||
# The range of values that $_history_substring_search_match_index can take
|
||||
# is: [0, $_history_substring_search_matches_count_plus]. A value of 0
|
||||
# indicates that we are beyond the end of
|
||||
# $_history_substring_search_matches. A value of
|
||||
# $_history_substring_search_matches_count_plus indicates that we are beyond
|
||||
# the beginning of $_history_substring_search_matches.
|
||||
#
|
||||
# In _history-substring-search-down-search() the initial value of
|
||||
# $_history_substring_search_match_index is
|
||||
# $_history_substring_search_matches_count. This value is set in
|
||||
# _history-substring-search-begin().
|
||||
# _history-substring-search-down-search() will initially increase it to
|
||||
# $_history_substring_search_matches_count_plus.
|
||||
#
|
||||
if [[ $_history_substring_search_match_index -le $_history_substring_search_matches_count_sans ]]; then
|
||||
#
|
||||
# Highlight the next match:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
BUFFER=$history[$_history_substring_search_matches[$_history_substring_search_match_index]]
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq $_history_substring_search_matches_count ]]; then
|
||||
#
|
||||
# We will move beyond the beginning of $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Save the current buffer in $_history_substring_search_old_buffer, so
|
||||
# that it can be retrieved by _history-substring-search-up-search()
|
||||
# later.
|
||||
#
|
||||
# 3. Make $BUFFER equal to $_history_substring_search_query.
|
||||
#
|
||||
# 4. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
_history_substring_search_old_buffer=$BUFFER
|
||||
BUFFER=$_history_substring_search_query
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_NOT_FOUND
|
||||
|
||||
elif [[ $_history_substring_search_match_index -eq 0 ]]; then
|
||||
#
|
||||
# We were beyond the end of $_history_substring_search_matches but DOWN
|
||||
# makes us move back to the $_history_substring_search_matches:
|
||||
#
|
||||
# 1. Increase $_history_substring_search_match_index by 1.
|
||||
#
|
||||
# 2. Restore $BUFFER from $_history_substring_search_old_buffer.
|
||||
#
|
||||
# 3. Use $HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
# to highlight the current buffer.
|
||||
#
|
||||
(( _history_substring_search_match_index++ ))
|
||||
BUFFER=$_history_substring_search_old_buffer
|
||||
_history_substring_search_query_highlight=$HISTORY_SUBSTRING_SEARCH_HIGHLIGHT_FOUND
|
||||
fi
|
||||
}
|
||||
|
||||
# -*- mode: zsh; sh-indentation: 2; indent-tabs-mode: nil; sh-basic-offset: 2; -*-
|
||||
# vim: ft=zsh sw=2 ts=2 et
|
9
plugins/kate/kate.plugin.zsh
Normal file
9
plugins/kate/kate.plugin.zsh
Normal file
|
@ -0,0 +1,9 @@
|
|||
|
||||
# Kate
|
||||
# Start kate always silent
|
||||
alias kate='kate >/dev/null 2>&1'
|
||||
|
||||
function kt () {
|
||||
cd $1
|
||||
kate $1
|
||||
}
|
174
plugins/knife/_knife
Normal file
174
plugins/knife/_knife
Normal file
|
@ -0,0 +1,174 @@
|
|||
#compdef knife
|
||||
|
||||
# These flags should be available everywhere according to man knife
|
||||
knife_general_flags=( --help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes )
|
||||
|
||||
# knife has a very special syntax, some example calls are:
|
||||
# knife status
|
||||
# knife cookbook list
|
||||
# knife role show ROLENAME
|
||||
# knife data bag show DATABAGNAME
|
||||
# knife role show ROLENAME --attribute ATTRIBUTENAME
|
||||
# knife cookbook show COOKBOOKNAME COOKBOOKVERSION recipes
|
||||
|
||||
# The -Q switch in compadd allow for completions of things like "data bag" without having to go through two rounds of completion and avoids zsh inserting a \ for escaping spaces
|
||||
_knife() {
|
||||
local curcontext="$curcontext" state line
|
||||
typeset -A opt_args
|
||||
cloudproviders=(bluebox ec2 rackspace slicehost terremark)
|
||||
_arguments \
|
||||
'1: :->knifecmd'\
|
||||
'2: :->knifesubcmd'\
|
||||
'3: :->knifesubcmd2' \
|
||||
'4: :->knifesubcmd3' \
|
||||
'5: :->knifesubcmd4' \
|
||||
'6: :->knifesubcmd5'
|
||||
|
||||
case $state in
|
||||
knifecmd)
|
||||
compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" exec index node recipe role search ssh status windows $cloudproviders
|
||||
;;
|
||||
knifesubcmd)
|
||||
case $words[2] in
|
||||
(bluebox|ec2|rackspace|slicehost|terremark)
|
||||
compadd "$@" server images
|
||||
;;
|
||||
client)
|
||||
compadd -Q "$@" "bulk delete" list create show delete edit reregister
|
||||
;;
|
||||
configure)
|
||||
compadd "$@" client
|
||||
;;
|
||||
cookbook)
|
||||
compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload
|
||||
;;
|
||||
node)
|
||||
compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete"
|
||||
;;
|
||||
recipe)
|
||||
compadd "$@" list
|
||||
;;
|
||||
role)
|
||||
compadd -Q "$@" "bulk delete" create delete edit "from file" list show
|
||||
;;
|
||||
windows)
|
||||
compadd "$@" bootstrap
|
||||
;;
|
||||
*)
|
||||
_arguments '2:Subsubcommands:($(_knife_options1))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd2)
|
||||
case $words[3] in
|
||||
server)
|
||||
compadd "$@" list create delete
|
||||
;;
|
||||
images)
|
||||
compadd "$@" list
|
||||
;;
|
||||
site)
|
||||
compadd "$@" vendor show share search download list unshare
|
||||
;;
|
||||
(show|delete|edit)
|
||||
_arguments '3:Subsubcommands:($(_chef_$words[2]s_remote))'
|
||||
;;
|
||||
(upload|test)
|
||||
_arguments '3:Subsubcommands:($(_chef_$words[2]s_local) --all)'
|
||||
;;
|
||||
list)
|
||||
compadd -a "$@" knife_general_flags
|
||||
;;
|
||||
bag)
|
||||
compadd -Q "$@" show edit list "from file" create delete
|
||||
;;
|
||||
*)
|
||||
_arguments '3:Subsubcommands:($(_knife_options2))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd3)
|
||||
case $words[3] in
|
||||
show)
|
||||
case $words[2] in
|
||||
cookbook)
|
||||
versioncomp=1
|
||||
_arguments '4:Cookbookversions:($(_cookbook_versions) latest)'
|
||||
;;
|
||||
(node|client|role)
|
||||
compadd "$@" --attribute
|
||||
esac
|
||||
esac
|
||||
case $words[4] in
|
||||
(show|edit)
|
||||
_arguments '4:Subsubsubcommands:($(_chef_$words[2]_$words[3]s_remote))'
|
||||
;;
|
||||
file)
|
||||
_arguments '*:file or directory:_files -g "*.(rb|json)"'
|
||||
;;
|
||||
list)
|
||||
compadd -a "$@" knife_general_flags
|
||||
;;
|
||||
*)
|
||||
_arguments '*:Subsubcommands:($(_knife_options3))'
|
||||
esac
|
||||
;;
|
||||
knifesubcmd4)
|
||||
if (( versioncomp > 0 )); then
|
||||
compadd "$@" attributes definitions files libraries providers recipes resources templates
|
||||
else
|
||||
_arguments '*:Subsubcommands:($(_knife_options2))'
|
||||
fi
|
||||
;;
|
||||
knifesubcmd5)
|
||||
_arguments '*:Subsubcommands:($(_knife_options3))'
|
||||
esac
|
||||
}
|
||||
|
||||
# Helper functions to provide the argument completion for several depths of commands
|
||||
_knife_options1() {
|
||||
( for line in $( knife $words[2] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
_knife_options2() {
|
||||
( for line in $( knife $words[2] $words[3] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
_knife_options3() {
|
||||
( for line in $( knife $words[2] $words[3] $words[4] --help | grep -v "^knife" ); do echo $line | grep "\-\-"; done )
|
||||
}
|
||||
|
||||
# The chef_x_remote functions use knife to get a list of objects of type x on the server
|
||||
_chef_roles_remote() {
|
||||
(knife role list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_clients_remote() {
|
||||
(knife client list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_nodes_remote() {
|
||||
(knife node list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_cookbooks_remote() {
|
||||
(knife cookbook list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_sitecookbooks_remote() {
|
||||
(knife cookbook site list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
_chef_data_bags_remote() {
|
||||
(knife data bag list | grep \" | awk '{print $1}' | awk -F"," '{print $1}' | awk -F"\"" '{print $2}')
|
||||
}
|
||||
|
||||
# The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server
|
||||
_chef_cookbooks_local() {
|
||||
(for i in $( grep cookbook_path $HOME/.chef/knife.rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' ); do ls $i; done)
|
||||
}
|
||||
|
||||
# This function extracts the available cookbook versions on the chef server
|
||||
_cookbook_versions() {
|
||||
(knife cookbook show $words[4] | grep -v $words[4] | grep -v -E '\]|\[|\{|\}' | sed 's/ //g' | sed 's/"//g')
|
||||
}
|
||||
|
||||
_knife "$@"
|
4
plugins/powder/_powder
Normal file
4
plugins/powder/_powder
Normal file
|
@ -0,0 +1,4 @@
|
|||
#compdef powder
|
||||
#autoload
|
||||
|
||||
compadd `powder help | grep powder | cut -d " " -f 4`
|
|
@ -1,13 +1,5 @@
|
|||
# Rails 3 aliases, backwards-compatible with Rails 2.
|
||||
|
||||
function _bundle_command {
|
||||
if command -v bundle && [ -e "Gemfile" ]; then
|
||||
bundle exec $@
|
||||
else
|
||||
$@
|
||||
fi
|
||||
}
|
||||
|
||||
function _rails_command () {
|
||||
if [ -e "script/server" ]; then
|
||||
ruby script/$@
|
||||
|
@ -25,6 +17,3 @@ alias rp='_rails_command plugin'
|
|||
alias rs='_rails_command server'
|
||||
alias rsd='_rails_command server --debugger'
|
||||
alias devlog='tail -f log/development.log'
|
||||
|
||||
alias rspec='_bundle_command rspec'
|
||||
alias cuke='_bundle_command cucumber'
|
||||
|
|
|
@ -51,7 +51,7 @@ _1st_arguments=(
|
|||
'keys:find all keys matching the given pattern'
|
||||
'lastsave:get the UNIX timestamp of the last successful save to disk'
|
||||
'lindex:get an element from a list by its index'
|
||||
'linset:insert an element before or after another element in a list'
|
||||
'linsert:insert an element before or after another element in a list'
|
||||
'llen:get the length of a list'
|
||||
'lpop:remove and get the first element in a list'
|
||||
'lpush:prepend a value to a list'
|
||||
|
|
|
@ -1,23 +1,62 @@
|
|||
#
|
||||
# INSTRUCTIONS
|
||||
#
|
||||
# To enabled agent forwarding support add the following to
|
||||
# your .zshrc file:
|
||||
#
|
||||
# zstyle :omz:plugins:ssh-agent agent-forwarding on
|
||||
#
|
||||
# To load multiple identies use the identities style, For
|
||||
# example:
|
||||
#
|
||||
# zstyle :omz:plugins:ssh-agent id_rsa id_rsa2 id_github
|
||||
#
|
||||
#
|
||||
# CREDITS
|
||||
#
|
||||
# Based on code from Joseph M. Reagle
|
||||
# http://www.cygwin.com/ml/cygwin/2001-06/msg00537.html
|
||||
#
|
||||
# Agent forwarding support based on ideas from
|
||||
# Florent Thoumie and Jonas Pfenniger
|
||||
#
|
||||
|
||||
local SSH_ENV=$HOME/.ssh/environment-$HOST
|
||||
local _plugin__ssh_env=$HOME/.ssh/environment-$HOST
|
||||
local _plugin__forwarding
|
||||
|
||||
function start_agent {
|
||||
/usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${SSH_ENV}
|
||||
chmod 600 ${SSH_ENV}
|
||||
. ${SSH_ENV} > /dev/null
|
||||
/usr/bin/ssh-add;
|
||||
function _plugin__start_agent()
|
||||
{
|
||||
local -a identities
|
||||
|
||||
# start ssh-agent and setup environment
|
||||
/usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${_plugin__ssh_env}
|
||||
chmod 600 ${_plugin__ssh_env}
|
||||
. ${_plugin__ssh_env} > /dev/null
|
||||
|
||||
# load identies
|
||||
zstyle -a :omz:plugins:ssh-agent identities identities
|
||||
echo starting...
|
||||
/usr/bin/ssh-add $HOME/.ssh/${^identities}
|
||||
}
|
||||
|
||||
# Source SSH settings, if applicable
|
||||
# test if agent-forwarding is enabled
|
||||
zstyle -b :omz:plugins:ssh-agent agent-forwarding _plugin__forwarding
|
||||
if [[ ${_plugin__forwarding} == "yes" && -n "$SSH_AUTH_SOCK" ]]; then
|
||||
# Add a nifty symlink for screen/tmux if agent forwarding
|
||||
[[ -L $SSH_AUTH_SOCK ]] || ln -sf "$SSH_AUTH_SOCK" /tmp/ssh-agent-$USER-screen
|
||||
|
||||
if [ -f "${SSH_ENV}" ]; then
|
||||
. ${SSH_ENV} > /dev/null
|
||||
elif [ -f "${_plugin__ssh_env}" ]; then
|
||||
# Source SSH settings, if applicable
|
||||
. ${_plugin__ssh_env} > /dev/null
|
||||
ps -ef | grep ${SSH_AGENT_PID} | grep ssh-agent$ > /dev/null || {
|
||||
start_agent;
|
||||
_plugin__start_agent;
|
||||
}
|
||||
else
|
||||
start_agent;
|
||||
_plugin__start_agent;
|
||||
fi
|
||||
|
||||
# tidy up after ourselves
|
||||
unfunction _plugin__start_agent
|
||||
unset _plugin__forwarding
|
||||
unset _plugin__ssh_env
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
function svn_prompt_info {
|
||||
if [[ -d .svn ]]; then
|
||||
if [ in_svn ]; then
|
||||
echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_PREFIX\
|
||||
$ZSH_THEME_REPO_NAME_COLOR$(svn_get_repo_name)$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(svn_dirty)$ZSH_PROMPT_BASE_COLOR"
|
||||
fi
|
||||
|
@ -28,7 +28,7 @@ function svn_get_rev_nr {
|
|||
|
||||
function svn_dirty_choose {
|
||||
if [ in_svn ]; then
|
||||
s=$(svn status 2>/dev/null)
|
||||
s=$(svn status|grep -E '^\s*[ACDIM!?L]' 2>/dev/null)
|
||||
if [ $s ]; then
|
||||
echo $1
|
||||
else
|
||||
|
|
280
plugins/taskwarrior/_task
Normal file
280
plugins/taskwarrior/_task
Normal file
|
@ -0,0 +1,280 @@
|
|||
#compdef task
|
||||
#
|
||||
# zsh completion for taskwarrior
|
||||
#
|
||||
# Copyright 2010 - 2011 Johannes Schlatow
|
||||
# Copyright 2009 P.C. Shyamshankar
|
||||
# All rights reserved.
|
||||
#
|
||||
# This script is part of the taskwarrior project.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify it under
|
||||
# the terms of the GNU General Public License as published by the Free Software
|
||||
# Foundation; either version 2 of the License, or (at your option) any later
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along with
|
||||
# this program; if not, write to the
|
||||
#
|
||||
# Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor,
|
||||
# Boston, MA
|
||||
# 02110-1301
|
||||
# USA
|
||||
#
|
||||
typeset -g _task_cmds _task_projects _task_tags _task_config _task_modifiers
|
||||
_task_projects=($(task _projects))
|
||||
_task_tags=($(task _tags))
|
||||
_task_ids=($(task _ids))
|
||||
_task_config=($(task _config))
|
||||
_task_modifiers=(
|
||||
'before' \
|
||||
'after' \
|
||||
'none' \
|
||||
'any' \
|
||||
'is' \
|
||||
'isnt' \
|
||||
'has' \
|
||||
'hasnt' \
|
||||
'startswith' \
|
||||
'endswith' \
|
||||
'word' \
|
||||
'noword'
|
||||
)
|
||||
_task_cmds=($(task _commands))
|
||||
_task_zshcmds=( ${(f)"$(task _zshcommands)"} )
|
||||
|
||||
|
||||
_task_idCmds=(
|
||||
'append' \
|
||||
'prepend' \
|
||||
'annotate' \
|
||||
'denotate' \
|
||||
'edit' \
|
||||
'duplicate' \
|
||||
'info' \
|
||||
'start' \
|
||||
'stop' \
|
||||
'done'
|
||||
)
|
||||
|
||||
_task_idCmdsDesc=(
|
||||
'append:Appends more description to an existing task.' \
|
||||
'prepend:Prepends more description to an existing task.' \
|
||||
'annotate:Adds an annotation to an existing task.' \
|
||||
'denotate:Deletes an annotation of an existing task.' \
|
||||
'edit:Launches an editor to let you modify a task directly.' \
|
||||
'duplicate:Duplicates the specified task, and allows modifications.' \
|
||||
'info:Shows all data, metadata for specified task.' \
|
||||
'start:Marks specified task as started.' \
|
||||
'stop:Removes the start time from a task.' \
|
||||
'done:Marks the specified task as completed.'
|
||||
)
|
||||
|
||||
_task() {
|
||||
_arguments -s -S \
|
||||
"*::task command:_task_commands"
|
||||
return 0
|
||||
}
|
||||
|
||||
local -a reply args word
|
||||
word=$'[^\0]#\0'
|
||||
|
||||
# priorities
|
||||
local -a task_priorities
|
||||
_regex_words values 'task priorities' \
|
||||
'H:High' \
|
||||
'M:Middle' \
|
||||
'L:Low'
|
||||
task_priorities=("$reply[@]")
|
||||
|
||||
# projects
|
||||
local -a task_projects
|
||||
task_projects=(
|
||||
/"$word"/
|
||||
":values:task projects:compadd -a _task_projects"
|
||||
)
|
||||
|
||||
local -a _task_dates
|
||||
_regex_words values 'task dates' \
|
||||
'tod*ay:Today' \
|
||||
'yes*terday:Yesterday' \
|
||||
'tom*orrow:Tomorrow' \
|
||||
'sow:Start of week' \
|
||||
'soww:Start of work week' \
|
||||
'socw:Start of calendar week' \
|
||||
'som:Start of month' \
|
||||
'soy:Start of year' \
|
||||
'eow:End of week' \
|
||||
'eoww:End of work week' \
|
||||
'eocw:End of calendar week' \
|
||||
'eom:End of month' \
|
||||
'eoy:End of year' \
|
||||
'mon:Monday' \
|
||||
'tue:Tuesday'\
|
||||
'wed:Wednesday' \
|
||||
'thu:Thursday' \
|
||||
'fri:Friday' \
|
||||
'sat:Saturday' \
|
||||
'sun:Sunday'
|
||||
_task_dates=("$reply[@]")
|
||||
|
||||
local -a _task_reldates
|
||||
_regex_words values 'task reldates' \
|
||||
'hrs:n hours' \
|
||||
'day:n days' \
|
||||
'1st:first' \
|
||||
'2nd:second' \
|
||||
'3rd:third' \
|
||||
'th:4th, 5th, etc.' \
|
||||
'wks:weeks'
|
||||
_task_reldates=("$reply[@]")
|
||||
|
||||
task_dates=(
|
||||
\( "$_task_dates[@]" \|
|
||||
\( /$'[0-9][0-9]#'/- \( "$_task_reldates[@]" \) \)
|
||||
\)
|
||||
)
|
||||
|
||||
_regex_words values 'task frequencies' \
|
||||
'daily:Every day' \
|
||||
'day:Every day' \
|
||||
'weekdays:Every day skipping weekend days' \
|
||||
'weekly:Every week' \
|
||||
'biweekly:Every two weeks' \
|
||||
'fortnight:Every two weeks' \
|
||||
'quarterly:Every three months' \
|
||||
'semiannual:Every six months' \
|
||||
'annual:Every year' \
|
||||
'yearly:Every year' \
|
||||
'biannual:Every two years' \
|
||||
'biyearly:Every two years'
|
||||
_task_freqs=("$reply[@]")
|
||||
|
||||
local -a _task_frequencies
|
||||
_regex_words values 'task frequencies' \
|
||||
'd:days' \
|
||||
'w:weeks' \
|
||||
'q:quarters' \
|
||||
'y:years'
|
||||
_task_frequencies=("$reply[@]")
|
||||
|
||||
task_freqs=(
|
||||
\( "$_task_freqs[@]" \|
|
||||
\( /$'[0-9][0-9]#'/- \( "$_task_frequencies[@]" \) \)
|
||||
\)
|
||||
)
|
||||
|
||||
# attributes
|
||||
local -a task_attributes
|
||||
_regex_words -t ':' default 'task attributes' \
|
||||
'pro*ject:Project name:$task_projects' \
|
||||
'du*e:Due date:$task_dates' \
|
||||
'wa*it:Date until task becomes pending:$task_dates' \
|
||||
're*cur:Recurrence frequency:$task_freqs' \
|
||||
'pri*ority:priority:$task_priorities' \
|
||||
'un*til:Recurrence end date:$task_dates' \
|
||||
'fg:Foreground color' \
|
||||
'bg:Background color' \
|
||||
'li*mit:Desired number of rows in report'
|
||||
task_attributes=("$reply[@]")
|
||||
|
||||
args=(
|
||||
\( "$task_attributes[@]" \|
|
||||
\( /'(project|due|wait|recur|priority|until|fg|bg|limit).'/- \( /$'[^:]#:'/ ":default:modifiers:compadd -S ':' -a _task_modifiers" \) \) \|
|
||||
\( /'(rc).'/- \( /$'[^:]#:'/ ":arguments:config:compadd -S ':' -a _task_config" \) \) \|
|
||||
\( /'(+|-)'/- \( /"$word"/ ":values:remove tag:compadd -a _task_tags" \) \) \|
|
||||
\( /"$word"/ \)
|
||||
\) \#
|
||||
)
|
||||
_regex_arguments _task_attributes "${args[@]}"
|
||||
|
||||
## task commands
|
||||
|
||||
# default completion
|
||||
(( $+functions[_task_default] )) ||
|
||||
_task_default() {
|
||||
_task_attributes "$@"
|
||||
}
|
||||
|
||||
# commands expecting an ID
|
||||
(( $+functions[_task_id] )) ||
|
||||
_task_id() {
|
||||
if (( CURRENT < 3 )); then
|
||||
# update IDs
|
||||
_task_zshids=( ${(f)"$(task _zshids)"} )
|
||||
_describe -t values 'task IDs' _task_zshids
|
||||
else
|
||||
_task_attributes "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# merge completion
|
||||
(( $+functions[_task_merge] )) ||
|
||||
_task_merge() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
# push completion
|
||||
(( $+functions[_task_push] )) ||
|
||||
_task_push() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
# pull completion
|
||||
(( $+functions[_task_pull] )) ||
|
||||
_task_pull() {
|
||||
# TODO match URIs in .taskrc
|
||||
_files
|
||||
}
|
||||
|
||||
|
||||
# modify (task [0-9]* ...) completion
|
||||
(( $+functions[_task_modify] )) ||
|
||||
_task_modify() {
|
||||
_describe -t commands 'task command' _task_idCmdsDesc
|
||||
_task_attributes "$@"
|
||||
}
|
||||
|
||||
## first level completion => task sub-command completion
|
||||
(( $+functions[_task_commands] )) ||
|
||||
_task_commands() {
|
||||
local cmd ret=1
|
||||
if (( CURRENT == 1 )); then
|
||||
# update IDs
|
||||
_task_zshids=( ${(f)"$(task _zshids)"} )
|
||||
|
||||
_describe -t commands 'task command' _task_zshcmds
|
||||
_describe -t values 'task IDs' _task_zshids
|
||||
# TODO match more than one ID
|
||||
elif [[ $words[1] =~ ^[0-9]*$ ]] then
|
||||
_call_function ret _task_modify
|
||||
return ret
|
||||
else
|
||||
# local curcontext="${curcontext}"
|
||||
# cmd="${_task_cmds[(r)$words[1]:*]%%:*}"
|
||||
cmd="${_task_cmds[(r)$words[1]]}"
|
||||
idCmd="${(M)_task_idCmds[@]:#$words[1]}"
|
||||
if (( $#cmd )); then
|
||||
# curcontext="${curcontext%:*:*}:task-${cmd}"
|
||||
|
||||
if (( $#idCmd )); then
|
||||
_call_function ret _task_id
|
||||
else
|
||||
_call_function ret _task_${cmd} ||
|
||||
_call_function ret _task_default ||
|
||||
_message "No command remaining."
|
||||
fi
|
||||
else
|
||||
_message "Unknown subcommand ${cmd}"
|
||||
fi
|
||||
return ret
|
||||
fi
|
||||
}
|
21
plugins/taskwarrior/taskwarrior.plugin.zsh
Normal file
21
plugins/taskwarrior/taskwarrior.plugin.zsh
Normal file
|
@ -0,0 +1,21 @@
|
|||
################################################################################
|
||||
# Author: Pete Clark
|
||||
# Email: pete[dot]clark[at]gmail[dot]com
|
||||
# Version: 0.1 (05/24/2011)
|
||||
# License: WTFPL<http://sam.zoy.org/wtfpl/>
|
||||
#
|
||||
# This oh-my-zsh plugin adds smart tab completion for
|
||||
# TaskWarrior<http://taskwarrior.org/>. It uses the zsh tab completion
|
||||
# script (_task) distributed with TaskWarrior for the completion definitions.
|
||||
#
|
||||
# Typing task[tabtab] will give you a list of current tasks, task 66[tabtab]
|
||||
# gives a list of available modifications for that task, etc.
|
||||
################################################################################
|
||||
|
||||
zstyle ':completion:*:*:task:*' verbose yes
|
||||
zstyle ':completion:*:*:task:*:descriptions' format '%U%B%d%b%u'
|
||||
|
||||
zstyle ':completion:*:*:task:*' group-name ''
|
||||
|
||||
alias t=task
|
||||
compdef _task t=task
|
Loading…
Reference in a new issue