#!/bin/bash # Settings, functions and aliases for commands # ============================================================================= # Enable simple aliases to be sudo'ed. ("sudone"?) # http://www.gnu.org/software/bash/manual/bashref.html#Aliases says: "If the # last character of the alias value is a space or tab character, then the next # command word following the alias is also checked for alias expansion." alias sudo='sudo '; # Paths and binaries # ----------------------------------------------------------------------------- # List the directories we want to add to the PATH variable, if they exist. paths_to_add=( # Private "bin" "$HOME/bin" # Homebrew, Rudix (and various other distributions and local installations) /usr/local/{,s}bin # MacPorts /opt/local/{,s}bin # System /{,s}bin /usr/{,s}bin # X binaries /usr/X11/bin ); # Create an array of directories currently in the PATH variable. oldIFS="$IFS"; IFS=:; set -- $PATH; IFS="$oldIFS"; unset oldIFS; old_paths=("$@"); # Construct an array of the directories in the new PATH, preferring our paths # to the predefined ones. new_paths=(); for path_to_add in "${paths_to_add[@]}"; do [ -d "$path_to_add" ] && new_paths+=("$path_to_add"); done; for old_path in "${old_paths[@]}"; do [ -d "$old_path" ] || continue; for new_path in "${new_paths[@]}"; do [ "${old_path%%/}" = "${new_path%%/}" ] && continue 2; done; new_paths+=("$old_path"); done; # Now implode everything into the new PATH variable. printf -v PATH "%s:" "${new_paths[@]}"; export PATH="${PATH%:}"; unset {old,new}_path{,s} path{s,}_to_add; # Prefer Homebrew's netcat [ -x /usr/local/bin/netcat ] && hash -p /usr/local/bin/netcat nc; # OS X's file system is case-insensitive by default, so use aliases to get # "GET"/"HEAD"/… working. (Otherwise "HEAD" would execute "/usr/bin/head".) [[ "$OSTYPE" =~ ^darwin ]] && for method in GET HEAD POST PUT DELETE TRACE OPTIONS; do alias "$method"="/usr/bin/lwp-request -m $method"; done; unset method; # Confirm before overwriting # ----------------------------------------------------------------------------- # I know it is bad practice to override the default commands, but this is for # my own safety. If you really want the original "instakill" versions, you can # use "command rm", "\rm", or "/bin/rm" inside your own commands, aliases, or # shell functions. Note that separate scripts are not affected by the aliases # defined here. alias cp='cp -i'; alias mv='mv -i'; alias rm='rm -i'; # Editors and pagers # ----------------------------------------------------------------------------- alias nano='nano -w'; alias pico='nano'; alias vi='vim'; export EDITOR='vim'; # Make sure "view" as-is works when stdin is not a terminal and prevent the # normal ensuing keyboard input chaos. function view { local args=("$@"); if ! [ -t 0 ] && ! (($#)); then echo 'Warning: Input is not from a terminal. Forcing "view -".'; args=('-'); fi; command view "${args[@]}"; } # Make less the default pager, and specify some useful defaults. less_options=( # If the entire text fits on one screen, just show it and quit. (Be more # like "cat" and less like "more".) --quit-if-one-screen # Do not clear the screen first. --no-init # Like "smartcase" in Vim: ignore case unless the search pattern is mixed. --ignore-case # Do not automatically wrap long lines. --chop-long-lines # Allow ANSI colour escapes, but no other escapes. --RAW-CONTROL-CHARS # Do not ring the bell when trying to scroll past the end of the buffer. --quiet # Do not complain when we are on a dumb terminal. --dumb ); export LESS="${less_options[*]}"; unset less_options; export PAGER='less'; # Make "less" transparently unpack archives etc. if [ -x /usr/bin/lesspipe ]; then eval $(/usr/bin/lesspipe); elif command -v lesspipe.sh > /dev/null; then # MacPorts recommended "/opt/local/bin/lesspipe.sh", but this is more # portable for people that have it in another directory in their $PATH. eval $(lesspipe.sh); fi; # Edit and reload the profile. function pro { vi +3tabn -p ~/.bash_profile ~/.bash/{shell,commands,prompt,extra}; source ~/.bash_profile; } # Colors # ----------------------------------------------------------------------------- # Always use colour output for "ls". if [[ "$OSTYPE" =~ ^darwin ]]; then alias ls='command ls -G'; else alias ls='command ls --color=auto'; export LS_COLORS='no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.avi=01;35:*.fli=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.ogg=01;35:*.mp3=01;35:*.wav=01;35:'; fi; # Autocompletion # ----------------------------------------------------------------------------- complete -d cd pushd rmdir; complete -u finger mail; complete -v set unset; complete -A command wtfis; [ -d ~/.bash/completion ] && for file in ~/.bash/completion/*; do [ -f "$file" ] && source "$file"; done; unset file; # Utilities # ----------------------------------------------------------------------------- # Open the manual page for the last command you executed. function lman { set -- $(fc -nl -1); while [ $# -gt 0 -a '(' "sudo" = "$1" -o "-" = "${1:0:1}" ')' ]; do shift; done; cmd="$(basename "$1")"; man "$cmd" || help "$cmd"; } # Clipboard access. I created these aliases to have the same command on # Cygwin, Linux and OS X. if command -v pbpaste >/dev/null; then alias getclip='pbpaste'; alias putclip='pbcopy'; elif command -v xclip >/dev/null; then alias getclip='xclip --out'; alias putclip='xclip --in'; fi; # Make "ctags" recurse by default. if command -v ctags > /dev/null; then alias ctags='ctags --exclude={docs,cache,tiny_mce,layout} --recurse'; fi; # OS X has no "md5sum", so use "md5" as a fall-back. command -v md5sum > /dev/null || alias md5sum='md5'; # Make the "sudo" prompt more useful, without requiring access to "visudo". export SUDO_PROMPT='[sudo] password for %u on %h: '; # Version control # ----------------------------------------------------------------------------- # Use "colordiff" or "highlight" to colour diffs. if command -v colordiff > /dev/null; then alias difflight='colordiff | less -XFIRd'; elif command -v highlight > /dev/null; then alias difflight='highlight --dark-red ^-.* | highlight --dark-green ^+.* | highlight --yellow ^Only.* | highlight --yellow ^Files.*differ$ | less -XFIRd'; else alias difflight='less -XFIRd'; fi; # Git autocompletion. # TODO: move this to a separate .bash/completion/git or some such. [ -f ~/git-completion.bash ] && source ~/git-completion.bash; # Show the current Git branch, if any. # (This is useful in the shell prompt.) function git-show-branch { branch="$(git symbolic-ref -q HEAD 2>/dev/null)"; ret=$?; case $ret in 0) echo "(${branch##*/})";; 1) echo '(no branch)';; 128) echo -n;; *) echo "[unknown git exit code: $ret]";; esac; return $ret; } # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # THE FUNCTIONS BELOW SHOULD BE MOVED TO SEPARATE SCRIPTS INSIDE ~/bin. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # Utilities # ----------------------------------------------------------------------------- # Show a unified diff, colourised if possible and paged if necessary. function udiff { if command -v colordiff > /dev/null; then colordiff -wU4 "$@" | $PAGER; return ${PIPESTATUS[0]}; elif command -v git > /dev/null && ! [[ " $* " =~ \ /dev/fd ]]; then git diff --no-index "$@"; return $?; fi; diff -wU4 -x .svn "$@" | $PAGER; return ${PIPESTATUS[0]}; } # Move the given file(s) to the Trash. function trash { local path; for path; do # Make relative paths "absolutey". [ "${path:0:1}" = '/' ] || path="$PWD/$1"; # Execute the AppleScript to nudge Finder. echo "$(cat <<-EOD tell application "Finder" delete POSIX file "${path//\"/\"}" end EOD)" | osascript; done; } # Use "hd" for the canonical hex dump. (Some systems have this symlinked.) command -v hd > /dev/null || alias hd='hexdump -C'; # Processes # ----------------------------------------------------------------------------- # Show a one-line process tree of the given process, defaulting to the current # shell. By specifying this as a function instead of a separate script, we # avoid the extra shell process. function process-tree { pid="${1:-$$}"; orig_pid="$pid"; local commands=(); while [ "$pid" != "$ppid" ]; do # Read the parent's process ID and the current process's command line. { read -d ' ' ppid; read command; } < <(ps c -p "$pid" -o ppid= -o command= | sed 's/^ *//'); # XXX This does not quite work yet with screen on OS x. Find out why. # echo "PID: $pid // PPID: $ppid // CMD: $command" 1>&2; # Stop when we have reached the first process, or an sshd/login process. if [ -z "$ppid" ] || [ "$ppid" -eq 0 -o "$ppid" -eq 1 -o "$command" = 'login' -o "$command" = 'sshd' ]; then # Include screen/xterm as the "root" process. if [ "$command" = 'screen' -o "$command" = 'xterm' ]; then commands=("$command" "${commands[@]}"); fi; break; fi; # Insert the command in the front of the process array. commands=("$command" "${commands[@]}"); # Prepare for the next iteration. pid="$ppid"; ppid=; done; # Hide the first bash process. set -- "${commands[@]}"; if [ "$1" = '-bash' -o "$1" = 'bash' ]; then shift; commands=("$@"); fi; # Print the tree with the specified separator. separator='→'; output="$(IFS="$separator"; echo "${commands[*]}")"; echo "${output//$separator/ $separator }"; } # Show the uptime (including load) and the top 10 processes by CPU usage. function top10 { uptime; if [[ "$OSTYPE" =~ ^darwin ]]; then ps waux -r; else ps waux --sort='-%cpu'; fi | head -n 11 | cut -c "1-${COLUMNS:-80}"; } # Sort the "du" output and use human-readable units. function duh { du -sk "$@" | sort -n | while read size fname; do for unit in KiB MiB GiB TiB PiB EiB ZiB YiB; do if [ "$size" -lt 1024 ]; then echo -e "${size} ${unit}\t${fname}"; break; fi; size=$((size/1024)); done; done; } # Compare one or more URLs "before" and "after". function diff-url () {( cd /tmp || return $?; local urls=() before=() after=(); # Remember all URLs. local url; for url; do urls+=("$url"); done; # Save all URLs to "before and after". local curr; for curr in {before,after}; do read -p "Press Enter to save $curr..."; local i; for ((i = 0; i < ${#urls[@]}; i++)); do url="${urls[$i]}"; num="$(printf '%02d' "$i")"; file="$curr-$num.html"; [ "$curr" = 'before' ] \ && before+=("$file") \ || after+=("$file"); wget -qO- "$url" | sed 's/m=[0-9]*//g; s/[0-9a-f]\{32\}//g; s/[0-9]* keer bekeken//' > "$file"; [ -z "$prev_x" ] && prev_x="$x"; done; done; # Loop through all URLs to diff their before and after. for ((i = 0; i < ${#urls[@]}; i++)); do url="${urls[$i]}"; num="$(printf '%02d' "$i")"; before="${before[$i]}"; after="${after[$i]}"; udiff --label="$url (before)" --label="$url (after)" "$before" "$after"; rm -f "$before" "$after"; done | colordiff | less -XFIRd; )} # Edit the files found with the previous "ack" command using Vim (or the # default editor). function vack { local cmd=''; if [ $# -eq 0 ]; then cmd="$(fc -nl -1)"; cmd="${cmd:2}"; else cmd='ack'; for x; do cmd="$cmd $(printf '%q' "$x")"; done; echo "$cmd"; fi; if [ "${cmd:0:4}" != 'ack ' ]; then $cmd; return $?; fi; declare -a files; while read -r file; do echo "$file"; files+=("$file"); done < <(bash -c "${cmd/ack/ack -l}"); vim -p "${files[@]}"; } # Convert the parameters or STDIN to lowercase. function lc { if [ $# -eq 0 ]; then tr '[:upper:]' '[:lower:]'; else tr '[:upper:]' '[:lower:]' <<< "$@"; fi; } # Convert the parameters or STDIN to uppercase. function uc { if [ $# -eq 0 ]; then tr '[:lower:]' '[:upper:]'; else tr '[:lower:]' '[:upper:]' <<< "$@"; fi; } # Highlight STDIN based on PCRE patterns. function highlight { local color=33; local perl_regex=''; while [ $# -gt 0 ]; do local brightness=1; local param="$1"; if [ "${param:0:2}" = "--" ]; then if [ "${param:2:5}" == "dark-" ]; then brightness=0; param="--${param:7}"; elif [ "${param:2:6}" == "light-" ]; then brightness=1; param="--${param:8}"; fi; case "${param:2}" in 'black' | 'red' | 'green' | 'yellow' | 'blue' | 'magenta' | 'pink' | 'cyan' | 'white') param="--color=${param:2}"; ;; esac; fi; if [[ "${param:0:8}" = '--color=' ]]; then case ${param:8} in 'black') color=30;; 'red') color=31;; 'green') color=32;; 'yellow') color=33;; 'blue') color=34;; 'magenta' | 'pink') color=35;; 'cyan') color=36;; 'white') color=37;; *) echo default;; esac shift; fi perl_regex="$perl_regex;s@${1//@/\\@/}@$(echo -n $'\e['$brightness';'$color'm$&'$'\e[m')@g"; shift; done; perl -p -e "select(STDOUT); $| = 1; ${perl_regex:1}"; } # Show what a given command really is. It is a combination of "type", "file" # and "ls". Unlike "which", it does not only take $PATH into account. This # means it works for aliases and hashes, too. (The name "whatis" was taken, # and I did not want to overwrite "which", hence "wtfis".) # The return value is the result of "type" for the last command specified. function wtfis { local cmd type i=1 ret=0; if [ $# -eq 0 ]; then # Use "fc" to get the last command, and use that when no command # was given as a parameter to "wtfis". set -- $(fc -nl -1); while [ $# -gt 0 -a '(' "sudo" = "$1" -o "-" = "${1:0:1}" ')' ]; do # Ignore "sudo" and options ("-x" or "--bla"). shift; done; # Replace the positional parameter array with the last command name. set -- "$1"; fi; for cmd; do type="$(type "$cmd")"; ret=$?; if [ $ret -eq 0 ]; then # Try to get the physical path. This works for hashes and # "normal" binaries. local path="$(type -p "$cmd")"; if [ -z "$path" ]; then # Show the output from "type" without ANSI escapes. echo "${type//$'\e'/\\033}"; case "$(command -v "$cmd")" in 'alias') local alias_="$(alias "$cmd")"; # The output looks like "alias foo='bar'"; so # strip everything except the body. alias_="${alias_#*\'}"; alias_="${alias_%\'}"; # Use "read" to process escapes. E.g. 'test\ it' # will # be read as 'test it'. This allows for # spaces inside command names. read -d ' ' alias_ <<< "$alias_"; # Recurse and indent the output. # TODO: prevent infinite recursion wtfis "$alias_" 2>&2 | sed 's/^/ /'; ;; 'keyword' | 'builtin') # Get the one-line description from the built-in # help, if available. Note that this does not # guarantee anything useful, though. Look at the # output for "help set", for instance. help "$cmd" 2> /dev/null | { local buf line; read -r line; while read -r line; do buf="$buf${line/. */.} "; if [[ "$buf" =~ \.\ $ ]]; then echo "$buf"; break; fi; done; }; ;; esac; else # For physical paths, get some more info. # First, get the one-line description from the man page. # ("col -b" gets rid of the backspaces used by OS X's man # to get a "bold" font.) (COLUMNS=10000 man "$(basename "$path")" 2>/dev/null) | col -b | \ awk '/^NAME$/,/^$/' | { local buf line; read -r line; while read -r line; do buf="$buf${line/. */.} "; if [[ "$buf" =~ \.\ $ ]]; then echo "$buf"; buf=''; break; fi; done; [ -n "$buf" ] && echo "$buf"; } # Get the absolute path for the binary. local full_path="$( cd "$(dirname "$path")" \ && echo "$PWD/$(basename "$path")" \ || echo "$path" )"; # Then, combine the output of "type" and "file". local fileinfo="$(file "$full_path")"; echo "${type%$path}${fileinfo}"; # Finally, show it using "ls" and highlight the path. # If the path is a symlink, keep going until we find the # final destination. (This assumes there are no circular # references.) local paths=("$path") target_path="$path"; while [ -L "$target_path" ]; do target_path="$(readlink "$target_path")"; paths+=("$( # Do some relative path resolving for systems # without readlink --canonicalize. cd "$(dirname "$path")"; cd "$(dirname "$target_path")"; echo "$PWD/$(basename "$target_path")" )"); done; local ls="$(command ls -fdalF "${paths[@]}")"; echo "${ls/$path/$'\e[7m'${path}$'\e[27m'}"; fi; fi; # Separate the output for all but the last command with blank lines. [ $i -lt $# ] && echo; let i++; done; return $ret; } # Try to make sense of the date. It supports everything GNU date knows how to # parse, as well as UNIX timestamps. It formats the given date using the # default GNU date format, which you can override using "--format='%x %y %z'. # # Examples of input and output: # # $ whenis 1234567890 # UNIX timestamps # Sat Feb 14 00:31:30 CET 2009 # # $ whenis +1 year -3 months # relative dates # Fri Jul 20 21:51:27 CEST 2012 # # $ whenis 2011-10-09 08:07:06 # MySQL DATETIME strings # Sun Oct 9 08:07:06 CEST 2011 # # $ whenis 1979-10-14T12:00:00.001-04:00 # HTML5 global date and time # Sun Oct 14 17:00:00 CET 1979 # # $ TZ=America/Vancouver whenis # Current time in Vancouver # Thu Oct 20 13:04:20 PDT 2011 # # For more info, check out http://kak.be/gnudateformats. function whenis { local error='Unable to parse that using http://kak.be/gnudateformats'; # Default GNU date format as seen in date.c from GNU coreutils. local format='%a %b %e %H:%M:%S %Z %Y'; if [[ "$1" =~ ^--format= ]]; then format="${1#--format=}"; shift; fi; # Concatenate all arguments as one string specifying the date. local date="$*"; if [[ "$date" =~ ^[[:space:]]*$ ]]; then date='now'; elif [[ "$date" =~ ^[0-9]{13}$ ]]; then # Cut the microseconds part. date="${date:0:10}"; fi; if [[ "$OSTYPE" =~ ^darwin ]]; then # Use PHP on OS X, where "date" is not GNU's. php -r ' error_reporting(-1); $format = $_SERVER["argv"][1]; $date = $_SERVER["argv"][2]; if (!is_numeric($date)) { $date = strtotime($date); if ($date === false) { fputs(STDERR, $_SERVER["argv"][3] . PHP_EOL); exit(1); } } echo strftime($format, $date), PHP_EOL; ' -- "$format" "$date" "$error"; else # Use GNU date in all other situations. [[ "$date" =~ ^[0-9]+$ ]] && date="@$date"; date -d "$date" +"$format"; fi; } # Start your editor ($EDITOR, defaulting to "vim") on the last file specified. # This is useful to quickly view the last in a series of timestamped files, # e.g.: # $ ls -1 *.sql # 20111021-112318.sql # 20111021-112328.sql # 20111021-112403.sql # 20111021-112500.sql # 20111021-112704.sql # 20111021-112724.sql # 20111021-112729.sql # 20111021-113949.sql # $ vilast *.sql # will edit 20111021-113949.sql function vilast { (($#)) && ${EDITOR:-vim} "${!#}"; } # A quick way to invoke a read-only Vim on the last file. See "vilast". function viewlast { (EDITOR=view vilast "$@"); } # A quick way to show the last file in the Finder. See "vilast". function showlast { (($#)) && show "${!#}"; } # A quick way to "tail -f" the last file. See "vilast". function taillast { (($#)) && tail -f "${!#}"; } # A quick way to "cd" to the last directory. See "vilast". function cdlast { for ((i = $#; i > 0; i--)); do if [ -d "${!i}" ]; then cd "${!i}"; return; fi; done; } # Keep executing the given command until it returns 0. If it did not, # sleep for 1 second and try again. If still not successful, sleep for 2 # seconds, and try again. Next, sleep for 4, 8, 16, … seconds (capped at # BB_MAX_DELAY, which defaults to 900, or 15 minutes), until the maximum # number of tries (BB_MAX_TRIES, defaults to unlimited) has been reached. # # Note that it is not the "true" binary exponential back-off as the delay is # not randomly chosen, but simply doubled each time. # # An example usage is trying to establish an SSH connection while the network # is acting up. Binary back-off ensures that we do not keep retrying # unnecessarily much and will patiently wait for longer amounts of time between # retries: # # binary-backoff ssh -f -N -L 10022:intranet.example.com:22 extranet.example.com # # This will immediately try to set up an SSH tunnel, retrying until successful. # If the first try does not succeed, it pauses for 1 second. Then, if still not # successful, for 2 seconds. Next, 4 seconds, 8 seconds, and so on. The maximum # delay can be specified using the --max-delay=N option or the BB_MAX_DELAY # environment variable. It defaults to 15 minutes. You can also set the maximum # number of tries using the --max-tries option or the BB_MAX_TRIES environment # variable. function binary-backoff { # Configuration. local max_delay="${BB_MAX_DELAY:-900}"; local max_tries="${BB_MAX_TRIES:-0}"; local can_expect_options=true; local command=(); local arg; for arg; do if [ "$arg" = '--' ]; then can_expect_options=false; elif [ "${arg:0:2}" = '--' ] && $can_expect_options; then if [[ "$arg" =~ ^--max-delay=(.*) ]]; then max_delay="${BASH_REMATCH[1]}"; elif [[ "$arg" =~ ^--max-tries=(.+) ]]; then max_tries="${BASH_REMATCH[1]}"; else echo "Invalid option: \"$arg\"." 1>&2; return 1; fi; else command+=("$arg"); can_expect_options=false; fi; done; if ! ((${#command[@]})); then echo 'You have to specify a command to execute.' 1>&2; return 127; elif ! [[ "$max_delay" =~ ^[1-9][0-9]*$ ]]; then echo '--max-delay/BB_MAX_DELAY has to be a positive integer.' 1>&2; return 1; elif [ -n "$max_tries" ] && ! [[ "$max_tries" =~ ^[0-9]+$ ]]; then echo '--max-tries/BB_MAX_TRIES has to be a non-negative integer.' 1>&2; return 1; fi; # Start trying. local delay=1; local num_tries=0; while [ $max_tries -eq 0 ] || [ $num_tries -lt $max_tries ]; do # If successful, stop immediately. "${command[@]}" && return; # Remember the non-zero exit status. exit_status=$?; exit_msg="Exit status: $exit_status."; # Check if have reached the maximum number of tries. let num_tries++; if [ $max_tries -gt 0 ] && [ $num_tries -ge $max_tries ]; then echo "$(date +'[%Y-%m-%d %H:%M:%S]') Stopping after $num_tries tries…" 1>&2; return $exit_status; fi; # Sleep for a while. echo; local move_up="$(tput cuu1)" clear_line="$(tput el)"; local num_seconds_left="$delay"; local start_timestamp="$(date +'%s')"; while [ $num_seconds_left -gt 0 ]; do [ $num_seconds_left -eq 1 ] \ && local time_unit=second \ || local time_unit=seconds; echo "${move_up}${clear_line}$(date +'[%Y-%m-%d %H:%M:%S]') ${exit_msg} Waiting for $num_seconds_left $time_unit after $num_tries tries… (Press Enter to retry immediately, or Ctrl+C to cancel entirely.)" 1>&2; if read -t 1; then num_seconds_left=0; else let num_seconds_left--; fi; done; # Show how long we actually waited before retrying. local end_timestamp="$(date +'%s')"; local num_seconds_waited="$((end_timestamp - start_timestamp))"; [ $num_seconds_waited -eq 1 ] \ && local time_unit=second \ || local time_unit=seconds; echo "${move_up}$(date +'[%Y-%m-%d %H:%M:%S]') ${clear_line}${exit_msg} Waited for $num_seconds_waited $time_unit. Retrying…" 1>&2; # Double the delay, but cap it at the maximum. let delay*=2; [ $delay -gt $max_delay ] && delay=$max_delay; done; } # Do a mysqldump via ssh and store it in a local timestamped file. This # function assumes proper setup in your local ~/.ssh/config and the remote # ~/.my.cnf. function ssh-mysqldump { local timestamp="$(date +'%Y%m%d-%H%M%S')"; if [ -z "$1" -o "$1" = '--help' ]; then local filename="$timestamp-ssh_host-database_name.sql"; echo "Usage: ${FUNCNAME[0]} ssh_host [database_name]"; echo "This will create a file called $filename in this directory."; # Do not return an error when the user explicitly requested help. [ "$1" = '--help' ]; return $?; fi >&2; # Determine the host and the database, with the database name defaulting # to the host name (or SSH config alias). local host="$1"; local database="$host"; shift; if (($#)) && [ "${1:0:1}" != '-' ]; then database="$1"; shift; fi; # I prefer one INSERT statement per record, so by default, disable the # extended INSERT syntax. Unless there is an explicit option in the # arguments. local has_extended_insert_option=false; # By default, do not lock the tables while dumping. This is slightly less # accurate, but locking tables used in production can wreak havoc. Prefer # uptime to correctness. local has_lock_all_tables_option=false; local has_lock_tables_option=false; local arg opts=() args=(); for arg; do # Escape the meta-characters in the arguments so they are safe for # the remote shell. if [ "${arg:0:1}" = '-' ]; then opts+=("$(printf '%q' "$arg")"); else args+=("$(printf '%q' "$arg")"); fi; # See if our default options are overridden. if [[ "$arg" =~ ^--(skip-)?extended-insert$ ]]; then has_extended_insert_option=true; elif [[ "$arg" =~ ^--(skip-)?lock-all-tables$ ]]; then has_lock_all_tables_option=true; elif [[ "$arg" =~ ^--(skip-)?lock-tables$ ]]; then has_lock_tables_option=true; fi; done; $has_extended_insert_option || opts+=('--skip-extended-insert'); $has_lock_all_tables_option || opts+=('--skip-lock-all-tables'); $has_lock_tables_option || opts+=('--skip-lock-tables'); local filename="$timestamp-${host//\//-}-${database//\//-}.sql"; # Finally, run the remote mysqldump and store the result locally. # If you want compression, set it in your ~/.ssh/config. I quote the man # page: "[ Compression ] will only slow down things on fast networks." if [ -t 1 ]; then ssh -- "$host" mysqldump "${opts[@]}" "$database" "${args[@]}" > "$filename"; else echo "Stdout is not a terminal, so I won't save to $filename." >&2; ssh -- "$host" mysqldump "${opts[@]}" "${args[@]}" "$database"; fi; local result=$?; if [ -s "$filename" ]; then ls -dalF "$filename"; else rm -f "$filename"; fi; return $result; }