Bash scripts have a reputation for being fragile. They don’t have to be. Here are the patterns that separate scripts that work from scripts that work reliably.
Start Every Script Right#
1
2
3
| #!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
|
What each does:
set -e - Exit on any command failureset -u - Error on undefined variablesset -o pipefail - Pipelines fail if any command failsIFS=$'\n\t' - Safer word splitting (no space splitting)
Error Handling#
Basic Trap#
1
2
3
4
5
6
7
8
9
10
11
12
| #!/usr/bin/env bash
set -euo pipefail
cleanup() {
echo "Cleaning up..."
rm -f "$TEMP_FILE"
}
trap cleanup EXIT
TEMP_FILE=$(mktemp)
# Script continues...
# cleanup runs automatically on exit, error, or interrupt
|
Detailed Error Reporting#
1
2
3
4
5
6
7
8
9
10
11
12
| #!/usr/bin/env bash
set -euo pipefail
error_handler() {
local line=$1
local exit_code=$2
echo "Error on line $line: exit code $exit_code" >&2
exit "$exit_code"
}
trap 'error_handler $LINENO $?' ERR
# Now errors report their line number
|
Log and Exit#
1
2
3
4
5
6
7
| die() {
echo "ERROR: $*" >&2
exit 1
}
# Usage
[[ -f "$CONFIG_FILE" ]] || die "Config file not found: $CONFIG_FILE"
|
Argument Parsing#
Simple Positional#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
| #!/usr/bin/env bash
set -euo pipefail
usage() {
echo "Usage: $0 <environment> <version>"
echo " environment: staging|production"
echo " version: semver (e.g., 1.2.3)"
exit 1
}
[[ $# -eq 2 ]] || usage
ENVIRONMENT=$1
VERSION=$2
[[ "$ENVIRONMENT" =~ ^(staging|production)$ ]] || die "Invalid environment"
[[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] || die "Invalid version format"
|
Flags with getopts#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
| #!/usr/bin/env bash
set -euo pipefail
VERBOSE=false
DRY_RUN=false
OUTPUT=""
usage() {
cat <<EOF
Usage: $0 [options] <file>
Options:
-v Verbose output
-n Dry run (don't make changes)
-o FILE Output file
-h Show this help
EOF
exit 1
}
while getopts "vno:h" opt; do
case $opt in
v) VERBOSE=true ;;
n) DRY_RUN=true ;;
o) OUTPUT=$OPTARG ;;
h) usage ;;
*) usage ;;
esac
done
shift $((OPTIND - 1))
[[ $# -eq 1 ]] || usage
FILE=$1
|
Long Options (Manual Parsing)#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
| #!/usr/bin/env bash
set -euo pipefail
VERBOSE=false
CONFIG=""
while [[ $# -gt 0 ]]; do
case $1 in
-v|--verbose)
VERBOSE=true
shift
;;
-c|--config)
CONFIG=$2
shift 2
;;
-h|--help)
usage
;;
--)
shift
break
;;
-*)
die "Unknown option: $1"
;;
*)
break
;;
esac
done
|
Variable Safety#
Default Values#
1
2
3
4
5
6
7
8
| # Default if unset
NAME=${NAME:-"default"}
# Default if unset or empty
NAME=${NAME:="default"}
# Error if unset
: "${REQUIRED_VAR:?'REQUIRED_VAR must be set'}"
|
Safe Variable Expansion#
1
2
3
4
5
6
7
8
| # Always quote variables
rm "$FILE" # Good
rm $FILE # Bad - breaks on spaces
# Check before using
if [[ -n "${VAR:-}" ]]; then
echo "$VAR"
fi
|
Arrays#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
| FILES=("file1.txt" "file with spaces.txt" "file3.txt")
# Iterate safely
for file in "${FILES[@]}"; do
echo "Processing: $file"
done
# Pass to commands
cp "${FILES[@]}" /destination/
# Length
echo "Count: ${#FILES[@]}"
# Append
FILES+=("another.txt")
|
File Operations#
Safe Temporary Files#
1
2
3
4
5
| TEMP_DIR=$(mktemp -d)
trap 'rm -rf "$TEMP_DIR"' EXIT
TEMP_FILE=$(mktemp)
trap 'rm -f "$TEMP_FILE"' EXIT
|
Check Before Acting#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
| # File exists
[[ -f "$FILE" ]] || die "File not found: $FILE"
# Directory exists
[[ -d "$DIR" ]] || die "Directory not found: $DIR"
# File is readable
[[ -r "$FILE" ]] || die "Cannot read: $FILE"
# File is writable
[[ -w "$FILE" ]] || die "Cannot write: $FILE"
# File is executable
[[ -x "$FILE" ]] || die "Cannot execute: $FILE"
|
Safe File Writing#
1
2
3
4
5
6
7
8
9
10
| # Atomic write with temp file
write_config() {
local content=$1
local dest=$2
local temp
temp=$(mktemp)
echo "$content" > "$temp"
mv "$temp" "$dest" # Atomic on same filesystem
}
|
Command Execution#
Check Command Exists#
1
2
3
4
5
6
7
| require_command() {
command -v "$1" >/dev/null 2>&1 || die "Required command not found: $1"
}
require_command jq
require_command aws
require_command docker
|
Capture Output and Exit Code#
1
2
3
4
5
6
7
8
9
10
11
| # Capture output
output=$(some_command 2>&1)
# Capture exit code without exiting (despite set -e)
exit_code=0
output=$(some_command 2>&1) || exit_code=$?
if [[ $exit_code -ne 0 ]]; then
echo "Command failed with code $exit_code"
echo "Output: $output"
fi
|
Retry Logic#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
| retry() {
local max_attempts=$1
local delay=$2
shift 2
local cmd=("$@")
local attempt=1
while [[ $attempt -le $max_attempts ]]; do
if "${cmd[@]}"; then
return 0
fi
echo "Attempt $attempt/$max_attempts failed. Retrying in ${delay}s..."
sleep "$delay"
((attempt++))
done
return 1
}
# Usage
retry 3 5 curl -f https://api.example.com/health
|
Logging#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
| LOG_FILE="/var/log/myscript.log"
log() {
local level=$1
shift
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$level] $*" | tee -a "$LOG_FILE"
}
info() { log INFO "$@"; }
warn() { log WARN "$@"; }
error() { log ERROR "$@" >&2; }
# Usage
info "Starting deployment"
warn "Deprecated config option used"
error "Failed to connect to database"
|
Confirmation Prompts#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
| confirm() {
local prompt=${1:-"Continue?"}
local response
read -r -p "$prompt [y/N] " response
[[ "$response" =~ ^[Yy]$ ]]
}
# Usage
if confirm "Delete all files in $DIR?"; then
rm -rf "$DIR"/*
fi
# With default yes
confirm_yes() {
local prompt=${1:-"Continue?"}
local response
read -r -p "$prompt [Y/n] " response
[[ ! "$response" =~ ^[Nn]$ ]]
}
|
Parallel Execution#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
| # Simple background jobs
for server in server1 server2 server3; do
deploy_to "$server" &
done
wait # Wait for all background jobs
# With job limiting
MAX_JOBS=4
job_count=0
for item in "${ITEMS[@]}"; do
process_item "$item" &
((job_count++))
if [[ $job_count -ge $MAX_JOBS ]]; then
wait -n # Wait for any one job to complete
((job_count--))
fi
done
wait # Wait for remaining jobs
|
Complete Script Template#
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
| #!/usr/bin/env bash
#
# Description: What this script does
# Usage: ./script.sh [options] <args>
#
set -euo pipefail
IFS=$'\n\t'
# Constants
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
readonly SCRIPT_NAME="$(basename "${BASH_SOURCE[0]}")"
# Defaults
VERBOSE=false
DRY_RUN=false
# Logging
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"; }
info() { log "INFO: $*"; }
warn() { log "WARN: $*" >&2; }
error() { log "ERROR: $*" >&2; }
die() { error "$@"; exit 1; }
# Cleanup
cleanup() {
# Add cleanup tasks here
:
}
trap cleanup EXIT
# Usage
usage() {
cat <<EOF
Usage: $SCRIPT_NAME [options] <argument>
Description:
What this script does in more detail.
Options:
-v, --verbose Enable verbose output
-n, --dry-run Show what would be done
-h, --help Show this help message
Examples:
$SCRIPT_NAME -v input.txt
$SCRIPT_NAME --dry-run config.yml
EOF
exit "${1:-0}"
}
# Parse arguments
parse_args() {
while [[ $# -gt 0 ]]; do
case $1 in
-v|--verbose) VERBOSE=true; shift ;;
-n|--dry-run) DRY_RUN=true; shift ;;
-h|--help) usage 0 ;;
--) shift; break ;;
-*) die "Unknown option: $1" ;;
*) break ;;
esac
done
[[ $# -ge 1 ]] || die "Missing required argument"
ARGUMENT=$1
}
# Main logic
main() {
parse_args "$@"
info "Starting with argument: $ARGUMENT"
if $VERBOSE; then
info "Verbose mode enabled"
fi
if $DRY_RUN; then
info "Dry run - no changes will be made"
fi
# Your script logic here
info "Done"
}
main "$@"
|
Bash scripts don’t need to be fragile. set -euo pipefail catches most accidents. Proper argument parsing makes scripts usable. Traps ensure cleanup happens. These patterns transform one-off hacks into reliable automation. Use the template, adapt as needed, and stop being afraid of your own scripts.
π¬ Get the Newsletter
Weekly insights on DevOps, automation, and CLI mastery. No spam, unsubscribe anytime.