Python Patterns for Command-Line Scripts

Python is the go-to language for automation scripts. Here’s how to write CLI tools that are reliable and user-friendly. Basic Script Structure 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 #!/usr/bin/env python3 """One-line description of what this script does.""" import argparse import sys def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('input', help='Input file path') parser.add_argument('-o', '--output', help='Output file path') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() # Your logic here process(args.input, args.output, args.verbose) if __name__ == '__main__': main() Argument Parsing with argparse Positional Arguments 1 2 3 4 parser.add_argument('filename') # Required parser.add_argument('files', nargs='+') # One or more parser.add_argument('files', nargs='*') # Zero or more parser.add_argument('config', nargs='?') # Optional positional Optional Arguments 1 2 3 4 5 parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-q', '--quiet', action='store_false', dest='verbose') parser.add_argument('-n', '--count', type=int, default=10) parser.add_argument('-f', '--format', choices=['json', 'csv', 'table']) parser.add_argument('--config', type=argparse.FileType('r')) Subcommands 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest='command', required=True) # 'init' command init_parser = subparsers.add_parser('init', help='Initialize project') init_parser.add_argument('--force', action='store_true') # 'run' command run_parser = subparsers.add_parser('run', help='Run the application') run_parser.add_argument('--port', type=int, default=8080) args = parser.parse_args() if args.command == 'init': do_init(args.force) elif args.command == 'run': do_run(args.port) Error Handling 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 import sys def main(): try: result = process() return 0 except FileNotFoundError as e: print(f"Error: File not found: {e.filename}", file=sys.stderr) return 1 except PermissionError: print("Error: Permission denied", file=sys.stderr) return 1 except KeyboardInterrupt: print("\nInterrupted", file=sys.stderr) return 130 except Exception as e: print(f"Error: {e}", file=sys.stderr) return 1 if __name__ == '__main__': sys.exit(main()) Logging 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 import logging def setup_logging(verbose=False): level = logging.DEBUG if verbose else logging.INFO logging.basicConfig( level=level, format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) def main(): args = parse_args() setup_logging(args.verbose) logging.info("Starting process") logging.debug("Detailed info here") logging.warning("Something might be wrong") logging.error("Something went wrong") Log to File and Console 1 2 3 4 5 6 7 8 9 10 11 def setup_logging(verbose=False, log_file=None): handlers = [logging.StreamHandler()] if log_file: handlers.append(logging.FileHandler(log_file)) logging.basicConfig( level=logging.DEBUG if verbose else logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=handlers ) Progress Indicators Simple Progress 1 2 3 4 5 6 7 8 import sys def process_items(items): total = len(items) for i, item in enumerate(items, 1): process(item) print(f"\rProcessing: {i}/{total}", end='', flush=True) print() # Newline at end With tqdm 1 2 3 4 5 6 7 8 9 10 from tqdm import tqdm for item in tqdm(items, desc="Processing"): process(item) # Or wrap any iterable with tqdm(total=100) as pbar: for i in range(100): do_work() pbar.update(1) Reading Input From File or Stdin 1 2 3 4 5 6 7 8 9 10 import sys def read_input(filepath=None): if filepath: with open(filepath) as f: return f.read() elif not sys.stdin.isatty(): return sys.stdin.read() else: raise ValueError("No input provided") Line by Line 1 2 3 4 5 import fileinput # Reads from files in args or stdin for line in fileinput.input(): process(line.strip()) Output Formatting JSON Output 1 2 3 4 5 6 7 import json def output_json(data, pretty=False): if pretty: print(json.dumps(data, indent=2, default=str)) else: print(json.dumps(data, default=str)) Table Output 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 def print_table(headers, rows): # Calculate column widths widths = [len(h) for h in headers] for row in rows: for i, cell in enumerate(row): widths[i] = max(widths[i], len(str(cell))) # Print header header_line = ' | '.join(h.ljust(widths[i]) for i, h in enumerate(headers)) print(header_line) print('-' * len(header_line)) # Print rows for row in rows: print(' | '.join(str(cell).ljust(widths[i]) for i, cell in enumerate(row))) With tabulate 1 2 3 4 5 6 7 from tabulate import tabulate data = [ ['Alice', 30, 'Engineer'], ['Bob', 25, 'Designer'], ] print(tabulate(data, headers=['Name', 'Age', 'Role'], tablefmt='grid')) Configuration Files YAML Config 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import yaml from pathlib import Path def load_config(config_path=None): paths = [ config_path, Path.home() / '.myapp.yaml', Path('/etc/myapp/config.yaml'), ] for path in paths: if path and Path(path).exists(): with open(path) as f: return yaml.safe_load(f) return {} # Defaults Environment Variables 1 2 3 4 5 6 7 8 import os def get_config(): return { 'api_key': os.environ.get('API_KEY'), 'debug': os.environ.get('DEBUG', '').lower() in ('true', '1', 'yes'), 'timeout': int(os.environ.get('TIMEOUT', '30')), } Running External Commands 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import subprocess def run_command(cmd, check=True): """Run command and return output.""" result = subprocess.run( cmd, shell=isinstance(cmd, str), capture_output=True, text=True, check=check ) return result.stdout.strip() # Usage output = run_command(['git', 'status', '--short']) output = run_command('ls -la | head -5') With Timeout 1 2 3 4 5 6 7 8 9 try: result = subprocess.run( ['slow-command'], timeout=30, capture_output=True, text=True ) except subprocess.TimeoutExpired: print("Command timed out") Temporary Files 1 2 3 4 5 6 7 8 9 10 11 12 13 import tempfile from pathlib import Path # Temporary file with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f: f.write('{"data": "value"}') temp_path = f.name # Temporary directory with tempfile.TemporaryDirectory() as tmpdir: work_file = Path(tmpdir) / 'work.txt' work_file.write_text('working...') # Directory deleted when context exits Path Handling 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 from pathlib import Path def process_files(directory): base = Path(directory) # Find files for path in base.glob('**/*.py'): print(f"Processing: {path}") # Path operations print(f" Name: {path.name}") print(f" Stem: {path.stem}") print(f" Suffix: {path.suffix}") print(f" Parent: {path.parent}") # Read/write content = path.read_text() path.with_suffix('.bak').write_text(content) Complete Example 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 #!/usr/bin/env python3 """Process log files and output statistics.""" import argparse import json import logging import sys from collections import Counter from pathlib import Path def setup_logging(verbose): logging.basicConfig( level=logging.DEBUG if verbose else logging.INFO, format='%(levelname)s: %(message)s' ) def parse_args(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( 'logfiles', nargs='+', type=Path, help='Log files to process' ) parser.add_argument( '-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help='Output file (default: stdout)' ) parser.add_argument( '-f', '--format', choices=['json', 'text'], default='text', help='Output format' ) parser.add_argument( '-v', '--verbose', action='store_true', help='Enable verbose output' ) return parser.parse_args() def analyze_logs(logfiles): stats = Counter() for logfile in logfiles: logging.info(f"Processing {logfile}") if not logfile.exists(): logging.warning(f"File not found: {logfile}") continue for line in logfile.read_text().splitlines(): if 'ERROR' in line: stats['errors'] += 1 elif 'WARNING' in line: stats['warnings'] += 1 stats['total'] += 1 return dict(stats) def output_results(stats, output, fmt): if fmt == 'json': json.dump(stats, output, indent=2) output.write('\n') else: for key, value in stats.items(): output.write(f"{key}: {value}\n") def main(): args = parse_args() setup_logging(args.verbose) try: stats = analyze_logs(args.logfiles) output_results(stats, args.output, args.format) return 0 except Exception as e: logging.error(f"Failed: {e}") return 1 if __name__ == '__main__': sys.exit(main()) Usage: ...

February 28, 2026 Â· 6 min Â· 1202 words Â· Rob Washington

systemd Timers: The Modern Alternative to Cron

Cron works. It’s also from 1975. systemd timers offer logging integration, dependency handling, and more flexible scheduling. Here’s how to use them. Why Timers Over Cron? Logging: Output goes to journald automatically Dependencies: Wait for network, mounts, or other services Flexibility: Calendar events, monotonic timers, randomized delays Visibility: systemctl list-timers shows everything Consistency: Same management as other systemd units Basic Structure A timer needs two files: A .timer unit (the schedule) A .service unit (the job) Place them in /etc/systemd/system/ (system-wide) or ~/.config/systemd/user/ (user). ...

February 28, 2026 Â· 5 min Â· 944 words Â· Rob Washington

Makefiles for Modern Development Workflows

Makefiles are ancient. They’re also incredibly useful for modern development. Here’s how to use them as your project’s command center. Why Make in 2026? Every project has commands you run repeatedly: Start development servers Run tests Build containers Deploy to environments Format and lint code You could remember them all. Or document them in a README that gets stale. Or put them in a Makefile where they’re executable documentation. 1 2 3 4 5 6 7 8 9 10 11 12 13 14 .PHONY: dev test build deploy dev: docker-compose up -d npm run dev test: pytest -v build: docker build -t myapp:latest . deploy: kubectl apply -f k8s/ Now make dev starts everything. New team member? Run make help. ...

February 28, 2026 Â· 6 min Â· 1075 words Â· Rob Washington

Practical Patterns for Building Autonomous AI Agents

The gap between “AI demo” and “AI that runs reliably” is enormous. Here are patterns that emerge when you actually deploy autonomous agents. The Heartbeat Pattern Agents need periodic check-ins, not just reactive responses. A heartbeat system provides: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 @dataclass class HeartbeatState: last_email_check: datetime last_calendar_check: datetime last_service_health: datetime async def heartbeat(state: HeartbeatState): now = datetime.now() if (now - state.last_service_health).hours >= 2: await check_services() state.last_service_health = now if (now - state.last_email_check).hours >= 4: await check_inbox() state.last_email_check = now The key insight: batch periodic tasks into a single heartbeat rather than creating dozens of scheduled jobs. This reduces API calls and keeps context coherent. ...

February 28, 2026 Â· 4 min Â· 642 words Â· Rob Washington

find: The Swiss Army Knife You're Underusing

Every developer knows find . -name "*.txt". Few know that find can replace half your shell scripts. Beyond Basic Search 1 2 3 4 5 6 7 8 9 10 11 # Find by name (case-insensitive) find . -iname "readme*" # Find by extension find . -name "*.py" # Find by exact name find . -name "Makefile" # Find excluding directories find . -name "*.js" -not -path "./node_modules/*" The -not (or !) operator is your friend for excluding noise. ...

February 27, 2026 Â· 6 min Â· 1166 words Â· Rob Washington

xargs: Turn Any Output Into Parallel Commands

You have a list of files. You need to process each one. The naive approach: 1 2 3 for file in $(cat files.txt); do process "$file" done This works until it doesn’t — filenames with spaces break it, and it’s sequential. Enter xargs. The Basics xargs reads input and converts it into arguments for a command: 1 2 3 4 5 # Delete files listed in a file cat files.txt | xargs rm # Same thing, more efficient xargs rm < files.txt Without xargs, you’d need a loop. With xargs, one line. ...

February 27, 2026 Â· 5 min Â· 1033 words Â· Rob Washington

Git Hooks: Automate Quality Before It's Too Late

Code review catches problems. CI catches problems. But the fastest feedback loop? Catching problems before you even commit. Git hooks run scripts at key points in your workflow. Use them to lint, test, and validate — automatically. Where Hooks Live 1 2 3 4 5 6 ls .git/hooks/ # applypatch-msg.sample pre-commit.sample # commit-msg.sample pre-push.sample # post-update.sample pre-rebase.sample # pre-applypatch.sample prepare-commit-msg.sample # pre-merge-commit.sample update.sample Remove .sample to activate. Hooks must be executable (chmod +x). ...

February 27, 2026 Â· 6 min Â· 1159 words Â· Rob Washington

Cron Jobs That Don't Wake You Up at Night

Cron is deceptively simple. Five fields, a command, done. Until your job runs twice simultaneously, silently fails for a week, or fills your disk with output nobody reads. Here’s how to write cron jobs that actually work in production. The Basics Done Right 1 2 3 4 5 6 7 8 # Bad: No logging, no error handling 0 * * * * /opt/scripts/backup.sh # Better: Redirect output, capture errors 0 * * * * /opt/scripts/backup.sh >> /var/log/backup.log 2>&1 # Best: Timestamped logging with chronic 0 * * * * chronic /opt/scripts/backup.sh chronic (from moreutils) only outputs when the command fails. Perfect for cron — silent success, loud failure. ...

February 27, 2026 Â· 5 min Â· 896 words Â· Rob Washington

Shell Scripting Patterns That Prevent 3 AM Pages

Every ops engineer has a story about a shell script that worked perfectly — until it didn’t. Usually at 3 AM. Usually in production. These patterns won’t make your scripts bulletproof, but they’ll stop the most common failures. Start Every Script Right 1 2 3 #!/usr/bin/env bash set -euo pipefail IFS=$'\n\t' This preamble should be muscle memory: set -e: Exit on any error (non-zero return code) set -u: Exit on undefined variables set -o pipefail: Catch errors in pipes (not just the last command) IFS=$'\n\t': Safer word splitting (no spaces) Without these, a typo like rm -rf $UNSET_VAR/ could wipe your root filesystem. ...

February 27, 2026 Â· 5 min Â· 1001 words Â· Rob Washington

Mastering systemd Service Units: From First Service to Production-Ready

If you’re running services on Linux, you’re almost certainly using systemd. But there’s a gap between knowing systemctl start nginx and actually writing your own robust service units. Let’s close that gap. The Anatomy of a Service Unit A systemd service unit lives in /etc/systemd/system/ and has three main sections: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 [Unit] Description=My Application Service After=network.target Wants=network-online.target [Service] Type=simple User=appuser Group=appgroup WorkingDirectory=/opt/myapp ExecStart=/opt/myapp/bin/server Restart=on-failure RestartSec=5 [Install] WantedBy=multi-user.target Let’s break down what matters: ...

February 27, 2026 Â· 4 min Â· 715 words Â· Rob Washington