Python is the go-to language for automation scripts. Hereβs how to write CLI tools that are reliable and user-friendly.
Basic Script Structure 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 #!/usr/bin/env python3 """One-line description of what this script does.""" import argparse import sys def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('input', help='Input file path') parser.add_argument('-o', '--output', help='Output file path') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() # Your logic here process(args.input, args.output, args.verbose) if __name__ == '__main__': main() Argument Parsing with argparse Positional Arguments 1 2 3 4 parser.add_argument('filename') # Required parser.add_argument('files', nargs='+') # One or more parser.add_argument('files', nargs='*') # Zero or more parser.add_argument('config', nargs='?') # Optional positional Optional Arguments 1 2 3 4 5 parser.add_argument('-v', '--verbose', action='store_true') parser.add_argument('-q', '--quiet', action='store_false', dest='verbose') parser.add_argument('-n', '--count', type=int, default=10) parser.add_argument('-f', '--format', choices=['json', 'csv', 'table']) parser.add_argument('--config', type=argparse.FileType('r')) Subcommands 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest='command', required=True) # 'init' command init_parser = subparsers.add_parser('init', help='Initialize project') init_parser.add_argument('--force', action='store_true') # 'run' command run_parser = subparsers.add_parser('run', help='Run the application') run_parser.add_argument('--port', type=int, default=8080) args = parser.parse_args() if args.command == 'init': do_init(args.force) elif args.command == 'run': do_run(args.port) Error Handling 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 import sys def main(): try: result = process() return 0 except FileNotFoundError as e: print(f"Error: File not found: {e.filename}", file=sys.stderr) return 1 except PermissionError: print("Error: Permission denied", file=sys.stderr) return 1 except KeyboardInterrupt: print("\nInterrupted", file=sys.stderr) return 130 except Exception as e: print(f"Error: {e}", file=sys.stderr) return 1 if __name__ == '__main__': sys.exit(main()) Logging 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 import logging def setup_logging(verbose=False): level = logging.DEBUG if verbose else logging.INFO logging.basicConfig( level=level, format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) def main(): args = parse_args() setup_logging(args.verbose) logging.info("Starting process") logging.debug("Detailed info here") logging.warning("Something might be wrong") logging.error("Something went wrong") Log to File and Console 1 2 3 4 5 6 7 8 9 10 11 def setup_logging(verbose=False, log_file=None): handlers = [logging.StreamHandler()] if log_file: handlers.append(logging.FileHandler(log_file)) logging.basicConfig( level=logging.DEBUG if verbose else logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=handlers ) Progress Indicators Simple Progress 1 2 3 4 5 6 7 8 import sys def process_items(items): total = len(items) for i, item in enumerate(items, 1): process(item) print(f"\rProcessing: {i}/{total}", end='', flush=True) print() # Newline at end With tqdm 1 2 3 4 5 6 7 8 9 10 from tqdm import tqdm for item in tqdm(items, desc="Processing"): process(item) # Or wrap any iterable with tqdm(total=100) as pbar: for i in range(100): do_work() pbar.update(1) Reading Input From File or Stdin 1 2 3 4 5 6 7 8 9 10 import sys def read_input(filepath=None): if filepath: with open(filepath) as f: return f.read() elif not sys.stdin.isatty(): return sys.stdin.read() else: raise ValueError("No input provided") Line by Line 1 2 3 4 5 import fileinput # Reads from files in args or stdin for line in fileinput.input(): process(line.strip()) Output Formatting JSON Output 1 2 3 4 5 6 7 import json def output_json(data, pretty=False): if pretty: print(json.dumps(data, indent=2, default=str)) else: print(json.dumps(data, default=str)) Table Output 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 def print_table(headers, rows): # Calculate column widths widths = [len(h) for h in headers] for row in rows: for i, cell in enumerate(row): widths[i] = max(widths[i], len(str(cell))) # Print header header_line = ' | '.join(h.ljust(widths[i]) for i, h in enumerate(headers)) print(header_line) print('-' * len(header_line)) # Print rows for row in rows: print(' | '.join(str(cell).ljust(widths[i]) for i, cell in enumerate(row))) With tabulate 1 2 3 4 5 6 7 from tabulate import tabulate data = [ ['Alice', 30, 'Engineer'], ['Bob', 25, 'Designer'], ] print(tabulate(data, headers=['Name', 'Age', 'Role'], tablefmt='grid')) Configuration Files YAML Config 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import yaml from pathlib import Path def load_config(config_path=None): paths = [ config_path, Path.home() / '.myapp.yaml', Path('/etc/myapp/config.yaml'), ] for path in paths: if path and Path(path).exists(): with open(path) as f: return yaml.safe_load(f) return {} # Defaults Environment Variables 1 2 3 4 5 6 7 8 import os def get_config(): return { 'api_key': os.environ.get('API_KEY'), 'debug': os.environ.get('DEBUG', '').lower() in ('true', '1', 'yes'), 'timeout': int(os.environ.get('TIMEOUT', '30')), } Running External Commands 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 import subprocess def run_command(cmd, check=True): """Run command and return output.""" result = subprocess.run( cmd, shell=isinstance(cmd, str), capture_output=True, text=True, check=check ) return result.stdout.strip() # Usage output = run_command(['git', 'status', '--short']) output = run_command('ls -la | head -5') With Timeout 1 2 3 4 5 6 7 8 9 try: result = subprocess.run( ['slow-command'], timeout=30, capture_output=True, text=True ) except subprocess.TimeoutExpired: print("Command timed out") Temporary Files 1 2 3 4 5 6 7 8 9 10 11 12 13 import tempfile from pathlib import Path # Temporary file with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f: f.write('{"data": "value"}') temp_path = f.name # Temporary directory with tempfile.TemporaryDirectory() as tmpdir: work_file = Path(tmpdir) / 'work.txt' work_file.write_text('working...') # Directory deleted when context exits Path Handling 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 from pathlib import Path def process_files(directory): base = Path(directory) # Find files for path in base.glob('**/*.py'): print(f"Processing: {path}") # Path operations print(f" Name: {path.name}") print(f" Stem: {path.stem}") print(f" Suffix: {path.suffix}") print(f" Parent: {path.parent}") # Read/write content = path.read_text() path.with_suffix('.bak').write_text(content) Complete Example 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 #!/usr/bin/env python3 """Process log files and output statistics.""" import argparse import json import logging import sys from collections import Counter from pathlib import Path def setup_logging(verbose): logging.basicConfig( level=logging.DEBUG if verbose else logging.INFO, format='%(levelname)s: %(message)s' ) def parse_args(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( 'logfiles', nargs='+', type=Path, help='Log files to process' ) parser.add_argument( '-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help='Output file (default: stdout)' ) parser.add_argument( '-f', '--format', choices=['json', 'text'], default='text', help='Output format' ) parser.add_argument( '-v', '--verbose', action='store_true', help='Enable verbose output' ) return parser.parse_args() def analyze_logs(logfiles): stats = Counter() for logfile in logfiles: logging.info(f"Processing {logfile}") if not logfile.exists(): logging.warning(f"File not found: {logfile}") continue for line in logfile.read_text().splitlines(): if 'ERROR' in line: stats['errors'] += 1 elif 'WARNING' in line: stats['warnings'] += 1 stats['total'] += 1 return dict(stats) def output_results(stats, output, fmt): if fmt == 'json': json.dump(stats, output, indent=2) output.write('\n') else: for key, value in stats.items(): output.write(f"{key}: {value}\n") def main(): args = parse_args() setup_logging(args.verbose) try: stats = analyze_logs(args.logfiles) output_results(stats, args.output, args.format) return 0 except Exception as e: logging.error(f"Failed: {e}") return 1 if __name__ == '__main__': sys.exit(main()) Usage:
...