curl Mastery: HTTP Requests from the Command Line

curl is the universal HTTP client. It’s installed everywhere, works with any API, and once mastered, becomes your go-to tool for testing, debugging, and scripting HTTP interactions. Basic Requests 1 2 3 4 5 6 7 8 # GET (default) curl https://api.example.com/users # Explicit methods curl -X POST https://api.example.com/users curl -X PUT https://api.example.com/users/1 curl -X DELETE https://api.example.com/users/1 curl -X PATCH https://api.example.com/users/1 Adding Headers 1 2 3 4 5 6 7 8 # Single header curl -H "Authorization: Bearer token123" https://api.example.com/me # Multiple headers curl -H "Authorization: Bearer token123" \ -H "Content-Type: application/json" \ -H "Accept: application/json" \ https://api.example.com/users Sending Data JSON Body 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Inline JSON curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name": "Alice", "email": "alice@example.com"}' # From file curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d @payload.json # From stdin echo '{"name": "Alice"}' | curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d @- Form Data 1 2 3 4 5 6 7 8 # URL-encoded (default for -d without Content-Type) curl -X POST https://api.example.com/login \ -d "username=alice&password=secret" # Multipart form (file uploads) curl -X POST https://api.example.com/upload \ -F "file=@document.pdf" \ -F "description=My document" Response Handling Show Headers 1 2 3 4 5 6 7 8 # Response headers only curl -I https://api.example.com/health # Headers + body curl -i https://api.example.com/users # Verbose (request + response headers) curl -v https://api.example.com/users Output Control 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Save to file curl -o response.json https://api.example.com/users # Save with remote filename curl -O https://example.com/file.zip # Silent (no progress bar) curl -s https://api.example.com/users # Silent but show errors curl -sS https://api.example.com/users # Only output body (suppress all else) curl -s https://api.example.com/users | jq '.' Extract Specific Info 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # HTTP status code only curl -s -o /dev/null -w "%{http_code}" https://api.example.com/health # Multiple variables curl -s -o /dev/null -w "Status: %{http_code}\nTime: %{time_total}s\nSize: %{size_download} bytes\n" \ https://api.example.com/users # Available variables # %{http_code} - HTTP status code # %{time_total} - Total time in seconds # %{time_connect} - Time to establish connection # %{time_starttransfer} - Time to first byte # %{size_download} - Downloaded bytes # %{url_effective} - Final URL after redirects Authentication 1 2 3 4 5 6 7 8 9 10 11 # Basic auth curl -u username:password https://api.example.com/secure # Bearer token curl -H "Authorization: Bearer eyJhbG..." https://api.example.com/me # API key in header curl -H "X-API-Key: abc123" https://api.example.com/data # API key in query string curl "https://api.example.com/data?api_key=abc123" Following Redirects 1 2 3 4 5 6 7 8 # Follow redirects (disabled by default) curl -L https://short.url/abc # Limit redirect count curl -L --max-redirs 5 https://example.com # Show redirect chain curl -L -v https://short.url/abc 2>&1 | grep "< location" Timeouts and Retries 1 2 3 4 5 6 7 8 9 10 11 # Connection timeout (seconds) curl --connect-timeout 5 https://api.example.com # Total operation timeout curl --max-time 30 https://api.example.com/slow-endpoint # Retry on failure curl --retry 3 --retry-delay 2 https://api.example.com # Retry on specific HTTP codes curl --retry 3 --retry-all-errors https://api.example.com SSL/TLS Options 1 2 3 4 5 6 7 8 # Skip certificate verification (development only!) curl -k https://self-signed.example.com # Use specific CA certificate curl --cacert /path/to/ca.crt https://api.example.com # Client certificate authentication curl --cert client.crt --key client.key https://api.example.com Cookies 1 2 3 4 5 6 7 8 9 10 11 # Send cookies curl -b "session=abc123; token=xyz" https://api.example.com # Save cookies to file curl -c cookies.txt https://api.example.com/login -d "user=alice&pass=secret" # Load cookies from file curl -b cookies.txt https://api.example.com/dashboard # Both (maintain session) curl -b cookies.txt -c cookies.txt https://api.example.com/action Useful Patterns Health Check Script 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 #!/bin/bash check_health() { local url=$1 local status=$(curl -s -o /dev/null -w "%{http_code}" --max-time 5 "$url") if [ "$status" = "200" ]; then echo "✓ $url" return 0 else echo "✗ $url (HTTP $status)" return 1 fi } check_health "https://api.example.com/health" check_health "https://web.example.com" API Testing 1 2 3 4 5 6 7 8 9 10 # Create resource and capture ID ID=$(curl -s -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name": "Test User"}' | jq -r '.id') # Use captured ID curl -s https://api.example.com/users/$ID | jq '.' # Delete curl -X DELETE https://api.example.com/users/$ID Download with Progress 1 2 3 4 5 # Show progress bar curl -# -O https://example.com/large-file.zip # Resume interrupted download curl -C - -O https://example.com/large-file.zip Parallel Requests 1 2 3 4 5 6 7 8 # Using xargs echo -e "url1\nurl2\nurl3" | xargs -P 4 -I {} curl -s {} -o /dev/null -w "{}: %{http_code}\n" # Using curl's parallel feature (7.68+) curl --parallel --parallel-immediate \ https://api1.example.com \ https://api2.example.com \ https://api3.example.com Debugging Trace All Details 1 2 # Full trace including SSL handshake curl -v --trace-ascii debug.txt https://api.example.com Common Issues 1 2 3 4 5 6 7 8 9 # DNS resolution problems curl -v --resolve api.example.com:443:1.2.3.4 https://api.example.com # Force IPv4 or IPv6 curl -4 https://api.example.com # IPv4 only curl -6 https://api.example.com # IPv6 only # Use specific interface curl --interface eth0 https://api.example.com Config File Save common options in ~/.curlrc: ...

February 26, 2026 Â· 6 min Â· 1117 words Â· Rob Washington

jq: The Swiss Army Knife for JSON on the Command Line

If you work with APIs, logs, or configuration files, you work with JSON. And if you work with JSON from the command line, jq is indispensable. Here are the patterns I use daily. The Basics 1 2 3 4 5 6 7 8 9 10 # Pretty print echo '{"name":"alice","age":30}' | jq '.' # Extract a field echo '{"name":"alice","age":30}' | jq '.name' # "alice" # Raw output (no quotes) echo '{"name":"alice","age":30}' | jq -r '.name' # alice The -r flag is your friend. Use it whenever you want the actual value, not a JSON string. ...

February 26, 2026 Â· 6 min Â· 1200 words Â· Rob Washington

AWS CLI Essentials: Patterns for Daily Operations

The AWS CLI is the fastest path from question to answer. These patterns cover the operations you’ll use daily. Setup and Configuration 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Configure default profile aws configure # Configure named profile aws configure --profile production # Use specific profile aws --profile production ec2 describe-instances # Or set environment variable export AWS_PROFILE=production # Verify identity aws sts get-caller-identity Multiple Accounts 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # ~/.aws/credentials [default] aws_access_key_id = AKIA... aws_secret_access_key = ... [production] aws_access_key_id = AKIA... aws_secret_access_key = ... # ~/.aws/config [default] region = us-east-1 output = json [profile production] region = us-west-2 output = json EC2 Operations List Instances 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 # All instances aws ec2 describe-instances # Just the essentials aws ec2 describe-instances \ --query 'Reservations[].Instances[].[InstanceId,State.Name,InstanceType,PrivateIpAddress,Tags[?Key==`Name`].Value|[0]]' \ --output table # Running instances only aws ec2 describe-instances \ --filters "Name=instance-state-name,Values=running" \ --query 'Reservations[].Instances[].[InstanceId,PrivateIpAddress]' \ --output text # By tag aws ec2 describe-instances \ --filters "Name=tag:Environment,Values=production" # By instance ID aws ec2 describe-instances --instance-ids i-1234567890abcdef0 Start/Stop/Reboot 1 2 3 4 5 6 7 8 9 10 11 # Stop aws ec2 stop-instances --instance-ids i-1234567890abcdef0 # Start aws ec2 start-instances --instance-ids i-1234567890abcdef0 # Reboot aws ec2 reboot-instances --instance-ids i-1234567890abcdef0 # Terminate (careful!) aws ec2 terminate-instances --instance-ids i-1234567890abcdef0 Get Console Output 1 aws ec2 get-console-output --instance-id i-1234567890abcdef0 --output text SSH Key Pairs 1 2 3 4 5 6 7 8 9 # List aws ec2 describe-key-pairs # Create aws ec2 create-key-pair --key-name mykey --query 'KeyMaterial' --output text > mykey.pem chmod 400 mykey.pem # Delete aws ec2 delete-key-pair --key-name mykey S3 Operations List and Navigate 1 2 3 4 5 6 7 8 9 10 11 # List buckets aws s3 ls # List bucket contents aws s3 ls s3://mybucket/ # Recursive listing aws s3 ls s3://mybucket/ --recursive # With human-readable sizes aws s3 ls s3://mybucket/ --recursive --human-readable --summarize Copy and Sync 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # Upload file aws s3 cp myfile.txt s3://mybucket/ # Download file aws s3 cp s3://mybucket/myfile.txt ./ # Upload directory aws s3 cp ./mydir s3://mybucket/mydir --recursive # Sync (only changed files) aws s3 sync ./local s3://mybucket/remote # Sync with delete (mirror) aws s3 sync ./local s3://mybucket/remote --delete # Exclude patterns aws s3 sync ./local s3://mybucket/remote --exclude "*.log" --exclude ".git/*" Delete 1 2 3 4 5 6 7 8 # Single file aws s3 rm s3://mybucket/myfile.txt # Directory aws s3 rm s3://mybucket/mydir/ --recursive # Empty bucket aws s3 rm s3://mybucket/ --recursive Presigned URLs 1 2 3 4 5 # Generate download URL (expires in 1 hour) aws s3 presign s3://mybucket/myfile.txt --expires-in 3600 # Upload URL aws s3 presign s3://mybucket/upload.txt --expires-in 3600 Bucket Operations 1 2 3 4 5 6 7 8 # Create bucket aws s3 mb s3://mynewbucket # Delete bucket (must be empty) aws s3 rb s3://mybucket # Force delete (removes contents first) aws s3 rb s3://mybucket --force IAM Operations Users 1 2 3 4 5 6 7 8 9 10 11 # List users aws iam list-users # Create user aws iam create-user --user-name newuser # Delete user aws iam delete-user --user-name olduser # List user's access keys aws iam list-access-keys --user-name myuser Roles 1 2 3 4 5 6 7 8 # List roles aws iam list-roles # Get role details aws iam get-role --role-name MyRole # List attached policies aws iam list-attached-role-policies --role-name MyRole Policies 1 2 3 4 5 6 7 # List policies aws iam list-policies --scope Local # Get policy document aws iam get-policy-version \ --policy-arn arn:aws:iam::123456789012:policy/MyPolicy \ --version-id v1 CloudWatch Logs 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 # List log groups aws logs describe-log-groups # List log streams aws logs describe-log-streams --log-group-name /aws/lambda/myfunction # Get recent logs aws logs get-log-events \ --log-group-name /aws/lambda/myfunction \ --log-stream-name '2024/01/01/[$LATEST]abc123' \ --limit 100 # Tail logs (requires aws-cli v2) aws logs tail /aws/lambda/myfunction --follow # Filter logs aws logs filter-log-events \ --log-group-name /aws/lambda/myfunction \ --filter-pattern "ERROR" \ --start-time $(date -d '1 hour ago' +%s)000 Lambda 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 # List functions aws lambda list-functions # Invoke function aws lambda invoke \ --function-name myfunction \ --payload '{"key": "value"}' \ output.json # Get function config aws lambda get-function-configuration --function-name myfunction # Update function code aws lambda update-function-code \ --function-name myfunction \ --zip-file fileb://function.zip # View recent invocations aws lambda get-function --function-name myfunction RDS 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # List instances aws rds describe-db-instances # Instance details aws rds describe-db-instances --db-instance-identifier mydb \ --query 'DBInstances[0].[DBInstanceIdentifier,DBInstanceStatus,Endpoint.Address]' # Create snapshot aws rds create-db-snapshot \ --db-instance-identifier mydb \ --db-snapshot-identifier mydb-snapshot-$(date +%Y%m%d) # List snapshots aws rds describe-db-snapshots --db-instance-identifier mydb Secrets Manager 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 # List secrets aws secretsmanager list-secrets # Get secret value aws secretsmanager get-secret-value --secret-id mysecret \ --query 'SecretString' --output text # Create secret aws secretsmanager create-secret \ --name mysecret \ --secret-string '{"username":"admin","password":"secret"}' # Update secret aws secretsmanager put-secret-value \ --secret-id mysecret \ --secret-string '{"username":"admin","password":"newsecret"}' SSM Parameter Store 1 2 3 4 5 6 7 8 9 10 11 12 # Get parameter aws ssm get-parameter --name /myapp/database/password --with-decryption # Put parameter aws ssm put-parameter \ --name /myapp/database/password \ --value "mysecret" \ --type SecureString \ --overwrite # List parameters by path aws ssm get-parameters-by-path --path /myapp/ --recursive --with-decryption Query and Filter Patterns JMESPath Queries 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 # Select specific fields aws ec2 describe-instances \ --query 'Reservations[].Instances[].[InstanceId,State.Name]' # Filter in query aws ec2 describe-instances \ --query 'Reservations[].Instances[?State.Name==`running`].[InstanceId]' # First result only aws ec2 describe-instances \ --query 'Reservations[0].Instances[0].InstanceId' # Flatten nested arrays aws ec2 describe-instances \ --query 'Reservations[].Instances[].Tags[?Key==`Name`].Value[]' Output Formats 1 2 3 4 5 6 7 8 9 10 11 # JSON (default) aws ec2 describe-instances --output json # Table (human readable) aws ec2 describe-instances --output table # Text (tab-separated, good for scripts) aws ec2 describe-instances --output text # YAML aws ec2 describe-instances --output yaml Scripting Patterns Loop Through Resources 1 2 3 4 5 6 7 8 # Stop all instances with specific tag for id in $(aws ec2 describe-instances \ --filters "Name=tag:Environment,Values=dev" \ --query 'Reservations[].Instances[].InstanceId' \ --output text); do echo "Stopping $id" aws ec2 stop-instances --instance-ids "$id" done Wait for State 1 2 3 4 5 6 7 8 # Wait for instance to be running aws ec2 wait instance-running --instance-ids i-1234567890abcdef0 # Wait for instance to stop aws ec2 wait instance-stopped --instance-ids i-1234567890abcdef0 # Wait for snapshot completion aws ec2 wait snapshot-completed --snapshot-ids snap-1234567890abcdef0 Pagination 1 2 3 4 5 6 7 # Auto-pagination (default in CLI v2) aws s3api list-objects-v2 --bucket mybucket # Manual pagination aws s3api list-objects-v2 --bucket mybucket --max-items 100 # Use NextToken from output for next page aws s3api list-objects-v2 --bucket mybucket --starting-token "token..." Useful Aliases 1 2 3 4 5 6 7 8 9 10 # ~/.bashrc or ~/.zshrc # Quick instance list alias ec2ls='aws ec2 describe-instances --query "Reservations[].Instances[].[InstanceId,State.Name,InstanceType,PrivateIpAddress,Tags[?Key==\`Name\`].Value|[0]]" --output table' # Who am I? alias awswho='aws sts get-caller-identity' # S3 bucket sizes alias s3sizes='aws s3 ls | while read _ _ bucket; do aws s3 ls s3://$bucket --recursive --summarize 2>/dev/null | tail -1; echo $bucket; done' Troubleshooting 1 2 3 4 5 6 7 8 9 10 11 # Debug mode aws ec2 describe-instances --debug # Dry run (check permissions without executing) aws ec2 run-instances --dry-run --image-id ami-12345 --instance-type t2.micro # Check CLI version aws --version # Clear credential cache rm -rf ~/.aws/cli/cache/* The AWS CLI rewards muscle memory. Start with the operations you do daily, build aliases for common patterns, and gradually expand. ...

February 25, 2026 Â· 7 min Â· 1350 words Â· Rob Washington

curl Deep Dive: HTTP Requests from the Command Line

curl is the universal language of HTTP. Every API doc includes curl examples. Every debugging session starts with “can you curl it?” If you’re not comfortable with curl, you’re missing the most portable tool in your kit. Basic Requests 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # GET (default) curl https://api.example.com/users # With headers shown curl -i https://api.example.com/users # Headers only curl -I https://api.example.com/users # Silent (no progress bar) curl -s https://api.example.com/users # Follow redirects curl -L https://example.com/redirect # Verbose (debug mode) curl -v https://api.example.com/users HTTP Methods 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # POST curl -X POST https://api.example.com/users # PUT curl -X PUT https://api.example.com/users/1 # PATCH curl -X PATCH https://api.example.com/users/1 # DELETE curl -X DELETE https://api.example.com/users/1 # HEAD (headers only, like -I) curl -X HEAD https://api.example.com/users Sending Data Form Data 1 2 3 4 5 6 7 # URL-encoded form curl -X POST https://api.example.com/login \ -d "username=admin&password=secret" # From file curl -X POST https://api.example.com/login \ -d @credentials.txt JSON Data 1 2 3 4 5 6 7 8 9 10 11 12 # Inline JSON curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d '{"name":"Alice","email":"alice@example.com"}' # From file curl -X POST https://api.example.com/users \ -H "Content-Type: application/json" \ -d @user.json # Using --json (curl 7.82+) curl --json '{"name":"Alice"}' https://api.example.com/users File Upload 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 # Single file curl -X POST https://api.example.com/upload \ -F "file=@document.pdf" # Multiple files curl -X POST https://api.example.com/upload \ -F "file1=@doc1.pdf" \ -F "file2=@doc2.pdf" # File with custom filename curl -X POST https://api.example.com/upload \ -F "file=@localname.pdf;filename=remote.pdf" # File with content type curl -X POST https://api.example.com/upload \ -F "file=@image.png;type=image/png" # Mixed form data and files curl -X POST https://api.example.com/upload \ -F "title=My Document" \ -F "file=@document.pdf" Headers 1 2 3 4 5 6 7 8 9 10 11 12 13 # Custom header curl -H "X-Custom-Header: value" https://api.example.com # Multiple headers curl -H "Accept: application/json" \ -H "X-API-Version: 2" \ https://api.example.com # User agent curl -A "MyApp/1.0" https://api.example.com # Referer curl -e "https://example.com" https://api.example.com Authentication Basic Auth 1 2 3 4 5 6 7 8 # Username and password curl -u username:password https://api.example.com # Prompt for password curl -u username https://api.example.com # In URL (not recommended) curl https://username:password@api.example.com Bearer Token 1 curl -H "Authorization: Bearer YOUR_TOKEN" https://api.example.com API Key 1 2 3 4 5 # In header curl -H "X-API-Key: YOUR_KEY" https://api.example.com # In query string curl "https://api.example.com?api_key=YOUR_KEY" OAuth 2.0 Flow 1 2 3 4 5 6 7 8 9 # Get access token curl -X POST https://auth.example.com/oauth/token \ -d "grant_type=client_credentials" \ -d "client_id=YOUR_CLIENT_ID" \ -d "client_secret=YOUR_SECRET" # Use token TOKEN="eyJ..." curl -H "Authorization: Bearer $TOKEN" https://api.example.com/resource Digest Auth 1 curl --digest -u username:password https://api.example.com Output Options 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # Save to file curl -o output.html https://example.com # Save with remote filename curl -O https://example.com/file.zip # Save multiple files curl -O https://example.com/file1.zip -O https://example.com/file2.zip # Append to file curl https://example.com >> output.txt # Write headers to file curl -D headers.txt https://example.com # Output to stdout and file curl https://example.com | tee output.html Timeouts and Retries 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Connection timeout (seconds) curl --connect-timeout 5 https://api.example.com # Max time for entire operation curl -m 30 https://api.example.com # Retry on failure curl --retry 3 https://api.example.com # Retry with delay curl --retry 3 --retry-delay 5 https://api.example.com # Retry on specific errors curl --retry 3 --retry-all-errors https://api.example.com SSL/TLS 1 2 3 4 5 6 7 8 9 10 11 12 # Skip certificate verification (insecure!) curl -k https://self-signed.example.com # Use specific CA certificate curl --cacert /path/to/ca.crt https://api.example.com # Client certificate curl --cert client.crt --key client.key https://api.example.com # Force TLS version curl --tlsv1.2 https://api.example.com curl --tlsv1.3 https://api.example.com Proxy 1 2 3 4 5 6 7 8 9 10 11 # HTTP proxy curl -x http://proxy:8080 https://api.example.com # SOCKS5 proxy curl --socks5 localhost:1080 https://api.example.com # Proxy with auth curl -x http://user:pass@proxy:8080 https://api.example.com # No proxy for specific hosts curl --noproxy "localhost,*.internal" https://api.example.com Cookies 1 2 3 4 5 6 7 8 9 10 11 # Send cookie curl -b "session=abc123" https://api.example.com # Send cookies from file curl -b cookies.txt https://api.example.com # Save cookies to file curl -c cookies.txt https://api.example.com/login # Full session (save and send) curl -b cookies.txt -c cookies.txt https://api.example.com Response Inspection 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 # HTTP status code only curl -s -o /dev/null -w "%{http_code}" https://api.example.com # Response time curl -s -o /dev/null -w "%{time_total}s" https://api.example.com # Detailed timing curl -s -o /dev/null -w " DNS: %{time_namelookup}s Connect: %{time_connect}s TLS: %{time_appconnect}s Start: %{time_starttransfer}s Total: %{time_total}s Size: %{size_download} bytes Speed: %{speed_download} bytes/sec " https://api.example.com # Content type curl -s -o /dev/null -w "%{content_type}" https://api.example.com Scripting Patterns Health Check 1 2 3 4 5 6 7 8 9 10 11 #!/bin/bash URL="https://api.example.com/health" STATUS=$(curl -s -o /dev/null -w "%{http_code}" "$URL") if [ "$STATUS" -eq 200 ]; then echo "OK" exit 0 else echo "FAIL: HTTP $STATUS" exit 1 fi API Wrapper 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 #!/bin/bash API_BASE="https://api.example.com" API_KEY="${API_KEY:?API_KEY required}" api_get() { curl -s -H "Authorization: Bearer $API_KEY" "$API_BASE$1" } api_post() { curl -s -X POST \ -H "Authorization: Bearer $API_KEY" \ -H "Content-Type: application/json" \ -d "$2" \ "$API_BASE$1" } # Usage api_get "/users" | jq '.' api_post "/users" '{"name":"Alice"}' Retry with Backoff 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 #!/bin/bash URL="$1" MAX_RETRIES=5 RETRY_DELAY=1 for i in $(seq 1 $MAX_RETRIES); do RESPONSE=$(curl -s -w "\n%{http_code}" "$URL") STATUS=$(echo "$RESPONSE" | tail -1) BODY=$(echo "$RESPONSE" | sed '$d') if [ "$STATUS" -eq 200 ]; then echo "$BODY" exit 0 fi echo "Attempt $i failed (HTTP $STATUS), retrying in ${RETRY_DELAY}s..." >&2 sleep $RETRY_DELAY RETRY_DELAY=$((RETRY_DELAY * 2)) done echo "Failed after $MAX_RETRIES attempts" >&2 exit 1 Parallel Requests 1 2 3 4 5 # Using xargs cat urls.txt | xargs -P 10 -I {} curl -s -o /dev/null -w "{}: %{http_code}\n" {} # Using GNU parallel parallel -j 10 curl -s -o /dev/null -w "{}: %{http_code}\n" ::: $(cat urls.txt) Config Files Create ~/.curlrc for defaults: ...

February 25, 2026 Â· 8 min Â· 1541 words Â· Rob Washington

jq Mastery: JSON Processing on the Command Line

Every API returns JSON. Every config file is JSON. If you’re not fluent in jq, you’re copying data by hand like it’s 1995. The Basics 1 2 3 4 5 6 7 8 9 10 # Pretty print echo '{"name":"test","value":42}' | jq '.' # Extract a field echo '{"name":"test","value":42}' | jq '.name' # "test" # Raw output (no quotes) echo '{"name":"test","value":42}' | jq -r '.name' # test Working with APIs 1 2 3 4 5 6 7 8 # GitHub API curl -s https://api.github.com/users/torvalds | jq '.login, .public_repos' # Extract specific fields curl -s https://api.github.com/repos/stedolan/jq | jq '{name, stars: .stargazers_count, language}' # AWS CLI (already outputs JSON) aws ec2 describe-instances | jq '.Reservations[].Instances[] | {id: .InstanceId, state: .State.Name}' Array Operations 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 # Sample data DATA='[{"name":"alice","age":30},{"name":"bob","age":25},{"name":"carol","age":35}]' # First element echo $DATA | jq '.[0]' # Last element echo $DATA | jq '.[-1]' # Slice echo $DATA | jq '.[0:2]' # All names echo $DATA | jq '.[].name' # Array of names echo $DATA | jq '[.[].name]' # Length echo $DATA | jq 'length' Filtering 1 2 3 4 5 6 7 8 9 10 11 # Select by condition echo $DATA | jq '.[] | select(.age > 28)' # Multiple conditions echo $DATA | jq '.[] | select(.age > 25 and .name != "carol")' # Contains echo '[{"tags":["web","api"]},{"tags":["cli"]}]' | jq '.[] | select(.tags | contains(["api"]))' # Has key echo '{"a":1,"b":null}' | jq 'has("a"), has("c")' Transformation 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # Add/modify fields echo '{"name":"test"}' | jq '. + {status: "active", count: 0}' # Update existing field echo '{"count":5}' | jq '.count += 1' # Delete field echo '{"a":1,"b":2,"c":3}' | jq 'del(.b)' # Rename key echo '{"old_name":"value"}' | jq '{new_name: .old_name}' # Map over array echo '[1,2,3,4,5]' | jq 'map(. * 2)' # Map with objects echo $DATA | jq 'map({username: .name, birth_year: (2026 - .age)})' String Operations 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 # Concatenation echo '{"first":"John","last":"Doe"}' | jq '.first + " " + .last' # String interpolation echo '{"name":"test","ver":"1.0"}' | jq '"\(.name)-\(.ver).tar.gz"' # Split echo '{"path":"/usr/local/bin"}' | jq '.path | split("/")' # Join echo '["a","b","c"]' | jq 'join(",")' # Upper/lower echo '"Hello World"' | jq 'ascii_downcase' echo '"Hello World"' | jq 'ascii_upcase' # Test regex echo '{"email":"test@example.com"}' | jq '.email | test("@")' # Replace echo '"hello world"' | jq 'gsub("world"; "jq")' Conditionals 1 2 3 4 5 6 7 8 9 10 11 # If-then-else echo '{"status":200}' | jq 'if .status == 200 then "ok" else "error" end' # Alternative operator (default value) echo '{"a":1}' | jq '.b // "default"' # Null handling echo '{"a":null}' | jq '.a // "was null"' # Error handling echo '{}' | jq '.missing.nested // "not found"' Grouping and Aggregation 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 LOGS='[ {"level":"error","msg":"failed"}, {"level":"info","msg":"started"}, {"level":"error","msg":"timeout"}, {"level":"info","msg":"completed"} ]' # Group by field echo $LOGS | jq 'group_by(.level)' # Count per group echo $LOGS | jq 'group_by(.level) | map({level: .[0].level, count: length})' # Unique values echo $LOGS | jq '[.[].level] | unique' # Sort echo $DATA | jq 'sort_by(.age)' # Reverse sort echo $DATA | jq 'sort_by(.age) | reverse' # Min/max echo '[5,2,8,1,9]' | jq 'min, max' # Sum echo '[1,2,3,4,5]' | jq 'add' # Average echo '[1,2,3,4,5]' | jq 'add / length' Constructing Output 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 # Build new object curl -s https://api.github.com/users/torvalds | jq '{ username: .login, repos: .public_repos, profile: .html_url }' # Build array echo '{"users":[{"name":"a"},{"name":"b"}]}' | jq '[.users[].name]' # Multiple outputs to array echo '{"a":1,"b":2}' | jq '[.a, .b, .a + .b]' # Key-value pairs echo '{"a":1,"b":2}' | jq 'to_entries' # [{"key":"a","value":1},{"key":"b","value":2}] # Back to object echo '[{"key":"a","value":1}]' | jq 'from_entries' # Transform keys echo '{"old_a":1,"old_b":2}' | jq 'with_entries(.key |= ltrimstr("old_"))' Real-World Examples Parse AWS Instance List 1 2 3 4 5 aws ec2 describe-instances | jq -r ' .Reservations[].Instances[] | [.InstanceId, .State.Name, (.Tags[]? | select(.Key=="Name") | .Value) // "unnamed"] | @tsv ' Filter Docker Containers 1 2 3 4 5 6 docker inspect $(docker ps -q) | jq '.[] | { name: .Name, image: .Config.Image, status: .State.Status, ip: .NetworkSettings.IPAddress }' Process Log Files 1 2 3 4 5 6 7 # Count errors by type cat app.log | jq -s 'group_by(.error_type) | map({type: .[0].error_type, count: length}) | sort_by(.count) | reverse' # Extract errors from last hour cat app.log | jq --arg cutoff "$(date -d '1 hour ago' -Iseconds)" ' select(.timestamp > $cutoff and .level == "error") ' Transform Config Files 1 2 3 4 5 6 7 8 # Merge configs jq -s '.[0] * .[1]' base.json override.json # Update nested value jq '.database.host = "newhost.example.com"' config.json # Add to array jq '.allowed_ips += ["10.0.0.5"]' config.json Generate Reports 1 2 3 4 5 6 # Kubernetes pod status kubectl get pods -o json | jq -r ' .items[] | [.metadata.name, .status.phase, (.status.containerStatuses[0].restartCount // 0)] | @tsv ' | column -t Useful Flags 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 # Compact output (no pretty print) jq -c '.' # Raw output (no quotes on strings) jq -r '.name' # Raw input (treat input as string, not JSON) jq -R 'split(",")' # Slurp (read all inputs into array) cat *.json | jq -s '.' # Pass variable jq --arg name "test" '.name = $name' # Pass JSON variable jq --argjson count 42 '.count = $count' # Read from file jq --slurpfile users users.json '.users = $users' # Exit with error if output is null/false jq -e '.important_field' && echo "exists" # Sort keys in output jq -S '.' Output Formats 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Tab-separated echo $DATA | jq -r '.[] | [.name, .age] | @tsv' # CSV echo $DATA | jq -r '.[] | [.name, .age] | @csv' # URI encoding echo '{"q":"hello world"}' | jq -r '.q | @uri' # Base64 echo '{"data":"secret"}' | jq -r '.data | @base64' # Shell-safe echo '{"cmd":"echo hello"}' | jq -r '.cmd | @sh' Debugging 1 2 3 4 5 6 7 8 9 10 11 # Show type echo '{"a":[1,2,3]}' | jq '.a | type' # Show keys echo '{"a":1,"b":2}' | jq 'keys' # Debug output (shows intermediate values) echo '{"x":{"y":{"z":1}}}' | jq '.x | debug | .y | debug | .z' # Path to value echo '{"a":{"b":{"c":1}}}' | jq 'path(.. | select(. == 1))' Quick Reference 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 # Identity . # Field access .field .field.nested # Array access .[0] .[-1] .[2:5] # Iterate array .[] # Pipe .[] | .name # Collect into array [.[] | .name] # Object construction {newkey: .oldkey} # Conditionals if COND then A else B end VALUE // DEFAULT # Comparison ==, !=, <, >, <=, >= and, or, not # Array functions map(f), select(f), sort_by(f), group_by(f), unique, length, first, last, nth(n), flatten, reverse, contains(x), inside(x), add, min, max # String functions split(s), join(s), test(re), match(re), gsub(re;s), ascii_downcase, ascii_upcase, ltrimstr(s), rtrimstr(s), startswith(s), endswith(s) # Object functions keys, values, has(k), in(o), to_entries, from_entries, with_entries(f) # Type functions type, isnumber, isstring, isnull, isboolean, isarray, isobject jq turns JSON from a data format into a query language. Once you internalize the pipe-and-filter model, you’ll wonder how you ever survived without it. ...

February 25, 2026 Â· 7 min Â· 1346 words Â· Rob Washington

tar: Creating and Extracting Archives

tar (tape archive) bundles files and directories into a single file. Combined with compression, it’s the standard way to package and distribute files on Unix systems. The Basics 1 2 3 4 5 6 7 8 # Create archive tar -cvf archive.tar /path/to/files # Extract archive tar -xvf archive.tar # List contents tar -tvf archive.tar Understanding the Flags 1 2 3 4 5 c = Create archive x = Extract archive t = List contents v = Verbose (show files) f = File (next arg is filename) So tar -cvf = Create, Verbose, File. ...

February 25, 2026 Â· 5 min Â· 875 words Â· Rob Washington

watch: Repeat Commands and See Changes

watch runs a command every N seconds and displays the output. It’s the simplest form of real-time monitoring — no setup, no configuration, just instant feedback loops. Basic Usage 1 2 3 4 5 6 7 8 # Run command every 2 seconds (default) watch date # Run every 5 seconds watch -n 5 date # Run every 0.5 seconds watch -n 0.5 date Highlight Changes 1 2 3 4 5 # Highlight differences between updates watch -d df -h # Highlight changes permanently (cumulative) watch -d=cumulative df -h Common Options 1 2 3 4 5 6 7 -n, --interval Seconds between updates (default: 2) -d, --differences Highlight changes -t, --no-title Hide the header -b, --beep Beep on command error -e, --errexit Exit on command error -c, --color Interpret ANSI color sequences -x, --exec Pass command to exec instead of sh -c Practical Examples Disk Space 1 2 3 4 5 6 7 8 # Watch disk usage watch df -h # Watch specific mount watch 'df -h | grep /dev/sda1' # Watch directory size watch 'du -sh /var/log' Memory 1 2 3 4 5 # Memory stats watch free -h # Memory with buffers/cache detail watch 'free -h && echo && cat /proc/meminfo | head -10' Processes 1 2 3 4 5 6 7 8 # Process count watch 'ps aux | wc -l' # Specific process watch 'ps aux | grep nginx' # Process memory watch 'ps aux --sort=-%mem | head -10' Network 1 2 3 4 5 6 7 8 9 10 11 # Network connections watch 'netstat -tuln' # Connection count watch 'netstat -an | wc -l' # Active connections watch 'ss -s' # Interface stats watch 'cat /proc/net/dev' Files and Directories 1 2 3 4 5 6 7 8 9 10 11 # File list watch ls -la # Directory size changes watch 'du -sh *' # File count watch 'find . -type f | wc -l' # Recent files watch 'ls -lt | head -10' Docker 1 2 3 4 5 6 7 8 # Container status watch docker ps # Container stats watch 'docker stats --no-stream' # Image list watch docker images Kubernetes 1 2 3 4 5 6 7 8 # Pod status watch kubectl get pods # All resources watch 'kubectl get pods,svc,deploy' # Pod logs (last 5 lines) watch 'kubectl logs -l app=myapp --tail=5' Git 1 2 3 4 5 6 7 8 # Branch status watch git status # Log (one line per commit) watch 'git log --oneline -10' # Diff stats watch git diff --stat Logs 1 2 3 4 5 6 7 8 # Last log lines watch 'tail -5 /var/log/syslog' # Error count watch 'grep -c ERROR /var/log/app.log' # Recent errors watch 'grep ERROR /var/log/app.log | tail -5' APIs and Services 1 2 3 4 5 6 7 8 # HTTP health check watch 'curl -s localhost:8080/health' # API response time watch 'curl -s -w "%{time_total}\n" -o /dev/null http://localhost/api' # Service status watch systemctl status nginx Database 1 2 3 4 5 6 7 8 # PostgreSQL connections watch 'psql -c "SELECT count(*) FROM pg_stat_activity"' # MySQL process list watch 'mysql -e "SHOW PROCESSLIST"' # Table row count watch 'psql -c "SELECT count(*) FROM users"' Quoting and Complex Commands For commands with pipes or special characters, quote the entire command: ...

February 25, 2026 Â· 5 min Â· 949 words Â· Rob Washington

xargs: Turning Output into Arguments

Many commands output lists. Many commands need arguments. xargs connects them. It reads input and runs a command with that input as arguments. Basic Usage 1 2 3 4 5 6 7 8 9 # Without xargs (doesn't work) find . -name "*.txt" | rm # rm doesn't read stdin # With xargs (works) find . -name "*.txt" | xargs rm # What's happening echo "file1 file2 file3" | xargs rm # Becomes: rm file1 file2 file3 Input Handling 1 2 3 4 5 6 7 8 9 10 11 12 13 # Default: split on whitespace echo "a b c" | xargs echo # Output: a b c # One item per line echo -e "a\nb\nc" | xargs echo # Output: a b c # Handle spaces in filenames (-0 with null delimiter) find . -name "*.txt" -print0 | xargs -0 rm # Treat each line as one argument cat list.txt | xargs -d '\n' command Argument Placement 1 2 3 4 5 6 7 8 9 10 # Default: append to end echo "file.txt" | xargs wc -l # Becomes: wc -l file.txt # Custom placement with -I echo "file.txt" | xargs -I {} cp {} {}.bak # Becomes: cp file.txt file.txt.bak # Multiple uses of placeholder echo "file.txt" | xargs -I {} sh -c 'echo "Processing {}"; wc -l {}' Limiting Arguments 1 2 3 4 5 6 7 8 9 10 11 12 13 # One argument per command execution find . -name "*.txt" | xargs -n 1 rm # Runs: rm file1.txt, rm file2.txt, rm file3.txt (separately) # Two arguments per execution echo "a b c d e f" | xargs -n 2 echo # Output: # a b # c d # e f # Limit by size (bytes) echo "a b c d e f" | xargs -s 10 echo Parallel Execution 1 2 3 4 5 6 7 8 # Run 4 processes in parallel find . -name "*.jpg" | xargs -P 4 -I {} convert {} -resize 800x600 {}.resized.jpg # All available CPUs find . -name "*.log" | xargs -P 0 gzip # Combined with -n cat urls.txt | xargs -n 1 -P 10 curl -O Confirmation (-p) and Verbose (-t) 1 2 3 4 5 # Ask before each execution find . -name "*.bak" | xargs -p rm # Show command before running find . -name "*.txt" | xargs -t wc -l Handling Empty Input 1 2 3 4 # Don't run if no input find . -name "*.missing" | xargs --no-run-if-empty rm # Short form: find . -name "*.missing" | xargs -r rm Practical Examples Bulk File Operations 1 2 3 4 5 6 7 8 9 10 11 # Delete files matching pattern find . -name "*.tmp" -print0 | xargs -0 rm -f # Move files to directory find . -name "*.jpg" -print0 | xargs -0 -I {} mv {} ./images/ # Change permissions find . -type f -name "*.sh" | xargs chmod +x # Compress multiple files find . -name "*.log" -mtime +7 | xargs gzip Search and Process 1 2 3 4 5 6 7 8 # Search in found files find . -name "*.py" | xargs grep "import os" # Count lines in all matching files find . -name "*.js" | xargs wc -l # Replace text in multiple files find . -name "*.txt" | xargs sed -i 's/old/new/g' Git Operations 1 2 3 4 5 6 7 8 # Add specific files git status --short | awk '{print $2}' | xargs git add # Remove deleted files from tracking git ls-files --deleted | xargs git rm # Checkout specific files echo "file1.txt file2.txt" | xargs git checkout -- Download Multiple URLs 1 2 3 4 5 6 7 8 # Download sequentially cat urls.txt | xargs -n 1 curl -O # Download in parallel (10 at a time) cat urls.txt | xargs -n 1 -P 10 curl -O # wget version cat urls.txt | xargs -n 1 -P 5 wget -q Docker Operations 1 2 3 4 5 6 7 8 9 10 11 # Stop all containers docker ps -q | xargs docker stop # Remove all stopped containers docker ps -aq | xargs docker rm # Remove images by pattern docker images | grep "none" | awk '{print $3}' | xargs docker rmi # Pull multiple images echo "nginx redis postgres" | xargs -n 1 docker pull Process Management 1 2 3 4 5 6 7 8 # Kill processes by name pgrep -f "pattern" | xargs kill # Kill processes by name (safer) pgrep -f "my-app" | xargs -r kill -9 # Send signal to multiple PIDs cat pids.txt | xargs kill -HUP Package Management 1 2 3 4 5 6 7 8 # Install multiple packages echo "vim git curl wget" | xargs sudo apt install -y # Uninstall packages from list cat remove.txt | xargs sudo apt remove -y # pip install from list cat requirements.txt | xargs -n 1 pip install Combining with Other Tools With find 1 2 3 4 5 # Process found files find . -name "*.md" -print0 | xargs -0 -I {} pandoc {} -o {}.html # Archive old files find /var/log -name "*.log" -mtime +30 -print0 | xargs -0 tar -czvf old-logs.tar.gz With grep 1 2 3 4 5 # Files containing pattern -> process grep -l "TODO" *.py | xargs -I {} echo "File with TODOs: {}" # Extract matches and process grep -oh "https://[^ ]*" urls.txt | xargs -n 1 -P 5 curl -sI | grep "HTTP" With awk 1 2 3 4 5 # Select column and process ps aux | awk '$3 > 50 {print $2}' | xargs kill # Format output for xargs cat data.csv | awk -F, '{print $1}' | xargs -I {} echo "ID: {}" Error Handling 1 2 3 4 5 6 7 8 9 # Continue on errors find . -name "*.txt" | xargs -I {} sh -c 'command {} || true' # Stop on first error (default behavior) find . -name "*.txt" | xargs command # Exit codes echo "a b c" | xargs false echo $? # Non-zero Performance Comparison 1 2 3 4 5 6 7 8 # Slow: one process per file for f in *.txt; do wc -l "$f"; done # Faster: batch arguments ls *.txt | xargs wc -l # Fastest: parallel execution ls *.txt | xargs -P 4 -n 10 wc -l GNU Parallel Alternative For complex parallel jobs, GNU Parallel offers more features: ...

February 24, 2026 Â· 6 min Â· 1237 words Â· Rob Washington

sed: Stream Editing for Text Transformation

sed (stream editor) processes text line by line, applying transformations as data flows through. It’s the scalpel to awk’s Swiss army knife — focused on text substitution and line manipulation. Basic Substitution 1 2 3 4 5 6 7 8 9 10 11 # Replace first occurrence per line sed 's/old/new/' file.txt # Replace all occurrences per line sed 's/old/new/g' file.txt # Case insensitive sed 's/old/new/gi' file.txt # Replace Nth occurrence sed 's/old/new/2' file.txt # Second occurrence only Delimiters When patterns contain slashes, use different delimiters: ...

February 24, 2026 Â· 5 min Â· 1016 words Â· Rob Washington

awk One-Liners: Text Processing Power

awk is a programming language disguised as a command-line tool. It processes text line by line, splitting each into fields. Most tasks need just one line. The Basics 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Print entire line awk '{print}' file.txt # Print specific field (space-delimited) awk '{print $1}' file.txt # First field awk '{print $2}' file.txt # Second field awk '{print $NF}' file.txt # Last field awk '{print $(NF-1)}' file.txt # Second to last # Print multiple fields awk '{print $1, $3}' file.txt # Custom output format awk '{print $1 " -> " $2}' file.txt Field Separators 1 2 3 4 5 6 7 8 9 10 11 # Colon-separated (like /etc/passwd) awk -F: '{print $1}' /etc/passwd # Tab-separated awk -F'\t' '{print $2}' data.tsv # Multiple separators awk -F'[,;]' '{print $1}' file.txt # Set output separator awk -F: 'BEGIN{OFS=","} {print $1,$3}' /etc/passwd Filtering Lines 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 # Lines matching pattern awk '/error/' logfile.txt # Lines NOT matching pattern awk '!/debug/' logfile.txt # Field matches value awk '$3 == "ERROR"' logfile.txt # Numeric comparison awk '$2 > 100' data.txt # Multiple conditions awk '$2 > 100 && $3 == "active"' data.txt # Line number range awk 'NR >= 10 && NR <= 20' file.txt Built-in Variables 1 2 3 4 5 6 7 NR # Current line number (total) NF # Number of fields in current line FNR # Line number in current file FS # Field separator (input) OFS # Output field separator RS # Record separator (default: newline) ORS # Output record separator 1 2 3 4 5 6 7 8 # Print line numbers awk '{print NR, $0}' file.txt # Print lines with more than 3 fields awk 'NF > 3' file.txt # Print total lines at end awk 'END{print NR}' file.txt Arithmetic 1 2 3 4 5 6 7 8 9 10 11 # Sum a column awk '{sum += $2} END{print sum}' data.txt # Average awk '{sum += $2; count++} END{print sum/count}' data.txt # Min/Max awk 'NR==1{min=max=$2} $2>max{max=$2} $2<min{min=$2} END{print min, max}' data.txt # Calculate percentage awk '{print $1, $2, ($2/$3)*100 "%"}' data.txt String Operations 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 # Length of field awk '{print length($1)}' file.txt # Substring awk '{print substr($1, 1, 3)}' file.txt # First 3 chars # Convert case awk '{print toupper($1)}' file.txt awk '{print tolower($1)}' file.txt # String concatenation awk '{print $1 $2}' file.txt # No space awk '{print $1 " " $2}' file.txt # With space # Split string awk '{split($1, arr, "-"); print arr[1]}' file.txt Conditional Logic 1 2 3 4 5 6 7 8 9 10 11 12 13 # If-else awk '{if ($2 > 100) print "high"; else print "low"}' data.txt # Ternary awk '{print ($2 > 100 ? "high" : "low")}' data.txt # Multiple conditions awk '{ if ($2 > 100) status = "high" else if ($2 > 50) status = "medium" else status = "low" print $1, status }' data.txt BEGIN and END 1 2 3 4 5 # Header and footer awk 'BEGIN{print "Name\tScore"} {print $1"\t"$2} END{print "---\nTotal: " NR}' data.txt # Initialize variables awk 'BEGIN{count=0} /error/{count++} END{print count " errors"}' logfile.txt Practical One-Liners Log Analysis 1 2 3 4 5 6 7 8 9 10 11 # Count occurrences of each status code awk '{print $9}' access.log | sort | uniq -c | sort -rn # Or all in awk awk '{count[$9]++} END{for (code in count) print count[code], code}' access.log # Requests per IP awk '{count[$1]++} END{for (ip in count) print count[ip], ip}' access.log | sort -rn | head # Slow requests (response time > 1s) awk '$NF > 1.0 {print $7, $NF}' access.log CSV Processing 1 2 3 4 5 6 7 8 9 10 11 # Print specific columns awk -F, '{print $1","$3}' data.csv # Skip header awk -F, 'NR > 1 {print $2}' data.csv # Sum a column awk -F, 'NR > 1 {sum += $3} END{print sum}' data.csv # Filter by value awk -F, '$4 == "active"' data.csv System Administration 1 2 3 4 5 6 7 8 9 10 11 # Disk usage over 80% df -h | awk '$5+0 > 80 {print $6, $5}' # Memory by process ps aux | awk '{mem[$11] += $6} END{for (proc in mem) print mem[proc], proc}' | sort -rn | head # Users with bash shell awk -F: '$7 ~ /bash/ {print $1}' /etc/passwd # Show listening ports netstat -tlnp | awk '$6 == "LISTEN" {print $4}' Data Transformation 1 2 3 4 5 6 7 8 9 10 11 # Transpose rows to columns awk '{for (i=1; i<=NF; i++) a[i,NR]=$i} END{for (i=1; i<=NF; i++) {for (j=1; j<=NR; j++) printf a[i,j] " "; print ""}}' file.txt # Remove duplicate lines (preserving order) awk '!seen[$0]++' file.txt # Print unique values from column awk '{print $2}' file.txt | awk '!seen[$0]++' # Join lines with comma awk '{printf "%s%s", sep, $0; sep=","} END{print ""}' file.txt Text Manipulation 1 2 3 4 5 6 7 8 9 10 11 12 13 14 # Remove blank lines awk 'NF' file.txt # Remove leading/trailing whitespace awk '{$1=$1}1' file.txt # Replace field value awk '{$2 = "REDACTED"; print}' file.txt # Add line numbers awk '{print NR": "$0}' file.txt # Print every Nth line awk 'NR % 5 == 0' file.txt Combining with Other Tools 1 2 3 4 5 6 7 8 # Filter then process grep "ERROR" logfile.txt | awk '{print $5}' # Process then sort awk -F: '{print $3, $1}' /etc/passwd | sort -n # Use in pipeline cat data.txt | awk '{print $2}' | sort | uniq -c Multi-line Scripts For complex logic, use a script file: ...

February 24, 2026 Â· 6 min Â· 1151 words Â· Rob Washington