Error Handling Patterns
--max-procs resource limiting — cap concurrency to avoid overwhelming targets
# -P2 limits to 2 concurrent processes even with many inputs
seq 1 20 | xargs -P2 -I{} sh -c 'echo "start {}"; sleep 1; echo "done {}"'
--max-args memory protection — process in small batches to avoid ARG_MAX
# -n50 passes 50 args per invocation instead of all at once
find /usr -type f -name '*.so' -print0 | xargs -0 -n50 ls -la
Exit code propagation — xargs returns 123 when any child command fails
# Demonstrate: one failure causes xargs to return 123
printf 'true\nfalse\ntrue\n' | xargs -n1 sh -c '$0'
echo "xargs exit code: $?"
Timeout per command — prevent any single invocation from hanging
cat <<'EOF' | xargs -P4 -I{} timeout 5 sh -c 'echo "checking {}"; curl -s -o /dev/null -w "%{http_code}" "https://{}"'
archlinux.org
github.com
example.com
EOF
Dry-run pattern — echo the command before executing
# Preview what xargs would execute without running it
find /tmp/demo -name '*.tmp' -print0 | xargs -0 -I{} echo "would run: rm {}"
# Remove 'echo "would run: "' prefix to execute for real
Log failures while continuing — sh -c wrapper captures errors per item
printf 'alpha\n/nonexistent\ngamma\n' | xargs -n1 -I{} sh -c 'ls {} 2>/dev/null || echo "FAILED: {}" >&2'
Retry pattern — wrapper function retries each command up to 3 times
printf 'archlinux.org\nexample.invalid\ngithub.com\n' | xargs -P2 -I{} sh -c '
host="$1"; attempts=0; max=3
while [ $attempts -lt $max ]; do
curl -sf -o /dev/null "https://$host" && echo "OK: $host" && exit 0
attempts=$((attempts + 1)); sleep 1
done
echo "FAILED after $max attempts: $host" >&2
' _ {}
Combine -P and timeout for resilient parallel operations
cat <<'EOF' | xargs -P4 -I{} sh -c 'timeout 10 ssh -o ConnectTimeout=3 {} uptime 2>/dev/null || echo "UNREACHABLE: {}" >&2'
192.168.1.10
192.168.1.11
192.168.1.12
192.168.1.13
EOF