DataConnect SQL Cookbook

Overview

Production-tested SQL patterns combined with jq/awk post-processing for colorized, formatted output.

Setup

dsource d000 dev/network

# Aliases for colorized output
alias jqc='jq -C'  # Force color even when piped
alias yqc='yq -C'

# Color codes for manual formatting
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'  # No Color

Authentication Analytics

Success/Failure with Color Indicators

# Colorized success rate by hour
netapi ise dc --format json query "
  SELECT
    TO_CHAR(TRUNC(TIMESTAMP_TIMEZONE, 'HH24'), 'HH24:MI') as hour,
    COUNT(*) as total,
    SUM(CASE WHEN PASSED = 1 THEN 1 ELSE 0 END) as passed,
    SUM(CASE WHEN PASSED = 0 THEN 1 ELSE 0 END) as failed,
    ROUND(SUM(CASE WHEN PASSED = 1 THEN 1 ELSE 0 END) * 100.0 / COUNT(*), 1) as pct
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > TRUNC(SYSDATE)
  GROUP BY TRUNC(TIMESTAMP_TIMEZONE, 'HH24')
  ORDER BY hour
" | jq -r '
  .[] |
  if .pct >= 95 then "\u001b[32m✓\u001b[0m"  # Green
  elif .pct >= 80 then "\u001b[33m⚠\u001b[0m"  # Yellow
  else "\u001b[31m✗\u001b[0m" end              # Red
  + " \(.hour) | Total: \(.total) | Pass: \(.passed) | Fail: \(.failed) | \(.pct)%"
'

Top Failures with Severity Coloring

# Top failures with severity coloring
netapi ise dc --format json query "
  SELECT
    FAILURE_REASON,
    COUNT(*) as count,
    COUNT(DISTINCT CALLING_STATION_ID) as unique_macs
  FROM RADIUS_AUTHENTICATIONS
  WHERE PASSED = 0
  AND TIMESTAMP_TIMEZONE > SYSDATE - 1
  GROUP BY FAILURE_REASON
  ORDER BY count DESC
  FETCH FIRST 15 ROWS ONLY
" | jq -r '
  .[] |
  (if .count > 100 then "\u001b[31;1m"      # Bold red (critical)
   elif .count > 50 then "\u001b[31m"       # Red (high)
   elif .count > 10 then "\u001b[33m"       # Yellow (medium)
   else "\u001b[0m" end) +                  # Normal (low)
  "[\(.count | tostring | ("     " + .)[-5:])] \(.unique_macs) MACs | \(.FAILURE_REASON)\u001b[0m"
'

EAP Method Distribution (Bar Chart)

# EAP Method distribution with bar chart
netapi ise dc --format json query "
  SELECT
    AUTHENTICATION_METHOD,
    COUNT(*) as count,
    ROUND(COUNT(*) * 100.0 / SUM(COUNT(*)) OVER(), 1) as pct
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 7
  GROUP BY AUTHENTICATION_METHOD
  ORDER BY count DESC
" | jq -r '
  .[] |
  # Color by method type
  (if .AUTHENTICATION_METHOD == "EAP-TLS" then "\u001b[32m"      # Green (best)
   elif .AUTHENTICATION_METHOD == "PEAP" then "\u001b[33m"       # Yellow
   elif .AUTHENTICATION_METHOD == "MAB" then "\u001b[34m"        # Blue
   else "\u001b[0m" end) +
  # Bar chart
  (.pct / 2 | floor | . as $bars | ("█" * $bars) + ("░" * (50 - $bars))) +
  " \(.pct)% \(.AUTHENTICATION_METHOD) (\(.count))\u001b[0m"
'

Session Analytics with awk

Pivot Table: Auth Method by Policy Set

# Pivot table: Auth Method by Policy Set (awk)
# Note: netapi dc doesn't have --format csv, use JSON + jq conversion
netapi ise dc --format json query "
  SELECT
    POLICY_SET_NAME,
    AUTHENTICATION_METHOD,
    COUNT(*) as count
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  GROUP BY POLICY_SET_NAME, AUTHENTICATION_METHOD
  ORDER BY POLICY_SET_NAME, count DESC
" | jq -r '.[] | "\(.POLICY_SET_NAME),\(.AUTHENTICATION_METHOD),\(.count)"' | awk -F',' '
{
  policy[$1]++
  method[$2]++
  data[$1][$2] = $3
  total[$1] += $3
}
END {
  # Print header
  printf "%-30s", "Policy Set"
  for (m in method) printf "%12s", m
  printf "%12s\n", "TOTAL"
  print "─────────────────────────────────────────────────────────────────"

  # Print data rows
  for (p in policy) {
    printf "%-30s", substr(p, 1, 29)
    for (m in method) {
      if ((p,m) in data)
        printf "%12d", data[p][m]
      else
        printf "%12s", "-"
    }
    printf "%12d\n", total[p]
  }
}'

Time Series: Hourly Auth Trend with Sparkline

# Hourly auth trend with sparkline
netapi ise dc --format json query "
  SELECT
    TO_CHAR(TRUNC(TIMESTAMP_TIMEZONE, 'HH24'), 'HH24') as hour,
    COUNT(*) as count
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > TRUNC(SYSDATE)
  GROUP BY TRUNC(TIMESTAMP_TIMEZONE, 'HH24')
  ORDER BY hour
" | jq -r '
  . as $data |
  ($data | map(.count) | max) as $max |
  $data[] |
  .hour + " " +
  ((.count / $max * 20) | floor | . as $h |
    if $h > 15 then "\u001b[32m"
    elif $h > 10 then "\u001b[33m"
    elif $h > 5 then "\u001b[34m"
    else "\u001b[31m" end +
    ("▇" * $h) + "\u001b[0m") +
  " \(.count)"
'

Endpoint Analysis

Endpoint Profile Distribution with jq Grouping

# Endpoint profile distribution with jq grouping
netapi ise dc --format json query "
  SELECT
    ENDPOINT_POLICY as profile,
    COUNT(*) as count
  FROM ENDPOINTS
  GROUP BY ENDPOINT_POLICY
  ORDER BY count DESC
  FETCH FIRST 20 ROWS ONLY
" | jq -C '
  group_by(.profile | split("-")[0]) |
  map({
    category: .[0].profile | split("-")[0],
    profiles: map({name: .profile, count: .count}),
    total: map(.count) | add
  }) |
  sort_by(-.total)
'

MAC Vendor Analysis with awk

# MAC vendor analysis with awk OUI lookup
# Note: netapi dc doesn't have --format csv, use JSON + jq conversion
netapi ise dc --format json query "
  SELECT
    SUBSTR(MAC_ADDRESS, 1, 8) as oui,
    COUNT(*) as count
  FROM ENDPOINTS
  GROUP BY SUBSTR(MAC_ADDRESS, 1, 8)
  ORDER BY count DESC
  FETCH FIRST 20 ROWS ONLY
" | jq -r '.[] | "\(.oui),\(.count)"' | awk -F',' '
BEGIN {
  # Common OUI prefixes
  vendor["C8:5B:76"] = "Apple"
  vendor["98:BB:1E"] = "Intel"
  vendor["14:F6:D8"] = "Intel"
  vendor["DC:A6:32"] = "Raspberry Pi"
  vendor["00:50:56"] = "VMware"
  vendor["52:54:00"] = "QEMU/KVM"
  vendor["00:0C:29"] = "VMware"
  vendor["3C:06:30"] = "Apple"
}
{
  oui = $1
  count = $2
  v = (oui in vendor) ? vendor[oui] : "Unknown"
  printf "\033[36m%-12s\033[0m %-20s %6d ", oui, v, count
  for (i=0; i<count/10; i++) printf "█"
  print ""
}'

Network Device Analytics

NAS Performance Heatmap

# NAS performance heatmap
netapi ise dc --format json query "
  SELECT
    NAS_IP_ADDRESS,
    TO_CHAR(TRUNC(TIMESTAMP_TIMEZONE, 'HH24'), 'HH24') as hour,
    COUNT(*) as auths,
    ROUND(AVG(RESPONSE_TIME), 0) as avg_ms
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > TRUNC(SYSDATE)
  GROUP BY NAS_IP_ADDRESS, TRUNC(TIMESTAMP_TIMEZONE, 'HH24')
  ORDER BY NAS_IP_ADDRESS, hour
" | jq -r '
  group_by(.NAS_IP_ADDRESS) | .[] |
  "\u001b[1m" + .[0].NAS_IP_ADDRESS + "\u001b[0m",
  (. | map(
    (if .avg_ms > 500 then "\u001b[41m"      # Red bg (slow)
     elif .avg_ms > 200 then "\u001b[43m"    # Yellow bg
     elif .avg_ms > 100 then "\u001b[42m"    # Green bg
     else "\u001b[44m" end) +                # Blue bg (fast)
    " " + .hour + ":" + (.avg_ms | tostring) + "ms " +
    "\u001b[0m"
  ) | join("")),
  ""
'

Switch Port Utilization

# Switch port utilization - find shared ports
netapi ise dc --format json query "
  SELECT
    NAS_IP_ADDRESS,
    NAS_PORT_ID,
    COUNT(DISTINCT CALLING_STATION_ID) as unique_macs,
    COUNT(*) as auths
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  AND NAS_PORT_ID IS NOT NULL
  GROUP BY NAS_IP_ADDRESS, NAS_PORT_ID
  HAVING COUNT(DISTINCT CALLING_STATION_ID) > 1
  ORDER BY unique_macs DESC
  FETCH FIRST 20 ROWS ONLY
" | jq -r '
  .[] |
  "\u001b[33m⚠\u001b[0m " +
  "\u001b[36m\(.NAS_IP_ADDRESS)\u001b[0m " +
  "\u001b[35m\(.NAS_PORT_ID)\u001b[0m " +
  "→ \(.unique_macs) MACs (\(.auths) auths)"
'

Advanced jq Database Operations

Join Simulation: Auth + Profiling

# Join simulation: Auth + Profiling with jq
AUTH=$(netapi ise dc --format json query "
  SELECT CALLING_STATION_ID as mac, COUNT(*) as auths, MAX(PASSED) as last_pass
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  GROUP BY CALLING_STATION_ID
" --format json)

PROFILES=$(netapi ise dc --format json query "
  SELECT MAC_ADDRESS as mac, ENDPOINT_POLICY as profile
  FROM ENDPOINTS
" --format json)

# Join with jq INDEX function
echo "$AUTH" | jq --argjson profiles "$PROFILES" '
  . as $auth |
  ($profiles | INDEX(.mac)) as $prof_map |
  $auth | map(. + {profile: ($prof_map[.mac].profile // "Unknown")})
' | jq -C '
  group_by(.profile) |
  map({
    profile: .[0].profile,
    endpoints: length,
    total_auths: map(.auths) | add,
    all_passed: all(.last_pass == 1)
  }) |
  sort_by(-.total_auths)
'

Window Functions with jq (Running Total)

# Running total with jq reduce
netapi ise dc --format json query "
  SELECT
    TO_CHAR(TRUNC(TIMESTAMP_TIMEZONE), 'YYYY-MM-DD') as day,
    COUNT(*) as daily_auths
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 7
  GROUP BY TRUNC(TIMESTAMP_TIMEZONE)
  ORDER BY day
" | jq -C '
  reduce .[] as $item (
    {running_total: 0, result: []};
    .running_total += $item.daily_auths |
    .result += [$item + {running_total: .running_total}]
  ) | .result
'

Percentile Calculation

# Response time percentiles with jq
netapi ise dc --format json query "
  SELECT RESPONSE_TIME as ms
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  AND RESPONSE_TIME IS NOT NULL
  ORDER BY RESPONSE_TIME
" | jq '
  map(.ms) | sort |
  {
    count: length,
    min: first,
    max: last,
    p50: .[length * 0.5 | floor],
    p90: .[length * 0.9 | floor],
    p95: .[length * 0.95 | floor],
    p99: .[length * 0.99 | floor]
  }
'

yq for YAML Output

Convert to YAML Report

# Convert to YAML report with yq
netapi ise dc --format json query "
  SELECT
    POLICY_SET_NAME as policy,
    AUTHENTICATION_METHOD as method,
    COUNT(*) as count,
    SUM(CASE WHEN PASSED = 1 THEN 1 ELSE 0 END) as passed
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  GROUP BY POLICY_SET_NAME, AUTHENTICATION_METHOD
  ORDER BY policy, count DESC
" --format json | yq -C -P '
  group_by(.policy) |
  map({
    "policy_set": .[0].policy,
    "methods": map({
      "name": .method,
      "total": .count,
      "passed": .passed,
      "success_rate": ((.passed / .count * 100) | . * 10 | round / 10 | tostring + "%")
    })
  })
'

Export Formats

Export to Prometheus Metrics Format

# Export to Prometheus metrics format
netapi ise dc --format json query "
  SELECT
    AUTHENTICATION_METHOD as method,
    PASSED as status,
    COUNT(*) as count
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  GROUP BY AUTHENTICATION_METHOD, PASSED
" | jq -r '
  "# HELP ise_auth_total Total ISE authentications",
  "# TYPE ise_auth_total counter",
  (.[] | "ise_auth_total{method=\"\(.method)\",status=\"\(if .status == 1 then "passed" else "failed" end)\"} \(.count)")
'

Export to CSV with Headers

# Export to CSV with headers
# Note: netapi dc doesn't have --format csv, use JSON + jq conversion
netapi ise dc --format json query "
  SELECT
    TO_CHAR(TIMESTAMP_TIMEZONE, 'YYYY-MM-DD HH24:MI:SS') as timestamp,
    CALLING_STATION_ID as mac,
    USERNAME as identity,
    NAS_IP_ADDRESS as nas,
    AUTHENTICATION_METHOD as method,
    PASSED as success,
    FAILURE_REASON as failure
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > SYSDATE - 1
  ORDER BY TIMESTAMP_TIMEZONE DESC
" | jq -r '
  (.[0] | keys) as $k |
  ($k | @csv),
  (.[] | [.[$k[]]] | @csv)
' > /tmp/ise-auths-$(date +%Y%m%d).csv

echo "Exported $(wc -l < /tmp/ise-auths-$(date +%Y%m%d).csv) records"

Complex Reporting

Daily Report Generation

#!/bin/bash
# generate-daily-report.sh

dsource d000 dev/network
DATE=$(date +%Y-%m-%d)
REPORT_FILE="/tmp/ise-report-${DATE}.json"

echo "Generating ISE Daily Report for ${DATE}..."

# Collect metrics
STATS=$(netapi ise dc --format json query "
  SELECT
    COUNT(*) as total_auths,
    SUM(CASE WHEN PASSED = 1 THEN 1 ELSE 0 END) as passed,
    SUM(CASE WHEN PASSED = 0 THEN 1 ELSE 0 END) as failed,
    COUNT(DISTINCT CALLING_STATION_ID) as unique_endpoints,
    COUNT(DISTINCT NAS_IP_ADDRESS) as active_nas,
    ROUND(AVG(RESPONSE_TIME), 2) as avg_response_ms
  FROM RADIUS_AUTHENTICATIONS
  WHERE TIMESTAMP_TIMEZONE > TRUNC(SYSDATE)
" | jq '.[0]')

TOP_FAILURES=$(netapi ise dc --format json query "
  SELECT FAILURE_REASON, COUNT(*) as count
  FROM RADIUS_AUTHENTICATIONS
  WHERE PASSED = 0 AND TIMESTAMP_TIMEZONE > TRUNC(SYSDATE)
  GROUP BY FAILURE_REASON
  ORDER BY count DESC
  FETCH FIRST 5 ROWS ONLY
" --format json)

# Combine into report
jq -n \
  --arg date "$DATE" \
  --argjson stats "$STATS" \
  --argjson failures "$TOP_FAILURES" \
  '{
    report_date: $date,
    summary: $stats,
    top_failures: $failures,
    health: (
      if ($stats.passed / $stats.total_auths) > 0.95 then "HEALTHY"
      elif ($stats.passed / $stats.total_auths) > 0.80 then "DEGRADED"
      else "CRITICAL"
      end
    )
  }' | tee "$REPORT_FILE" | jq -C '.'

echo -e "\nReport saved to: $REPORT_FILE"