Wed, Jan 7, 2026

Propagation anomalies - 2026-01-07

Detection of blocks that propagated slower than expected given their blob count.

Show code
display_sql("block_production_timeline", target_date)
View query
WITH
-- Base slots using proposer duty as the source of truth
slots AS (
    SELECT DISTINCT
        slot,
        slot_start_date_time,
        proposer_validator_index
    FROM canonical_beacon_proposer_duty
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
),

-- Proposer entity mapping
proposer_entity AS (
    SELECT
        index,
        entity
    FROM ethseer_validator_entity
    WHERE meta_network_name = 'mainnet'
),

-- Blob count per slot
blob_count AS (
    SELECT
        slot,
        uniq(blob_index) AS blob_count
    FROM canonical_beacon_blob_sidecar
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
    GROUP BY slot
),

-- Canonical block hash (to verify MEV payload was actually used)
canonical_block AS (
    SELECT
        slot,
        execution_payload_block_hash
    FROM canonical_beacon_block
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
),

-- MEV bid timing using timestamp_ms
mev_bids AS (
    SELECT
        slot,
        slot_start_date_time,
        min(timestamp_ms) AS first_bid_timestamp_ms,
        max(timestamp_ms) AS last_bid_timestamp_ms
    FROM mev_relay_bid_trace
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
    GROUP BY slot, slot_start_date_time
),

-- MEV payload delivery - join canonical block with delivered payloads
-- Note: Use is_mev flag because ClickHouse LEFT JOIN returns 0 (not NULL) for non-matching rows
-- Get value from proposer_payload_delivered (not bid_trace, which may not have the winning block)
mev_payload AS (
    SELECT
        cb.slot,
        cb.execution_payload_block_hash AS winning_block_hash,
        1 AS is_mev,
        max(pd.value) AS winning_bid_value,
        groupArray(DISTINCT pd.relay_name) AS relay_names,
        any(pd.builder_pubkey) AS winning_builder
    FROM canonical_block cb
    GLOBAL INNER JOIN mev_relay_proposer_payload_delivered pd
        ON cb.slot = pd.slot AND cb.execution_payload_block_hash = pd.block_hash
    WHERE pd.meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
    GROUP BY cb.slot, cb.execution_payload_block_hash
),

-- Winning bid timing from bid_trace (may not exist for all MEV blocks)
winning_bid AS (
    SELECT
        bt.slot,
        bt.slot_start_date_time,
        argMin(bt.timestamp_ms, bt.event_date_time) AS winning_bid_timestamp_ms
    FROM mev_relay_bid_trace bt
    GLOBAL INNER JOIN mev_payload mp ON bt.slot = mp.slot AND bt.block_hash = mp.winning_block_hash
    WHERE bt.meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
    GROUP BY bt.slot, bt.slot_start_date_time
),

-- Block gossip timing with spread
block_gossip AS (
    SELECT
        slot,
        min(event_date_time) AS block_first_seen,
        max(event_date_time) AS block_last_seen
    FROM libp2p_gossipsub_beacon_block
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
    GROUP BY slot
),

-- Column arrival timing: first arrival per column, then min/max of those
column_gossip AS (
    SELECT
        slot,
        min(first_seen) AS first_column_first_seen,
        max(first_seen) AS last_column_first_seen
    FROM (
        SELECT
            slot,
            column_index,
            min(event_date_time) AS first_seen
        FROM libp2p_gossipsub_data_column_sidecar
        WHERE meta_network_name = 'mainnet'
          AND slot_start_date_time >= '2026-01-07' AND slot_start_date_time < '2026-01-07'::date + INTERVAL 1 DAY
          AND event_date_time > '1970-01-01 00:00:01'
        GROUP BY slot, column_index
    )
    GROUP BY slot
)

SELECT
    s.slot AS slot,
    s.slot_start_date_time AS slot_start_date_time,
    pe.entity AS proposer_entity,

    -- Blob count
    coalesce(bc.blob_count, 0) AS blob_count,

    -- MEV bid timing (absolute and relative to slot start)
    fromUnixTimestamp64Milli(mb.first_bid_timestamp_ms) AS first_bid_at,
    mb.first_bid_timestamp_ms - toInt64(toUnixTimestamp(mb.slot_start_date_time)) * 1000 AS first_bid_ms,
    fromUnixTimestamp64Milli(mb.last_bid_timestamp_ms) AS last_bid_at,
    mb.last_bid_timestamp_ms - toInt64(toUnixTimestamp(mb.slot_start_date_time)) * 1000 AS last_bid_ms,

    -- Winning bid timing (from bid_trace, may be NULL if block hash not in bid_trace)
    if(wb.slot != 0, fromUnixTimestamp64Milli(wb.winning_bid_timestamp_ms), NULL) AS winning_bid_at,
    if(wb.slot != 0, wb.winning_bid_timestamp_ms - toInt64(toUnixTimestamp(s.slot_start_date_time)) * 1000, NULL) AS winning_bid_ms,

    -- MEV payload info (from proposer_payload_delivered, always present for MEV blocks)
    if(mp.is_mev = 1, mp.winning_bid_value, NULL) AS winning_bid_value,
    if(mp.is_mev = 1, mp.relay_names, []) AS winning_relays,
    if(mp.is_mev = 1, mp.winning_builder, NULL) AS winning_builder,

    -- Block gossip timing with spread
    bg.block_first_seen,
    dateDiff('millisecond', s.slot_start_date_time, bg.block_first_seen) AS block_first_seen_ms,
    bg.block_last_seen,
    dateDiff('millisecond', s.slot_start_date_time, bg.block_last_seen) AS block_last_seen_ms,
    dateDiff('millisecond', bg.block_first_seen, bg.block_last_seen) AS block_spread_ms,

    -- Column arrival timing (NULL when no blobs)
    if(coalesce(bc.blob_count, 0) = 0, NULL, cg.first_column_first_seen) AS first_column_first_seen,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', s.slot_start_date_time, cg.first_column_first_seen)) AS first_column_first_seen_ms,
    if(coalesce(bc.blob_count, 0) = 0, NULL, cg.last_column_first_seen) AS last_column_first_seen,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', s.slot_start_date_time, cg.last_column_first_seen)) AS last_column_first_seen_ms,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', cg.first_column_first_seen, cg.last_column_first_seen)) AS column_spread_ms

FROM slots s
GLOBAL LEFT JOIN proposer_entity pe ON s.proposer_validator_index = pe.index
GLOBAL LEFT JOIN blob_count bc ON s.slot = bc.slot
GLOBAL LEFT JOIN mev_bids mb ON s.slot = mb.slot
GLOBAL LEFT JOIN mev_payload mp ON s.slot = mp.slot
GLOBAL LEFT JOIN winning_bid wb ON s.slot = wb.slot
GLOBAL LEFT JOIN block_gossip bg ON s.slot = bg.slot
GLOBAL LEFT JOIN column_gossip cg ON s.slot = cg.slot

ORDER BY s.slot DESC
Show code
df = load_parquet("block_production_timeline", target_date)

# Filter to valid blocks (exclude missed slots)
df = df[df["block_first_seen_ms"].notna()]
df = df[(df["block_first_seen_ms"] >= 0) & (df["block_first_seen_ms"] < 60000)]

# Flag MEV vs local blocks
df["has_mev"] = df["winning_bid_value"].notna()
df["block_type"] = df["has_mev"].map({True: "MEV", False: "Local"})

# Get max blob count for charts
max_blobs = df["blob_count"].max()

print(f"Total valid blocks: {len(df):,}")
print(f"MEV blocks: {df['has_mev'].sum():,} ({df['has_mev'].mean()*100:.1f}%)")
print(f"Local blocks: {(~df['has_mev']).sum():,} ({(~df['has_mev']).mean()*100:.1f}%)")
Total valid blocks: 7,179
MEV blocks: 6,717 (93.6%)
Local blocks: 462 (6.4%)

Anomaly detection method

Blocks that are slow relative to their blob count are more interesting than blocks that are simply slow. A 500ms block with 15 blobs may be normal; with 0 blobs it's anomalous.

The method:

  1. Fit linear regression: block_first_seen_ms ~ blob_count
  2. Calculate residuals (actual - expected)
  3. Flag blocks with residuals > 2σ as anomalies

Points above the ±2σ band propagated slower than expected given their blob count.

Show code
# Conditional outliers: blocks slow relative to their blob count
df_anomaly = df.copy()

# Fit regression: block_first_seen_ms ~ blob_count
slope, intercept, r_value, p_value, std_err = stats.linregress(
    df_anomaly["blob_count"].astype(float), df_anomaly["block_first_seen_ms"]
)

# Calculate expected value and residual
df_anomaly["expected_ms"] = intercept + slope * df_anomaly["blob_count"].astype(float)
df_anomaly["residual_ms"] = df_anomaly["block_first_seen_ms"] - df_anomaly["expected_ms"]

# Calculate residual standard deviation
residual_std = df_anomaly["residual_ms"].std()

# Flag anomalies: residual > 2σ (unexpectedly slow)
df_anomaly["is_anomaly"] = df_anomaly["residual_ms"] > 2 * residual_std

n_anomalies = df_anomaly["is_anomaly"].sum()
pct_anomalies = n_anomalies / len(df_anomaly) * 100

# Prepare outliers dataframe
df_outliers = df_anomaly[df_anomaly["is_anomaly"]].copy()
df_outliers["relay"] = df_outliers["winning_relays"].apply(lambda x: x[0] if len(x) > 0 else "Local")

print(f"Regression: block_ms = {intercept:.1f} + {slope:.2f} × blob_count (R² = {r_value**2:.3f})")
print(f"Residual σ = {residual_std:.1f}ms")
print(f"Anomalies (>2σ slow): {n_anomalies:,} ({pct_anomalies:.1f}%)")
Regression: block_ms = 1785.5 + 16.45 × blob_count (R² = 0.010)
Residual σ = 659.6ms
Anomalies (>2σ slow): 169 (2.4%)
Show code
# Create scatter plot with regression band
x_range = np.array([0, int(max_blobs)])
y_pred = intercept + slope * x_range
y_upper = y_pred + 2 * residual_std
y_lower = y_pred - 2 * residual_std

fig = go.Figure()

# Add ±2σ band
fig.add_trace(go.Scatter(
    x=np.concatenate([x_range, x_range[::-1]]),
    y=np.concatenate([y_upper, y_lower[::-1]]),
    fill="toself",
    fillcolor="rgba(100,100,100,0.2)",
    line=dict(width=0),
    name="±2σ band",
    hoverinfo="skip",
))

# Add regression line
fig.add_trace(go.Scatter(
    x=x_range,
    y=y_pred,
    mode="lines",
    line=dict(color="white", width=2, dash="dash"),
    name="Expected",
))

# Normal points (sample to avoid overplotting)
df_normal = df_anomaly[~df_anomaly["is_anomaly"]]
if len(df_normal) > 2000:
    df_normal = df_normal.sample(2000, random_state=42)

fig.add_trace(go.Scatter(
    x=df_normal["blob_count"],
    y=df_normal["block_first_seen_ms"],
    mode="markers",
    marker=dict(size=4, color="rgba(100,150,200,0.4)"),
    name=f"Normal ({len(df_anomaly) - n_anomalies:,})",
    hoverinfo="skip",
))

# Anomaly points
fig.add_trace(go.Scatter(
    x=df_outliers["blob_count"],
    y=df_outliers["block_first_seen_ms"],
    mode="markers",
    marker=dict(
        size=7,
        color="#e74c3c",
        line=dict(width=1, color="white"),
    ),
    name=f"Anomalies ({n_anomalies:,})",
    customdata=np.column_stack([
        df_outliers["slot"],
        df_outliers["residual_ms"].round(0),
        df_outliers["relay"],
    ]),
    hovertemplate="<b>Slot %{customdata[0]}</b><br>Blobs: %{x}<br>Actual: %{y:.0f}ms<br>+%{customdata[1]}ms vs expected<br>Relay: %{customdata[2]}<extra></extra>",
))

fig.update_layout(
    margin=dict(l=60, r=30, t=30, b=60),
    xaxis=dict(title="Blob count", range=[-0.5, int(max_blobs) + 0.5]),
    yaxis=dict(title="Block first seen (ms from slot start)"),
    legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
    height=500,
)
fig.show(config={"responsive": True})

All propagation anomalies

Blocks that propagated much slower than expected given their blob count, sorted by residual (worst first).

Show code
# All anomalies table with selectable text and Lab links
if n_anomalies > 0:
    df_table = df_outliers.sort_values("residual_ms", ascending=False)[
        ["slot", "blob_count", "block_first_seen_ms", "expected_ms", "residual_ms", "relay"]
    ].copy()
    df_table["block_first_seen_ms"] = df_table["block_first_seen_ms"].round(0).astype(int)
    df_table["expected_ms"] = df_table["expected_ms"].round(0).astype(int)
    df_table["residual_ms"] = df_table["residual_ms"].round(0).astype(int)
    
    # Create Lab links
    df_table["lab_link"] = df_table["slot"].apply(
        lambda s: f'<a href="https://lab.ethpandaops.io/ethereum/slots/{s}" target="_blank">View</a>'
    )
    
    # Build HTML table
    html = '''
    <style>
    .anomaly-table { border-collapse: collapse; width: 100%; font-family: monospace; font-size: 13px; }
    .anomaly-table th { background: #2c3e50; color: white; padding: 8px 12px; text-align: left; position: sticky; top: 0; }
    .anomaly-table td { padding: 6px 12px; border-bottom: 1px solid #eee; }
    .anomaly-table tr:hover { background: #f5f5f5; }
    .anomaly-table .num { text-align: right; }
    .anomaly-table .delta { background: #ffebee; color: #c62828; font-weight: bold; }
    .anomaly-table a { color: #1976d2; text-decoration: none; }
    .anomaly-table a:hover { text-decoration: underline; }
    .table-container { max-height: 600px; overflow-y: auto; }
    </style>
    <div class="table-container">
    <table class="anomaly-table">
    <thead>
    <tr><th>Slot</th><th class="num">Blobs</th><th class="num">Actual (ms)</th><th class="num">Expected (ms)</th><th class="num">Δ (ms)</th><th>Relay</th><th>Lab</th></tr>
    </thead>
    <tbody>
    '''
    
    for _, row in df_table.iterrows():
        html += f'''<tr>
            <td>{row["slot"]}</td>
            <td class="num">{row["blob_count"]}</td>
            <td class="num">{row["block_first_seen_ms"]}</td>
            <td class="num">{row["expected_ms"]}</td>
            <td class="num delta">+{row["residual_ms"]}</td>
            <td>{row["relay"]}</td>
            <td>{row["lab_link"]}</td>
        </tr>'''
    
    html += '</tbody></table></div>'
    display(HTML(html))
    print(f"\nTotal anomalies: {len(df_table):,}")
else:
    print("No anomalies detected.")
SlotBlobsActual (ms)Expected (ms)Δ (ms)RelayLab
13413662 0 12526 1785 +10741 Local View
13411920 0 10665 1785 +8880 Local View
13414220 12 10599 1983 +8616 Ultra Sound View
13413698 0 9150 1785 +7365 Local View
13415905 0 8177 1785 +6392 Local View
13416207 0 6814 1785 +5029 Local View
13416464 0 6729 1785 +4944 Local View
13416351 5 6360 1868 +4492 Local View
13413199 6 6189 1884 +4305 Ultra Sound View
13413525 0 5466 1785 +3681 Local View
13414625 5 5533 1868 +3665 Local View
13412448 0 5114 1785 +3329 Local View
13413593 1 4355 1802 +2553 Local View
13414579 4 4392 1851 +2541 Ultra Sound View
13416382 0 4187 1785 +2402 Local View
13411744 0 4115 1785 +2330 Local View
13412359 0 4112 1785 +2327 Local View
13414272 6 4164 1884 +2280 Local View
13412544 0 3937 1785 +2152 Local View
13411652 0 3902 1785 +2117 Local View
13416256 5 3876 1868 +2008 Local View
13411358 6 3882 1884 +1998 Ultra Sound View
13410325 18 4038 2082 +1956 Titan Relay View
13412290 10 3871 1950 +1921 BloXroute Regulated View
13412384 1 3672 1802 +1870 BloXroute Regulated View
13415736 2 3655 1818 +1837 Local View
13412211 2 3649 1818 +1831 Titan Relay View
13415280 1 3583 1802 +1781 Ultra Sound View
13411534 5 3645 1868 +1777 Ultra Sound View
13411754 2 3588 1818 +1770 Ultra Sound View
13414349 3 3601 1835 +1766 Ultra Sound View
13416968 5 3621 1868 +1753 Ultra Sound View
13410587 1 3547 1802 +1745 Ultra Sound View
13410560 1 3541 1802 +1739 Flashbots View
13410326 0 3497 1785 +1712 Local View
13417049 3 3524 1835 +1689 BloXroute Regulated View
13413478 10 3614 1950 +1664 BloXroute Regulated View
13410126 9 3592 1933 +1659 Ultra Sound View
13410000 2 3475 1818 +1657 Titan Relay View
13415820 8 3562 1917 +1645 Ultra Sound View
13415253 1 3446 1802 +1644 BloXroute Regulated View
13410525 8 3548 1917 +1631 BloXroute Regulated View
13413123 2 3449 1818 +1631 BloXroute Regulated View
13415984 2 3449 1818 +1631 BloXroute Regulated View
13413725 10 3579 1950 +1629 Ultra Sound View
13416741 2 3425 1818 +1607 BloXroute Regulated View
13411851 5 3470 1868 +1602 Ultra Sound View
13411289 2 3410 1818 +1592 BloXroute Regulated View
13411533 0 3376 1785 +1591 BloXroute Regulated View
13416006 2 3401 1818 +1583 BloXroute Regulated View
13415849 5 3444 1868 +1576 BloXroute Max Profit View
13412823 8 3483 1917 +1566 BloXroute Regulated View
13410652 3 3393 1835 +1558 BloXroute Regulated View
13414276 5 3425 1868 +1557 BloXroute Regulated View
13412229 8 3472 1917 +1555 Titan Relay View
13416609 8 3469 1917 +1552 BloXroute Regulated View
13412979 1 3353 1802 +1551 BloXroute Regulated View
13410565 9 3483 1933 +1550 Ultra Sound View
13412899 10 3499 1950 +1549 BloXroute Regulated View
13410786 4 3396 1851 +1545 BloXroute Regulated View
13414547 8 3446 1917 +1529 BloXroute Regulated View
13413632 5 3386 1868 +1518 Titan Relay View
13410667 1 3317 1802 +1515 Titan Relay View
13410632 6 3394 1884 +1510 Ultra Sound View
13414209 3 3339 1835 +1504 BloXroute Regulated View
13416703 8 3420 1917 +1503 Agnostic Gnosis View
13412021 0 3288 1785 +1503 BloXroute Regulated View
13416635 1 3303 1802 +1501 BloXroute Regulated View
13417031 7 3400 1901 +1499 BloXroute Regulated View
13412169 3 3330 1835 +1495 BloXroute Regulated View
13411419 8 3411 1917 +1494 BloXroute Regulated View
13416035 4 3343 1851 +1492 Titan Relay View
13414611 0 3276 1785 +1491 Ultra Sound View
13414500 6 3367 1884 +1483 BloXroute Max Profit View
13411350 3 3312 1835 +1477 BloXroute Regulated View
13413454 4 3324 1851 +1473 Ultra Sound View
13416460 6 3354 1884 +1470 BloXroute Regulated View
13416908 7 3369 1901 +1468 Titan Relay View
13412032 7 3368 1901 +1467 Titan Relay View
13414619 1 3268 1802 +1466 Ultra Sound View
13413615 8 3377 1917 +1460 BloXroute Regulated View
13416736 0 3244 1785 +1459 BloXroute Max Profit View
13415040 5 3317 1868 +1449 Ultra Sound View
13412264 8 3365 1917 +1448 BloXroute Regulated View
13416845 6 3330 1884 +1446 BloXroute Max Profit View
13416888 1 3244 1802 +1442 Ultra Sound View
13413481 0 3225 1785 +1440 Ultra Sound View
13412431 7 3339 1901 +1438 Ultra Sound View
13410656 10 3386 1950 +1436 Agnostic Gnosis View
13415127 0 3221 1785 +1436 BloXroute Regulated View
13413046 5 3302 1868 +1434 Titan Relay View
13412381 0 3219 1785 +1434 Ultra Sound View
13416009 3 3268 1835 +1433 Flashbots View
13416754 0 3218 1785 +1433 Aestus View
13412880 1 3232 1802 +1430 BloXroute Regulated View
13416034 10 3380 1950 +1430 Flashbots View
13411172 1 3230 1802 +1428 Local View
13416662 6 3311 1884 +1427 Flashbots View
13411592 6 3308 1884 +1424 Ultra Sound View
13412270 6 3307 1884 +1423 BloXroute Regulated View
13413561 6 3305 1884 +1421 Ultra Sound View
13410797 1 3219 1802 +1417 BloXroute Regulated View
13411846 9 3347 1933 +1414 BloXroute Regulated View
13411732 1 3212 1802 +1410 Ultra Sound View
13417030 7 3310 1901 +1409 EthGas View
13416084 1 3208 1802 +1406 BloXroute Regulated View
13414281 7 3305 1901 +1404 Ultra Sound View
13416758 0 3189 1785 +1404 Agnostic Gnosis View
13416262 7 3303 1901 +1402 BloXroute Regulated View
13413930 7 3303 1901 +1402 Ultra Sound View
13416873 8 3319 1917 +1402 Ultra Sound View
13412246 6 3282 1884 +1398 BloXroute Max Profit View
13412839 4 3247 1851 +1396 Ultra Sound View
13412833 3 3228 1835 +1393 BloXroute Max Profit View
13412275 7 3293 1901 +1392 Flashbots View
13416626 5 3257 1868 +1389 Aestus View
13412821 3 3223 1835 +1388 Ultra Sound View
13416037 11 3353 1966 +1387 BloXroute Regulated View
13413190 5 3248 1868 +1380 BloXroute Regulated View
13412273 7 3280 1901 +1379 Agnostic Gnosis View
13412548 7 3280 1901 +1379 Local View
13413897 12 3360 1983 +1377 Titan Relay View
13413906 5 3244 1868 +1376 Titan Relay View
13415840 14 3392 2016 +1376 BloXroute Max Profit View
13415410 9 3309 1933 +1376 Titan Relay View
13415564 7 3276 1901 +1375 BloXroute Regulated View
13414238 5 3243 1868 +1375 Agnostic Gnosis View
13411619 4 3225 1851 +1374 Titan Relay View
13413557 8 3288 1917 +1371 Titan Relay View
13414640 8 3282 1917 +1365 BloXroute Regulated View
13416471 14 3379 2016 +1363 BloXroute Max Profit View
13412785 6 3245 1884 +1361 BloXroute Regulated View
13415618 2 3177 1818 +1359 BloXroute Max Profit View
13410195 7 3259 1901 +1358 BloXroute Max Profit View
13414073 16 3407 2049 +1358 BloXroute Regulated View
13414759 5 3225 1868 +1357 Ultra Sound View
13415749 1 3158 1802 +1356 Ultra Sound View
13411181 7 3256 1901 +1355 BloXroute Max Profit View
13414913 10 3305 1950 +1355 BloXroute Max Profit View
13415164 4 3204 1851 +1353 BloXroute Regulated View
13412759 9 3286 1933 +1353 Titan Relay View
13416503 5 3220 1868 +1352 Ultra Sound View
13415045 6 3234 1884 +1350 BloXroute Max Profit View
13415678 6 3233 1884 +1349 BloXroute Max Profit View
13416917 4 3200 1851 +1349 BloXroute Max Profit View
13411936 10 3298 1950 +1348 Ultra Sound View
13411725 4 3198 1851 +1347 Agnostic Gnosis View
13410374 2 3162 1818 +1344 EthGas View
13415231 7 3244 1901 +1343 BloXroute Regulated View
13415716 6 3227 1884 +1343 Ultra Sound View
13414763 7 3240 1901 +1339 Titan Relay View
13413237 7 3239 1901 +1338 BloXroute Max Profit View
13417137 11 3304 1966 +1338 Ultra Sound View
13412426 4 3187 1851 +1336 BloXroute Regulated View
13414019 2 3153 1818 +1335 Local View
13412447 6 3216 1884 +1332 Ultra Sound View
13414550 5 3199 1868 +1331 BloXroute Max Profit View
13412410 1 3132 1802 +1330 BloXroute Max Profit View
13414366 0 3114 1785 +1329 Agnostic Gnosis View
13415387 7 3228 1901 +1327 Ultra Sound View
13414503 0 3112 1785 +1327 Titan Relay View
13415535 3 3161 1835 +1326 Ultra Sound View
13413810 3 3156 1835 +1321 Ultra Sound View
13415062 12 3304 1983 +1321 Ultra Sound View
13411451 6 3205 1884 +1321 BloXroute Max Profit View
13413499 4 3172 1851 +1321 BloXroute Max Profit View
13414014 11 3286 1966 +1320 Ultra Sound View
13412434 0 3105 1785 +1320 Aestus View
13415941 5 3187 1868 +1319 Agnostic Gnosis View
Total anomalies: 169

Anomalies by relay

Which relays have the most propagation anomalies?

Show code
if n_anomalies > 0:
    # Count anomalies by relay
    relay_counts = df_outliers["relay"].value_counts().reset_index()
    relay_counts.columns = ["relay", "anomaly_count"]
    
    # Get total blocks per relay for context
    df_anomaly["relay"] = df_anomaly["winning_relays"].apply(lambda x: x[0] if len(x) > 0 else "Local")
    total_by_relay = df_anomaly.groupby("relay").size().reset_index(name="total_blocks")
    
    relay_counts = relay_counts.merge(total_by_relay, on="relay")
    relay_counts["anomaly_rate"] = relay_counts["anomaly_count"] / relay_counts["total_blocks"] * 100
    relay_counts = relay_counts.sort_values("anomaly_count", ascending=True)
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        y=relay_counts["relay"],
        x=relay_counts["anomaly_count"],
        orientation="h",
        marker_color="#e74c3c",
        text=relay_counts.apply(lambda r: f"{r['anomaly_count']} ({r['anomaly_rate']:.1f}%)", axis=1),
        textposition="outside",
        hovertemplate="<b>%{y}</b><br>Anomalies: %{x}<br>Total blocks: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([relay_counts["total_blocks"], relay_counts["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=150, r=80, t=30, b=60),
        xaxis=dict(title="Number of anomalies"),
        yaxis=dict(title=""),
        height=350,
    )
    fig.show(config={"responsive": True})

Anomalies by blob count

Are anomalies more common at certain blob counts?

Show code
if n_anomalies > 0:
    # Count anomalies by blob count
    blob_anomalies = df_outliers.groupby("blob_count").size().reset_index(name="anomaly_count")
    blob_total = df_anomaly.groupby("blob_count").size().reset_index(name="total_blocks")
    
    blob_stats = blob_total.merge(blob_anomalies, on="blob_count", how="left").fillna(0)
    blob_stats["anomaly_count"] = blob_stats["anomaly_count"].astype(int)
    blob_stats["anomaly_rate"] = blob_stats["anomaly_count"] / blob_stats["total_blocks"] * 100
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        x=blob_stats["blob_count"],
        y=blob_stats["anomaly_count"],
        marker_color="#e74c3c",
        hovertemplate="<b>%{x} blobs</b><br>Anomalies: %{y}<br>Total: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([blob_stats["total_blocks"], blob_stats["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=60, r=30, t=30, b=60),
        xaxis=dict(title="Blob count", dtick=1),
        yaxis=dict(title="Number of anomalies"),
        height=350,
    )
    fig.show(config={"responsive": True})