Wed, Jan 21, 2026

Propagation anomalies - 2026-01-21

Detection of blocks that propagated slower than expected, attempting to find correlations with blob count.

Show code
display_sql("block_production_timeline", target_date)
View query
WITH
-- Base slots using proposer duty as the source of truth
slots AS (
    SELECT DISTINCT
        slot,
        slot_start_date_time,
        proposer_validator_index
    FROM canonical_beacon_proposer_duty
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
),

-- Proposer entity mapping
proposer_entity AS (
    SELECT
        index,
        entity
    FROM ethseer_validator_entity
    WHERE meta_network_name = 'mainnet'
),

-- Blob count per slot
blob_count AS (
    SELECT
        slot,
        uniq(blob_index) AS blob_count
    FROM canonical_beacon_blob_sidecar
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
    GROUP BY slot
),

-- Canonical block hash (to verify MEV payload was actually used)
canonical_block AS (
    SELECT DISTINCT
        slot,
        execution_payload_block_hash
    FROM canonical_beacon_block
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
),

-- MEV bid timing using timestamp_ms
mev_bids AS (
    SELECT
        slot,
        slot_start_date_time,
        min(timestamp_ms) AS first_bid_timestamp_ms,
        max(timestamp_ms) AS last_bid_timestamp_ms
    FROM mev_relay_bid_trace
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
    GROUP BY slot, slot_start_date_time
),

-- MEV payload delivery - join canonical block with delivered payloads
-- Note: Use is_mev flag because ClickHouse LEFT JOIN returns 0 (not NULL) for non-matching rows
-- Get value from proposer_payload_delivered (not bid_trace, which may not have the winning block)
mev_payload AS (
    SELECT
        cb.slot,
        cb.execution_payload_block_hash AS winning_block_hash,
        1 AS is_mev,
        max(pd.value) AS winning_bid_value,
        groupArray(DISTINCT pd.relay_name) AS relay_names,
        any(pd.builder_pubkey) AS winning_builder
    FROM canonical_block cb
    GLOBAL INNER JOIN mev_relay_proposer_payload_delivered pd
        ON cb.slot = pd.slot AND cb.execution_payload_block_hash = pd.block_hash
    WHERE pd.meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
    GROUP BY cb.slot, cb.execution_payload_block_hash
),

-- Winning bid timing from bid_trace (may not exist for all MEV blocks)
winning_bid AS (
    SELECT
        bt.slot,
        bt.slot_start_date_time,
        argMin(bt.timestamp_ms, bt.event_date_time) AS winning_bid_timestamp_ms
    FROM mev_relay_bid_trace bt
    GLOBAL INNER JOIN mev_payload mp ON bt.slot = mp.slot AND bt.block_hash = mp.winning_block_hash
    WHERE bt.meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
    GROUP BY bt.slot, bt.slot_start_date_time
),

-- Block gossip timing with spread
block_gossip AS (
    SELECT
        slot,
        min(event_date_time) AS block_first_seen,
        max(event_date_time) AS block_last_seen
    FROM libp2p_gossipsub_beacon_block
    WHERE meta_network_name = 'mainnet'
      AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
    GROUP BY slot
),

-- Column arrival timing: first arrival per column, then min/max of those
column_gossip AS (
    SELECT
        slot,
        min(first_seen) AS first_column_first_seen,
        max(first_seen) AS last_column_first_seen
    FROM (
        SELECT
            slot,
            column_index,
            min(event_date_time) AS first_seen
        FROM libp2p_gossipsub_data_column_sidecar
        WHERE meta_network_name = 'mainnet'
          AND slot_start_date_time >= '2026-01-21' AND slot_start_date_time < '2026-01-21'::date + INTERVAL 1 DAY
          AND event_date_time > '1970-01-01 00:00:01'
        GROUP BY slot, column_index
    )
    GROUP BY slot
)

SELECT
    s.slot AS slot,
    s.slot_start_date_time AS slot_start_date_time,
    pe.entity AS proposer_entity,

    -- Blob count
    coalesce(bc.blob_count, 0) AS blob_count,

    -- MEV bid timing (absolute and relative to slot start)
    fromUnixTimestamp64Milli(mb.first_bid_timestamp_ms) AS first_bid_at,
    mb.first_bid_timestamp_ms - toInt64(toUnixTimestamp(mb.slot_start_date_time)) * 1000 AS first_bid_ms,
    fromUnixTimestamp64Milli(mb.last_bid_timestamp_ms) AS last_bid_at,
    mb.last_bid_timestamp_ms - toInt64(toUnixTimestamp(mb.slot_start_date_time)) * 1000 AS last_bid_ms,

    -- Winning bid timing (from bid_trace, may be NULL if block hash not in bid_trace)
    if(wb.slot != 0, fromUnixTimestamp64Milli(wb.winning_bid_timestamp_ms), NULL) AS winning_bid_at,
    if(wb.slot != 0, wb.winning_bid_timestamp_ms - toInt64(toUnixTimestamp(s.slot_start_date_time)) * 1000, NULL) AS winning_bid_ms,

    -- MEV payload info (from proposer_payload_delivered, always present for MEV blocks)
    if(mp.is_mev = 1, mp.winning_bid_value, NULL) AS winning_bid_value,
    if(mp.is_mev = 1, mp.relay_names, []) AS winning_relays,
    if(mp.is_mev = 1, mp.winning_builder, NULL) AS winning_builder,

    -- Block gossip timing with spread
    bg.block_first_seen,
    dateDiff('millisecond', s.slot_start_date_time, bg.block_first_seen) AS block_first_seen_ms,
    bg.block_last_seen,
    dateDiff('millisecond', s.slot_start_date_time, bg.block_last_seen) AS block_last_seen_ms,
    dateDiff('millisecond', bg.block_first_seen, bg.block_last_seen) AS block_spread_ms,

    -- Column arrival timing (NULL when no blobs)
    if(coalesce(bc.blob_count, 0) = 0, NULL, cg.first_column_first_seen) AS first_column_first_seen,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', s.slot_start_date_time, cg.first_column_first_seen)) AS first_column_first_seen_ms,
    if(coalesce(bc.blob_count, 0) = 0, NULL, cg.last_column_first_seen) AS last_column_first_seen,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', s.slot_start_date_time, cg.last_column_first_seen)) AS last_column_first_seen_ms,
    if(coalesce(bc.blob_count, 0) = 0, NULL, dateDiff('millisecond', cg.first_column_first_seen, cg.last_column_first_seen)) AS column_spread_ms

FROM slots s
GLOBAL LEFT JOIN proposer_entity pe ON s.proposer_validator_index = pe.index
GLOBAL LEFT JOIN blob_count bc ON s.slot = bc.slot
GLOBAL LEFT JOIN mev_bids mb ON s.slot = mb.slot
GLOBAL LEFT JOIN mev_payload mp ON s.slot = mp.slot
GLOBAL LEFT JOIN winning_bid wb ON s.slot = wb.slot
GLOBAL LEFT JOIN block_gossip bg ON s.slot = bg.slot
GLOBAL LEFT JOIN column_gossip cg ON s.slot = cg.slot

ORDER BY s.slot DESC
Show code
df = load_parquet("block_production_timeline", target_date)

# Filter to valid blocks (exclude missed slots)
df = df[df["block_first_seen_ms"].notna()]
df = df[(df["block_first_seen_ms"] >= 0) & (df["block_first_seen_ms"] < 60000)]

# Flag MEV vs local blocks
df["has_mev"] = df["winning_bid_value"].notna()
df["block_type"] = df["has_mev"].map({True: "MEV", False: "Local"})

# Get max blob count for charts
max_blobs = df["blob_count"].max()

print(f"Total valid blocks: {len(df):,}")
print(f"MEV blocks: {df['has_mev'].sum():,} ({df['has_mev'].mean()*100:.1f}%)")
print(f"Local blocks: {(~df['has_mev']).sum():,} ({(~df['has_mev']).mean()*100:.1f}%)")
Total valid blocks: 7,180
MEV blocks: 6,742 (93.9%)
Local blocks: 438 (6.1%)

Anomaly detection method

The method:

  1. Fit linear regression: block_first_seen_ms ~ blob_count
  2. Calculate residuals (actual - expected)
  3. Flag blocks with residuals > 2σ as anomalies

Points above the ±2σ band propagated slower than expected given their blob count.

Show code
# Conditional outliers: blocks slow relative to their blob count
df_anomaly = df.copy()

# Fit regression: block_first_seen_ms ~ blob_count
slope, intercept, r_value, p_value, std_err = stats.linregress(
    df_anomaly["blob_count"].astype(float), df_anomaly["block_first_seen_ms"]
)

# Calculate expected value and residual
df_anomaly["expected_ms"] = intercept + slope * df_anomaly["blob_count"].astype(float)
df_anomaly["residual_ms"] = df_anomaly["block_first_seen_ms"] - df_anomaly["expected_ms"]

# Calculate residual standard deviation
residual_std = df_anomaly["residual_ms"].std()

# Flag anomalies: residual > 2σ (unexpectedly slow)
df_anomaly["is_anomaly"] = df_anomaly["residual_ms"] > 2 * residual_std

n_anomalies = df_anomaly["is_anomaly"].sum()
pct_anomalies = n_anomalies / len(df_anomaly) * 100

# Prepare outliers dataframe
df_outliers = df_anomaly[df_anomaly["is_anomaly"]].copy()
df_outliers["relay"] = df_outliers["winning_relays"].apply(lambda x: x[0] if len(x) > 0 else "Local")
df_outliers["proposer"] = df_outliers["proposer_entity"].fillna("Unknown")
df_outliers["builder"] = df_outliers["winning_builder"].apply(
    lambda x: f"{x[:10]}..." if pd.notna(x) and x else "Local"
)

print(f"Regression: block_ms = {intercept:.1f} + {slope:.2f} × blob_count (R² = {r_value**2:.3f})")
print(f"Residual σ = {residual_std:.1f}ms")
print(f"Anomalies (>2σ slow): {n_anomalies:,} ({pct_anomalies:.1f}%)")
Regression: block_ms = 1793.0 + 18.85 × blob_count (R² = 0.013)
Residual σ = 721.1ms
Anomalies (>2σ slow): 131 (1.8%)
Show code
# Create scatter plot with regression band
x_range = np.array([0, int(max_blobs)])
y_pred = intercept + slope * x_range
y_upper = y_pred + 2 * residual_std
y_lower = y_pred - 2 * residual_std

fig = go.Figure()

# Add ±2σ band
fig.add_trace(go.Scatter(
    x=np.concatenate([x_range, x_range[::-1]]),
    y=np.concatenate([y_upper, y_lower[::-1]]),
    fill="toself",
    fillcolor="rgba(100,100,100,0.2)",
    line=dict(width=0),
    name="±2σ band",
    hoverinfo="skip",
))

# Add regression line
fig.add_trace(go.Scatter(
    x=x_range,
    y=y_pred,
    mode="lines",
    line=dict(color="white", width=2, dash="dash"),
    name="Expected",
))

# Normal points (sample to avoid overplotting)
df_normal = df_anomaly[~df_anomaly["is_anomaly"]]
if len(df_normal) > 2000:
    df_normal = df_normal.sample(2000, random_state=42)

fig.add_trace(go.Scatter(
    x=df_normal["blob_count"],
    y=df_normal["block_first_seen_ms"],
    mode="markers",
    marker=dict(size=4, color="rgba(100,150,200,0.4)"),
    name=f"Normal ({len(df_anomaly) - n_anomalies:,})",
    hoverinfo="skip",
))

# Anomaly points
fig.add_trace(go.Scatter(
    x=df_outliers["blob_count"],
    y=df_outliers["block_first_seen_ms"],
    mode="markers",
    marker=dict(
        size=7,
        color="#e74c3c",
        line=dict(width=1, color="white"),
    ),
    name=f"Anomalies ({n_anomalies:,})",
    customdata=np.column_stack([
        df_outliers["slot"],
        df_outliers["residual_ms"].round(0),
        df_outliers["relay"],
    ]),
    hovertemplate="<b>Slot %{customdata[0]}</b><br>Blobs: %{x}<br>Actual: %{y:.0f}ms<br>+%{customdata[1]}ms vs expected<br>Relay: %{customdata[2]}<extra></extra>",
))

fig.update_layout(
    margin=dict(l=60, r=30, t=30, b=60),
    xaxis=dict(title="Blob count", range=[-0.5, int(max_blobs) + 0.5]),
    yaxis=dict(title="Block first seen (ms from slot start)"),
    legend=dict(orientation="h", yanchor="bottom", y=1.02, xanchor="right", x=1),
    height=500,
)
fig.show(config={"responsive": True})

All propagation anomalies

Blocks that propagated much slower than expected given their blob count, sorted by residual (worst first).

Show code
# All anomalies table with selectable text and Lab links
if n_anomalies > 0:
    df_table = df_outliers.sort_values("residual_ms", ascending=False)[
        ["slot", "blob_count", "block_first_seen_ms", "expected_ms", "residual_ms", "proposer", "builder", "relay"]
    ].copy()
    df_table["block_first_seen_ms"] = df_table["block_first_seen_ms"].round(0).astype(int)
    df_table["expected_ms"] = df_table["expected_ms"].round(0).astype(int)
    df_table["residual_ms"] = df_table["residual_ms"].round(0).astype(int)
    
    # Build HTML table
    html = '''
    <style>
    .anomaly-table { border-collapse: collapse; width: 100%; font-family: monospace; font-size: 13px; }
    .anomaly-table th { background: #2c3e50; color: white; padding: 8px 12px; text-align: left; position: sticky; top: 0; }
    .anomaly-table td { padding: 6px 12px; border-bottom: 1px solid #eee; }
    .anomaly-table tr:hover { background: #f5f5f5; }
    .anomaly-table .num { text-align: right; }
    .anomaly-table .delta { background: #ffebee; color: #c62828; font-weight: bold; }
    .anomaly-table a { color: #1976d2; text-decoration: none; }
    .anomaly-table a:hover { text-decoration: underline; }
    .table-container { max-height: 600px; overflow-y: auto; }
    </style>
    <div class="table-container">
    <table class="anomaly-table">
    <thead>
    <tr><th>Slot</th><th class="num">Blobs</th><th class="num">Actual (ms)</th><th class="num">Expected (ms)</th><th class="num">Δ (ms)</th><th>Proposer</th><th>Builder</th><th>Relay</th></tr>
    </thead>
    <tbody>
    '''
    
    for _, row in df_table.iterrows():
        slot_link = f'<a href="https://lab.ethpandaops.io/ethereum/slots/{row["slot"]}" target="_blank">{row["slot"]}</a>'
        html += f'''<tr>
            <td>{slot_link}</td>
            <td class="num">{row["blob_count"]}</td>
            <td class="num">{row["block_first_seen_ms"]}</td>
            <td class="num">{row["expected_ms"]}</td>
            <td class="num delta">+{row["residual_ms"]}</td>
            <td>{row["proposer"]}</td>
            <td>{row["builder"]}</td>
            <td>{row["relay"]}</td>
        </tr>'''
    
    html += '</tbody></table></div>'
    display(HTML(html))
    print(f"\nTotal anomalies: {len(df_table):,}")
else:
    print("No anomalies detected.")
SlotBlobsActual (ms)Expected (ms)Δ (ms)ProposerBuilderRelay
13516987 0 29181 1793 +27388 Local Local
13515382 16 10258 2095 +8163 figment_lido Local Local
13514304 6 4975 1906 +3069 upbit Local Local
13517377 0 4767 1793 +2974 solo_stakers Local Local
13515713 5 4343 1887 +2456 launchnodes_lido 0xb26f9666... Aestus
13511599 0 4109 1793 +2316 everstake Local Local
13511549 0 4025 1793 +2232 Local Local
13513536 14 4275 2057 +2218 upbit Local Local
13515430 8 4146 1944 +2202 ether.fi 0x8db2a99d... BloXroute Max Profit
13516192 0 3935 1793 +2142 upbit Local Local
13516542 0 3828 1793 +2035 blockdaemon Local Local
13511437 4 3818 1868 +1950 0xb26f9666... BloXroute Max Profit
13516672 0 3679 1793 +1886 senseinode_lido 0x8527d16c... Ultra Sound
13512158 1 3652 1812 +1840 everstake 0xb67eaa5e... BloXroute Max Profit
13514860 6 3707 1906 +1801 0xb67eaa5e... Titan Relay
13512160 4 3667 1868 +1799 figment 0x850b00e0... BloXroute Regulated
13511074 2 3624 1831 +1793 0x8527d16c... Ultra Sound
13513744 1 3594 1812 +1782 everstake 0xb26f9666... Aestus
13515949 0 3569 1793 +1776 0x850b00e0... BloXroute Regulated
13517639 4 3642 1868 +1774 blockdaemon_lido 0xb67eaa5e... BloXroute Regulated
13515528 0 3564 1793 +1771 0x8527d16c... Ultra Sound
13513300 6 3674 1906 +1768 0x853b0078... Ultra Sound
13517629 0 3559 1793 +1766 binance 0x852b0070... Aestus
13515673 1 3568 1812 +1756 0x850b00e0... BloXroute Regulated
13512812 5 3629 1887 +1742 0xb26f9666... Titan Relay
13513784 4 3608 1868 +1740 0xb26f9666... Titan Relay
13514624 5 3617 1887 +1730 blockdaemon_lido 0xb26f9666... Titan Relay
13513796 6 3629 1906 +1723 0xb67eaa5e... BloXroute Regulated
13515891 0 3506 1793 +1713 0x8527d16c... Ultra Sound
13516799 3 3560 1850 +1710 0xb26f9666... Titan Relay
13517798 6 3616 1906 +1710 ether.fi 0x853b0078... Agnostic Gnosis
13513024 7 3633 1925 +1708 ether.fi 0xb26f9666... Titan Relay
13514195 1 3517 1812 +1705 blockdaemon_lido 0xb67eaa5e... Titan Relay
13512875 0 3495 1793 +1702 0x805e28e6... BloXroute Regulated
13511777 0 3484 1793 +1691 0x8a850621... Ultra Sound
13516682 6 3594 1906 +1688 0x8527d16c... Ultra Sound
13515203 2 3504 1831 +1673 everstake 0xb67eaa5e... BloXroute Max Profit
13513282 1 3482 1812 +1670 piertwo 0xaceaea9f... Flashbots
13514502 0 3463 1793 +1670 ether.fi 0x82c466b9... EthGas
13517470 5 3555 1887 +1668 revolut 0x8527d16c... Ultra Sound
13513954 9 3625 1963 +1662 blockdaemon 0xb26f9666... Titan Relay
13517533 0 3455 1793 +1662 lido 0x91a8729e... BloXroute Max Profit
13517898 10 3640 1982 +1658 revolut 0x856b0004... Ultra Sound
13515054 20 3824 2170 +1654 lido 0x8527d16c... Ultra Sound
13512408 9 3616 1963 +1653 figment 0xb67eaa5e... BloXroute Regulated
13516448 0 3446 1793 +1653 stakefish 0x853b0078... Agnostic Gnosis
13514965 5 3539 1887 +1652 blockdaemon 0xb26f9666... Titan Relay
13514969 7 3572 1925 +1647 ether.fi Local Local
13511457 10 3628 1982 +1646 blockdaemon 0x853b0078... Ultra Sound
13511925 6 3540 1906 +1634 blockdaemon 0xb26f9666... Titan Relay
13516497 10 3611 1982 +1629 0x8527d16c... Ultra Sound
13513520 0 3411 1793 +1618 blockdaemon 0x851b00b1... Ultra Sound
13515477 2 3445 1831 +1614 whale_0x7c1b 0x853b0078... BloXroute Max Profit
13512965 1 3421 1812 +1609 binance 0x823e0146... Flashbots
13512153 2 3435 1831 +1604 kelp 0x8527d16c... Ultra Sound
13513184 1 3407 1812 +1595 blockscape_lido 0x8db2a99d... Ultra Sound
13515948 9 3555 1963 +1592 blockdaemon 0x853b0078... Ultra Sound
13516273 0 3384 1793 +1591 everstake 0xb26f9666... Titan Relay
13512848 7 3514 1925 +1589 blockdaemon_lido 0xb67eaa5e... BloXroute Regulated
13516431 1 3400 1812 +1588 blockdaemon_lido 0xb67eaa5e... Titan Relay
13515367 3 3437 1850 +1587 everstake 0xb67eaa5e... BloXroute Regulated
13516332 11 3586 2000 +1586 0x8527d16c... Ultra Sound
13514611 3 3435 1850 +1585 blockdaemon_lido 0xb67eaa5e... Titan Relay
13516887 0 3372 1793 +1579 blockdaemon 0x99dbe3e8... Ultra Sound
13516378 6 3469 1906 +1563 blockdaemon_lido 0x850b00e0... BloXroute Regulated
13512725 9 3511 1963 +1548 everstake 0x88857150... Ultra Sound
13517158 2 3378 1831 +1547 blockdaemon_lido 0x88a53ec4... BloXroute Regulated
13517984 1 3359 1812 +1547 0xb26f9666... BloXroute Regulated
13517973 2 3373 1831 +1542 binance 0x8a850621... Titan Relay
13514672 1 3353 1812 +1541 everstake 0x853b0078... BloXroute Max Profit
13517445 1 3352 1812 +1540 blockdaemon_lido 0xb67eaa5e... Titan Relay
13513632 10 3521 1982 +1539 everstake 0x853b0078... Aestus
13512932 8 3482 1944 +1538 ether.fi Local Local
13517791 2 3368 1831 +1537 everstake 0x88a53ec4... BloXroute Max Profit
13514016 1 3348 1812 +1536 bitstamp 0xac23f8cc... Flashbots
13511648 5 3421 1887 +1534 bitstamp 0xb7c5e609... BloXroute Max Profit
13517823 3 3381 1850 +1531 blockdaemon 0x88857150... Ultra Sound
13516857 5 3417 1887 +1530 blockdaemon 0x8a850621... Ultra Sound
13516535 3 3379 1850 +1529 0x856b0004... BloXroute Max Profit
13512961 3 3379 1850 +1529 everstake 0xb26f9666... Titan Relay
13516167 3 3377 1850 +1527 blockdaemon_lido 0x88857150... Ultra Sound
13517704 11 3525 2000 +1525 blockdaemon_lido 0x88857150... Ultra Sound
13517426 4 3393 1868 +1525 blockdaemon 0x853b0078... Ultra Sound
13517465 7 3447 1925 +1522 0x88a53ec4... BloXroute Max Profit
13516683 8 3465 1944 +1521 blockdaemon 0x88857150... Ultra Sound
13513566 9 3483 1963 +1520 0x8527d16c... Ultra Sound
13514080 4 3388 1868 +1520 gateway.fmas_lido 0x8db2a99d... Flashbots
13517576 0 3312 1793 +1519 everstake 0x8527d16c... Ultra Sound
13517184 8 3462 1944 +1518 everstake 0x853b0078... BloXroute Max Profit
13514912 1 3330 1812 +1518 figment 0x8527d16c... Ultra Sound
13516137 4 3384 1868 +1516 binance 0x8a850621... Titan Relay
13513497 7 3437 1925 +1512 blockdaemon_lido 0xb67eaa5e... BloXroute Regulated
13514555 10 3492 1982 +1510 everstake 0x88a53ec4... BloXroute Max Profit
13511647 2 3341 1831 +1510 0xb26f9666... Titan Relay
13514205 14 3565 2057 +1508 0xb67eaa5e... BloXroute Max Profit
13517708 3 3357 1850 +1507 luno 0xb67eaa5e... BloXroute Regulated
13516203 5 3393 1887 +1506 everstake 0xb26f9666... Titan Relay
13515084 9 3468 1963 +1505 0xb67eaa5e... BloXroute Max Profit
13512891 8 3444 1944 +1500 0xb26f9666... Titan Relay
13516858 11 3497 2000 +1497 blockdaemon 0x855b00e6... Ultra Sound
13511477 4 3362 1868 +1494 blockdaemon_lido 0xb67eaa5e... BloXroute Regulated
13517308 2 3324 1831 +1493 blockdaemon_lido 0x853b0078... Ultra Sound
13514353 2 3322 1831 +1491 0xb67eaa5e... BloXroute Regulated
13517067 0 3283 1793 +1490 everstake 0x91a8729e... Aestus
13516662 0 3283 1793 +1490 blockscape_lido 0x99dbe3e8... Aestus
13513062 7 3414 1925 +1489 origin_protocol 0xb67eaa5e... BloXroute Regulated
13514183 2 3319 1831 +1488 everstake 0xb67eaa5e... BloXroute Max Profit
13515354 5 3370 1887 +1483 blockdaemon_lido 0xb26f9666... Titan Relay
13511873 0 3274 1793 +1481 blockdaemon_lido 0xb26f9666... Titan Relay
13515547 5 3368 1887 +1481 blockdaemon 0xb26f9666... Titan Relay
13512199 9 3442 1963 +1479 everstake 0xb67eaa5e... BloXroute Max Profit
13514638 0 3271 1793 +1478 blockdaemon 0x83bee517... BloXroute Regulated
13513963 1 3289 1812 +1477 blockdaemon_lido 0xb67eaa5e... BloXroute Regulated
13515082 13 3514 2038 +1476 everstake 0x88a53ec4... BloXroute Regulated
13514819 7 3400 1925 +1475 blockdaemon_lido 0xb67eaa5e... Titan Relay
13517418 1 3286 1812 +1474 blockdaemon_lido 0x850b00e0... BloXroute Regulated
13512324 7 3391 1925 +1466 blockdaemon 0x88a53ec4... BloXroute Regulated
13514795 6 3370 1906 +1464 everstake 0xb67eaa5e... BloXroute Max Profit
13517711 11 3464 2000 +1464 0x857b0038... Ultra Sound
13516800 5 3348 1887 +1461 bitstamp 0x8527d16c... Ultra Sound
13512596 6 3366 1906 +1460 everstake 0x88a53ec4... BloXroute Regulated
13515200 10 3441 1982 +1459 p2porg 0xb26f9666... Titan Relay
13511359 9 3422 1963 +1459 ether.fi 0xb26f9666... EthGas
13511797 2 3289 1831 +1458 blockdaemon_lido 0x91b123d8... BloXroute Regulated
13514332 6 3363 1906 +1457 blockdaemon 0xb26f9666... Titan Relay
13516178 8 3400 1944 +1456 blockdaemon_lido 0x850b00e0... BloXroute Regulated
13512809 12 3475 2019 +1456 everstake 0x8527d16c... Ultra Sound
13512563 6 3359 1906 +1453 everstake 0x8527d16c... Ultra Sound
13511045 12 3472 2019 +1453 abyss_finance 0x856b0004... Agnostic Gnosis
13516184 9 3412 1963 +1449 blockdaemon_lido 0xb67eaa5e... Titan Relay
13512346 0 3242 1793 +1449 blockdaemon_lido 0x91a8729e... BloXroute Regulated
Total anomalies: 131

Anomalies by relay

Which relays produce the most propagation anomalies?

Show code
if n_anomalies > 0:
    # Count anomalies by relay
    relay_counts = df_outliers["relay"].value_counts().reset_index()
    relay_counts.columns = ["relay", "anomaly_count"]
    
    # Get total blocks per relay for context
    df_anomaly["relay"] = df_anomaly["winning_relays"].apply(lambda x: x[0] if len(x) > 0 else "Local")
    total_by_relay = df_anomaly.groupby("relay").size().reset_index(name="total_blocks")
    
    relay_counts = relay_counts.merge(total_by_relay, on="relay")
    relay_counts["anomaly_rate"] = relay_counts["anomaly_count"] / relay_counts["total_blocks"] * 100
    relay_counts = relay_counts.sort_values("anomaly_rate", ascending=True)
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        y=relay_counts["relay"],
        x=relay_counts["anomaly_count"],
        orientation="h",
        marker_color="#e74c3c",
        text=relay_counts.apply(lambda r: f"{r['anomaly_count']}/{r['total_blocks']} ({r['anomaly_rate']:.1f}%)", axis=1),
        textposition="outside",
        hovertemplate="<b>%{y}</b><br>Anomalies: %{x}<br>Total blocks: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([relay_counts["total_blocks"], relay_counts["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=150, r=80, t=30, b=60),
        xaxis=dict(title="Number of anomalies"),
        yaxis=dict(title=""),
        height=350,
    )
    fig.show(config={"responsive": True})

Anomalies by proposer entity

Which proposer entities produce the most propagation anomalies?

Show code
if n_anomalies > 0:
    # Count anomalies by proposer entity
    proposer_counts = df_outliers["proposer"].value_counts().reset_index()
    proposer_counts.columns = ["proposer", "anomaly_count"]
    
    # Get total blocks per proposer for context
    df_anomaly["proposer"] = df_anomaly["proposer_entity"].fillna("Unknown")
    total_by_proposer = df_anomaly.groupby("proposer").size().reset_index(name="total_blocks")
    
    proposer_counts = proposer_counts.merge(total_by_proposer, on="proposer")
    proposer_counts["anomaly_rate"] = proposer_counts["anomaly_count"] / proposer_counts["total_blocks"] * 100
    
    # Show top 15 by anomaly count
    proposer_counts = proposer_counts.nlargest(15, "anomaly_rate").sort_values("anomaly_rate", ascending=True)
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        y=proposer_counts["proposer"],
        x=proposer_counts["anomaly_count"],
        orientation="h",
        marker_color="#e74c3c",
        text=proposer_counts.apply(lambda r: f"{r['anomaly_count']}/{r['total_blocks']} ({r['anomaly_rate']:.1f}%)", axis=1),
        textposition="outside",
        hovertemplate="<b>%{y}</b><br>Anomalies: %{x}<br>Total blocks: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([proposer_counts["total_blocks"], proposer_counts["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=150, r=80, t=30, b=60),
        xaxis=dict(title="Number of anomalies"),
        yaxis=dict(title=""),
        height=450,
    )
    fig.show(config={"responsive": True})

Anomalies by builder

Which builders produce the most propagation anomalies? (Truncated pubkeys shown for MEV blocks)

Show code
if n_anomalies > 0:
    # Count anomalies by builder
    builder_counts = df_outliers["builder"].value_counts().reset_index()
    builder_counts.columns = ["builder", "anomaly_count"]
    
    # Get total blocks per builder for context
    df_anomaly["builder"] = df_anomaly["winning_builder"].apply(
        lambda x: f"{x[:10]}..." if pd.notna(x) and x else "Local"
    )
    total_by_builder = df_anomaly.groupby("builder").size().reset_index(name="total_blocks")
    
    builder_counts = builder_counts.merge(total_by_builder, on="builder")
    builder_counts["anomaly_rate"] = builder_counts["anomaly_count"] / builder_counts["total_blocks"] * 100
    
    # Show top 15 by anomaly count
    builder_counts = builder_counts.nlargest(15, "anomaly_rate").sort_values("anomaly_rate", ascending=True)
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        y=builder_counts["builder"],
        x=builder_counts["anomaly_count"],
        orientation="h",
        marker_color="#e74c3c",
        text=builder_counts.apply(lambda r: f"{r['anomaly_count']}/{r['total_blocks']} ({r['anomaly_rate']:.1f}%)", axis=1),
        textposition="outside",
        hovertemplate="<b>%{y}</b><br>Anomalies: %{x}<br>Total blocks: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([builder_counts["total_blocks"], builder_counts["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=150, r=80, t=30, b=60),
        xaxis=dict(title="Number of anomalies"),
        yaxis=dict(title=""),
        height=450,
    )
    fig.show(config={"responsive": True})

Anomalies by blob count

Are anomalies more common at certain blob counts?

Show code
if n_anomalies > 0:
    # Count anomalies by blob count
    blob_anomalies = df_outliers.groupby("blob_count").size().reset_index(name="anomaly_count")
    blob_total = df_anomaly.groupby("blob_count").size().reset_index(name="total_blocks")
    
    blob_stats = blob_total.merge(blob_anomalies, on="blob_count", how="left").fillna(0)
    blob_stats["anomaly_count"] = blob_stats["anomaly_count"].astype(int)
    blob_stats["anomaly_rate"] = blob_stats["anomaly_count"] / blob_stats["total_blocks"] * 100
    
    fig = go.Figure()
    
    fig.add_trace(go.Bar(
        x=blob_stats["blob_count"],
        y=blob_stats["anomaly_count"],
        marker_color="#e74c3c",
        hovertemplate="<b>%{x} blobs</b><br>Anomalies: %{y}<br>Total: %{customdata[0]:,}<br>Rate: %{customdata[1]:.1f}%<extra></extra>",
        customdata=np.column_stack([blob_stats["total_blocks"], blob_stats["anomaly_rate"]]),
    ))
    
    fig.update_layout(
        margin=dict(l=60, r=30, t=30, b=60),
        xaxis=dict(title="Blob count", dtick=1),
        yaxis=dict(title="Number of anomalies"),
        height=350,
    )
    fig.show(config={"responsive": True})