import polars as pl
import glob
from typing import List
import warnings
warnings.filterwarnings("ignore")Issue 9
9. Vendor Downtime or API Rate-Limits
This notebook documents the analysis for Issue #9: Vendor Downtime or API Rate-Limits in the Autonity Oracle data. It covers:
- What is this issue about?
- Why conduct this issue analysis?
- How to conduct this issue analysis?
- What are the results?
9.1 What Is This Issue About?
Validators occasionally experience data interruptions due to:
- Vendor downtime (temporary outages)
- API rate-limits (causing zero or placeholder values)
Analyze abrupt stoppages and zero-value submissions, especially simultaneous occurrences, indicating a shared vendor or API issue.
9.2 Why Conduct This Issue Analysis?
- To identify vendor-related disruptions that could impact reliability.
- To differentiate individual validator errors from broader vendor problems.
- To inform strategies for improved vendor redundancy or rate-limit management.
9.3 How to Conduct the Analysis?
Use Python with the Polars library (v1.24.0) to:
- Load and preprocess Oracle submission CSV files.
- Detecting validator submission stoppages exceeding 120-minute gaps.
- Identifying submissions with zero or near-zero price placeholders.
- Examining concurrency—multiple validators experiencing issues simultaneously.
Here’s the Python code used:
def load_and_preprocess_submissions(submission_glob: str) -> pl.DataFrame:
"""
Loads Oracle Submission CSVs and returns a Polars DataFrame.
"""
files = sorted(glob.glob(submission_glob))
if not files:
raise ValueError(f"No CSV files found matching pattern {submission_glob}")
lf_list = []
for f in files:
lf_temp = pl.scan_csv(
f,
dtypes={"Timestamp": pl.Utf8},
null_values=[""],
ignore_errors=True,
)
lf_list.append(lf_temp)
lf = pl.concat(lf_list)
lf = lf.with_columns(
pl.col("Timestamp")
.str.strptime(pl.Datetime, strict=False)
.alias("Timestamp_dt")
)
lf = lf.with_columns(
[
pl.col("Timestamp_dt").cast(pl.Date).alias("date_only"),
pl.col("Timestamp_dt").dt.weekday().alias("weekday_num"),
]
)
df = lf.collect()
return df
def detect_abrupt_stoppages(
df: pl.DataFrame, max_gap_minutes: float = 120.0
) -> pl.DataFrame:
"""
Looks at the interval between consecutive submissions and flags:
- If there's a gap > `max_gap_minutes` (e.g. 120 minutes).
- If the validator has no submissions after a certain date/time.
"""
df_sorted = df.sort(["Validator Address", "Timestamp_dt"]).with_columns(
(pl.col("Timestamp_dt").cast(pl.Int64) // 1_000_000_000).alias("epoch_seconds")
)
df_with_diff = df_sorted.with_columns(
[
(pl.col("epoch_seconds") - pl.col("epoch_seconds").shift(1))
.over("Validator Address")
.alias("diff_seconds")
]
)
df_with_diff = df_with_diff.with_columns(
(pl.col("diff_seconds") / 60.0).alias("diff_minutes")
)
df_with_diff = df_with_diff.with_columns(
pl.col("Timestamp_dt")
.shift(-1)
.over("Validator Address")
.alias("next_submission_ts")
)
large_gaps = df_with_diff.filter(pl.col("diff_minutes") > max_gap_minutes)
final_stops = df_with_diff.filter(pl.col("next_submission_ts").is_null())
large_gaps_df = large_gaps.select(
[
pl.col("Validator Address"),
pl.col("Timestamp_dt"),
pl.col("next_submission_ts"),
pl.col("diff_minutes").alias("gap_minutes"),
pl.lit(False).alias("is_final_stop"),
]
)
final_stops_df = final_stops.select(
[
pl.col("Validator Address"),
pl.col("Timestamp_dt"),
pl.col("next_submission_ts"),
pl.lit(None).cast(pl.Float64).alias("gap_minutes"),
pl.lit(True).alias("is_final_stop"),
]
)
return pl.concat([large_gaps_df, final_stops_df]).sort(
["Validator Address", "Timestamp_dt"]
)
def detect_zero_placeholder_values(
df: pl.DataFrame,
price_columns: List[str],
zero_threshold: float = 1e-5,
) -> pl.DataFrame:
"""
Detects submissions where all relevant price columns are effectively zero
(below `zero_threshold` once converted from Wei).
"""
bool_exprs = []
for pc in price_columns:
col_expr = ((pl.col(pc).cast(pl.Float64) / 1e18) < zero_threshold).alias(
f"is_{pc}_zero"
)
bool_exprs.append(col_expr)
df_local = df.with_columns(bool_exprs)
_ = [
c.alias(f"{c}_int")
for c in (pl.col(name) for name in df_local.columns if name.startswith("is_"))
]
newly_created_bools = [f"is_{pc}_zero" for pc in price_columns]
count_zero_expr = pl.fold(
acc=pl.lit(0),
function=lambda acc, x: acc + x,
exprs=[pl.col(b).cast(pl.Int64) for b in newly_created_bools],
).alias("count_zeroed_prices")
df_zero_check = df_local.with_columns(
[
count_zero_expr,
pl.lit(len(price_columns)).alias("total_price_cols"),
]
).with_columns(
(
pl.col("count_zeroed_prices").cast(pl.Float64)
/ pl.col("total_price_cols").cast(pl.Float64)
).alias("fraction_zeroed")
)
df_zero_filtered = (
df_zero_check.filter(pl.col("fraction_zeroed") == 1.0)
.select(
[
"Timestamp_dt",
"Validator Address",
"count_zeroed_prices",
"total_price_cols",
"fraction_zeroed",
]
)
.sort(["Validator Address", "Timestamp_dt"])
)
return df_zero_filtered
def detect_concurrent_issues(
df_events: pl.DataFrame, time_col: str = "Timestamp_dt", group_window: str = "1h"
) -> pl.DataFrame:
"""
Detects how many validators exhibit the same event within a certain time window.
"""
if df_events.is_empty():
return pl.DataFrame(
{
"time_bucket": [],
"num_validators": [],
"validator_addresses": [],
}
)
df_local = df_events.with_columns(
pl.col(time_col).dt.truncate(group_window).alias("time_bucket")
)
grouped = (
df_local.lazy()
.group_by("time_bucket")
.agg(
[
pl.n_unique("Validator Address").alias("num_validators"),
pl.col("Validator Address").unique().alias("validator_addresses"),
]
)
)
return grouped.collect().sort("time_bucket")
def analyze_vendor_downtime_api_ratelimits(
submission_glob: str,
price_cols: List[str],
max_gap_minutes: float = 120.0,
zero_threshold: float = 1e-5,
concurrency_window: str = "1h",
):
"""
Main analysis function.
"""
df_all = load_and_preprocess_submissions(submission_glob)
df_stoppages = detect_abrupt_stoppages(df_all, max_gap_minutes=max_gap_minutes)
df_zeros = detect_zero_placeholder_values(
df_all, price_cols, zero_threshold=zero_threshold
)
df_stoppage_concurrency = detect_concurrent_issues(
df_stoppages, time_col="Timestamp_dt", group_window=concurrency_window
)
df_zero_concurrency = detect_concurrent_issues(
df_zeros, time_col="Timestamp_dt", group_window=concurrency_window
)
return {
"df_all": df_all,
"df_stoppages": df_stoppages,
"df_zero_placeholders": df_zeros,
"df_stoppage_concurrency": df_stoppage_concurrency,
"df_zero_concurrency": df_zero_concurrency,
}fx_price_cols = [
"AUD-USD Price",
"CAD-USD Price",
"EUR-USD Price",
"GBP-USD Price",
"JPY-USD Price",
"SEK-USD Price",
]
autonity_price_cols = [
"ATN-USD Price",
"NTN-USD Price",
"NTN-ATN Price",
]
all_price_cols = fx_price_cols + autonity_price_cols
results = analyze_vendor_downtime_api_ratelimits(
submission_glob="../submission-data/Oracle_Submission_*.csv",
price_cols=all_price_cols,
max_gap_minutes=120.0, # e.g. 2 hours
zero_threshold=1e-5, # treat sub-1e-5 as "effectively zero"
concurrency_window="1h", # aggregate concurrency by the hour
)9.4 What are the results?
The following sections present analysis findings dynamically from results_issue9.
9.4.1 Abrupt Stoppages
df_stoppages = results["df_stoppages"]
print(f"Total stoppage events detected: {df_stoppages.height}")
if not df_stoppages.is_empty():
print("Sample abrupt stoppage records:")
display(df_stoppages)
else:
print("No stoppages found above the given threshold.")Total stoppage events detected: 60
Sample abrupt stoppage records:
| Validator Address | Timestamp_dt | next_submission_ts | gap_minutes | is_final_stop |
|---|---|---|---|---|
| str | datetime[μs, UTC] | datetime[μs, UTC] | f64 | bool |
| "0x00a96aaED75015Bb44cED878D927… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0x01F788E4371a70D579C178Ea7F48… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0x100E38f7BCEc53937BDd79ADE46F… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0x1476A65D7B5739dE1805d5130441… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0x197B2c44b887c4aC01243BDE7E4b… | 2025-01-01 23:59:44 UTC | null | null | true |
| … | … | … | … | … |
| "0xd625d50B0d087861c286d726eC51… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0xdF239e0D5b4E6e820B0cFEF6972A… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0xf10f56Bf0A28E0737c7e6bB0aF92… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0xf34CD6c09a59d7D3d1a6C3dC231a… | 2025-01-01 23:59:44 UTC | null | null | true |
| "0xfD97FB8835d25740A2Da27c69762… | 2025-01-01 23:59:44 UTC | null | null | true |
Note: You may observe lots of null for next_submission_ts and gap_minutes, with is_final_stop = True for every record in df_stoppages, which usually means no large gaps were found, so the only “stoppage” events are the final submissions per validator.
Interpretation:
- Frequent stoppages indicate potential downtime.
- Sparse stoppages suggest isolated issues rather than systemic.
9.4.2 Zero or Placeholder Values
df_zero = results["df_zero_placeholders"]
print(f"Total zero-placeholder events detected: {df_zero.height}")
if not df_zero.is_empty():
print("Sample zero-placeholder records:")
display(df_zero)
else:
print("No zero-placeholder rows detected.")Total zero-placeholder events detected: 0
No zero-placeholder rows detected.
Interpretation:
- Zero-value submissions strongly suggest rate-limit hits or vendor API fallbacks.
- Many zero events may necessitate vendor review.
9.4.3 Concurrency of Issues
df_stop_conc = results["df_stoppage_concurrency"]
df_zero_conc = results["df_zero_concurrency"]
print("Stoppage Concurrency Events:")
if df_stop_conc.is_empty():
print("No concurrency found among stoppages.")
else:
display(df_stop_conc)
print("\nZero-Placeholder Concurrency Events:")
if df_zero_conc.is_empty():
print("No concurrency found among zero placeholders.")
else:
display(df_zero_conc)Stoppage Concurrency Events:
| time_bucket | num_validators | validator_addresses |
|---|---|---|
| datetime[μs, UTC] | u32 | list[str] |
| 2025-01-01 23:00:00 UTC | 60 | ["0xfD97FB8835d25740A2Da27c69762D74F6A931858", "0xDF2D0052ea56A860443039619f6DAe4434bc0Ac4", … "0x718361fc3637199F24a2437331677D6B89a40519"] |
Zero-Placeholder Concurrency Events:
No concurrency found among zero placeholders.
- High concurrency strongly implies a vendor or API outage affecting multiple validators simultaneously.
- Low or no concurrency indicates validator-specific configuration or connectivity issues.
List of all Validators and their Zero Placeholder Ratios
df_all = results["df_all"] # all submissions
df_zero = results["df_zero_placeholders"] # all-zero submissions
df_sub_count = (
df_all.lazy()
.group_by("Validator Address")
.agg(
pl.count().alias("num_submissions")
)
)
df_zero_count = (
df_zero.lazy()
.group_by("Validator Address")
.agg(
pl.count().alias("num_zero_submissions")
)
)
df_ratio = (
df_sub_count.join(df_zero_count, on="Validator Address", how="left")
.with_columns(
(pl.col("num_zero_submissions") / pl.col("num_submissions"))
.fill_null(0)
.alias("zero_placeholder_ratio")
)
.select(["Validator Address", "num_submissions", "num_zero_submissions", "zero_placeholder_ratio"])
.sort("num_submissions", descending=True)
.collect()
)
for row in df_ratio.to_dicts():
print(
f"Validator {row['Validator Address']}: "
f"total_submissions={row['num_submissions']}, "
f"zero_submissions={row['num_zero_submissions']}, "
f"zero_placeholder_ratio={row['zero_placeholder_ratio']:.2f}"
)Validator 0x99E2B4B27BDe92b42D04B6CF302cF564D2C13b74: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x3597d2D42f8Fbbc82E8b1046048773aD6DDB717E: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x36142A4f36974e2935192A1111C39330aA296D3C: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xB5d8be2AB4b6d7E6be7Ea28E91b370223a06289f: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xfD97FB8835d25740A2Da27c69762D74F6A931858: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x718361fc3637199F24a2437331677D6B89a40519: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x3fe573552E14a0FC11Da25E43Fef11e16a785068: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xF9B38D02959379d43C764064dE201324d5e12931: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x527192F3D2408C84087607b7feE1d0f907821E17: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x791A7F840ac11841cCB0FaA968B2e3a0Db930fCe: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x551f3300FCFE0e392178b3542c009948008B2a9F: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x59031767f20EA8F4a3d90d33aB0DAA2ca469Fd9a: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x383A3c437d3F12f60E5fC990119468D3561EfBfc: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xC1F9acAF1824F6C906b35A0D2584D6E25077C7f5: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x5603caFE3313D0cf56Fd4bE4A2f606dD6E43F8Eb: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x1Be7f70BCf8393a7e4A5BcC66F6f15d6e35cfBBC: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xBBf36374eb23968F25aecAEbb97BF3118f3c2fEC: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xf10f56Bf0A28E0737c7e6bB0aF92f3DDad34aE6a: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x94d28f08Ff81A80f4716C0a8EfC6CAC2Ec74d09E: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x24915749B793375a8C93090AF19928aFF1CAEcb6: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xdF239e0D5b4E6e820B0cFEF6972A90893c2073AB: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x9d28e40E9Ec4789f9A0D17e421F76D8D0868EA44: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x22A76e194A49c9e5508Cd4A3E1cD555D088ECB08: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x358488a4EdCA493FCD87610dcd50c62c8A3Dd658: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x26E2724dBD14Fbd52be430B97043AA4c83F05852: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x197B2c44b887c4aC01243BDE7E4bBa8bd95BC3a8: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x9C7dAABb5101623340C925CFD6fF74088ff5672e: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xE4686A4C6E63A8ab51B458c52EB779AEcf0B74f7: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xDCA5DFF3D42f2db3C18dBE823380A0A81db49A7E: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x64F83c2538A646A550Ad9bEEb63427a377359DEE: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xcf716b3930d7cf6f2ADAD90A27c39fDc9D643BBd: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x23b4Be9536F93b8D550214912fD0e38417Ff7209: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xE9FFF86CAdC3136b3D94948B8Fd23631EDaa2dE3: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xD9fDab408dF7Ae751691BeC2efE3b713ba3f9C36: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x94470A842Ea4f44e668EB9C2AB81367b6Ce01772: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xcdEed21b471b0Dc54faF74480A0E700fCc42a7b6: total_submissions=2880, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x8584A78A9b94f332A34BBf24D2AF83367Da31894: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xEf0Ba5e345C2C3937df5667A870Aae5105CAa3a5: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xc5B9d978715F081E226cb28bADB7Ba4cde5f9775: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x8f91e0ADF8065C3fFF92297267E02DF32C2978FF: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xDF2D0052ea56A860443039619f6DAe4434bc0Ac4: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xBE287C82A786218E008FF97320b08244BE4A282c: total_submissions=2879, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x984A46Ec685Bb41A7BBb2bc39f80C78410ff4057: total_submissions=2877, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x7232e75a8bFd8c9ab002BB3A00eAa885BC72A6dd: total_submissions=2877, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x5E17e837DcBa2728C94f95c38fA8a47CB9C8818F: total_submissions=2876, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x100E38f7BCEc53937BDd79ADE46F34362470577B: total_submissions=2876, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x3AaF7817618728ffEF81898E11A3171C33faAE41: total_submissions=2874, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x6a395dE946c0493157404E2b1947493c633f569E: total_submissions=2874, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xd61a48b0e11B0Dc6b7Bd713B1012563c52591BAA: total_submissions=2873, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x00a96aaED75015Bb44cED878D927dcb15ec1FF54: total_submissions=2866, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x831B837C3DA1B6c2AB68a690206bDfF368877E19: total_submissions=2863, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xd625d50B0d087861c286d726eC51Cf4Bd9c54357: total_submissions=2856, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x6747c02DE7eb2099265e55715Ba2E03e8563D051: total_submissions=2840, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x01F788E4371a70D579C178Ea7F48E04e8B2CD743: total_submissions=2837, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xf34CD6c09a59d7D3d1a6C3dC231a7834E5615D6A: total_submissions=2834, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x19E356ebC20283fc74AF0BA4C179502A1F62fA7B: total_submissions=2833, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0xbfDcAF35f52F9ef423ac8F2621F9eef8be6dEd17: total_submissions=2833, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x2928FE5b911BCAf837cAd93eB9626E86a189f1dd: total_submissions=2829, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x4cD134001EEF0843B9c69Ba9569d11fDcF4bd495: total_submissions=2823, zero_submissions=None, zero_placeholder_ratio=0.00
Validator 0x1476A65D7B5739dE1805d5130441A94022Ee49fe: total_submissions=2462, zero_submissions=None, zero_placeholder_ratio=0.00
Please note, total_submissions is the count of all submission rows for a validator. zero_submissions counts those rows where every tracked price column is effectively 0 (≤ 1 × 10‑5 after the Wei‑to‑unit conversion); if the validator never produced a full all‑zero row this value shows up as null. zero_placeholder_ratio is zero_submissions / total_submissions, producing a value from 0 to 1 that indicates the fraction of a validator’s submissions that were complete zero‑placeholders.