Skip to content

Commit 21fd959

Browse files
committed
fix tests bigquery provider + ruff
1 parent 3b2a474 commit 21fd959

File tree

8 files changed

+149
-68
lines changed

8 files changed

+149
-68
lines changed

pyproject.toml

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,7 @@ explicit_package_bases = true
5858

5959
[tool.pytest.ini_options]
6060
minversion = "6.0"
61-
addopts = "-ra -q"
62-
testpaths = [
63-
"tests",
64-
]
65-
pythonpath = [
66-
"."
67-
]
61+
addopts = "--cov=src --cov-report=term-missing -v"
62+
testpaths = ["tests"]
63+
python_files = "test_*.py"
64+
pythonpath = ["."]

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ aiohttp>=3.9.0 # For async HTTP requests (used by web3)
3434
pytest>=8.0.0
3535
pytest-cov>=6.0.0
3636
pytest-mock>=3.0.0
37+
pytest-snapshot>=0.9.0
3738
mypy>=1.0.0
3839
types-pytz # Type stubs for pytz
3940
types-requests # Type stubs for requests

src/models/bigquery_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ def fetch_indexer_issuance_eligibility_data(self, start_date: date, end_date: da
185185
- eligible_for_indexing_rewards: Whether the indexer is eligible for indexing rewards.
186186
"""
187187
# Construct the query
188-
query = self._get_indexer_eligibility_query(start_date, end_date)
188+
query = self._get_indexer_eligibility_query(start_date=start_date, end_date=end_date)
189189

190190
# Return the results df
191191
return self._read_gbq_dataframe(query)

src/models/blockchain_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ def _get_gas_prices(self, replace: bool) -> Tuple[int, int]:
324324
latest_block = cast(BlockData, latest_block_data)
325325
base_fee_hex = latest_block["baseFeePerGas"]
326326
base_fee = int(base_fee_hex) if isinstance(base_fee_hex, int) else int(str(base_fee_hex), 16)
327-
logger.info(f"Latest block base fee: {base_fee/1e9:.2f} gwei")
327+
logger.info(f"Latest block base fee: {base_fee / 1e9:.2f} gwei")
328328

329329
# If the base fee cannot be retrieved, use a fallback value
330330
except Exception as e:
@@ -334,7 +334,7 @@ def _get_gas_prices(self, replace: bool) -> Tuple[int, int]:
334334
# Try to get the max priority fee
335335
try:
336336
max_priority_fee = self._execute_rpc_call(lambda: self.w3.eth.max_priority_fee)
337-
logger.info(f"Max priority fee: {max_priority_fee/1e9:.2f} gwei")
337+
logger.info(f"Max priority fee: {max_priority_fee / 1e9:.2f} gwei")
338338

339339
# If the max priority fee cannot be retrieved, use a fallback value
340340
except Exception as e:
@@ -364,15 +364,15 @@ def _build_transaction_params(
364364
max_priority_fee_per_gas = max_priority_fee * 2
365365
tx_params["maxFeePerGas"] = max_fee_per_gas
366366
tx_params["maxPriorityFeePerGas"] = max_priority_fee_per_gas
367-
logger.info(f"High gas for replacement: {max_fee_per_gas/1e9:.2f} gwei")
367+
logger.info(f"High gas for replacement: {max_fee_per_gas / 1e9:.2f} gwei")
368368

369369
# If we are not replacing a pending transaction, use a lower gas price
370370
else:
371371
max_fee_per_gas = base_fee * 2 + max_priority_fee
372372
max_priority_fee_per_gas = max_priority_fee
373373
tx_params["maxFeePerGas"] = max_fee_per_gas
374374
tx_params["maxPriorityFeePerGas"] = max_priority_fee_per_gas
375-
logger.info(f"Standard gas: {max_fee_per_gas/1e9:.2f} gwei")
375+
logger.info(f"Standard gas: {max_fee_per_gas / 1e9:.2f} gwei")
376376

377377
logger.info(f"Transaction parameters: nonce={nonce}, gas={gas_limit}, chain_id={chain_id}")
378378
return tx_params

src/models/eligibility_pipeline.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ def validate_dataframe_structure(self, df: pd.DataFrame, required_columns: List[
175175
# If any required columns are missing, raise an error
176176
if missing_columns:
177177
raise ValueError(
178-
f"DataFrame missing required columns: {missing_columns}. " f"Found columns: {list(df.columns)}"
178+
f"DataFrame missing required columns: {missing_columns}. Found columns: {list(df.columns)}"
179179
)
180180

181181
# If all required columns are present, return True

src/utils/slack_notifier.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ def send_success_notification(
129129

130130
# Add transaction links if provided
131131
if transaction_links:
132-
tx_links = "\n".join([f"Batch {i+1}: {link}" for i, link in enumerate(transaction_links)])
132+
tx_links = "\n".join([f"Batch {i + 1}: {link}" for i, link in enumerate(transaction_links)])
133133
fields.append({"title": "Transactions", "value": tx_links, "short": False})
134134

135135
# Create message payload
@@ -179,7 +179,7 @@ def send_failure_notification(
179179

180180
# Add partial transaction links if any succeeded before failure
181181
if partial_transaction_links:
182-
tx_links = "\n".join([f"Batch {i+1}: {link}" for i, link in enumerate(partial_transaction_links)])
182+
tx_links = "\n".join([f"Batch {i + 1}: {link}" for i, link in enumerate(partial_transaction_links)])
183183
fields.append({"title": "Partial Transactions", "value": tx_links, "short": False})
184184

185185
# Truncate error message if too long
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
2+
WITH
3+
-- Get daily query metrics per indexer
4+
DailyMetrics AS (
5+
SELECT
6+
day_partition AS day,
7+
indexer,
8+
COUNT(*) AS query_attempts,
9+
SUM(CASE
10+
WHEN status = '200 OK'
11+
AND response_time_ms < 5000
12+
AND blocks_behind < 50000
13+
THEN 1
14+
ELSE 0
15+
END) AS good_responses,
16+
COUNT(DISTINCT deployment) AS unique_subgraphs_served
17+
FROM
18+
test.dataset.table
19+
WHERE
20+
day_partition BETWEEN '2025-01-01' AND '2025-01-28'
21+
GROUP BY
22+
day_partition, indexer
23+
),
24+
-- Determine which days count as 'online' (>= 1 good query on >= 10 subgraphs)
25+
DaysOnline AS (
26+
SELECT
27+
indexer,
28+
day,
29+
unique_subgraphs_served,
30+
CASE WHEN good_responses >= 1 AND unique_subgraphs_served >= 10
31+
THEN 1 ELSE 0
32+
END AS is_online_day
33+
FROM
34+
DailyMetrics
35+
),
36+
-- Calculate unique subgraphs served with at least one good query
37+
UniqueSubgraphs AS (
38+
SELECT
39+
indexer,
40+
COUNT(DISTINCT deployment) AS unique_good_response_subgraphs
41+
FROM
42+
test.dataset.table
43+
WHERE
44+
day_partition BETWEEN '2025-01-01' AND '2025-01-28'
45+
AND status = '200 OK'
46+
AND response_time_ms < 5000
47+
AND blocks_behind < 50000
48+
GROUP BY
49+
indexer
50+
),
51+
-- Calculate overall metrics per indexer
52+
IndexerMetrics AS (
53+
SELECT
54+
d.indexer,
55+
SUM(m.query_attempts) AS total_query_attempts,
56+
SUM(m.good_responses) AS total_good_responses,
57+
SUM(d.is_online_day) AS total_good_days_online,
58+
ds.unique_good_response_subgraphs
59+
FROM
60+
DailyMetrics m
61+
JOIN
62+
DaysOnline d USING (indexer, day)
63+
LEFT JOIN
64+
UniqueSubgraphs ds ON m.indexer = ds.indexer
65+
GROUP BY
66+
d.indexer, ds.unique_good_response_subgraphs
67+
)
68+
-- Final result with eligibility determination
69+
SELECT
70+
indexer,
71+
total_query_attempts AS query_attempts,
72+
total_good_responses AS good_responses,
73+
total_good_days_online,
74+
unique_good_response_subgraphs,
75+
CASE
76+
WHEN total_good_days_online >= 5 THEN 1
77+
ELSE 0
78+
END AS eligible_for_indexing_rewards
79+
FROM
80+
IndexerMetrics
81+
ORDER BY
82+
total_good_days_online DESC, good_responses DESC
83+

0 commit comments

Comments
 (0)