Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions backend/crates/atlas-server/src/api/handlers/addresses.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use axum::{
Json,
};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;

use crate::api::error::ApiResult;
Expand Down Expand Up @@ -66,6 +67,32 @@ fn default_limit() -> u32 {
20
}

async fn get_indexed_erc20_total_supply(
pool: &PgPool,
address: &str,
) -> Result<bigdecimal::BigDecimal, sqlx::Error> {
let (supply,): (bigdecimal::BigDecimal,) = sqlx::query_as(
"SELECT COALESCE(SUM(balance), 0)
FROM erc20_balances
WHERE contract_address = $1 AND balance > 0",
)
.bind(address)
.fetch_one(pool)
.await?;

Ok(supply)
}

async fn has_erc20_transfers(pool: &PgPool, address: &str) -> Result<bool, sqlx::Error> {
let (has_transfers,): (bool,) =
sqlx::query_as("SELECT EXISTS(SELECT 1 FROM erc20_transfers WHERE contract_address = $1)")
.bind(address)
.fetch_one(pool)
.await?;

Ok(has_transfers)
}

pub async fn list_addresses(
State(state): State<Arc<AppState>>,
Query(filters): Query<AddressFilters>,
Expand Down Expand Up @@ -221,6 +248,17 @@ pub async fn get_address(
.fetch_optional(&state.pool)
.await?;

let erc20_contract = match erc20_contract {
Some(mut erc20) => {
if erc20.total_supply.is_none() || has_erc20_transfers(&state.pool, &address).await? {
erc20.total_supply =
Some(get_indexed_erc20_total_supply(&state.pool, &address).await?);
}
Some(erc20)
}
None => None,
};
Comment thread
coderabbitai[bot] marked this conversation as resolved.

// Merge the data
match (base_addr, nft_contract, erc20_contract) {
// Found in addresses table and is an NFT contract
Expand Down
51 changes: 23 additions & 28 deletions backend/crates/atlas-server/src/api/handlers/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use axum::{
Json,
};
use chrono::Utc;
use sqlx::PgPool;
use std::sync::Arc;

use crate::api::error::ApiResult;
Expand Down Expand Up @@ -50,6 +51,22 @@ pub struct TokenDetailResponse {
pub transfer_count: i64,
}

async fn get_indexed_total_supply(
pool: &PgPool,
address: &str,
) -> Result<bigdecimal::BigDecimal, sqlx::Error> {
let (supply,): (bigdecimal::BigDecimal,) = sqlx::query_as(
"SELECT COALESCE(SUM(balance), 0)
FROM erc20_balances
WHERE contract_address = $1 AND balance > 0",
)
.bind(address)
.fetch_one(pool)
.await?;

Ok(supply)
}

/// GET /api/tokens/:address - Get token details
pub async fn get_token(
State(state): State<Arc<AppState>>,
Expand Down Expand Up @@ -80,16 +97,8 @@ pub async fn get_token(
.fetch_one(&state.pool)
.await?;

// Compute total_supply from balances if not set
if contract.total_supply.is_none() {
let computed_supply: Option<(bigdecimal::BigDecimal,)> = sqlx::query_as(
"SELECT COALESCE(SUM(balance), 0) FROM erc20_balances WHERE contract_address = $1 AND balance > 0",
)
.bind(&address)
.fetch_optional(&state.pool)
.await?;

contract.total_supply = computed_supply.map(|(s,)| s);
if transfer_count.0 > 0 || contract.total_supply.is_none() {
contract.total_supply = Some(get_indexed_total_supply(&state.pool, &address).await?);
Comment on lines +101 to +102
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

The incomplete-history path still exposes indexed supply.

backend/crates/atlas-server/src/indexer/indexer.rs now advances erc20_contracts.total_supply from indexed mint/burn deltas. When has_complete_erc20_supply_history() is false, get_token leaves that field untouched and get_token_holders falls back to it, so a fresh sync can still return a partial supply before the completeness flag flips. The false branch needs a separate trusted source, or it should suppress total_supply until history is complete.

Also applies to: 137-145

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@backend/crates/atlas-server/src/api/handlers/tokens.rs` around lines 101 -
102, The current logic lets indexed supply leak when history is incomplete: when
has_complete_erc20_supply_history(&state.pool) is false you must not surface
erc20_contracts.total_supply (or any value derived from
get_indexed_total_supply) to callers of get_token or get_token_holders; change
the false-branch so that get_token clears/suppresses contract.total_supply (set
to None) and ensure get_token_holders does not fall back to the stored
erc20_contracts.total_supply value—either query a trusted on-chain source
instead or return no total_supply until has_complete_erc20_supply_history
returns true; apply the same change to the other occurrence around lines 137-145
where total_supply is currently used.

}

Ok(Json(TokenDetailResponse {
Expand Down Expand Up @@ -124,29 +133,15 @@ pub async fn get_token_holders(
.fetch_one(&state.pool)
.await?;

// Get total supply for percentage calculation
// First try to get it from the contract, if NULL compute from sum of balances
let total_supply: Option<bigdecimal::BigDecimal> = {
let total_supply = if total.0 > 0 {
Some(get_indexed_total_supply(&state.pool, &address).await?)
} else {
let stored: Option<(Option<bigdecimal::BigDecimal>,)> =
sqlx::query_as("SELECT total_supply FROM erc20_contracts WHERE address = $1")
.bind(&address)
.fetch_optional(&state.pool)
.await?;

match stored {
Some((Some(ts),)) => Some(ts),
_ => {
// Compute from sum of balances
let computed: Option<(bigdecimal::BigDecimal,)> = sqlx::query_as(
"SELECT COALESCE(SUM(balance), 0) FROM erc20_balances
WHERE contract_address = $1 AND balance > 0",
)
.bind(&address)
.fetch_optional(&state.pool)
.await?;
computed.map(|(s,)| s)
}
}
stored.and_then(|(supply,)| supply)
};

let balances: Vec<Erc20Balance> = sqlx::query_as(
Expand Down
24 changes: 24 additions & 0 deletions backend/crates/atlas-server/src/indexer/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,9 @@ pub(crate) struct BlockBatch {
// erc20_balances — aggregated deltas per (address, contract)
pub(crate) balance_map: HashMap<(String, String), BalanceDelta>,

// erc20 total supply deltas — aggregated per contract from mint/burn events
pub(crate) supply_map: HashMap<String, BigDecimal>,

// Contracts newly discovered in this batch.
// These are NOT merged into the persistent known_* sets until after a
// successful write, so a failed write doesn't leave the in-memory sets
Expand Down Expand Up @@ -158,6 +161,16 @@ impl BlockBatch {
entry.last_block = entry.last_block.max(block);
}

/// Add a total supply delta for a contract.
/// Only mint and burn transfers should touch this accumulator.
pub(crate) fn apply_supply_delta(&mut self, contract: String, delta: BigDecimal) {
let entry = self
.supply_map
.entry(contract)
.or_insert(BigDecimal::from(0));
*entry += delta;
}

pub(crate) fn materialize_blocks(&self, indexed_at: DateTime<Utc>) -> Vec<Block> {
debug_assert_eq!(self.b_numbers.len(), self.b_hashes.len());
debug_assert_eq!(self.b_numbers.len(), self.b_parent_hashes.len());
Expand Down Expand Up @@ -257,6 +270,17 @@ mod tests {
assert_eq!(entry.last_block, 100);
}

#[test]
fn apply_supply_delta_accumulates_by_contract() {
let mut batch = BlockBatch::new();
let contract = "0xtoken".to_string();

batch.apply_supply_delta(contract.clone(), BigDecimal::from(100));
batch.apply_supply_delta(contract.clone(), BigDecimal::from(-25));

assert_eq!(batch.supply_map[&contract], BigDecimal::from(75));
}

#[test]
fn materialize_blocks_preserves_parallel_block_fields() {
let mut batch = BlockBatch::new();
Expand Down
66 changes: 64 additions & 2 deletions backend/crates/atlas-server/src/indexer/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -614,15 +614,19 @@ impl Indexer {
// Aggregate balance deltas — multiple transfers in the same batch
// for the same (address, contract) pair are summed in Rust,
// so we only need one DB upsert per unique pair.
if from != ZERO_ADDRESS {
if from == ZERO_ADDRESS {
batch.apply_supply_delta(contract.clone(), value.clone());
} else {
batch.apply_balance_delta(
from,
contract.clone(),
-value.clone(),
block_num as i64,
);
}
if to != ZERO_ADDRESS {
if to == ZERO_ADDRESS {
batch.apply_supply_delta(contract.clone(), -value);
} else {
batch.apply_balance_delta(
to,
contract.clone(),
Expand Down Expand Up @@ -673,6 +677,7 @@ impl Indexer {
ec_addresses,
ec_first_seen_blocks,
balance_map,
supply_map,
last_block,
..
} = batch;
Expand Down Expand Up @@ -800,6 +805,26 @@ impl Indexer {
.await?;
}

if !supply_map.is_empty() {
let mut supply_contracts = Vec::with_capacity(supply_map.len());
let mut supply_deltas = Vec::with_capacity(supply_map.len());
for (contract, delta) in supply_map {
supply_contracts.push(contract);
supply_deltas.push(delta.to_string());
}

let params: [&(dyn ToSql + Sync); 2] = [&supply_contracts, &supply_deltas];
pg_tx
.execute(
"UPDATE erc20_contracts AS c
SET total_supply = COALESCE(c.total_supply, 0) + s.supply_delta::numeric
FROM unnest($1::text[], $2::text[]) AS s(contract_address, supply_delta)
WHERE c.address = s.contract_address",
&params,
)
.await?;
}

if update_watermark {
let last_value = last_block.to_string();
pg_tx
Expand Down Expand Up @@ -1049,6 +1074,43 @@ mod tests {
let contract = batch.ec_addresses[0].clone();
let to = "0x2222222222222222222222222222222222222222";
assert!(batch.balance_map.contains_key(&(to.to_string(), contract)));
assert_eq!(
batch.supply_map["0x3333333333333333333333333333333333333333"],
BigDecimal::from(1000)
);
}

#[test]
fn collect_erc20_burn_tracks_negative_supply_delta() {
let mut batch = BlockBatch::new();
let known_erc20 = HashSet::new();
let known_nft = HashSet::new();

let logs = serde_json::json!([{
"address": "0x3333333333333333333333333333333333333333",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x0000000000000000000000001111111111111111111111111111111111111111",
"0x0000000000000000000000000000000000000000000000000000000000000000"
],
"data": "0x00000000000000000000000000000000000000000000000000000000000003e8",
"blockNumber": "0x1",
"transactionHash": "0x0000000000000000000000000000000000000000000000000000000000000001",
"transactionIndex": "0x0",
"blockHash": "0x0000000000000000000000000000000000000000000000000000000000000001",
"logIndex": "0x0",
"removed": false
}]);

let mut fb = empty_fetched_block(1);
fb.receipts = vec![make_receipt(logs)];
Indexer::collect_block(&mut batch, &known_erc20, &known_nft, fb);

assert_eq!(batch.balance_map.len(), 1);
assert_eq!(
batch.supply_map["0x3333333333333333333333333333333333333333"],
BigDecimal::from(-1000)
);
}

#[test]
Expand Down
14 changes: 1 addition & 13 deletions backend/crates/atlas-server/src/indexer/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use alloy::{
sol,
};
use anyhow::Result;
use bigdecimal::BigDecimal;
use sqlx::PgPool;
use std::{str::FromStr, sync::Arc, time::Duration};

Expand All @@ -29,7 +28,6 @@ sol! {
function name() external view returns (string memory);
function symbol() external view returns (string memory);
function decimals() external view returns (uint8);
function totalSupply() external view returns (uint256);
}
}

Expand Down Expand Up @@ -283,7 +281,7 @@ async fn fetch_nft_contract_metadata(
Ok(())
}

/// Fetch ERC-20 contract metadata (name, symbol, decimals, totalSupply)
/// Fetch ERC-20 contract metadata (name, symbol, decimals)
async fn fetch_erc20_contract_metadata(
pool: &PgPool,
provider: &HttpProvider,
Expand All @@ -301,28 +299,18 @@ async fn fetch_erc20_contract_metadata(
// Fetch decimals
let decimals = contract.decimals().call().await.ok().map(|r| r as i16);

// Fetch totalSupply
let total_supply = contract
.totalSupply()
.call()
.await
.ok()
.map(|r| BigDecimal::from_str(&r.to_string()).unwrap_or_default());

sqlx::query(
"UPDATE erc20_contracts SET
name = COALESCE($2, name),
symbol = COALESCE($3, symbol),
decimals = COALESCE($4, decimals),
total_supply = COALESCE($5, total_supply),
metadata_fetched = true
WHERE address = $1",
)
.bind(contract_address)
.bind(name)
.bind(symbol)
.bind(decimals)
.bind(total_supply)
.execute(pool)
.await?;

Expand Down
Loading
Loading