This commit is contained in:
2026-04-30 12:37:01 +02:00
parent 37d54887d6
commit b3b0e882b2
14 changed files with 1324 additions and 9 deletions

View File

@@ -31,6 +31,7 @@ pub use dtos::KbLaunchSurfaceKeyDto;
pub use dtos::KbLiquidityEventDto;
pub use dtos::KbObservedTokenDto;
pub use dtos::KbOnchainObservationDto;
pub use dtos::KbPairAnalyticSignalDto;
pub use dtos::KbPairCandleDto;
pub use dtos::KbPairDto;
pub use dtos::KbPairMetricDto;
@@ -62,6 +63,7 @@ pub use entities::KbLaunchSurfaceKeyEntity;
pub use entities::KbLiquidityEventEntity;
pub use entities::KbObservedTokenEntity;
pub use entities::KbOnchainObservationEntity;
pub use entities::KbPairAnalyticSignalEntity;
pub use entities::KbPairCandleEntity;
pub use entities::KbPairEntity;
pub use entities::KbPairMetricEntity;
@@ -89,6 +91,7 @@ pub use queries::get_launch_attribution_by_decoded_event_id;
pub use queries::get_launch_surface_by_code;
pub use queries::get_launch_surface_key_by_match;
pub use queries::get_observed_token_by_mint;
pub use queries::get_pair_analytic_signal_by_key;
pub use queries::get_pair_by_pool_id;
pub use queries::get_pair_candle_by_key;
pub use queries::get_pair_metric_by_pair_id;
@@ -114,6 +117,7 @@ pub use queries::list_launch_attributions_by_pool_id;
pub use queries::list_launch_surface_keys_by_surface_id;
pub use queries::list_launch_surfaces;
pub use queries::list_observed_tokens;
pub use queries::list_pair_analytic_signals_by_pair_id;
pub use queries::list_pair_candles_by_pair_and_timeframe;
pub use queries::list_pair_metrics;
pub use queries::list_pairs;
@@ -149,6 +153,7 @@ pub use queries::upsert_launch_surface_key;
pub use queries::upsert_liquidity_event;
pub use queries::upsert_observed_token;
pub use queries::upsert_pair;
pub use queries::upsert_pair_analytic_signal;
pub use queries::upsert_pair_candle;
pub use queries::upsert_pair_metric;
pub use queries::upsert_pool;

View File

@@ -19,6 +19,7 @@ mod liquidity_event;
mod observed_token;
mod onchain_observation;
mod pair;
mod pair_analytic_signal;
mod pair_candle;
mod pair_metric;
mod pool;
@@ -51,6 +52,7 @@ pub use liquidity_event::KbLiquidityEventDto;
pub use observed_token::KbObservedTokenDto;
pub use onchain_observation::KbOnchainObservationDto;
pub use pair::KbPairDto;
pub use pair_analytic_signal::KbPairAnalyticSignalDto;
pub use pair_candle::KbPairCandleDto;
pub use pair_metric::KbPairMetricDto;
pub use pool::KbPoolDto;

View File

@@ -0,0 +1,120 @@
// file: kb_lib/src/db/dtos/pair_analytic_signal.rs
//! Pair-analytic-signal DTO.
/// Application-facing pair-analytic-signal DTO.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct KbPairAnalyticSignalDto {
/// Optional numeric primary key.
pub id: std::option::Option<i64>,
/// Related pair id.
pub pair_id: i64,
/// Stable signal kind.
pub signal_kind: std::string::String,
/// Signal severity.
pub severity: crate::KbAnalysisSignalSeverity,
/// Timeframe in seconds. Zero means non-bucketed signal.
pub timeframe_seconds: i64,
/// Inclusive bucket start in unix seconds. Zero means non-bucketed signal.
pub bucket_start_unix: i64,
/// Optional numeric score.
pub score: std::option::Option<f64>,
/// Signal value payload.
pub signal_value: serde_json::Value,
/// Optional first transaction id that produced this signal key.
pub first_transaction_id: std::option::Option<i64>,
/// Optional last transaction id that produced this signal key.
pub last_transaction_id: std::option::Option<i64>,
/// Creation timestamp.
pub created_at: chrono::DateTime<chrono::Utc>,
/// Update timestamp.
pub updated_at: chrono::DateTime<chrono::Utc>,
}
impl KbPairAnalyticSignalDto {
/// Creates a new pair-analytic-signal DTO.
pub fn new(
pair_id: i64,
signal_kind: std::string::String,
severity: crate::KbAnalysisSignalSeverity,
timeframe_seconds: i64,
bucket_start_unix: i64,
score: std::option::Option<f64>,
signal_value: serde_json::Value,
first_transaction_id: std::option::Option<i64>,
last_transaction_id: std::option::Option<i64>,
) -> Self {
let now = chrono::Utc::now();
Self {
id: None,
pair_id,
signal_kind,
severity,
timeframe_seconds,
bucket_start_unix,
score,
signal_value,
first_transaction_id,
last_transaction_id,
created_at: now,
updated_at: now,
}
}
}
impl TryFrom<crate::KbPairAnalyticSignalEntity> for KbPairAnalyticSignalDto {
type Error = crate::KbError;
fn try_from(entity: crate::KbPairAnalyticSignalEntity) -> Result<Self, Self::Error> {
let severity_result = crate::KbAnalysisSignalSeverity::from_i16(entity.severity);
let severity = match severity_result {
Ok(severity) => severity,
Err(error) => return Err(error),
};
let signal_value_result =
serde_json::from_str::<serde_json::Value>(entity.signal_value_json.as_str());
let signal_value = match signal_value_result {
Ok(signal_value) => signal_value,
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot parse pair_analytic_signal signal_value_json '{}': {}",
entity.signal_value_json, error
)));
}
};
let created_at_result = chrono::DateTime::parse_from_rfc3339(entity.created_at.as_str());
let created_at = match created_at_result {
Ok(created_at) => created_at.with_timezone(&chrono::Utc),
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot parse pair_analytic_signal created_at '{}': {}",
entity.created_at, error
)));
}
};
let updated_at_result = chrono::DateTime::parse_from_rfc3339(entity.updated_at.as_str());
let updated_at = match updated_at_result {
Ok(updated_at) => updated_at.with_timezone(&chrono::Utc),
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot parse pair_analytic_signal updated_at '{}': {}",
entity.updated_at, error
)));
}
};
Ok(Self {
id: Some(entity.id),
pair_id: entity.pair_id,
signal_kind: entity.signal_kind,
severity,
timeframe_seconds: entity.timeframe_seconds,
bucket_start_unix: entity.bucket_start_unix,
score: entity.score,
signal_value,
first_transaction_id: entity.first_transaction_id,
last_transaction_id: entity.last_transaction_id,
created_at,
updated_at,
})
}
}

View File

@@ -21,6 +21,7 @@ mod liquidity_event;
mod observed_token;
mod onchain_observation;
mod pair;
mod pair_analytic_signal;
mod pair_candle;
mod pair_metric;
mod pool;
@@ -53,6 +54,7 @@ pub use liquidity_event::KbLiquidityEventEntity;
pub use observed_token::KbObservedTokenEntity;
pub use onchain_observation::KbOnchainObservationEntity;
pub use pair::KbPairEntity;
pub use pair_analytic_signal::KbPairAnalyticSignalEntity;
pub use pair_candle::KbPairCandleEntity;
pub use pair_metric::KbPairMetricEntity;
pub use pool::KbPoolEntity;

View File

@@ -0,0 +1,32 @@
// file: kb_lib/src/db/entities/pair_analytic_signal.rs
//! Pair-analytic-signal entity.
/// Persisted pair-analytic-signal row.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
pub struct KbPairAnalyticSignalEntity {
/// Numeric primary key.
pub id: i64,
/// Related pair id.
pub pair_id: i64,
/// Stable signal kind.
pub signal_kind: std::string::String,
/// Signal severity.
pub severity: i16,
/// Timeframe in seconds. Zero means non-bucketed signal.
pub timeframe_seconds: i64,
/// Inclusive bucket start in unix seconds. Zero means non-bucketed signal.
pub bucket_start_unix: i64,
/// Optional numeric score.
pub score: std::option::Option<f64>,
/// Signal value JSON encoded as text.
pub signal_value_json: std::string::String,
/// Optional first transaction id that produced this signal key.
pub first_transaction_id: std::option::Option<i64>,
/// Optional last transaction id that produced this signal key.
pub last_transaction_id: std::option::Option<i64>,
/// Creation timestamp encoded as RFC3339 UTC text.
pub created_at: std::string::String,
/// Update timestamp encoded as RFC3339 UTC text.
pub updated_at: std::string::String,
}

View File

@@ -23,6 +23,7 @@ mod liquidity_event;
mod observed_token;
mod onchain_observation;
mod pair;
mod pair_analytic_signal;
mod pair_candle;
mod pair_metric;
mod pool;
@@ -85,6 +86,9 @@ pub use onchain_observation::list_recent_onchain_observations;
pub use pair::get_pair_by_pool_id;
pub use pair::list_pairs;
pub use pair::upsert_pair;
pub use pair_analytic_signal::get_pair_analytic_signal_by_key;
pub use pair_analytic_signal::list_pair_analytic_signals_by_pair_id;
pub use pair_analytic_signal::upsert_pair_analytic_signal;
pub use pair_candle::get_pair_candle_by_key;
pub use pair_candle::list_pair_candles_by_pair_and_timeframe;
pub use pair_candle::upsert_pair_candle;

View File

@@ -0,0 +1,196 @@
// file: kb_lib/src/db/queries/pair_analytic_signal.rs
//! Queries for `kb_pair_analytic_signals`.
/// Inserts or updates one pair-analytic-signal row and returns its stable internal id.
pub async fn upsert_pair_analytic_signal(
database: &crate::KbDatabase,
dto: &crate::KbPairAnalyticSignalDto,
) -> Result<i64, crate::KbError> {
let signal_value_json_result = serde_json::to_string(&dto.signal_value);
let signal_value_json = match signal_value_json_result {
Ok(signal_value_json) => signal_value_json,
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot serialize pair analytic signal payload: {}",
error
)));
}
};
match database.connection() {
crate::KbDatabaseConnection::Sqlite(pool) => {
let query_result = sqlx::query(
r#"
INSERT INTO kb_pair_analytic_signals (
pair_id,
signal_kind,
severity,
timeframe_seconds,
bucket_start_unix,
score,
signal_value_json,
first_transaction_id,
last_transaction_id,
created_at,
updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(pair_id, signal_kind, timeframe_seconds, bucket_start_unix) DO UPDATE SET
severity = excluded.severity,
score = excluded.score,
signal_value_json = excluded.signal_value_json,
last_transaction_id = excluded.last_transaction_id,
updated_at = excluded.updated_at
"#,
)
.bind(dto.pair_id)
.bind(dto.signal_kind.clone())
.bind(dto.severity.to_i16())
.bind(dto.timeframe_seconds)
.bind(dto.bucket_start_unix)
.bind(dto.score)
.bind(signal_value_json)
.bind(dto.first_transaction_id)
.bind(dto.last_transaction_id)
.bind(dto.created_at.to_rfc3339())
.bind(dto.updated_at.to_rfc3339())
.execute(pool)
.await;
if let Err(error) = query_result {
return Err(crate::KbError::Db(format!(
"cannot upsert kb_pair_analytic_signals on sqlite: {}",
error
)));
}
let id_result = sqlx::query_scalar::<sqlx::Sqlite, i64>(
r#"
SELECT id
FROM kb_pair_analytic_signals
WHERE pair_id = ? AND signal_kind = ? AND timeframe_seconds = ? AND bucket_start_unix = ?
LIMIT 1
"#,
)
.bind(dto.pair_id)
.bind(dto.signal_kind.clone())
.bind(dto.timeframe_seconds)
.bind(dto.bucket_start_unix)
.fetch_one(pool)
.await;
match id_result {
Ok(id) => Ok(id),
Err(error) => Err(crate::KbError::Db(format!(
"cannot fetch kb_pair_analytic_signals id on sqlite: {}",
error
))),
}
}
}
}
/// Returns one pair-analytic-signal row identified by its key, if it exists.
pub async fn get_pair_analytic_signal_by_key(
database: &crate::KbDatabase,
pair_id: i64,
signal_kind: &str,
timeframe_seconds: i64,
bucket_start_unix: i64,
) -> Result<std::option::Option<crate::KbPairAnalyticSignalDto>, crate::KbError> {
match database.connection() {
crate::KbDatabaseConnection::Sqlite(pool) => {
let query_result =
sqlx::query_as::<sqlx::Sqlite, crate::KbPairAnalyticSignalEntity>(
r#"
SELECT
id,
pair_id,
signal_kind,
severity,
timeframe_seconds,
bucket_start_unix,
score,
signal_value_json,
first_transaction_id,
last_transaction_id,
created_at,
updated_at
FROM kb_pair_analytic_signals
WHERE pair_id = ? AND signal_kind = ? AND timeframe_seconds = ? AND bucket_start_unix = ?
LIMIT 1
"#,
)
.bind(pair_id)
.bind(signal_kind)
.bind(timeframe_seconds)
.bind(bucket_start_unix)
.fetch_optional(pool)
.await;
let entity_option = match query_result {
Ok(entity_option) => entity_option,
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot read kb_pair_analytic_signals by key on sqlite: {}",
error
)));
}
};
match entity_option {
Some(entity) => crate::KbPairAnalyticSignalDto::try_from(entity).map(Some),
None => Ok(None),
}
}
}
}
/// Lists all pair-analytic signals for one pair ordered by key.
pub async fn list_pair_analytic_signals_by_pair_id(
database: &crate::KbDatabase,
pair_id: i64,
) -> Result<std::vec::Vec<crate::KbPairAnalyticSignalDto>, crate::KbError> {
match database.connection() {
crate::KbDatabaseConnection::Sqlite(pool) => {
let query_result =
sqlx::query_as::<sqlx::Sqlite, crate::KbPairAnalyticSignalEntity>(
r#"
SELECT
id,
pair_id,
signal_kind,
severity,
timeframe_seconds,
bucket_start_unix,
score,
signal_value_json,
first_transaction_id,
last_transaction_id,
created_at,
updated_at
FROM kb_pair_analytic_signals
WHERE pair_id = ?
ORDER BY timeframe_seconds ASC, bucket_start_unix ASC, signal_kind ASC, id ASC
"#,
)
.bind(pair_id)
.fetch_all(pool)
.await;
let entities = match query_result {
Ok(entities) => entities,
Err(error) => {
return Err(crate::KbError::Db(format!(
"cannot list kb_pair_analytic_signals by pair_id '{}' on sqlite: {}",
pair_id, error
)));
}
};
let mut dtos = std::vec::Vec::new();
for entity in entities {
let dto_result = crate::KbPairAnalyticSignalDto::try_from(entity);
let dto = match dto_result {
Ok(dto) => dto,
Err(error) => return Err(error),
};
dtos.push(dto);
}
Ok(dtos)
}
}
}

View File

@@ -334,6 +334,14 @@ pub(crate) async fn ensure_schema(database: &crate::KbDatabase) -> Result<(), cr
if let Err(error) = result {
return Err(error);
}
let result = create_kb_pair_analytic_signals_table(pool).await;
if let Err(error) = result {
return Err(error);
}
let result = create_kb_idx_pair_analytic_signals_pair_id(pool).await;
if let Err(error) = result {
return Err(error);
}
Ok(())
}
}
@@ -1858,3 +1866,47 @@ ON kb_pair_candles(bucket_start_unix)
)
.await
}
async fn create_kb_pair_analytic_signals_table(
pool: &sqlx::SqlitePool,
) -> Result<(), crate::KbError> {
execute_sqlite_schema_statement(
pool,
"create_kb_pair_analytic_signals_table",
r#"
CREATE TABLE IF NOT EXISTS kb_pair_analytic_signals (
id INTEGER PRIMARY KEY AUTOINCREMENT,
pair_id INTEGER NOT NULL,
signal_kind TEXT NOT NULL,
severity INTEGER NOT NULL,
timeframe_seconds INTEGER NOT NULL,
bucket_start_unix INTEGER NOT NULL,
score REAL NULL,
signal_value_json TEXT NOT NULL,
first_transaction_id INTEGER NULL,
last_transaction_id INTEGER NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
UNIQUE(pair_id, signal_kind, timeframe_seconds, bucket_start_unix),
FOREIGN KEY(pair_id) REFERENCES kb_pairs(id) ON DELETE CASCADE,
FOREIGN KEY(first_transaction_id) REFERENCES kb_chain_transactions(id) ON DELETE SET NULL,
FOREIGN KEY(last_transaction_id) REFERENCES kb_chain_transactions(id) ON DELETE SET NULL
)
"#,
)
.await
}
async fn create_kb_idx_pair_analytic_signals_pair_id(
pool: &sqlx::SqlitePool,
) -> Result<(), crate::KbError> {
execute_sqlite_schema_statement(
pool,
"create_kb_idx_pair_analytic_signals_pair_id",
r#"
CREATE INDEX IF NOT EXISTS kb_idx_pair_analytic_signals_pair_id
ON kb_pair_analytic_signals(pair_id)
"#,
)
.await
}

View File

@@ -20,6 +20,7 @@ mod http_client;
mod http_pool;
mod json_rpc_ws;
mod launch_origin;
mod pair_analytic_signal;
mod pair_candle_aggregation;
mod pair_candle_query;
mod pool_origin;
@@ -88,6 +89,8 @@ pub use db::KbObservedTokenEntity;
pub use db::KbObservedTokenStatus;
pub use db::KbOnchainObservationDto;
pub use db::KbOnchainObservationEntity;
pub use db::KbPairAnalyticSignalDto;
pub use db::KbPairAnalyticSignalEntity;
pub use db::KbPairCandleDto;
pub use db::KbPairCandleEntity;
pub use db::KbPairDto;
@@ -134,6 +137,7 @@ pub use db::get_launch_attribution_by_decoded_event_id;
pub use db::get_launch_surface_by_code;
pub use db::get_launch_surface_key_by_match;
pub use db::get_observed_token_by_mint;
pub use db::get_pair_analytic_signal_by_key;
pub use db::get_pair_by_pool_id;
pub use db::get_pair_candle_by_key;
pub use db::get_pair_metric_by_pair_id;
@@ -159,6 +163,7 @@ pub use db::list_launch_attributions_by_pool_id;
pub use db::list_launch_surface_keys_by_surface_id;
pub use db::list_launch_surfaces;
pub use db::list_observed_tokens;
pub use db::list_pair_analytic_signals_by_pair_id;
pub use db::list_pair_candles_by_pair_and_timeframe;
pub use db::list_pair_metrics;
pub use db::list_pairs;
@@ -194,6 +199,7 @@ pub use db::upsert_launch_surface_key;
pub use db::upsert_liquidity_event;
pub use db::upsert_observed_token;
pub use db::upsert_pair;
pub use db::upsert_pair_analytic_signal;
pub use db::upsert_pair_candle;
pub use db::upsert_pair_metric;
pub use db::upsert_pool;
@@ -290,6 +296,8 @@ pub use json_rpc_ws::parse_kb_json_rpc_ws_incoming_text;
pub use json_rpc_ws::parse_kb_json_rpc_ws_incoming_value;
pub use launch_origin::KbLaunchAttributionResult;
pub use launch_origin::KbLaunchOriginService;
pub use pair_analytic_signal::KbPairAnalyticSignalResult;
pub use pair_analytic_signal::KbPairAnalyticSignalService;
pub use pair_candle_aggregation::KbPairCandleAggregationResult;
pub use pair_candle_aggregation::KbPairCandleAggregationService;
pub use pair_candle_query::KbPairCandleQueryService;

View File

@@ -0,0 +1,880 @@
// file: kb_lib/src/pair_analytic_signal.rs
//! Pair analytic signal service.
/// One pair-analytic-signal recording result.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct KbPairAnalyticSignalResult {
/// Related pair id.
pub pair_id: i64,
/// Signal kind.
pub signal_kind: std::string::String,
/// Timeframe in seconds. Zero means non-bucketed signal.
pub timeframe_seconds: i64,
/// Bucket start in unix seconds. Zero means non-bucketed signal.
pub bucket_start_unix: i64,
/// Persisted signal id.
pub pair_analytic_signal_id: i64,
}
/// Pair analytic signal service.
#[derive(Debug, Clone)]
pub struct KbPairAnalyticSignalService {
database: std::sync::Arc<crate::KbDatabase>,
persistence: crate::KbDetectionPersistenceService,
}
impl KbPairAnalyticSignalService {
/// Creates a new pair analytic signal service.
pub fn new(database: std::sync::Arc<crate::KbDatabase>) -> Self {
let persistence = crate::KbDetectionPersistenceService::new(database.clone());
Self {
database,
persistence,
}
}
/// Records richer analytic signals for one resolved transaction signature.
pub async fn record_transaction_by_signature(
&self,
signature: &str,
) -> Result<std::vec::Vec<crate::KbPairAnalyticSignalResult>, crate::KbError> {
let transaction_result =
crate::get_chain_transaction_by_signature(self.database.as_ref(), signature).await;
let transaction_option = match transaction_result {
Ok(transaction_option) => transaction_option,
Err(error) => return Err(error),
};
let transaction = match transaction_option {
Some(transaction) => transaction,
None => {
return Err(crate::KbError::InvalidState(format!(
"cannot record pair analytic signals for unknown transaction '{}'",
signature
)));
}
};
let transaction_id = match transaction.id {
Some(transaction_id) => transaction_id,
None => {
return Err(crate::KbError::InvalidState(format!(
"transaction '{}' has no internal id",
signature
)));
}
};
let trade_events_result =
crate::list_trade_events_by_transaction_id(self.database.as_ref(), transaction_id)
.await;
let trade_events = match trade_events_result {
Ok(trade_events) => trade_events,
Err(error) => return Err(error),
};
let mut impacted_pairs =
std::collections::BTreeMap::<i64, std::collections::BTreeSet<i64>>::new();
for trade_event in &trade_events {
let event_time_option = self.trade_event_unix_time(trade_event).await;
let event_time_unix = match event_time_option {
Ok(event_time_option) => event_time_option,
Err(error) => return Err(error),
};
let event_time_unix = match event_time_unix {
Some(event_time_unix) => event_time_unix,
None => continue,
};
let bucket_start_result = kb_bucket_start_unix(event_time_unix, 60);
let bucket_start_unix = match bucket_start_result {
Ok(bucket_start_unix) => bucket_start_unix,
Err(error) => return Err(error),
};
let entry = impacted_pairs
.entry(trade_event.pair_id)
.or_insert_with(std::collections::BTreeSet::<i64>::new);
entry.insert(bucket_start_unix);
}
let mut results = std::vec::Vec::new();
for (pair_id, bucket_starts) in impacted_pairs {
let pair_metric_result =
crate::get_pair_metric_by_pair_id(self.database.as_ref(), pair_id).await;
let pair_metric_option = match pair_metric_result {
Ok(pair_metric_option) => pair_metric_option,
Err(error) => return Err(error),
};
if let Some(pair_metric) = pair_metric_option {
if pair_metric.trade_count == 1 {
let signal_result = self
.upsert_signal(
pair_id,
"first_trade_seen".to_string(),
crate::KbAnalysisSignalSeverity::Low,
0,
0,
Some(1.0),
serde_json::json!({
"pairId": pair_id,
"tradeCount": pair_metric.trade_count,
"lastSignature": pair_metric.last_signature
}),
Some(transaction_id),
)
.await;
let signal_id = match signal_result {
Ok(signal_id) => signal_id,
Err(error) => return Err(error),
};
results.push(crate::KbPairAnalyticSignalResult {
pair_id,
signal_kind: "first_trade_seen".to_string(),
timeframe_seconds: 0,
bucket_start_unix: 0,
pair_analytic_signal_id: signal_id,
});
}
}
for bucket_start_unix in bucket_starts {
let candle_result = crate::get_pair_candle_by_key(
self.database.as_ref(),
pair_id,
60,
bucket_start_unix,
)
.await;
let candle_option = match candle_result {
Ok(candle_option) => candle_option,
Err(error) => return Err(error),
};
let candle = match candle_option {
Some(candle) => candle,
None => continue,
};
let previous_candle_result = crate::get_pair_candle_by_key(
self.database.as_ref(),
pair_id,
60,
bucket_start_unix - 60,
)
.await;
let previous_candle_option = match previous_candle_result {
Ok(previous_candle_option) => previous_candle_option,
Err(error) => return Err(error),
};
let trade_burst_result =
kb_maybe_build_trade_burst_signal(pair_id, &candle, transaction_id);
if let Some(signal) = trade_burst_result {
let signal_id_result = self.persist_signal(signal.clone()).await;
let signal_id = match signal_id_result {
Ok(signal_id) => signal_id,
Err(error) => return Err(error),
};
results.push(crate::KbPairAnalyticSignalResult {
pair_id,
signal_kind: signal.signal_kind,
timeframe_seconds: signal.timeframe_seconds,
bucket_start_unix: signal.bucket_start_unix,
pair_analytic_signal_id: signal_id,
});
}
let imbalance_result =
kb_maybe_build_buy_sell_imbalance_signal(pair_id, &candle, transaction_id);
if let Some(signal) = imbalance_result {
let signal_id_result = self.persist_signal(signal.clone()).await;
let signal_id = match signal_id_result {
Ok(signal_id) => signal_id,
Err(error) => return Err(error),
};
results.push(crate::KbPairAnalyticSignalResult {
pair_id,
signal_kind: signal.signal_kind,
timeframe_seconds: signal.timeframe_seconds,
bucket_start_unix: signal.bucket_start_unix,
pair_analytic_signal_id: signal_id,
});
}
if let Some(previous_candle) = previous_candle_option.clone() {
let jump_result = kb_maybe_build_price_jump_signal(
pair_id,
&previous_candle,
&candle,
transaction_id,
);
if let Some(signal) = jump_result {
let signal_id_result = self.persist_signal(signal.clone()).await;
let signal_id = match signal_id_result {
Ok(signal_id) => signal_id,
Err(error) => return Err(error),
};
results.push(crate::KbPairAnalyticSignalResult {
pair_id,
signal_kind: signal.signal_kind,
timeframe_seconds: signal.timeframe_seconds,
bucket_start_unix: signal.bucket_start_unix,
pair_analytic_signal_id: signal_id,
});
}
let spike_result = kb_maybe_build_volume_spike_signal(
pair_id,
&previous_candle,
&candle,
transaction_id,
);
if let Some(signal) = spike_result {
let signal_id_result = self.persist_signal(signal.clone()).await;
let signal_id = match signal_id_result {
Ok(signal_id) => signal_id,
Err(error) => return Err(error),
};
results.push(crate::KbPairAnalyticSignalResult {
pair_id,
signal_kind: signal.signal_kind,
timeframe_seconds: signal.timeframe_seconds,
bucket_start_unix: signal.bucket_start_unix,
pair_analytic_signal_id: signal_id,
});
}
}
}
}
if !results.is_empty() {
let payload = serde_json::json!({
"transactionSignature": signature,
"pairAnalyticSignalCount": results.len()
});
let observation_result = self
.persistence
.record_observation(&crate::KbDetectionObservationInput::new(
"pair.analytic_signal".to_string(),
crate::KbObservationSourceKind::Dex,
transaction.source_endpoint_name.clone(),
transaction.signature.clone(),
transaction.slot,
payload.clone(),
))
.await;
let observation_id = match observation_result {
Ok(observation_id) => observation_id,
Err(error) => return Err(error),
};
let generic_signal_result = self
.persistence
.record_signal(&crate::KbDetectionSignalInput::new(
"signal.pair.analytic_signal.recorded".to_string(),
crate::KbAnalysisSignalSeverity::Low,
transaction.signature.clone(),
Some(observation_id),
None,
payload,
))
.await;
if let Err(error) = generic_signal_result {
return Err(error);
}
}
Ok(results)
}
async fn trade_event_unix_time(
&self,
trade_event: &crate::KbTradeEventDto,
) -> Result<std::option::Option<i64>, crate::KbError> {
let transaction_result = crate::get_chain_transaction_by_signature(
self.database.as_ref(),
trade_event.signature.as_str(),
)
.await;
let transaction_option = match transaction_result {
Ok(transaction_option) => transaction_option,
Err(error) => return Err(error),
};
let transaction = match transaction_option {
Some(transaction) => transaction,
None => return Ok(Some(trade_event.created_at.timestamp())),
};
match transaction.block_time_unix {
Some(block_time_unix) => Ok(Some(block_time_unix)),
None => Ok(Some(trade_event.created_at.timestamp())),
}
}
async fn upsert_signal(
&self,
pair_id: i64,
signal_kind: std::string::String,
severity: crate::KbAnalysisSignalSeverity,
timeframe_seconds: i64,
bucket_start_unix: i64,
score: std::option::Option<f64>,
signal_value: serde_json::Value,
transaction_id: std::option::Option<i64>,
) -> Result<i64, crate::KbError> {
let existing_result = crate::get_pair_analytic_signal_by_key(
self.database.as_ref(),
pair_id,
signal_kind.as_str(),
timeframe_seconds,
bucket_start_unix,
)
.await;
let existing_option = match existing_result {
Ok(existing_option) => existing_option,
Err(error) => return Err(error),
};
let first_transaction_id = match existing_option {
Some(existing) => existing.first_transaction_id,
None => transaction_id,
};
let dto = crate::KbPairAnalyticSignalDto::new(
pair_id,
signal_kind,
severity,
timeframe_seconds,
bucket_start_unix,
score,
signal_value,
first_transaction_id,
transaction_id,
);
crate::upsert_pair_analytic_signal(self.database.as_ref(), &dto).await
}
async fn persist_signal(
&self,
signal: KbPendingPairAnalyticSignal,
) -> Result<i64, crate::KbError> {
self.upsert_signal(
signal.pair_id,
signal.signal_kind,
signal.severity,
signal.timeframe_seconds,
signal.bucket_start_unix,
signal.score,
signal.signal_value,
signal.transaction_id,
)
.await
}
}
#[derive(Debug, Clone)]
struct KbPendingPairAnalyticSignal {
pair_id: i64,
signal_kind: std::string::String,
severity: crate::KbAnalysisSignalSeverity,
timeframe_seconds: i64,
bucket_start_unix: i64,
score: std::option::Option<f64>,
signal_value: serde_json::Value,
transaction_id: std::option::Option<i64>,
}
fn kb_bucket_start_unix(
event_time_unix: i64,
timeframe_seconds: i64,
) -> Result<i64, crate::KbError> {
if timeframe_seconds <= 0 {
return Err(crate::KbError::InvalidState(format!(
"invalid timeframe_seconds '{}'",
timeframe_seconds
)));
}
Ok((event_time_unix / timeframe_seconds) * timeframe_seconds)
}
fn kb_maybe_build_trade_burst_signal(
pair_id: i64,
candle: &crate::KbPairCandleDto,
transaction_id: i64,
) -> std::option::Option<KbPendingPairAnalyticSignal> {
if candle.trade_count < 3 {
return None;
}
let severity = if candle.trade_count >= 5 {
crate::KbAnalysisSignalSeverity::High
} else {
crate::KbAnalysisSignalSeverity::Medium
};
Some(KbPendingPairAnalyticSignal {
pair_id,
signal_kind: "trade_burst_60s".to_string(),
severity,
timeframe_seconds: 60,
bucket_start_unix: candle.bucket_start_unix,
score: Some(candle.trade_count as f64),
signal_value: serde_json::json!({
"pairId": pair_id,
"tradeCount": candle.trade_count,
"buyCount": candle.buy_count,
"sellCount": candle.sell_count,
"bucketStartUnix": candle.bucket_start_unix
}),
transaction_id: Some(transaction_id),
})
}
fn kb_maybe_build_buy_sell_imbalance_signal(
pair_id: i64,
candle: &crate::KbPairCandleDto,
transaction_id: i64,
) -> std::option::Option<KbPendingPairAnalyticSignal> {
if candle.trade_count < 3 {
return None;
}
let difference = (candle.buy_count - candle.sell_count).abs();
let ratio = difference as f64 / candle.trade_count as f64;
if ratio < 0.75 {
return None;
}
let severity = if ratio >= 0.9 {
crate::KbAnalysisSignalSeverity::High
} else {
crate::KbAnalysisSignalSeverity::Medium
};
Some(KbPendingPairAnalyticSignal {
pair_id,
signal_kind: "buy_sell_imbalance_60s".to_string(),
severity,
timeframe_seconds: 60,
bucket_start_unix: candle.bucket_start_unix,
score: Some(ratio),
signal_value: serde_json::json!({
"pairId": pair_id,
"tradeCount": candle.trade_count,
"buyCount": candle.buy_count,
"sellCount": candle.sell_count,
"imbalanceRatio": ratio,
"bucketStartUnix": candle.bucket_start_unix
}),
transaction_id: Some(transaction_id),
})
}
fn kb_maybe_build_price_jump_signal(
pair_id: i64,
previous_candle: &crate::KbPairCandleDto,
candle: &crate::KbPairCandleDto,
transaction_id: i64,
) -> std::option::Option<KbPendingPairAnalyticSignal> {
if previous_candle.close_price_quote_per_base <= 0.0 {
return None;
}
let ratio = candle.close_price_quote_per_base / previous_candle.close_price_quote_per_base;
let delta_ratio = ratio - 1.0;
if delta_ratio >= 0.2 {
let severity = if delta_ratio >= 0.5 {
crate::KbAnalysisSignalSeverity::High
} else {
crate::KbAnalysisSignalSeverity::Medium
};
return Some(KbPendingPairAnalyticSignal {
pair_id,
signal_kind: "price_jump_up_60s".to_string(),
severity,
timeframe_seconds: 60,
bucket_start_unix: candle.bucket_start_unix,
score: Some(delta_ratio),
signal_value: serde_json::json!({
"pairId": pair_id,
"previousClose": previous_candle.close_price_quote_per_base,
"close": candle.close_price_quote_per_base,
"deltaRatio": delta_ratio,
"bucketStartUnix": candle.bucket_start_unix
}),
transaction_id: Some(transaction_id),
});
}
if delta_ratio <= -0.2 {
let severity = if delta_ratio <= -0.5 {
crate::KbAnalysisSignalSeverity::High
} else {
crate::KbAnalysisSignalSeverity::Medium
};
return Some(KbPendingPairAnalyticSignal {
pair_id,
signal_kind: "price_jump_down_60s".to_string(),
severity,
timeframe_seconds: 60,
bucket_start_unix: candle.bucket_start_unix,
score: Some(delta_ratio.abs()),
signal_value: serde_json::json!({
"pairId": pair_id,
"previousClose": previous_candle.close_price_quote_per_base,
"close": candle.close_price_quote_per_base,
"deltaRatio": delta_ratio,
"bucketStartUnix": candle.bucket_start_unix
}),
transaction_id: Some(transaction_id),
});
}
None
}
fn kb_maybe_build_volume_spike_signal(
pair_id: i64,
previous_candle: &crate::KbPairCandleDto,
candle: &crate::KbPairCandleDto,
transaction_id: i64,
) -> std::option::Option<KbPendingPairAnalyticSignal> {
let previous_quote_text = match previous_candle.quote_volume_raw.clone() {
Some(previous_quote_text) => previous_quote_text,
None => return None,
};
let current_quote_text = match candle.quote_volume_raw.clone() {
Some(current_quote_text) => current_quote_text,
None => return None,
};
let previous_quote_result = previous_quote_text.parse::<f64>();
let previous_quote = match previous_quote_result {
Ok(previous_quote) => previous_quote,
Err(_) => return None,
};
if previous_quote <= 0.0 {
return None;
}
let current_quote_result = current_quote_text.parse::<f64>();
let current_quote = match current_quote_result {
Ok(current_quote) => current_quote,
Err(_) => return None,
};
let spike_ratio = current_quote / previous_quote;
if spike_ratio < 2.0 {
return None;
}
let severity = if spike_ratio >= 3.0 {
crate::KbAnalysisSignalSeverity::High
} else {
crate::KbAnalysisSignalSeverity::Medium
};
Some(KbPendingPairAnalyticSignal {
pair_id,
signal_kind: "volume_spike_60s".to_string(),
severity,
timeframe_seconds: 60,
bucket_start_unix: candle.bucket_start_unix,
score: Some(spike_ratio),
signal_value: serde_json::json!({
"pairId": pair_id,
"previousQuoteVolumeRaw": previous_quote_text,
"quoteVolumeRaw": current_quote_text,
"spikeRatio": spike_ratio,
"bucketStartUnix": candle.bucket_start_unix
}),
transaction_id: Some(transaction_id),
})
}
#[cfg(test)]
mod tests {
async fn make_database() -> std::sync::Arc<crate::KbDatabase> {
let tempdir_result = tempfile::tempdir();
let tempdir = match tempdir_result {
Ok(tempdir) => tempdir,
Err(error) => panic!("tempdir must succeed: {}", error),
};
let database_path = tempdir.path().join("pair_analytic_signal.sqlite3");
let config = crate::KbDatabaseConfig {
enabled: true,
backend: crate::KbDatabaseBackend::Sqlite,
sqlite: crate::KbSqliteDatabaseConfig {
path: database_path.to_string_lossy().to_string(),
create_if_missing: true,
busy_timeout_ms: 5000,
max_connections: 1,
auto_initialize_schema: true,
use_wal: true,
},
};
let database_result = crate::KbDatabase::connect_and_initialize(&config).await;
let database = match database_result {
Ok(database) => database,
Err(error) => panic!("database init must succeed: {}", error),
};
std::sync::Arc::new(database)
}
async fn seed_fluxbeam_swap_transaction(
database: std::sync::Arc<crate::KbDatabase>,
signature: &str,
block_time_unix: i64,
quote_amount_raw: &str,
buy_side: bool,
) {
let transaction_model = crate::KbTransactionModelService::new(database.clone());
let dex_decode = crate::KbDexDecodeService::new(database.clone());
let dex_detect = crate::KbDexDetectService::new(database.clone());
let trade_aggregation = crate::KbTradeAggregationService::new(database.clone());
let pair_candle_aggregation = crate::KbPairCandleAggregationService::new(database.clone());
let log_side = if buy_side { "buy" } else { "sell" };
let resolved_transaction = serde_json::json!({
"slot": 970001,
"blockTime": block_time_unix,
"version": 0,
"transaction": {
"message": {
"instructions": [
{
"programId": crate::KB_FLUXBEAM_PROGRAM_ID,
"program": "fluxbeam",
"stackHeight": 1,
"accounts": [
"AnalyticPool111",
"AnalyticLpMint111",
"AnalyticTokenA111",
"So11111111111111111111111111111111111111112"
],
"parsed": {
"info": {
"instruction": "swap",
"pool": "AnalyticPool111",
"tokenA": "AnalyticTokenA111",
"tokenB": "So11111111111111111111111111111111111111112",
"baseAmountRaw": "1000",
"quoteAmountRaw": quote_amount_raw
}
},
"data": "opaque"
}
]
}
},
"meta": {
"err": null,
"logMessages": [
"Program log: Instruction: Swap",
format!("Program log: {}", log_side)
]
}
});
let project_result = transaction_model
.persist_resolved_transaction(
signature,
Some("helius_primary_http".to_string()),
&resolved_transaction,
)
.await;
if let Err(error) = project_result {
panic!("projection must succeed: {}", error);
}
let decode_result = dex_decode.decode_transaction_by_signature(signature).await;
if let Err(error) = decode_result {
panic!("dex decode must succeed: {}", error);
}
let detect_result = dex_detect.detect_transaction_by_signature(signature).await;
if let Err(error) = detect_result {
panic!("dex detect must succeed: {}", error);
}
let trade_result = trade_aggregation
.record_transaction_by_signature(signature)
.await;
if let Err(error) = trade_result {
panic!("trade aggregation must succeed: {}", error);
}
let candle_result = pair_candle_aggregation
.record_transaction_by_signature(signature)
.await;
if let Err(error) = candle_result {
panic!("pair candle aggregation must succeed: {}", error);
}
}
#[tokio::test]
async fn record_transaction_by_signature_creates_first_trade_signal() {
let database = make_database().await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-first-trade",
1_700_020_000,
"2000",
true,
)
.await;
let service = crate::KbPairAnalyticSignalService::new(database.clone());
let record_result = service
.record_transaction_by_signature("sig-analytic-first-trade")
.await;
let results = match record_result {
Ok(results) => results,
Err(error) => panic!("analytic signal service must succeed: {}", error),
};
let pools_result = crate::list_pools(database.as_ref()).await;
let pools = match pools_result {
Ok(pools) => pools,
Err(error) => panic!("pool list must succeed: {}", error),
};
let pool_id = pools[0].id.unwrap_or_default();
let pair_result = crate::get_pair_by_pool_id(database.as_ref(), pool_id).await;
let pair_option = match pair_result {
Ok(pair_option) => pair_option,
Err(error) => panic!("pair fetch must succeed: {}", error),
};
let pair = match pair_option {
Some(pair) => pair,
None => panic!("pair must exist"),
};
let pair_id = pair.id.unwrap_or_default();
let signals_result =
crate::list_pair_analytic_signals_by_pair_id(database.as_ref(), pair_id).await;
let signals = match signals_result {
Ok(signals) => signals,
Err(error) => panic!("signal list must succeed: {}", error),
};
assert_eq!(signals.len(), 1);
assert_eq!(signals[0].signal_kind, "first_trade_seen".to_string());
assert_eq!(signals[0].severity, crate::KbAnalysisSignalSeverity::Low);
assert!(!results.is_empty());
}
#[tokio::test]
async fn record_transaction_by_signature_creates_bucketed_signals_and_is_idempotent() {
let database = make_database().await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-bucket-1",
1_700_021_000,
"1000",
true,
)
.await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-bucket-2",
1_700_021_010,
"1100",
true,
)
.await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-bucket-3",
1_700_021_020,
"1200",
true,
)
.await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-bucket-4",
1_700_021_030,
"1300",
true,
)
.await;
seed_fluxbeam_swap_transaction(
database.clone(),
"sig-analytic-bucket-5",
1_700_021_070,
"15000",
true,
)
.await;
let service = crate::KbPairAnalyticSignalService::new(database.clone());
let record_1 = service
.record_transaction_by_signature("sig-analytic-bucket-1")
.await;
if let Err(error) = record_1 {
panic!("analytic signal service call 1 must succeed: {}", error);
}
let record_2 = service
.record_transaction_by_signature("sig-analytic-bucket-2")
.await;
if let Err(error) = record_2 {
panic!("analytic signal service call 2 must succeed: {}", error);
}
let record_3 = service
.record_transaction_by_signature("sig-analytic-bucket-3")
.await;
if let Err(error) = record_3 {
panic!("analytic signal service call 3 must succeed: {}", error);
}
let record_4 = service
.record_transaction_by_signature("sig-analytic-bucket-4")
.await;
if let Err(error) = record_4 {
panic!("analytic signal service call 4 must succeed: {}", error);
}
let record_5 = service
.record_transaction_by_signature("sig-analytic-bucket-5")
.await;
let results = match record_5 {
Ok(results) => results,
Err(error) => panic!("analytic signal service call 5 must succeed: {}", error),
};
assert!(!results.is_empty());
let second_result = service
.record_transaction_by_signature("sig-analytic-bucket-5")
.await;
let second_results = match second_result {
Ok(second_results) => second_results,
Err(error) => panic!(
"second analytic signal service call must succeed: {}",
error
),
};
assert!(!second_results.is_empty());
let pools_result = crate::list_pools(database.as_ref()).await;
let pools = match pools_result {
Ok(pools) => pools,
Err(error) => panic!("pool list must succeed: {}", error),
};
let pool_id = pools[0].id.unwrap_or_default();
let pair_result = crate::get_pair_by_pool_id(database.as_ref(), pool_id).await;
let pair_option = match pair_result {
Ok(pair_option) => pair_option,
Err(error) => panic!("pair fetch must succeed: {}", error),
};
let pair = match pair_option {
Some(pair) => pair,
None => panic!("pair must exist"),
};
let pair_id = pair.id.unwrap_or_default();
let signals_result =
crate::list_pair_analytic_signals_by_pair_id(database.as_ref(), pair_id).await;
let signals = match signals_result {
Ok(signals) => signals,
Err(error) => panic!("signal list must succeed: {}", error),
};
let mut signal_kinds = std::collections::BTreeSet::<std::string::String>::new();
for signal in &signals {
signal_kinds.insert(signal.signal_kind.clone());
}
assert!(signal_kinds.contains("trade_burst_60s"));
assert!(signal_kinds.contains("buy_sell_imbalance_60s"));
assert!(signal_kinds.contains("price_jump_up_60s"));
assert!(signal_kinds.contains("volume_spike_60s"));
let trade_burst_result = crate::get_pair_analytic_signal_by_key(
database.as_ref(),
pair_id,
"trade_burst_60s",
60,
1_700_020_980,
)
.await;
let trade_burst_option = match trade_burst_result {
Ok(trade_burst_option) => trade_burst_option,
Err(error) => panic!("trade burst fetch must succeed: {}", error),
};
let trade_burst = match trade_burst_option {
Some(trade_burst) => trade_burst,
None => panic!("trade burst signal must exist"),
};
assert_eq!(
trade_burst.severity,
crate::KbAnalysisSignalSeverity::Medium
);
let jump_result = crate::get_pair_analytic_signal_by_key(
database.as_ref(),
pair_id,
"price_jump_up_60s",
60,
1_700_021_040,
)
.await;
let jump_option = match jump_result {
Ok(jump_option) => jump_option,
Err(error) => panic!("jump fetch must succeed: {}", error),
};
let jump = match jump_option {
Some(jump) => jump,
None => panic!("jump signal must exist"),
};
assert!(jump.score.unwrap_or_default() > 1.0 || jump.score.unwrap_or_default() > 0.2);
}
}

View File

@@ -108,6 +108,7 @@ pub struct KbTransactionResolutionService {
trade_aggregation_service: crate::KbTradeAggregationService,
wallet_holding_observation_service: crate::KbWalletHoldingObservationService,
pair_candle_aggregation_service: crate::KbPairCandleAggregationService,
pair_analytic_signal_service: crate::KbPairAnalyticSignalService,
resolved_signatures:
std::sync::Arc<tokio::sync::Mutex<std::collections::HashSet<std::string::String>>>,
}
@@ -131,6 +132,8 @@ impl KbTransactionResolutionService {
crate::KbWalletHoldingObservationService::new(database.clone());
let pair_candle_aggregation_service =
crate::KbPairCandleAggregationService::new(database.clone());
let pair_analytic_signal_service =
crate::KbPairAnalyticSignalService::new(database.clone());
Self {
http_pool,
persistence,
@@ -144,6 +147,7 @@ impl KbTransactionResolutionService {
trade_aggregation_service,
wallet_holding_observation_service,
pair_candle_aggregation_service,
pair_analytic_signal_service,
resolved_signatures: std::sync::Arc::new(tokio::sync::Mutex::new(
std::collections::HashSet::new(),
)),
@@ -388,11 +392,21 @@ impl KbTransactionResolutionService {
Err(error) => return Err(error),
};
let pair_candle_count = pair_candle_aggregations.len();
let pair_analytic_signals_result = self
.pair_analytic_signal_service
.record_transaction_by_signature(request.signature.as_str())
.await;
let pair_analytic_signals = match pair_analytic_signals_result {
Ok(pair_analytic_signals) => pair_analytic_signals,
Err(error) => return Err(error),
};
let pair_analytic_signal_count = pair_analytic_signals.len();
let payload = serde_json::json!({
"status": "resolved",
"signature": request.signature.clone(),
"triggerMethod": request.trigger_method.clone(),
"sourceEndpointName": request.source_endpoint_name.clone(),
"triggerPayload": request.trigger_payload.clone(),
"slotHint": request.slot_hint,
"projectedTransactionId": projected_transaction_id,
"decodedEventCount": decoded_event_count,
@@ -403,7 +417,7 @@ impl KbTransactionResolutionService {
"walletHoldingCount": wallet_holding_count,
"tradeEventCount": trade_event_count,
"pairCandleCount": pair_candle_count,
"triggerPayload": request.trigger_payload.clone(),
"pairAnalyticSignalCount": pair_analytic_signal_count,
"transaction": transaction_value
});
let observation_id_result = self