Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Release Notes

## [2.0.3] - 2025-12-03
- **Scoring**: Fill missing predictions with 0.5 for miners without predictions
- **Architecture**: Sandbox retry mechanism and error handling with comprehensive log exports

## [2.0.2] - 2025-11-30
- **Validator**: Implement burn mechanism - UID 239 receives 80% of emissions, remaining miners share 20%

Expand Down
28 changes: 20 additions & 8 deletions neurons/miner/scripts/fetch_agent_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,22 +211,34 @@ def fetch_logs(

# Add all metadata fields
meta_table.add_row("Event ID", str(metadata.get("event_id", "N/A")))
meta_table.add_row("Market Type", str(metadata.get("market_type", "N/A")))
meta_table.add_row("Validator UID", str(metadata.get("vali_uid", "N/A")))

validator_hotkey = str(metadata.get("vali_hotkey", "N/A"))
if len(validator_hotkey) > 50:
validator_hotkey = validator_hotkey[:16] + "..." + validator_hotkey[-8:]
meta_table.add_row("Validator Hotkey", validator_hotkey)

meta_table.add_row(
"Interval Prediction", str(metadata.get("interval_agg_prediction", "N/A"))
)
meta_table.add_row(
"Interval DateTime", str(metadata.get("interval_datetime", "N/A"))
)
meta_table.add_row("Submitted At", str(metadata.get("submitted_at", "N/A")))
meta_table.add_row("Version ID", str(metadata.get("version_id", "N/A")))
meta_table.add_row("Status", str(metadata.get("status", "N/A")))
meta_table.add_row("Is Final", str(metadata.get("is_final", "N/A")))

# Add prediction fields if available
prediction = metadata.get("prediction")
if prediction:
meta_table.add_row(
"Interval Prediction",
str(prediction.get("interval_agg_prediction", "N/A")),
)
meta_table.add_row(
"Interval DateTime", str(prediction.get("interval_datetime", "N/A"))
)
meta_table.add_row(
"Submitted At", str(prediction.get("submitted_at", "N/A"))
)
else:
meta_table.add_row("Interval Prediction", "N/A")
meta_table.add_row("Interval DateTime", "N/A")
meta_table.add_row("Submitted At", "N/A")

console.print(
Panel.fit(meta_table, title="📊 Run Metadata", border_style="cyan")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
"""Add agent_run_logs table

Revision ID: 0fc1f13544dc
Revises: 40606aaa49f9
Create Date: 2025-12-02 15:12:42.000000

"""

from typing import Sequence, Union

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "0fc1f13544dc"
down_revision: Union[str, None] = "40606aaa49f9"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
op.execute(
"""
CREATE TABLE IF NOT EXISTS agent_run_logs (
run_id TEXT PRIMARY KEY,
log_content TEXT NOT NULL,
exported INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (run_id) REFERENCES agent_runs(run_id)
)
"""
)

op.execute(
"""
CREATE INDEX idx_agent_run_logs_exported ON agent_run_logs(exported) WHERE exported = 0
"""
)

op.execute(
"""
CREATE INDEX idx_agent_run_logs_created_at ON agent_run_logs(created_at)
"""
)


def downgrade() -> None:
pass
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Backfill agent_runs from predictions

Revision ID: e330661ab24a
Revises: 40606aaa49f9
Create Date: 2025-11-28 15:32:05.000000

"""

from typing import Sequence, Union

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "e330661ab24a"
down_revision: Union[str, None] = "0fc1f13544dc"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
op.execute(
"""
INSERT OR IGNORE INTO agent_runs (
run_id,
unique_event_id,
agent_version_id,
miner_uid,
miner_hotkey,
status,
exported,
is_final,
created_at,
updated_at
)
SELECT
run_id,
unique_event_id,
version_id,
miner_uid,
miner_hotkey,
'SUCCESS',
0,
1,
submitted,
submitted
FROM predictions
WHERE run_id IS NOT NULL
AND version_id IS NOT NULL
"""
)


def downgrade() -> None:
pass
Loading
Loading