Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,9 @@ export ANTHROPIC_API_KEY="your_anthropic_api_key_here"
# For Qwen (DashScope, based in Singapore — delays may occur)
export DASHSCOPE_API_KEY="your_dashscope_api_key_here"

# For MiniMax (204K context, OpenAI-compatible API)
export MINIMAX_API_KEY="your_minimax_api_key_here"

```


Expand Down Expand Up @@ -216,14 +219,21 @@ elif provider == "qwen":
analyzer.config["agent_llm_model"] = "qwen3-max"
if not analyzer.config["graph_llm_model"].startswith("qwen"):
analyzer.config["graph_llm_model"] = "qwen3-vl-plus"


elif provider == "minimax":
# Set default MiniMax models (204K context window)
if not analyzer.config["agent_llm_model"].startswith("MiniMax"):
analyzer.config["agent_llm_model"] = "MiniMax-M2.7"
if not analyzer.config["graph_llm_model"].startswith("MiniMax"):
analyzer.config["graph_llm_model"] = "MiniMax-M2.7"

else:
# Set default OpenAI models if not already set to OpenAI models
if analyzer.config["agent_llm_model"].startswith(("claude", "qwen")):
if analyzer.config["agent_llm_model"].startswith(("claude", "qwen", "MiniMax")):
analyzer.config["agent_llm_model"] = "gpt-4o-mini"
if analyzer.config["graph_llm_model"].startswith(("claude", "qwen")):
if analyzer.config["graph_llm_model"].startswith(("claude", "qwen", "MiniMax")):
analyzer.config["graph_llm_model"] = "gpt-4o"

```

For live data, we recommend using the web interface as it provides access to real-time market data through yfinance. The system automatically fetches the most recent 30 candlesticks for optimal LLM analysis accuracy.
Expand Down Expand Up @@ -272,6 +282,7 @@ This repository was built with the help of the following libraries and framework
- [**OpenAI**](https://github.com/openai/openai-python)
- [**Anthropic (Claude)**](https://github.com/anthropics/anthropic-sdk-python)
- [**Qwen**](https://github.com/QwenLM/Qwen)
- [**MiniMax**](https://platform.minimaxi.com/) — 204K context, OpenAI-compatible API
- [**yfinance**](https://github.com/ranaroussi/yfinance)
- [**Flask**](https://github.com/pallets/flask)
- [**TechnicalAnalysisAutomation**](https://github.com/neurotrader888/TechnicalAnalysisAutomation/tree/main)
Expand Down
17 changes: 14 additions & 3 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,9 @@ export ANTHROPIC_API_KEY="your_anthropic_api_key_here"
# For Qwen (DashScope, based in Singapore — delays may occur)
export DASHSCOPE_API_KEY="your_dashscope_api_key_here"

# For MiniMax (204K context, OpenAI-compatible API)
export MINIMAX_API_KEY="your_minimax_api_key_here"

```

## 🔧 实现细节
Expand Down Expand Up @@ -184,12 +187,19 @@ elif provider == "qwen":
analyzer.config["agent_llm_model"] = "qwen3-max"
if not analyzer.config["graph_llm_model"].startswith("qwen"):
analyzer.config["graph_llm_model"] = "qwen3-vl-plus"


elif provider == "minimax":
# Set default MiniMax models (204K context window)
if not analyzer.config["agent_llm_model"].startswith("MiniMax"):
analyzer.config["agent_llm_model"] = "MiniMax-M2.7"
if not analyzer.config["graph_llm_model"].startswith("MiniMax"):
analyzer.config["graph_llm_model"] = "MiniMax-M2.7"

else:
# Set default OpenAI models if not already set to OpenAI models
if analyzer.config["agent_llm_model"].startswith(("claude", "qwen")):
if analyzer.config["agent_llm_model"].startswith(("claude", "qwen", "MiniMax")):
analyzer.config["agent_llm_model"] = "gpt-4o-mini"
if analyzer.config["graph_llm_model"].startswith(("claude", "qwen")):
if analyzer.config["graph_llm_model"].startswith(("claude", "qwen", "MiniMax")):
analyzer.config["graph_llm_model"] = "gpt-4o"

```
Expand Down Expand Up @@ -263,6 +273,7 @@ python web_interface.py
- [**OpenAI**](https://github.com/openai/openai-python)
- [**Anthropic (Claude)**](https://github.com/anthropics/anthropic-sdk-python)
- [**Qwen**](https://github.com/QwenLM/Qwen)
- [**MiniMax**](https://platform.minimaxi.com/) — 204K context, OpenAI-compatible API
- [**yfinance**](https://github.com/ranaroussi/yfinance)
- [**Flask**](https://github.com/pallets/flask)
- [**TechnicalAnalysisAutomation**](https://github.com/neurotrader888/TechnicalAnalysisAutomation/tree/main)
Expand Down
5 changes: 3 additions & 2 deletions default_config.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
DEFAULT_CONFIG = {
"agent_llm_model": "gpt-4o-mini",
"graph_llm_model": "gpt-4o",
"agent_llm_provider": "openai", # "openai", "anthropic", or "qwen"
"graph_llm_provider": "openai", # "openai", "anthropic", or "qwen"
"agent_llm_provider": "openai", # "openai", "anthropic", "qwen", or "minimax"
"graph_llm_provider": "openai", # "openai", "anthropic", "qwen", or "minimax"
"agent_llm_temperature": 0.1,
"graph_llm_temperature": 0.1,
"api_key": "sk-", # OpenAI API key
"anthropic_api_key": "sk-", # Anthropic API key (optional, can also use ANTHROPIC_API_KEY env var)
"qwen_api_key": "sk-", # Qwen API key (optional, can also use DASHSCOPE_API_KEY env var)
"minimax_api_key": "", # MiniMax API key (optional, can also use MINIMAX_API_KEY env var)
}
30 changes: 26 additions & 4 deletions templates/demo_new.html
Original file line number Diff line number Diff line change
Expand Up @@ -1091,6 +1091,7 @@ <h4 class="panel-title">
<option value="openai" selected>OpenAI</option>
<option value="anthropic">Claude (Anthropic)</option>
<option value="qwen">Qwen</option>
<option value="minimax">MiniMax</option>
</select>
</div>

Expand Down Expand Up @@ -1126,6 +1127,17 @@ <h4 class="panel-title">
</button>
</div>
</div>

<!-- MiniMax API Key Input -->
<div class="form-group" id="minimaxApiKeyGroup" style="display: none;">
<label class="form-label">MiniMax API Key</label>
<div class="input-group">
<input type="password" class="form-control" id="minimaxApiKeyInput" placeholder="Enter your MiniMax API key">
<button class="btn btn-outline-primary" type="button" onclick="updateApiKey('minimax')">
<i class="fas fa-save"></i> Update
</button>
</div>
</div>

<!-- API Key Status Messages -->
<div id="apiKeyStatus" class="alert alert-info mt-2" style="display: none;">
Expand Down Expand Up @@ -1495,7 +1507,8 @@ <h4 class="panel-title">
const providerNames = {
'openai': 'OpenAI',
'anthropic': 'Anthropic',
'qwen': 'Qwen'
'qwen': 'Qwen',
'minimax': 'MiniMax'
};
const providerName = providerNames[provider] || provider;

Expand Down Expand Up @@ -1782,19 +1795,23 @@ <h4 class="panel-title">
const openaiGroup = document.getElementById('openaiApiKeyGroup');
const anthropicGroup = document.getElementById('anthropicApiKeyGroup');
const qwenGroup = document.getElementById('qwenApiKeyGroup');

const minimaxGroup = document.getElementById('minimaxApiKeyGroup');

// Hide all groups first
openaiGroup.style.display = 'none';
anthropicGroup.style.display = 'none';
qwenGroup.style.display = 'none';

minimaxGroup.style.display = 'none';

// Show the selected provider's group
if (provider === 'openai') {
openaiGroup.style.display = 'block';
} else if (provider === 'anthropic') {
anthropicGroup.style.display = 'block';
} else if (provider === 'qwen') {
qwenGroup.style.display = 'block';
} else if (provider === 'minimax') {
minimaxGroup.style.display = 'block';
}

// Update provider on backend
Expand Down Expand Up @@ -1839,6 +1856,8 @@ <h4 class="panel-title">
apiKey = document.getElementById('anthropicApiKeyInput').value.trim();
} else if (actualProvider === 'qwen') {
apiKey = document.getElementById('qwenApiKeyInput').value.trim();
} else if (actualProvider === 'minimax') {
apiKey = document.getElementById('minimaxApiKeyInput').value.trim();
}

if (!apiKey) {
Expand Down Expand Up @@ -1872,7 +1891,8 @@ <h4 class="panel-title">
const providerNames = {
'openai': 'OpenAI',
'anthropic': 'Anthropic',
'qwen': 'Qwen'
'qwen': 'Qwen',
'minimax': 'MiniMax'
};
showApiKeySuccess(`${providerNames[actualProvider] || actualProvider} API key updated successfully!`);
if (actualProvider === 'openai') {
Expand All @@ -1881,6 +1901,8 @@ <h4 class="panel-title">
document.getElementById('anthropicApiKeyInput').value = '';
} else if (actualProvider === 'qwen') {
document.getElementById('qwenApiKeyInput').value = '';
} else if (actualProvider === 'minimax') {
document.getElementById('minimaxApiKeyInput').value = '';
}
checkApiKeyStatus();
} else {
Expand Down
Empty file added tests/__init__.py
Empty file.
89 changes: 89 additions & 0 deletions tests/test_minimax_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""Integration tests for MiniMax provider in QuantAgent.

These tests verify end-to-end behavior with the actual MiniMax API.
They require the MINIMAX_API_KEY environment variable to be set.
Skip with: pytest -k "not integration"
"""

import os
import sys
import unittest
from unittest.mock import MagicMock

sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))

# Mock heavy native dependencies
for mod_name in ["talib", "langchain_qwq"]:
if mod_name not in sys.modules:
sys.modules[mod_name] = MagicMock()

MINIMAX_API_KEY = os.environ.get("MINIMAX_API_KEY", "")
SKIP_REASON = "MINIMAX_API_KEY not set"


@unittest.skipUnless(MINIMAX_API_KEY, SKIP_REASON)
class TestMiniMaxIntegration(unittest.TestCase):
"""Integration tests that hit the real MiniMax API."""

def test_create_llm_minimax_m27(self):
"""Should create a working MiniMax M2.7 LLM via ChatOpenAI."""
from trading_graph import TradingGraph
from default_config import DEFAULT_CONFIG

config = DEFAULT_CONFIG.copy()
config["agent_llm_provider"] = "minimax"
config["graph_llm_provider"] = "minimax"
config["agent_llm_model"] = "MiniMax-M2.7"
config["graph_llm_model"] = "MiniMax-M2.7"
config["minimax_api_key"] = MINIMAX_API_KEY

tg = TradingGraph(config=config)

# The agent_llm should be a ChatOpenAI instance
from langchain_openai import ChatOpenAI
self.assertIsInstance(tg.agent_llm, ChatOpenAI)

def test_minimax_simple_invoke(self):
"""Should successfully invoke MiniMax M2.7-highspeed for a simple query."""
from trading_graph import TradingGraph
from default_config import DEFAULT_CONFIG

config = DEFAULT_CONFIG.copy()
config["agent_llm_provider"] = "minimax"
config["graph_llm_provider"] = "minimax"
config["agent_llm_model"] = "MiniMax-M2.7-highspeed"
config["graph_llm_model"] = "MiniMax-M2.7-highspeed"
config["minimax_api_key"] = MINIMAX_API_KEY

tg = TradingGraph(config=config)

# Simple invoke test
response = tg.agent_llm.invoke("Say 'hello' and nothing else.")
self.assertIsNotNone(response)
self.assertTrue(len(response.content) > 0)

def test_minimax_provider_full_lifecycle(self):
"""Test full lifecycle: create -> update key -> refresh."""
from trading_graph import TradingGraph
from default_config import DEFAULT_CONFIG

config = DEFAULT_CONFIG.copy()
config["agent_llm_provider"] = "minimax"
config["graph_llm_provider"] = "minimax"
config["agent_llm_model"] = "MiniMax-M2.7-highspeed"
config["graph_llm_model"] = "MiniMax-M2.7-highspeed"
config["minimax_api_key"] = MINIMAX_API_KEY

tg = TradingGraph(config=config)

# Update API key (same key, just testing the mechanism)
tg.update_api_key(MINIMAX_API_KEY, provider="minimax")

# Verify the LLM still works after refresh
response = tg.agent_llm.invoke("Reply with just the word 'ok'.")
self.assertIsNotNone(response)
self.assertTrue(len(response.content) > 0)


if __name__ == "__main__":
unittest.main()
Loading
Loading