1+ package org .k11techlab .framework_unittests .aiTests ;
2+
3+ import org .k11techlab .framework .selenium .webuitestbase .BaseSeleniumTest ;
4+ import org .k11techlab .framework .ai .manager .AIProviderManager ;
5+ import org .k11techlab .framework .ai .llm .LLMInterface ;
6+ import org .k11techlab .framework .ai .ollama .OllamaClient ;
7+ import org .k11techlab .framework .selenium .webuitestengine .logger .Log ;
8+ import org .testng .annotations .Test ;
9+ import org .testng .annotations .BeforeClass ;
10+
11+ /**
12+ * AI Provider Diagnostics Test
13+ * Comprehensive testing and debugging for AI provider connectivity
14+ *
15+ * @author K11 TechLab
16+ * @version 1.0
17+ */
18+ public class AIProviderDiagnosticsTest extends BaseSeleniumTest {
19+
20+ private AIProviderManager aiManager ;
21+
22+ @ BeforeClass
23+ public void setupDiagnostics () {
24+ System .out .println ("\n 🔧 AI Provider Diagnostics Starting..." );
25+ Log .info ("Starting comprehensive AI provider diagnostics" );
26+ }
27+
28+ @ Test (priority = 1 , description = "Test System Properties and Environment" )
29+ public void testSystemConfiguration () {
30+ System .out .println ("\n " + "=" .repeat (60 ));
31+ System .out .println ("🔧 SYSTEM CONFIGURATION DIAGNOSTICS" );
32+ System .out .println ("=" .repeat (60 ));
33+
34+ // Environment variables
35+ System .out .println ("📋 Environment Variables:" );
36+ System .out .println (" CI: " + System .getenv ("CI" ));
37+ System .out .println (" OLLAMA_HOST: " + System .getenv ("OLLAMA_HOST" ));
38+ System .out .println (" OLLAMA_MODEL: " + System .getenv ("OLLAMA_MODEL" ));
39+ System .out .println (" OLLAMA_AVAILABLE: " + System .getenv ("OLLAMA_AVAILABLE" ));
40+
41+ // System properties
42+ System .out .println ("\n ⚙️ Java System Properties:" );
43+ System .out .println (" ai.test.mode: " + System .getProperty ("ai.test.mode" , "not set" ));
44+ System .out .println (" ai.provider: " + System .getProperty ("ai.provider" , "not set" ));
45+ System .out .println (" ai.model: " + System .getProperty ("ai.model" , "not set" ));
46+ System .out .println (" ai.ollama.url: " + System .getProperty ("ai.ollama.url" , "not set" ));
47+
48+ System .out .println ("✅ System configuration check completed" );
49+ }
50+
51+ @ Test (priority = 2 , description = "Test Ollama Direct Connection" )
52+ public void testOllamaDirectConnection () {
53+ System .out .println ("\n " + "=" .repeat (60 ));
54+ System .out .println ("🦙 OLLAMA DIRECT CONNECTION TEST" );
55+ System .out .println ("=" .repeat (60 ));
56+
57+ try {
58+ // Test with system properties if available
59+ String ollamaUrl = System .getProperty ("ai.ollama.url" , "http://localhost:11434" );
60+ String model = System .getProperty ("ai.model" , "tinyllama:1.1b" );
61+
62+ System .out .println ("🔗 Testing Ollama connection:" );
63+ System .out .println (" URL: " + ollamaUrl );
64+ System .out .println (" Model: " + model );
65+
66+ OllamaClient ollama = new OllamaClient (ollamaUrl , model );
67+
68+ // Test availability
69+ System .out .println ("🔍 Testing Ollama availability..." );
70+ boolean available = ollama .isAvailable ();
71+ System .out .println (" Available: " + (available ? "✅ YES" : "❌ NO" ));
72+
73+ if (available ) {
74+ // Test model info
75+ System .out .println ("📦 Testing model info..." );
76+ String modelInfo = ollama .getModelInfo ();
77+ System .out .println (" Model Info: " + modelInfo );
78+
79+ // Test simple generation
80+ System .out .println ("🤖 Testing response generation..." );
81+ String response = ollama .generateResponse ("Say 'Hello from Ollama'" );
82+ System .out .println (" Response: " + (response != null && !response .trim ().isEmpty () ? "✅ SUCCESS" : "❌ FAILED" ));
83+ System .out .println (" Response Preview: " + (response != null ? response .substring (0 , Math .min (100 , response .length ())) : "null" ));
84+
85+ } else {
86+ System .out .println ("❌ Ollama not available - cannot perform further tests" );
87+
88+ // Try to diagnose the issue
89+ System .out .println ("\n 🔍 Diagnosing connection issues..." );
90+ try {
91+ java .net .URL url = new java .net .URL (ollamaUrl + "/api/tags" );
92+ java .net .HttpURLConnection conn = (java .net .HttpURLConnection ) url .openConnection ();
93+ conn .setConnectTimeout (5000 );
94+ conn .setRequestMethod ("GET" );
95+ int responseCode = conn .getResponseCode ();
96+ System .out .println (" HTTP Response Code: " + responseCode );
97+ } catch (Exception e ) {
98+ System .out .println (" Connection Error: " + e .getMessage ());
99+ }
100+ }
101+
102+ } catch (Exception e ) {
103+ System .out .println ("❌ Ollama connection test failed: " + e .getMessage ());
104+ e .printStackTrace ();
105+ }
106+ }
107+
108+ @ Test (priority = 3 , description = "Test AI Provider Manager" )
109+ public void testAIProviderManager () {
110+ System .out .println ("\n " + "=" .repeat (60 ));
111+ System .out .println ("🤖 AI PROVIDER MANAGER TEST" );
112+ System .out .println ("=" .repeat (60 ));
113+
114+ try {
115+ System .out .println ("🔧 Initializing AI Provider Manager..." );
116+ aiManager = new AIProviderManager (true ); // Enable fallback for CI
117+
118+ // Test best provider selection
119+ System .out .println ("🎯 Testing best provider selection..." );
120+ LLMInterface bestProvider = aiManager .getBestProvider ();
121+
122+ if (bestProvider != null ) {
123+ System .out .println ("✅ Best provider found: " + bestProvider .getModelInfo ());
124+
125+ // Test availability
126+ boolean available = bestProvider .isAvailable ();
127+ System .out .println (" Available: " + (available ? "✅ YES" : "❌ NO" ));
128+
129+ if (available ) {
130+ // Test response generation
131+ System .out .println ("🧪 Testing response generation..." );
132+ String response = bestProvider .generateResponse ("Generate 3 Selenium locators for a submit button" );
133+ System .out .println (" Response received: " + (response != null && !response .trim ().isEmpty () ? "✅ SUCCESS" : "❌ FAILED" ));
134+
135+ if (response != null && response .length () > 0 ) {
136+ System .out .println (" Response preview (first 200 chars):" );
137+ System .out .println (" " + response .substring (0 , Math .min (200 , response .length ())));
138+ }
139+ }
140+ } else {
141+ System .out .println ("❌ No AI provider available" );
142+ }
143+
144+ // Test specific Ollama provider
145+ System .out .println ("\n 🦙 Testing specific Ollama provider..." );
146+ LLMInterface ollamaProvider = aiManager .getProvider (AIProviderManager .Provider .OLLAMA );
147+ if (ollamaProvider != null ) {
148+ System .out .println ("✅ Ollama provider obtained" );
149+ System .out .println (" Available: " + (ollamaProvider .isAvailable () ? "✅ YES" : "❌ NO" ));
150+ } else {
151+ System .out .println ("❌ Ollama provider not available" );
152+ }
153+
154+ } catch (Exception e ) {
155+ System .out .println ("❌ AI Provider Manager test failed: " + e .getMessage ());
156+ e .printStackTrace ();
157+ }
158+ }
159+
160+ @ Test (priority = 4 , description = "Test Fallback Behavior" )
161+ public void testFallbackBehavior () {
162+ System .out .println ("\n " + "=" .repeat (60 ));
163+ System .out .println ("🛡️ FALLBACK BEHAVIOR TEST" );
164+ System .out .println ("=" .repeat (60 ));
165+
166+ try {
167+ System .out .println ("🔧 Testing fallback behavior with all providers..." );
168+
169+ // Create manager with fallback enabled
170+ AIProviderManager fallbackManager = new AIProviderManager (true );
171+
172+ // Get any available provider
173+ LLMInterface provider = fallbackManager .getBestProvider ();
174+
175+ if (provider != null ) {
176+ System .out .println ("✅ Fallback provider available: " + provider .getModelInfo ());
177+
178+ // Test response generation with fallback
179+ String response = provider .generateResponse ("Test fallback response" );
180+ System .out .println (" Fallback response: " + (response != null ? "✅ SUCCESS" : "❌ FAILED" ));
181+
182+ } else {
183+ System .out .println ("❌ Even fallback provider failed - this should not happen" );
184+ }
185+
186+ } catch (Exception e ) {
187+ System .out .println ("❌ Fallback test failed: " + e .getMessage ());
188+ e .printStackTrace ();
189+ }
190+ }
191+
192+ @ Test (priority = 5 , description = "Generate Diagnostics Summary" )
193+ public void generateDiagnosticsSummary () {
194+ System .out .println ("\n " + "=" .repeat (60 ));
195+ System .out .println ("📊 DIAGNOSTICS SUMMARY" );
196+ System .out .println ("=" .repeat (60 ));
197+
198+ System .out .println ("🎯 Recommendations based on test results:" );
199+
200+ String testMode = System .getProperty ("ai.test.mode" , "not set" );
201+ String ciEnv = System .getenv ("CI" );
202+
203+ if ("true" .equals (ciEnv )) {
204+ System .out .println ("🏗️ CI Environment detected:" );
205+ System .out .println (" ✅ Use fallback mode for reliable testing" );
206+ System .out .println (" ✅ Mock LM Studio for API compatibility testing" );
207+ System .out .println (" ⚠️ Ollama may timeout due to model download constraints" );
208+ } else {
209+ System .out .println ("💻 Local Development Environment:" );
210+ System .out .println (" ✅ Install and run Ollama locally for best experience" );
211+ System .out .println (" ✅ Consider LM Studio for advanced AI features" );
212+ System .out .println (" ✅ Simple AI fallback always available" );
213+ }
214+
215+ System .out .println ("\n 📋 Next Steps:" );
216+ System .out .println (" 1. Review diagnostics output above" );
217+ System .out .println (" 2. Fix any connection issues identified" );
218+ System .out .println (" 3. Run SelfHealingDemoTest to validate functionality" );
219+ System .out .println (" 4. Check troubleshooting guide if issues persist" );
220+
221+ System .out .println ("\n ✅ AI Provider Diagnostics completed!" );
222+ }
223+ }
0 commit comments