feat(nordagpt): integrate smart router — selective context loading + adaptive model selection
Some checks are pending
NordaBiz Tests / Unit & Integration Tests (push) Waiting to run
NordaBiz Tests / E2E Tests (Playwright) (push) Blocked by required conditions
NordaBiz Tests / Smoke Tests (Production) (push) Blocked by required conditions
NordaBiz Tests / Send Failure Notification (push) Blocked by required conditions

Wire Smart Router and Context Builder into send_message(): queries are now classified,
only needed data is loaded via build_selective_context(), and model/thinking level
are determined by the router. Falls back to full context if router is unavailable.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Maciej Pienczyn 2026-03-28 05:37:00 +01:00
parent 5b8f82d35b
commit 0b2e210da6
3 changed files with 61 additions and 9 deletions

View File

@ -1,4 +1,9 @@
{
"permissions": {
"allow": [
"Bash(git commit:*)"
]
},
"enabledPlugins": {
"firebase@claude-plugins-official": false,
"greptile@claude-plugins-official": false,
@ -14,7 +19,6 @@
"vercel@claude-plugins-official": false,
"supabase@claude-plugins-official": false,
"figma@claude-plugins-official": false,
"ralph-wiggum@claude-plugins-official": true,
"serena@claude-plugins-official": false,
"agent-sdk-dev@claude-plugins-official": false,
"plugin-dev@claude-plugins-official": false,
@ -47,10 +51,5 @@
"commit-commands@claude-plugins-official": true,
"context7@claude-plugins-official": true,
"code-review@claude-plugins-official": true
},
"permissions": {
"allow": [
"Bash(git commit:*)"
]
}
}

View File

@ -78,6 +78,15 @@ except ImportError:
SENSITIVE_DATA_SERVICE_AVAILABLE = False
logger.warning("Sensitive data service not available - messages will not be sanitized")
# Import Smart Router and Context Builder
try:
from smart_router import route_query
from context_builder import build_selective_context
SMART_ROUTER_AVAILABLE = True
except ImportError:
SMART_ROUTER_AVAILABLE = False
logger.warning("Smart Router or Context Builder not available - using full context fallback")
class NordaBizChatEngine:
"""
@ -234,14 +243,46 @@ class NordaBizChatEngine:
# Build context from conversation history and relevant companies
# Use ORIGINAL message for AI (so it can understand the question)
# but the sanitized version is what gets stored in DB
# Smart Router — classify query and select data + model
if SMART_ROUTER_AVAILABLE:
route_decision = route_query(
message=user_message,
user_context=user_context,
gemini_service=self.gemini_service
)
logger.info(
f"NordaGPT Router: user={user_context.get('user_name') if user_context else '?'}, "
f"complexity={route_decision['complexity']}, model={route_decision.get('model')}, "
f"thinking={route_decision.get('thinking')}, data={route_decision['data_needed']}, "
f"routed_by={route_decision.get('routed_by')}"
)
# Build context — selective (via router) or full (fallback)
if route_decision.get('routed_by') != 'fallback':
context = build_selective_context(
data_needed=route_decision.get('data_needed', []),
conversation_id=conversation.id,
current_message=user_message,
user_context=user_context
)
else:
context = self._build_conversation_context(db, conversation, user_message)
context['_route_decision'] = route_decision
# Use router-determined thinking level
effective_thinking = route_decision.get('thinking', thinking_level)
else:
context = self._build_conversation_context(db, conversation, user_message)
effective_thinking = thinking_level
# Get AI response with cost tracking
response = self._query_ai(
context,
user_message,
user_id=user_id,
thinking_level=thinking_level,
thinking_level=effective_thinking,
user_context=user_context
)
@ -1372,12 +1413,21 @@ W dyskusji [Artur Wiertel](link) pytał o moderację. Pełna treść: [moje uwag
# Get response with automatic cost tracking to ai_api_costs table
if self.use_global_service and self.gemini_service:
# Read router decision from context to select model
route = context.get('_route_decision', {})
effective_model_id = None
model_alias = route.get('model')
if model_alias:
from gemini_service import GEMINI_MODELS
effective_model_id = GEMINI_MODELS.get(model_alias)
response_text = self.gemini_service.generate_text(
prompt=full_prompt,
feature='ai_chat',
user_id=user_id,
temperature=0.7,
thinking_level=thinking_level
thinking_level=thinking_level,
model=effective_model_id
)
# Post-process to ensure links are added even if AI didn't format them
return self._postprocess_links(response_text, context)

View File

@ -75,6 +75,9 @@ MANUAL_NAME_MAP = {
'U WITKA': 77,
'WIENIAWA': 36,
'PZU': 100,
'EKOZUK': 31,
'PGK': 75,
'SKLEPY LORD': 76,
}