Some checks are pending
NordaBiz Tests / Unit & Integration Tests (push) Waiting to run
NordaBiz Tests / E2E Tests (Playwright) (push) Blocked by required conditions
NordaBiz Tests / Smoke Tests (Production) (push) Blocked by required conditions
NordaBiz Tests / Send Failure Notification (push) Blocked by required conditions
- Move percentage text from inside bars to separate column (always visible) - Add cost (USD) column to "Wykorzystanie wg typu" section - Add tokens+cost to type query in backend - Fix same issues in company detail template Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1078 lines
42 KiB
Python
1078 lines
42 KiB
Python
"""
|
|
Admin Analytics Routes
|
|
=======================
|
|
|
|
User analytics dashboard - sessions, page views, clicks, conversions.
|
|
"""
|
|
|
|
import csv
|
|
import io
|
|
import logging
|
|
from datetime import date, timedelta
|
|
|
|
from flask import render_template, request, redirect, url_for, flash, Response
|
|
from flask_login import login_required, current_user
|
|
from sqlalchemy import func, desc
|
|
from sqlalchemy.orm import joinedload
|
|
|
|
from . import bp
|
|
from database import (
|
|
SessionLocal, User, UserSession, PageView, SearchQuery,
|
|
ConversionEvent, JSError, Company, AIUsageLog, AIUsageDaily,
|
|
SystemRole
|
|
)
|
|
from utils.decorators import role_required
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
# ============================================================
|
|
# ANALYTICS DASHBOARD
|
|
# ============================================================
|
|
|
|
@bp.route('/analytics')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_analytics():
|
|
"""Admin dashboard for user analytics - sessions, page views, clicks"""
|
|
period = request.args.get('period', 'week')
|
|
user_id = request.args.get('user_id', type=int)
|
|
|
|
# Period calculation
|
|
today = date.today()
|
|
if period == 'day':
|
|
start_date = today
|
|
elif period == 'week':
|
|
start_date = today - timedelta(days=7)
|
|
elif period == 'month':
|
|
start_date = today - timedelta(days=30)
|
|
else:
|
|
start_date = None
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Base query for sessions in period
|
|
sessions_query = db.query(UserSession)
|
|
if start_date:
|
|
sessions_query = sessions_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
|
|
# Overall stats
|
|
total_sessions = sessions_query.count()
|
|
unique_users = sessions_query.filter(
|
|
UserSession.user_id.isnot(None)
|
|
).distinct(UserSession.user_id).count()
|
|
|
|
total_page_views = db.query(func.sum(UserSession.page_views_count)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True
|
|
).scalar() or 0
|
|
|
|
total_clicks = db.query(func.sum(UserSession.clicks_count)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True
|
|
).scalar() or 0
|
|
|
|
avg_duration = db.query(func.avg(UserSession.duration_seconds)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True,
|
|
UserSession.duration_seconds.isnot(None)
|
|
).scalar() or 0
|
|
|
|
stats = {
|
|
'total_sessions': total_sessions,
|
|
'unique_users': unique_users,
|
|
'total_page_views': int(total_page_views),
|
|
'total_clicks': int(total_clicks),
|
|
'avg_duration': float(avg_duration)
|
|
}
|
|
|
|
# Device breakdown
|
|
device_query = db.query(
|
|
UserSession.device_type,
|
|
func.count(UserSession.id)
|
|
)
|
|
if start_date:
|
|
device_query = device_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
device_stats = dict(device_query.group_by(UserSession.device_type).all())
|
|
|
|
# Top users by engagement
|
|
user_query = db.query(
|
|
User.id,
|
|
User.name,
|
|
User.email,
|
|
func.count(UserSession.id).label('sessions'),
|
|
func.sum(UserSession.page_views_count).label('page_views'),
|
|
func.sum(UserSession.clicks_count).label('clicks'),
|
|
func.sum(UserSession.duration_seconds).label('total_time')
|
|
).join(UserSession, User.id == UserSession.user_id)
|
|
|
|
if start_date:
|
|
user_query = user_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
|
|
user_rankings = user_query.group_by(User.id).order_by(
|
|
desc('page_views')
|
|
).limit(20).all()
|
|
|
|
# Popular pages
|
|
page_query = db.query(
|
|
PageView.path,
|
|
func.count(PageView.id).label('views'),
|
|
func.count(func.distinct(PageView.user_id)).label('unique_users'),
|
|
func.avg(PageView.time_on_page_seconds).label('avg_time')
|
|
)
|
|
if start_date:
|
|
page_query = page_query.filter(
|
|
func.date(PageView.viewed_at) >= start_date
|
|
)
|
|
popular_pages = page_query.group_by(PageView.path).order_by(
|
|
desc('views')
|
|
).limit(20).all()
|
|
|
|
# Recent sessions (last 50)
|
|
recent_sessions = db.query(UserSession).options(
|
|
joinedload(UserSession.user)
|
|
).order_by(UserSession.started_at.desc()).limit(50).all()
|
|
|
|
# Single user detail (if requested)
|
|
user_detail = None
|
|
if user_id:
|
|
user_obj = db.query(User).filter_by(id=user_id).first()
|
|
user_sessions = db.query(UserSession).filter_by(user_id=user_id).order_by(
|
|
UserSession.started_at.desc()
|
|
).limit(20).all()
|
|
user_pages = db.query(PageView).filter_by(user_id=user_id).order_by(
|
|
PageView.viewed_at.desc()
|
|
).limit(50).all()
|
|
|
|
user_detail = {
|
|
'user': user_obj,
|
|
'sessions': user_sessions,
|
|
'pages': user_pages
|
|
}
|
|
|
|
# Bounce rate: sesje z 1 pageview LUB czas < 10s
|
|
bounced_sessions = sessions_query.filter(
|
|
(UserSession.page_views_count <= 1) |
|
|
((UserSession.duration_seconds.isnot(None)) & (UserSession.duration_seconds < 10))
|
|
).count()
|
|
bounce_rate = round((bounced_sessions / total_sessions * 100), 1) if total_sessions > 0 else 0
|
|
|
|
# Geolokalizacja - top 10 krajów
|
|
country_query = db.query(
|
|
UserSession.country,
|
|
func.count(UserSession.id).label('count')
|
|
).filter(UserSession.country.isnot(None))
|
|
if start_date:
|
|
country_query = country_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
country_stats = dict(country_query.group_by(UserSession.country).order_by(desc('count')).limit(10).all())
|
|
|
|
# UTM sources
|
|
utm_query = db.query(
|
|
UserSession.utm_source,
|
|
func.count(UserSession.id).label('count')
|
|
).filter(UserSession.utm_source.isnot(None))
|
|
if start_date:
|
|
utm_query = utm_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
utm_stats = dict(utm_query.group_by(UserSession.utm_source).order_by(desc('count')).limit(10).all())
|
|
|
|
# Top wyszukiwania
|
|
search_query = db.query(
|
|
SearchQuery.query_normalized,
|
|
func.count(SearchQuery.id).label('count'),
|
|
func.avg(SearchQuery.results_count).label('avg_results')
|
|
)
|
|
if start_date:
|
|
search_query = search_query.filter(func.date(SearchQuery.searched_at) >= start_date)
|
|
top_searches = search_query.group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(15).all()
|
|
|
|
# Wyszukiwania bez wyników
|
|
no_results_query = db.query(
|
|
SearchQuery.query_normalized,
|
|
func.count(SearchQuery.id).label('count')
|
|
).filter(SearchQuery.has_results == False)
|
|
if start_date:
|
|
no_results_query = no_results_query.filter(func.date(SearchQuery.searched_at) >= start_date)
|
|
searches_no_results = no_results_query.group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(10).all()
|
|
|
|
# Konwersje
|
|
conversion_query = db.query(
|
|
ConversionEvent.event_type,
|
|
func.count(ConversionEvent.id).label('count')
|
|
)
|
|
if start_date:
|
|
conversion_query = conversion_query.filter(func.date(ConversionEvent.converted_at) >= start_date)
|
|
conversion_stats = dict(conversion_query.group_by(ConversionEvent.event_type).all())
|
|
|
|
# Błędy JS (agregowane)
|
|
error_query = db.query(
|
|
JSError.message,
|
|
JSError.source,
|
|
func.count(JSError.id).label('count')
|
|
)
|
|
if start_date:
|
|
error_query = error_query.filter(func.date(JSError.occurred_at) >= start_date)
|
|
js_errors = error_query.group_by(JSError.error_hash, JSError.message, JSError.source).order_by(desc('count')).limit(10).all()
|
|
|
|
# Średni scroll depth
|
|
avg_scroll = db.query(func.avg(PageView.scroll_depth_percent)).filter(
|
|
PageView.scroll_depth_percent.isnot(None)
|
|
)
|
|
if start_date:
|
|
avg_scroll = avg_scroll.filter(func.date(PageView.viewed_at) >= start_date)
|
|
avg_scroll_depth = round(avg_scroll.scalar() or 0, 1)
|
|
|
|
# Wzorce czasowe - aktywność wg godziny
|
|
hourly_query = db.query(
|
|
func.extract('hour', UserSession.started_at).label('hour'),
|
|
func.count(UserSession.id).label('count')
|
|
)
|
|
if start_date:
|
|
hourly_query = hourly_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
hourly_activity = dict(hourly_query.group_by('hour').all())
|
|
|
|
# Dodaj nowe statystyki do stats
|
|
stats['bounce_rate'] = bounce_rate
|
|
stats['avg_scroll_depth'] = avg_scroll_depth
|
|
|
|
return render_template(
|
|
'admin/analytics_dashboard.html',
|
|
stats=stats,
|
|
device_stats=device_stats,
|
|
user_rankings=user_rankings,
|
|
popular_pages=popular_pages,
|
|
recent_sessions=recent_sessions,
|
|
user_detail=user_detail,
|
|
current_period=period,
|
|
# Nowe dane
|
|
country_stats=country_stats,
|
|
utm_stats=utm_stats,
|
|
top_searches=top_searches,
|
|
searches_no_results=searches_no_results,
|
|
conversion_stats=conversion_stats,
|
|
js_errors=js_errors,
|
|
hourly_activity=hourly_activity
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Admin analytics error: {e}")
|
|
flash('Blad podczas ladowania analityki.', 'error')
|
|
return redirect(url_for('admin.admin_users'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@bp.route('/analytics/export')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_analytics_export():
|
|
"""Export analytics data as CSV"""
|
|
export_type = request.args.get('type', 'sessions')
|
|
period = request.args.get('period', 'month')
|
|
|
|
today = date.today()
|
|
|
|
if period == 'day':
|
|
start_date = today
|
|
elif period == 'week':
|
|
start_date = today - timedelta(days=7)
|
|
elif period == 'month':
|
|
start_date = today - timedelta(days=30)
|
|
else:
|
|
start_date = today - timedelta(days=365) # year
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
|
|
if export_type == 'sessions':
|
|
writer.writerow(['ID', 'User ID', 'Started At', 'Duration (s)', 'Page Views', 'Clicks',
|
|
'Device', 'Browser', 'OS', 'Country', 'UTM Source', 'UTM Campaign'])
|
|
|
|
sessions = db.query(UserSession).filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
).order_by(UserSession.started_at.desc()).all()
|
|
|
|
for s in sessions:
|
|
writer.writerow([
|
|
s.id, s.user_id, s.started_at.isoformat() if s.started_at else '',
|
|
s.duration_seconds or 0, s.page_views_count or 0, s.clicks_count or 0,
|
|
s.device_type or '', s.browser or '', s.os or '',
|
|
s.country or '', s.utm_source or '', s.utm_campaign or ''
|
|
])
|
|
|
|
elif export_type == 'pageviews':
|
|
writer.writerow(['ID', 'Session ID', 'User ID', 'Path', 'Viewed At', 'Time on Page (s)',
|
|
'Scroll Depth (%)', 'Company ID'])
|
|
|
|
views = db.query(PageView).filter(
|
|
func.date(PageView.viewed_at) >= start_date
|
|
).order_by(PageView.viewed_at.desc()).limit(10000).all()
|
|
|
|
for v in views:
|
|
writer.writerow([
|
|
v.id, v.session_id, v.user_id, v.path,
|
|
v.viewed_at.isoformat() if v.viewed_at else '',
|
|
v.time_on_page_seconds or 0, v.scroll_depth_percent or 0, v.company_id or ''
|
|
])
|
|
|
|
elif export_type == 'searches':
|
|
writer.writerow(['ID', 'User ID', 'Query', 'Results Count', 'Has Results', 'Clicked Company',
|
|
'Search Type', 'Searched At'])
|
|
|
|
searches = db.query(SearchQuery).filter(
|
|
func.date(SearchQuery.searched_at) >= start_date
|
|
).order_by(SearchQuery.searched_at.desc()).limit(10000).all()
|
|
|
|
for s in searches:
|
|
writer.writerow([
|
|
s.id, s.user_id, s.query, s.results_count, s.has_results,
|
|
s.clicked_company_id or '', s.search_type,
|
|
s.searched_at.isoformat() if s.searched_at else ''
|
|
])
|
|
|
|
elif export_type == 'conversions':
|
|
writer.writerow(['ID', 'User ID', 'Event Type', 'Event Category', 'Company ID',
|
|
'Target Type', 'Converted At'])
|
|
|
|
conversions = db.query(ConversionEvent).filter(
|
|
func.date(ConversionEvent.converted_at) >= start_date
|
|
).order_by(ConversionEvent.converted_at.desc()).all()
|
|
|
|
for c in conversions:
|
|
writer.writerow([
|
|
c.id, c.user_id, c.event_type, c.event_category or '',
|
|
c.company_id or '', c.target_type or '',
|
|
c.converted_at.isoformat() if c.converted_at else ''
|
|
])
|
|
|
|
output.seek(0)
|
|
return Response(
|
|
output.getvalue(),
|
|
mimetype='text/csv',
|
|
headers={'Content-Disposition': f'attachment; filename=analytics_{export_type}_{period}.csv'}
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Export error: {e}")
|
|
flash('Blad podczas eksportu.', 'error')
|
|
return redirect(url_for('admin.admin_analytics'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# AI USAGE DASHBOARD
|
|
# ============================================================
|
|
|
|
@bp.route('/ai-usage')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_ai_usage():
|
|
"""Admin dashboard for AI (Gemini) API usage monitoring"""
|
|
from datetime import datetime, timedelta
|
|
|
|
# Get filter params
|
|
period = request.args.get('period', 'month') # day, week, month, all, custom
|
|
filter_user_id = request.args.get('user_id', type=int)
|
|
filter_company_id = request.args.get('company_id', type=int)
|
|
filter_model = request.args.get('model', '')
|
|
date_from = request.args.get('date_from', '')
|
|
date_to = request.args.get('date_to', '')
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
now = datetime.now()
|
|
today = now.date()
|
|
week_ago = today - timedelta(days=7)
|
|
month_ago = today - timedelta(days=30)
|
|
day_ago = now - timedelta(hours=24)
|
|
|
|
# Determine date filter based on period
|
|
if period == 'custom' and date_from:
|
|
try:
|
|
period_start = date.fromisoformat(date_from)
|
|
period_end = date.fromisoformat(date_to) if date_to else today
|
|
period_label = f'{period_start.strftime("%d.%m")} - {period_end.strftime("%d.%m.%Y")}'
|
|
except ValueError:
|
|
period_start = month_ago
|
|
period_end = today
|
|
period_label = 'Ten miesiąc'
|
|
period = 'month'
|
|
else:
|
|
period_end = None
|
|
period_labels = {
|
|
'day': ('Dzisiaj', today),
|
|
'week': ('Ten tydzień', week_ago),
|
|
'month': ('Ten miesiąc', month_ago),
|
|
'all': ('Od początku', None)
|
|
}
|
|
period_label, period_start = period_labels.get(period, period_labels['month'])
|
|
|
|
# Build base filter to apply to all queries
|
|
def apply_filters(query):
|
|
if period_start:
|
|
query = query.filter(func.date(AIUsageLog.created_at) >= period_start)
|
|
if period == 'custom' and period_end:
|
|
query = query.filter(func.date(AIUsageLog.created_at) <= period_end)
|
|
if filter_user_id:
|
|
query = query.filter(AIUsageLog.user_id == filter_user_id)
|
|
if filter_company_id:
|
|
# Filter by company: get users from this company
|
|
company_user_ids = [u.id for u in db.query(User.id).filter(User.company_id == filter_company_id).all()]
|
|
if company_user_ids:
|
|
query = query.filter(AIUsageLog.user_id.in_(company_user_ids))
|
|
else:
|
|
query = query.filter(AIUsageLog.id == -1) # No results
|
|
if filter_model:
|
|
query = query.filter(AIUsageLog.model == filter_model)
|
|
return query
|
|
|
|
# Today's stats (always show, unfiltered)
|
|
today_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
func.date(AIUsageLog.created_at) == today
|
|
).first()
|
|
|
|
# Week stats
|
|
week_requests = db.query(func.count(AIUsageLog.id)).filter(
|
|
func.date(AIUsageLog.created_at) >= week_ago
|
|
).scalar() or 0
|
|
|
|
# Month stats
|
|
month_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
func.date(AIUsageLog.created_at) >= month_ago
|
|
).first()
|
|
|
|
# All-time stats
|
|
all_time_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).first()
|
|
|
|
# Error rate (last 24h)
|
|
last_24h_total = db.query(func.count(AIUsageLog.id)).filter(
|
|
AIUsageLog.created_at >= day_ago
|
|
).scalar() or 0
|
|
|
|
last_24h_errors = db.query(func.count(AIUsageLog.id)).filter(
|
|
AIUsageLog.created_at >= day_ago,
|
|
AIUsageLog.success == False
|
|
).scalar() or 0
|
|
|
|
error_rate = (last_24h_errors / last_24h_total * 100) if last_24h_total > 0 else 0
|
|
|
|
# Average response time (last 24h)
|
|
avg_response_time = db.query(func.avg(AIUsageLog.response_time_ms)).filter(
|
|
AIUsageLog.created_at >= day_ago,
|
|
AIUsageLog.success == True
|
|
).scalar() or 0
|
|
|
|
# Usage by type (filtered)
|
|
type_query = db.query(
|
|
AIUsageLog.request_type,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output')
|
|
)
|
|
type_query = apply_filters(type_query)
|
|
type_stats = type_query.group_by(AIUsageLog.request_type).order_by(desc('count')).all()
|
|
|
|
# Calculate percentages for type breakdown
|
|
total_type_count = sum(t.count for t in type_stats) if type_stats else 0
|
|
type_labels = {
|
|
'ai_chat': ('Chat AI', 'chat'),
|
|
'zopk_news_evaluation': ('Ocena newsów ZOP Kaszubia', 'news'),
|
|
'ai_user_parse': ('Tworzenie user', 'user'),
|
|
'gbp_audit_ai': ('Audyt GBP', 'image'),
|
|
'general': ('Ogólne', 'other')
|
|
}
|
|
usage_by_type = []
|
|
for t in type_stats:
|
|
label, css_class = type_labels.get(t.request_type, (t.request_type, 'other'))
|
|
percentage = (t.count / total_type_count * 100) if total_type_count > 0 else 0
|
|
usage_by_type.append({
|
|
'type': t.request_type,
|
|
'type_label': label,
|
|
'type_class': css_class,
|
|
'count': t.count,
|
|
'percentage': round(percentage, 1),
|
|
'cost_usd': float(t.cost_cents or 0) / 100,
|
|
'tokens': int(t.tokens_input or 0) + int(t.tokens_output or 0)
|
|
})
|
|
|
|
# Model breakdown (filtered)
|
|
model_query = db.query(
|
|
AIUsageLog.model,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output')
|
|
)
|
|
model_query = apply_filters(model_query)
|
|
model_stats_raw = model_query.group_by(AIUsageLog.model).order_by(desc('count')).all()
|
|
|
|
total_model_count = sum(m.count for m in model_stats_raw) if model_stats_raw else 0
|
|
model_breakdown = []
|
|
for m in model_stats_raw:
|
|
percentage = (m.count / total_model_count * 100) if total_model_count > 0 else 0
|
|
model_breakdown.append({
|
|
'model': m.model or 'unknown',
|
|
'count': m.count,
|
|
'cost_usd': float(m.cost_cents or 0) / 100,
|
|
'tokens': int(m.tokens_input or 0) + int(m.tokens_output or 0),
|
|
'percentage': round(percentage, 1)
|
|
})
|
|
|
|
# User statistics (filtered)
|
|
user_query = db.query(
|
|
User.id,
|
|
User.name.label('user_name'),
|
|
User.email,
|
|
Company.name.label('company_name'),
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
).outerjoin(
|
|
Company, User.company_id == Company.id
|
|
)
|
|
user_query = apply_filters(user_query)
|
|
user_stats = user_query.group_by(
|
|
User.id, User.name, User.email, Company.name
|
|
).order_by(desc('cost_cents')).all()
|
|
|
|
# Format user stats
|
|
user_rankings = []
|
|
for u in user_stats:
|
|
user_rankings.append({
|
|
'id': u.id,
|
|
'name': u.user_name or u.email,
|
|
'email': u.email,
|
|
'company': u.company_name or '-',
|
|
'requests': u.requests,
|
|
'tokens': int(u.tokens_input) + int(u.tokens_output),
|
|
'cost_cents': float(u.cost_cents or 0),
|
|
'cost_usd': float(u.cost_cents or 0) / 100
|
|
})
|
|
|
|
# Company statistics (filtered)
|
|
company_query = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.count(func.distinct(AIUsageLog.user_id)).label('unique_users'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).join(
|
|
User, User.company_id == Company.id
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
)
|
|
company_query = apply_filters(company_query)
|
|
company_stats = company_query.group_by(
|
|
Company.id, Company.name
|
|
).order_by(desc('cost_cents')).all()
|
|
|
|
# Format company stats
|
|
company_rankings = []
|
|
for c in company_stats:
|
|
company_rankings.append({
|
|
'id': c.id,
|
|
'name': c.name,
|
|
'requests': c.requests,
|
|
'unique_users': c.unique_users,
|
|
'tokens': int(c.tokens_input) + int(c.tokens_output),
|
|
'cost_cents': float(c.cost_cents or 0),
|
|
'cost_usd': float(c.cost_cents or 0) / 100
|
|
})
|
|
|
|
# Recent logs with user info (filtered)
|
|
recent_query = db.query(AIUsageLog)
|
|
recent_query = apply_filters(recent_query)
|
|
recent_logs = recent_query.order_by(desc(AIUsageLog.created_at)).limit(50).all()
|
|
|
|
# Enrich recent logs with user names
|
|
for log in recent_logs:
|
|
label, _ = type_labels.get(log.request_type, (log.request_type, 'other'))
|
|
log.type_label = label
|
|
if log.user_id:
|
|
user = db.query(User).filter_by(id=log.user_id).first()
|
|
if user:
|
|
log.user_name = user.name or user.email
|
|
else:
|
|
log.user_name = None
|
|
else:
|
|
log.user_name = None
|
|
|
|
# Daily history (last 14 days)
|
|
daily_history = db.query(AIUsageDaily).filter(
|
|
AIUsageDaily.date >= today - timedelta(days=14)
|
|
).order_by(desc(AIUsageDaily.date)).all()
|
|
|
|
stats = {
|
|
'today_requests': today_stats.requests or 0,
|
|
'today_tokens_input': int(today_stats.tokens_input) or 0,
|
|
'today_tokens_output': int(today_stats.tokens_output) or 0,
|
|
'today_cost': float(today_stats.cost_cents or 0) / 100,
|
|
'week_requests': week_requests,
|
|
'month_requests': month_stats.requests or 0,
|
|
'month_cost': float(month_stats.cost_cents or 0) / 100,
|
|
'all_requests': all_time_stats.requests or 0,
|
|
'all_cost': float(all_time_stats.cost_cents or 0) / 100,
|
|
'error_rate': error_rate,
|
|
'avg_response_time': int(avg_response_time)
|
|
}
|
|
|
|
# Filtered stats summary (period + filters)
|
|
filtered_query = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output')
|
|
)
|
|
filtered_query = apply_filters(filtered_query)
|
|
filtered_stats = filtered_query.first()
|
|
|
|
stats['filtered_requests'] = filtered_stats.requests or 0
|
|
stats['filtered_cost'] = float(filtered_stats.cost_cents or 0) / 100
|
|
stats['filtered_tokens'] = int(filtered_stats.tokens_input or 0) + int(filtered_stats.tokens_output or 0)
|
|
|
|
# Dropdown options for filters
|
|
filter_users = db.query(User.id, User.name, User.email).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
).group_by(User.id, User.name, User.email).order_by(User.name).all()
|
|
|
|
filter_companies = db.query(Company.id, Company.name).join(
|
|
User, User.company_id == Company.id
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
).group_by(Company.id, Company.name).order_by(Company.name).all()
|
|
|
|
filter_models = db.query(AIUsageLog.model).filter(
|
|
AIUsageLog.model.isnot(None)
|
|
).distinct().order_by(AIUsageLog.model).all()
|
|
filter_models = [m[0] for m in filter_models]
|
|
|
|
# Current filters for template
|
|
has_filters = bool(filter_user_id or filter_company_id or filter_model)
|
|
current_filters = {
|
|
'user_id': filter_user_id,
|
|
'company_id': filter_company_id,
|
|
'model': filter_model,
|
|
'date_from': date_from,
|
|
'date_to': date_to
|
|
}
|
|
|
|
return render_template(
|
|
'admin/ai_usage_dashboard.html',
|
|
stats=stats,
|
|
usage_by_type=usage_by_type,
|
|
model_breakdown=model_breakdown,
|
|
recent_logs=recent_logs,
|
|
daily_history=daily_history,
|
|
user_rankings=user_rankings,
|
|
company_rankings=company_rankings,
|
|
current_period=period,
|
|
period_label=period_label,
|
|
# Filter options
|
|
filter_users=filter_users,
|
|
filter_companies=filter_companies,
|
|
filter_models=filter_models,
|
|
current_filters=current_filters,
|
|
has_filters=has_filters
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@bp.route('/ai-usage/user/<int:user_id>')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_ai_usage_user(user_id):
|
|
"""Detailed AI usage for a specific user"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Get user info
|
|
user = db.query(User).filter_by(id=user_id).first()
|
|
if not user:
|
|
flash('Użytkownik nie istnieje.', 'error')
|
|
return redirect(url_for('admin.admin_ai_usage'))
|
|
|
|
company = None
|
|
if user.company_id:
|
|
company = db.query(Company).filter_by(id=user.company_id).first()
|
|
|
|
# Get overall stats for this user
|
|
stats = db.query(
|
|
func.count(AIUsageLog.id).label('total_requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.count(func.nullif(AIUsageLog.success, True)).label('errors')
|
|
).filter(AIUsageLog.user_id == user_id).first()
|
|
|
|
# Usage by type
|
|
type_labels = {
|
|
'ai_chat': 'Chat AI',
|
|
'zopk_news_evaluation': 'Ocena newsów ZOP Kaszubia',
|
|
'ai_user_parse': 'Tworzenie user',
|
|
'gbp_audit_ai': 'Audyt GBP',
|
|
'general': 'Ogólne'
|
|
}
|
|
|
|
type_stats = db.query(
|
|
AIUsageLog.request_type,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input + AIUsageLog.tokens_output), 0).label('tokens'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
AIUsageLog.user_id == user_id
|
|
).group_by(AIUsageLog.request_type).order_by(desc('count')).all()
|
|
|
|
# Calculate total for percentages
|
|
total_type_count = sum(t.count for t in type_stats) if type_stats else 1
|
|
|
|
type_classes = {
|
|
'ai_chat': 'chat',
|
|
'zopk_news_evaluation': 'news_evaluation',
|
|
'ai_user_parse': 'user_creation',
|
|
'gbp_audit_ai': 'image_analysis',
|
|
'general': 'other'
|
|
}
|
|
|
|
usage_by_type = []
|
|
for t in type_stats:
|
|
usage_by_type.append({
|
|
'type': t.request_type,
|
|
'type_label': type_labels.get(t.request_type, t.request_type),
|
|
'type_class': type_classes.get(t.request_type, 'other'),
|
|
'count': t.count,
|
|
'tokens': int(t.tokens),
|
|
'cost_usd': float(t.cost_cents) / 100,
|
|
'percentage': round(t.count / total_type_count * 100, 1) if total_type_count > 0 else 0
|
|
})
|
|
|
|
# Get all requests for this user (paginated)
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 50
|
|
|
|
requests_query = db.query(AIUsageLog).filter(
|
|
AIUsageLog.user_id == user_id
|
|
).order_by(desc(AIUsageLog.created_at))
|
|
|
|
total_requests = requests_query.count()
|
|
total_pages = (total_requests + per_page - 1) // per_page
|
|
|
|
logs = requests_query.offset((page - 1) * per_page).limit(per_page).all()
|
|
|
|
# Enrich logs with type labels and cost
|
|
for log in logs:
|
|
log.type_label = type_labels.get(log.request_type, log.request_type)
|
|
log.cost_usd = float(log.cost_cents or 0) / 100
|
|
|
|
user_stats = {
|
|
'total_requests': stats.total_requests or 0,
|
|
'tokens_total': int(stats.tokens_input or 0) + int(stats.tokens_output or 0),
|
|
'tokens_input': int(stats.tokens_input or 0),
|
|
'tokens_output': int(stats.tokens_output or 0),
|
|
'cost_usd': float(stats.cost_cents or 0) / 100,
|
|
'errors': stats.errors or 0
|
|
}
|
|
|
|
return render_template(
|
|
'admin/ai_usage_user.html',
|
|
user=user,
|
|
company=company,
|
|
stats=user_stats,
|
|
usage_by_type=usage_by_type,
|
|
logs=logs,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total_requests=total_requests
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@bp.route('/ai-usage/company/<int:company_id>')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_ai_usage_company(company_id):
|
|
"""Detailed AI usage for a specific company"""
|
|
db = SessionLocal()
|
|
try:
|
|
company = db.query(Company).filter_by(id=company_id).first()
|
|
if not company:
|
|
flash('Firma nie istnieje.', 'error')
|
|
return redirect(url_for('admin.admin_ai_usage'))
|
|
|
|
# Get all users from this company
|
|
company_users = db.query(User).filter(User.company_id == company_id).all()
|
|
company_user_ids = [u.id for u in company_users]
|
|
|
|
if not company_user_ids:
|
|
flash('Firma nie ma przypisanych użytkowników.', 'error')
|
|
return redirect(url_for('admin.admin_ai_usage'))
|
|
|
|
# Overall stats for this company
|
|
stats = db.query(
|
|
func.count(AIUsageLog.id).label('total_requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.count(func.nullif(AIUsageLog.success, True)).label('errors')
|
|
).filter(AIUsageLog.user_id.in_(company_user_ids)).first()
|
|
|
|
# Per-user stats
|
|
type_labels = {
|
|
'ai_chat': 'Chat AI',
|
|
'zopk_news_evaluation': 'Ocena newsów ZOP Kaszubia',
|
|
'ai_user_parse': 'Tworzenie user',
|
|
'gbp_audit_ai': 'Audyt GBP',
|
|
'general': 'Ogólne'
|
|
}
|
|
|
|
user_stats_list = db.query(
|
|
User.id,
|
|
User.name,
|
|
User.email,
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input + AIUsageLog.tokens_output), 0).label('tokens'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
).filter(
|
|
User.company_id == company_id
|
|
).group_by(User.id, User.name, User.email).order_by(desc('cost_cents')).all()
|
|
|
|
user_rankings = []
|
|
for u in user_stats_list:
|
|
user_rankings.append({
|
|
'id': u.id,
|
|
'name': u.name or u.email,
|
|
'email': u.email,
|
|
'requests': u.requests,
|
|
'tokens': int(u.tokens),
|
|
'cost_usd': float(u.cost_cents or 0) / 100
|
|
})
|
|
|
|
# Usage by type
|
|
type_classes = {
|
|
'ai_chat': 'chat',
|
|
'zopk_news_evaluation': 'news_evaluation',
|
|
'ai_user_parse': 'user_creation',
|
|
'gbp_audit_ai': 'image_analysis',
|
|
'general': 'other'
|
|
}
|
|
|
|
type_stats = db.query(
|
|
AIUsageLog.request_type,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input + AIUsageLog.tokens_output), 0).label('tokens'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
AIUsageLog.user_id.in_(company_user_ids)
|
|
).group_by(AIUsageLog.request_type).order_by(desc('count')).all()
|
|
|
|
total_type_count = sum(t.count for t in type_stats) if type_stats else 1
|
|
usage_by_type = []
|
|
for t in type_stats:
|
|
usage_by_type.append({
|
|
'type': t.request_type,
|
|
'type_label': type_labels.get(t.request_type, t.request_type),
|
|
'type_class': type_classes.get(t.request_type, 'other'),
|
|
'count': t.count,
|
|
'tokens': int(t.tokens),
|
|
'cost_usd': float(t.cost_cents) / 100,
|
|
'percentage': round(t.count / total_type_count * 100, 1)
|
|
})
|
|
|
|
# Model breakdown
|
|
model_stats = db.query(
|
|
AIUsageLog.model,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
AIUsageLog.user_id.in_(company_user_ids)
|
|
).group_by(AIUsageLog.model).order_by(desc('count')).all()
|
|
|
|
total_model_count = sum(m.count for m in model_stats) if model_stats else 1
|
|
model_breakdown = []
|
|
for m in model_stats:
|
|
model_breakdown.append({
|
|
'model': m.model or 'unknown',
|
|
'count': m.count,
|
|
'cost_usd': float(m.cost_cents or 0) / 100,
|
|
'percentage': round(m.count / total_model_count * 100, 1)
|
|
})
|
|
|
|
# Recent logs (paginated)
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 50
|
|
|
|
logs_query = db.query(AIUsageLog).filter(
|
|
AIUsageLog.user_id.in_(company_user_ids)
|
|
).order_by(desc(AIUsageLog.created_at))
|
|
|
|
total_requests = logs_query.count()
|
|
total_pages = (total_requests + per_page - 1) // per_page
|
|
|
|
logs = logs_query.offset((page - 1) * per_page).limit(per_page).all()
|
|
|
|
for log in logs:
|
|
log.type_label = type_labels.get(log.request_type, log.request_type)
|
|
log.cost_usd = float(log.cost_cents or 0) / 100
|
|
if log.user_id:
|
|
user = db.query(User).filter_by(id=log.user_id).first()
|
|
log.user_name = (user.name or user.email) if user else None
|
|
else:
|
|
log.user_name = None
|
|
|
|
company_stats = {
|
|
'total_requests': stats.total_requests or 0,
|
|
'tokens_total': int(stats.tokens_input or 0) + int(stats.tokens_output or 0),
|
|
'tokens_input': int(stats.tokens_input or 0),
|
|
'tokens_output': int(stats.tokens_output or 0),
|
|
'cost_usd': float(stats.cost_cents or 0) / 100,
|
|
'errors': stats.errors or 0,
|
|
'unique_users': len(user_rankings)
|
|
}
|
|
|
|
return render_template(
|
|
'admin/ai_usage_company.html',
|
|
company=company,
|
|
stats=company_stats,
|
|
user_rankings=user_rankings,
|
|
usage_by_type=usage_by_type,
|
|
model_breakdown=model_breakdown,
|
|
logs=logs,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total_requests=total_requests
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@bp.route('/ai-usage/export')
|
|
@login_required
|
|
@role_required(SystemRole.OFFICE_MANAGER)
|
|
def admin_ai_usage_export():
|
|
"""Export AI usage logs as CSV with filters"""
|
|
from datetime import datetime
|
|
|
|
period = request.args.get('period', 'month')
|
|
filter_user_id = request.args.get('user_id', type=int)
|
|
filter_company_id = request.args.get('company_id', type=int)
|
|
filter_model = request.args.get('model', '')
|
|
date_from = request.args.get('date_from', '')
|
|
date_to = request.args.get('date_to', '')
|
|
|
|
today = date.today()
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Determine period
|
|
if period == 'custom' and date_from:
|
|
try:
|
|
period_start = date.fromisoformat(date_from)
|
|
period_end = date.fromisoformat(date_to) if date_to else today
|
|
except ValueError:
|
|
period_start = today - timedelta(days=30)
|
|
period_end = today
|
|
else:
|
|
period_end = None
|
|
periods = {
|
|
'day': today,
|
|
'week': today - timedelta(days=7),
|
|
'month': today - timedelta(days=30),
|
|
'all': None
|
|
}
|
|
period_start = periods.get(period, today - timedelta(days=30))
|
|
|
|
# Build query
|
|
query = db.query(AIUsageLog).options(
|
|
joinedload(AIUsageLog.user),
|
|
joinedload(AIUsageLog.company)
|
|
)
|
|
|
|
if period_start:
|
|
query = query.filter(func.date(AIUsageLog.created_at) >= period_start)
|
|
if period == 'custom' and period_end:
|
|
query = query.filter(func.date(AIUsageLog.created_at) <= period_end)
|
|
if filter_user_id:
|
|
query = query.filter(AIUsageLog.user_id == filter_user_id)
|
|
if filter_company_id:
|
|
company_user_ids = [u.id for u in db.query(User.id).filter(User.company_id == filter_company_id).all()]
|
|
if company_user_ids:
|
|
query = query.filter(AIUsageLog.user_id.in_(company_user_ids))
|
|
else:
|
|
query = query.filter(AIUsageLog.id == -1)
|
|
if filter_model:
|
|
query = query.filter(AIUsageLog.model == filter_model)
|
|
|
|
logs = query.order_by(desc(AIUsageLog.created_at)).limit(10000).all()
|
|
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
writer.writerow([
|
|
'Data', 'Uzytkownik', 'Email', 'Firma', 'Model', 'Typ',
|
|
'Tokeny_in', 'Tokeny_out', 'Koszt_USD', 'Czas_ms', 'Sukces', 'Blad'
|
|
])
|
|
|
|
for log in logs:
|
|
user_name = ''
|
|
user_email = ''
|
|
company_name = ''
|
|
if log.user:
|
|
user_name = log.user.name or ''
|
|
user_email = log.user.email or ''
|
|
if log.company:
|
|
company_name = log.company.name or ''
|
|
elif log.user and log.user.company_id:
|
|
comp = db.query(Company).filter_by(id=log.user.company_id).first()
|
|
company_name = comp.name if comp else ''
|
|
|
|
writer.writerow([
|
|
log.created_at.strftime('%Y-%m-%d %H:%M:%S') if log.created_at else '',
|
|
user_name,
|
|
user_email,
|
|
company_name,
|
|
log.model or '',
|
|
log.request_type or '',
|
|
log.tokens_input or 0,
|
|
log.tokens_output or 0,
|
|
f'{float(log.cost_cents or 0) / 100:.6f}',
|
|
log.response_time_ms or 0,
|
|
'TAK' if log.success else 'NIE',
|
|
log.error_message or ''
|
|
])
|
|
|
|
output.seek(0)
|
|
filename = f'ai_usage_{period}_{today.isoformat()}.csv'
|
|
return Response(
|
|
output.getvalue(),
|
|
mimetype='text/csv',
|
|
headers={'Content-Disposition': f'attachment; filename={filename}'}
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"AI usage export error: {e}")
|
|
flash('Blad podczas eksportu.', 'error')
|
|
return redirect(url_for('admin.admin_ai_usage'))
|
|
finally:
|
|
db.close()
|