nordabiz/blueprints/admin/routes_user_insights.py
Maciej Pienczyn 42e42c5fa0
Some checks are pending
NordaBiz Tests / Unit & Integration Tests (push) Waiting to run
NordaBiz Tests / E2E Tests (Playwright) (push) Blocked by required conditions
NordaBiz Tests / Smoke Tests (Production) (push) Blocked by required conditions
NordaBiz Tests / Send Failure Notification (push) Blocked by required conditions
feat: simplify Chat tab - topic categories and cleaner feedback view
Replace anonymized metadata ("45 chars") with real topic categories
(O firmach, Szukanie kontaktu, O wydarzeniach, etc). Remove empty
conversion stats, UTM sources, avg message length. Keep feedback
with ratings in compact two-column layout.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-11 04:05:15 +01:00

2159 lines
83 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
Admin User Insights Routes
============================
User Insights Dashboard - problem detection, engagement scoring,
page popularity, user flows, and behavioral profiles.
"""
import csv
import io
import logging
import math
import os
import secrets
from datetime import date, timedelta, datetime
from flask import render_template, request, redirect, url_for, flash, Response, jsonify
from flask_login import login_required, current_user
from sqlalchemy import func, desc, text, or_
from sqlalchemy.orm import joinedload
from . import bp
from database import (
SessionLocal, User, UserSession, PageView, SearchQuery,
ConversionEvent, JSError, EmailLog, SecurityAlert,
AuditLog, AnalyticsDaily, SystemRole,
AIChatConversation, AIChatMessage, AIChatFeedback,
Company, NordaEvent, Announcement, AnnouncementRead,
ForumTopic, ForumTopicRead, Classified, ClassifiedRead
)
from utils.decorators import role_required
logger = logging.getLogger(__name__)
def _non_bot_sessions(db, start_dt=None):
"""Subquery of non-bot session IDs for filtering page_views."""
q = db.query(UserSession.id).filter(UserSession.is_bot == False)
if start_dt:
q = q.filter(UserSession.started_at >= start_dt)
return q
def _log_engagement_score(raw):
"""Logarithmic engagement score: better distribution than linear capped at 100."""
if raw <= 0:
return 0
return min(100, int(math.log2(raw + 1) * 6))
def _get_period_dates(period):
"""Return (start_date, days) for given period string."""
today = date.today()
if period == 'day':
return today, 1
elif period == 'month':
return today - timedelta(days=30), 30
else: # week (default)
return today - timedelta(days=7), 7
# ============================================================
# MAIN DASHBOARD
# ============================================================
@bp.route('/analytics')
@login_required
@role_required(SystemRole.OFFICE_MANAGER)
def user_insights():
"""Consolidated Analytics Dashboard - 6 tabs."""
tab = request.args.get('tab', 'overview')
period = request.args.get('period', 'week')
start_date, days = _get_period_dates(period)
db = SessionLocal()
try:
data = {}
if tab == 'problems':
data = _tab_problems(db, start_date, days)
elif tab == 'engagement':
data = _tab_engagement(db, start_date, days)
elif tab == 'pages':
data = _tab_pages(db, start_date, days)
elif tab == 'paths':
# Redirect old paths tab to overview
from flask import redirect
return redirect(url_for('admin.user_insights', tab='overview', period=period))
elif tab == 'overview':
data = _tab_overview(db, start_date, days)
elif tab == 'chat':
data = _tab_chat(db, start_date, days)
return render_template(
'admin/user_insights.html',
tab=tab,
period=period,
data=data
)
except Exception as e:
logger.error(f"User insights error: {e}", exc_info=True)
flash('Błąd ładowania danych insights.', 'error')
return redirect(url_for('admin.admin_users'))
finally:
db.close()
@bp.route('/user-insights')
@login_required
def user_insights_redirect():
"""Redirect old URL to consolidated analytics."""
return redirect(url_for('admin.user_insights', **request.args))
# ============================================================
# TAB 1: PROBLEMS
# ============================================================
def _tab_problems(db, start_date, days):
"""Problem detection tab - identify users with issues."""
now = datetime.now()
start_dt = datetime.combine(start_date, datetime.min.time())
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
# Stat cards
locked_accounts = db.query(func.count(User.id)).filter(
User.locked_until > now, User.is_active == True
).scalar() or 0
failed_logins_total = db.query(func.count(AuditLog.id)).filter(
AuditLog.action == 'login_failed',
AuditLog.created_at >= start_dt
).scalar() or 0
password_resets_total = db.query(func.count(EmailLog.id)).filter(
EmailLog.email_type == 'password_reset',
EmailLog.created_at >= start_dt
).scalar() or 0
js_errors_total = db.query(func.count(JSError.id)).filter(
JSError.occurred_at >= start_dt
).scalar() or 0
slow_pages_total = db.query(func.count(PageView.id)).filter(
PageView.viewed_at >= start_dt,
PageView.load_time_ms > 3000
).scalar() or 0
# Alerts — users who need help
alerts = []
# Alert: Never logged in (account > 7 days old)
never_logged = db.query(User).filter(
User.is_active == True,
User.last_login.is_(None),
User.created_at < now - timedelta(days=7)
).all()
# Batch: welcome emails for never-logged users
never_logged_emails = {u.email for u in never_logged}
welcomed_emails = set()
if never_logged_emails:
welcomed_emails = set(e for (e,) in db.query(EmailLog.recipient_email).filter(
EmailLog.recipient_email.in_(never_logged_emails),
EmailLog.email_type == 'welcome'
).all())
for u in never_logged:
has_welcome = u.email in welcomed_emails
priority = 'critical' if (u.failed_login_attempts or 0) >= 3 else 'high'
alerts.append({
'type': 'never_logged_in',
'priority': priority,
'user': u,
'message': f'Nigdy nie zalogowany ({(now - u.created_at).days}d od rejestracji)',
'detail': f'Prób logowania: {u.failed_login_attempts or 0}. Email powitalny: {"Tak" if has_welcome else "NIE WYSŁANO"}.',
})
# Alert: Account locked
locked_users = db.query(User).filter(
User.locked_until > now, User.is_active == True
).all()
for u in locked_users:
alerts.append({
'type': 'locked',
'priority': 'critical',
'user': u,
'message': f'Konto zablokowane (do {u.locked_until.strftime("%d.%m %H:%M")})',
'detail': f'Nieudane próby: {u.failed_login_attempts or 0}.',
})
# Alert: Reset without effect (reset sent > 24h ago, no login after)
recent_resets = db.query(
EmailLog.recipient_email,
func.max(EmailLog.created_at).label('last_reset')
).filter(
EmailLog.email_type == 'password_reset',
EmailLog.created_at >= start_30d,
EmailLog.status == 'sent'
).group_by(EmailLog.recipient_email).all()
# Batch: user lookups + login-after checks for resets
reset_emails = [r.recipient_email for r in recent_resets]
users_by_email = {}
login_after_map = {}
if reset_emails:
users_by_email = {u.email: u for u in db.query(User).filter(
User.email.in_(reset_emails), User.is_active == True
).all()}
login_after_map = dict(db.query(
AuditLog.user_email, func.max(AuditLog.created_at)
).filter(
AuditLog.user_email.in_(reset_emails),
AuditLog.action == 'login'
).group_by(AuditLog.user_email).all())
for r in recent_resets:
u = users_by_email.get(r.recipient_email)
if not u:
continue
last_login_after = login_after_map.get(u.email)
has_login_after = last_login_after is not None and last_login_after > r.last_reset
if not has_login_after and r.last_reset < now - timedelta(hours=24):
alerts.append({
'type': 'reset_no_effect',
'priority': 'high',
'user': u,
'message': f'Reset hasła bez efektu (wysłany {r.last_reset.strftime("%d.%m %H:%M")})',
'detail': 'Użytkownik nie zalogował się po otrzymaniu emaila z resetem hasła.',
})
# Alert: Repeated resets (>= 3 in 7 days)
repeat_resets = db.query(
EmailLog.recipient_email,
func.count(EmailLog.id).label('cnt')
).filter(
EmailLog.email_type == 'password_reset',
EmailLog.created_at >= start_dt
).group_by(EmailLog.recipient_email).having(func.count(EmailLog.id) >= 3).all()
for r in repeat_resets:
u = users_by_email.get(r.recipient_email)
if not u:
u = db.query(User).filter(User.email == r.recipient_email, User.is_active == True).first()
if u:
if not any(a['user'].id == u.id and a['type'] == 'reset_no_effect' for a in alerts):
alerts.append({
'type': 'repeat_resets',
'priority': 'high',
'user': u,
'message': f'{r.cnt} resetów hasła w {days}d',
'detail': 'Wielokrotne resety mogą wskazywać na problem z emailem lub hasłem.',
})
# Sort alerts: critical first, then high
priority_order = {'critical': 0, 'high': 1, 'medium': 2}
alerts.sort(key=lambda a: priority_order.get(a['priority'], 3))
never_logged_count = len(never_logged)
# Top JS errors (grouped by message, for tech section)
top_js_errors = db.query(
JSError.message,
func.count(JSError.id).label('cnt')
).filter(
JSError.occurred_at >= start_dt
).group_by(JSError.message).order_by(desc('cnt')).limit(5).all()
# Slow pages (grouped by path)
slow_pages_top = db.query(
PageView.path,
func.count(PageView.id).label('cnt'),
func.avg(PageView.load_time_ms).label('avg_ms')
).filter(
PageView.viewed_at >= start_dt,
PageView.load_time_ms > 3000
).group_by(PageView.path).order_by(desc('cnt')).limit(5).all()
return {
'locked_accounts': locked_accounts,
'never_logged_in': never_logged_count,
'failed_logins': failed_logins_total,
'password_resets': password_resets_total,
'alerts': alerts,
'js_errors': js_errors_total,
'top_js_errors': [{'message': r.message[:80], 'count': r.cnt} for r in top_js_errors],
'slow_pages': [{'path': r.path, 'label': _humanize_path(r.path, db), 'count': r.cnt, 'avg_ms': int(r.avg_ms)} for r in slow_pages_top],
}
# ============================================================
# TAB 2: ENGAGEMENT
# ============================================================
def _tab_engagement(db, start_date, days):
"""Engagement ranking tab."""
now = datetime.now()
start_dt = datetime.combine(start_date, datetime.min.time())
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
prev_start = datetime.combine(start_date - timedelta(days=days), datetime.min.time())
first_of_month_dt = datetime.combine(date.today().replace(day=1), datetime.min.time())
# Stat cards — SQL queries instead of Python loops, exclude UNAFFILIATED
base_filter = [User.is_active == True, User.role != 'UNAFFILIATED']
active_7d = db.query(func.count(func.distinct(UserSession.user_id))).filter(
UserSession.user_id.isnot(None),
UserSession.started_at >= start_dt,
UserSession.is_bot == False
).scalar() or 0
new_this_month = db.query(func.count(User.id)).filter(
*base_filter, User.created_at >= first_of_month_dt
).scalar() or 0
at_risk = db.query(func.count(User.id)).filter(
*base_filter,
User.last_login.isnot(None),
User.last_login >= now - timedelta(days=30),
User.last_login < now - timedelta(days=7)
).scalar() or 0
# Dormant = last login > 30d ago (NOT including never-logged-in)
dormant = db.query(func.count(User.id)).filter(
*base_filter,
User.last_login.isnot(None),
User.last_login < now - timedelta(days=30)
).scalar() or 0
# Batch queries — replace N+1 per-user loops
sess_cur_map = dict(db.query(
UserSession.user_id, func.count(UserSession.id)
).filter(
UserSession.started_at >= start_dt,
UserSession.user_id.isnot(None),
UserSession.is_bot == False
).group_by(UserSession.user_id).all())
pv_cur_map = dict(db.query(
PageView.user_id, func.count(PageView.id)
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_dt,
PageView.user_id.isnot(None),
UserSession.is_bot == False
).group_by(PageView.user_id).all())
sess_prev_map = dict(db.query(
UserSession.user_id, func.count(UserSession.id)
).filter(
UserSession.started_at >= prev_start,
UserSession.started_at < start_dt,
UserSession.user_id.isnot(None),
UserSession.is_bot == False
).group_by(UserSession.user_id).all())
pv_prev_map = dict(db.query(
PageView.user_id, func.count(PageView.id)
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= prev_start,
PageView.viewed_at < start_dt,
PageView.user_id.isnot(None),
UserSession.is_bot == False
).group_by(PageView.user_id).all())
# 30d components: sessions, clicks, duration — single query
sess_30d_rows = db.query(
UserSession.user_id,
func.count(UserSession.id).label('cnt'),
func.coalesce(func.sum(UserSession.clicks_count), 0).label('clicks'),
func.coalesce(func.sum(func.least(UserSession.duration_seconds, 3600)), 0).label('dur')
).filter(
UserSession.started_at >= start_30d,
UserSession.user_id.isnot(None),
UserSession.is_bot == False
).group_by(UserSession.user_id).all()
s30_map = {r.user_id: r.cnt for r in sess_30d_rows}
clicks30_map = {r.user_id: r.clicks for r in sess_30d_rows}
dur30_map = {r.user_id: r.dur for r in sess_30d_rows}
pv30_map = dict(db.query(
PageView.user_id, func.count(PageView.id)
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_30d,
PageView.user_id.isnot(None),
UserSession.is_bot == False
).group_by(PageView.user_id).all())
conv30_map = dict(db.query(
ConversionEvent.user_id, func.count(ConversionEvent.id)
).filter(
ConversionEvent.converted_at >= start_30d
).group_by(ConversionEvent.user_id).all())
search30_map = dict(db.query(
SearchQuery.user_id, func.count(SearchQuery.id)
).filter(
SearchQuery.searched_at >= start_30d
).group_by(SearchQuery.user_id).all())
# Sparkline — 1 query instead of 7×N
spark_start = datetime.combine(date.today() - timedelta(days=6), datetime.min.time())
sparkline_raw = db.query(
PageView.user_id,
func.date(PageView.viewed_at).label('day'),
func.count(PageView.id).label('cnt')
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= spark_start,
PageView.user_id.isnot(None),
UserSession.is_bot == False
).group_by(PageView.user_id, func.date(PageView.viewed_at)).all()
sparkline_map = {}
for row in sparkline_raw:
sparkline_map.setdefault(row.user_id, {})[row.day] = row.cnt
# Build engagement list from batch data
registered_users = db.query(User).filter(
User.is_active == True, User.role != 'UNAFFILIATED'
).all()
spark_days = [date.today() - timedelta(days=6 - i) for i in range(7)]
engagement_list = []
for user in registered_users:
uid = user.id
sessions_cur = sess_cur_map.get(uid, 0)
pv_cur = pv_cur_map.get(uid, 0)
pv_prev = pv_prev_map.get(uid, 0)
s30 = s30_map.get(uid, 0)
pv30 = pv30_map.get(uid, 0)
clicks30 = clicks30_map.get(uid, 0)
dur30 = dur30_map.get(uid, 0)
conv30 = conv30_map.get(uid, 0)
search30 = search30_map.get(uid, 0)
minutes30 = round(int(dur30) / 60, 1)
raw = (s30 * 3 + pv30 * 1 + int(clicks30) * 0.5 +
minutes30 * 2 + conv30 * 10 + search30 * 2)
score = _log_engagement_score(raw)
# Last login label
days_since_login = None
if user.last_login:
days_since_login = (date.today() - user.last_login.date()).days
if days_since_login is not None:
if days_since_login == 0:
last_login = 'Dziś'
elif days_since_login == 1:
last_login = 'Wczoraj'
elif days_since_login <= 7:
last_login = f'{days_since_login} dni temu'
elif days_since_login <= 30:
weeks = days_since_login // 7
last_login = f'{weeks} tyg. temu'
else:
months = days_since_login // 30
if months > 0:
last_login = f'{months} mies. temu'
else:
last_login = f'{days_since_login} dni temu'
else:
last_login = 'Nigdy'
engagement_list.append({
'user': user,
'score': score,
'sessions': sessions_cur,
'page_views': pv_cur,
'last_login': last_login,
'days_since': days_since_login,
'detail': {
'sessions_30d': s30,
'pages_30d': pv30,
'clicks_30d': int(clicks30),
'minutes_30d': minutes30,
'conversions_30d': conv30,
'searches_30d': search30,
'raw': round(raw, 1),
},
})
engagement_list.sort(key=lambda x: x['score'], reverse=True)
return {
'active_7d': active_7d,
'at_risk': at_risk,
'dormant': dormant,
'new_this_month': new_this_month,
'engagement_list': engagement_list,
}
# ============================================================
# TAB 3: PAGE MAP
# ============================================================
EXCLUDED_PATH_PREFIXES = (
'/sw.js', '/robots.txt', '/sitemap.xml', '/favicon',
'/apple-touch-icon', '/.well-known/', '/.git/', '/.env',
'/check-verification-status', '/manifest.json',
'/sito/', '/wordpress/', '/wp-', '/xmlrpc',
)
EXCLUDED_PATH_CONTAINS = (
'.env', '.php', '.xml', '.asp', '.jsp', '.cgi',
'wp-includes', 'wp-admin', 'wp-content', 'wp-login',
'wlwmanifest', 'aws-config', 'phpinfo', 'phpmyadmin',
'config.js', 'info.php',
)
def _is_technical_path(path):
"""Check if path is a technical/system/scanner endpoint (not a user page)."""
if not path:
return True
for excl in EXCLUDED_PATH_PREFIXES:
if path.startswith(excl) or path == excl:
return True
path_lower = path.lower()
for excl in EXCLUDED_PATH_CONTAINS:
if excl in path_lower:
return True
return False
def _mask_token_path(path):
"""Mask security tokens in paths like /reset-password/<token>."""
import re
path = re.sub(r'(/reset-password/)[A-Za-z0-9_-]{10,}', r'\1***', path)
path = re.sub(r'(/verify-email/)[A-Za-z0-9_-]{10,}', r'\1***', path)
return path
def _format_time(seconds):
"""Format seconds to human-readable string."""
if seconds <= 0:
return '0s'
if seconds < 60:
return f'{seconds}s'
minutes = seconds // 60
secs = seconds % 60
if minutes < 60:
return f'{minutes}m {secs}s' if secs else f'{minutes}m'
hours = minutes // 60
mins = minutes % 60
return f'{hours}h {mins}m'
PATH_LABELS = {
'/': 'Strona główna',
'/login': 'Logowanie',
'/register': 'Rejestracja',
'/dashboard': 'Panel użytkownika',
'/search': 'Wyszukiwarka',
'/szukaj': 'Wyszukiwarka',
'/chat': 'NordaGPT',
'/forum': 'Forum',
'/forum/': 'Forum',
'/kalendarz/': 'Kalendarz',
'/tablica/': 'Tablica B2B',
'/ogloszenia': 'Aktualności',
'/wiadomosci': 'Wiadomości',
'/edukacja': 'Edukacja',
'/rada': 'Rada Izby',
'/korzysci': 'Korzyści członkostwa',
'/debugbar/': 'Debug Bar',
}
def _humanize_path(path, db=None, _cache={}):
"""Convert raw path to human-readable label."""
import re
if not path:
return path
# Exact match first
if path in PATH_LABELS:
return PATH_LABELS[path]
# Dynamic: /kalendarz/<id> → Event title (BEFORE prefix match)
event_match = re.match(r'^/kalendarz/(\d+)(/.*)?$', path)
if event_match and db:
eid = int(event_match.group(1))
suffix = event_match.group(2) or ''
cache_key = f'event_{eid}'
if cache_key not in _cache:
ev = db.query(NordaEvent.title).filter_by(id=eid).first()
_cache[cache_key] = ev.title if ev else f'Wydarzenie #{eid}'
name = _cache[cache_key]
if suffix == '/rsvp':
return f'{name} (RSVP)'
return name
# Dynamic: /company/<id_or_slug> → Company name
company_match = re.match(r'^/company/([a-z0-9-]+)(/.*)?$', path)
if company_match and db:
id_or_slug = company_match.group(1)
suffix = company_match.group(2) or ''
cache_key = f'company_{id_or_slug}'
if cache_key not in _cache:
# Try as numeric ID first, then as slug
if id_or_slug.isdigit():
co = db.query(Company.name).filter_by(id=int(id_or_slug)).first()
else:
co = db.query(Company.name).filter_by(slug=id_or_slug).first()
_cache[cache_key] = co.name if co else id_or_slug
name = _cache[cache_key]
if suffix == '/edit':
return f'{name} (edycja)'
return name
# Prefix match (e.g. /forum/topic/5 → Forum: topic/5)
for prefix, label in PATH_LABELS.items():
if prefix.endswith('/') and path.startswith(prefix) and prefix != '/':
rest = path[len(prefix):]
if rest and not rest.startswith('admin'):
return f'{label}: {rest}'
return label
# Admin paths
if path.startswith('/admin/'):
return path.replace('/admin/', 'Admin: ').replace('-', ' ').title()
return path
def _tab_pages(db, start_date, days):
"""Page popularity map."""
start_dt = datetime.combine(start_date, datetime.min.time())
# Page sections with grouping (expanded to cover ~95% of traffic)
section_map = {
'Strona główna': ['/'],
'Profile firm': ['/company/'],
'Forum': ['/forum'],
'Chat': ['/chat'],
'Wyszukiwarka': ['/search', '/szukaj'],
'Wydarzenia': ['/events', '/wydarzenia', '/kalendarz'],
'Ogłoszenia': ['/classifieds', '/ogloszenia', '/tablica'],
'Członkostwo': ['/membership', '/czlonkostwo', '/korzysci'],
'Logowanie': ['/login', '/register', '/forgot-password', '/reset-password', '/verify-email'],
'Panel użytkownika': ['/dashboard', '/konto'],
'Wiadomości': ['/wiadomosci'],
'Edukacja': ['/edukacja'],
'Rada': ['/rada'],
'ZOPK': ['/zopk'],
'Kontakty': ['/kontakty'],
'Raporty': ['/raporty'],
'Admin': ['/admin'],
}
sections = []
for name, prefixes in section_map.items():
conditions = [PageView.path.like(p + '%') for p in prefixes]
if prefixes == ['/']:
conditions = [PageView.path == '/']
# Cap time_on_page at 1800s (30 min) to exclude outliers (tabs left open)
capped_time = func.least(PageView.time_on_page_seconds, 1800)
q = db.query(
func.count(PageView.id).label('views'),
func.count(func.distinct(PageView.user_id)).label('unique_users'),
func.avg(capped_time).label('avg_time')
).join(UserSession, PageView.session_id == UserSession.id).filter(
or_(*conditions),
PageView.viewed_at >= start_dt,
UserSession.is_bot == False
).first()
avg_time_val = int(q.avg_time or 0)
sections.append({
'name': name,
'views': q.views or 0,
'unique_users': q.unique_users or 0,
'avg_time': avg_time_val,
'avg_time_fmt': _format_time(avg_time_val),
})
max_views = max((s['views'] for s in sections), default=1) or 1
for s in sections:
s['intensity'] = min(100, int(s['views'] / max_views * 100))
# Top 50 pages (exclude bots + technical paths)
capped_time = func.least(PageView.time_on_page_seconds, 1800)
top_pages_raw = db.query(
PageView.path,
func.count(PageView.id).label('views'),
func.count(func.distinct(PageView.user_id)).label('unique_users'),
func.avg(capped_time).label('avg_time'),
func.avg(PageView.scroll_depth_percent).label('avg_scroll'),
func.avg(PageView.load_time_ms).label('avg_load'),
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_dt,
UserSession.is_bot == False
).group_by(PageView.path).order_by(desc('views')).limit(80).all()
# Filter out technical paths after query (more flexible than SQL LIKE)
top_pages = [p for p in top_pages_raw if not _is_technical_path(p.path)][:50]
max_page_views = top_pages[0].views if top_pages else 1
pages_list = []
for p in top_pages:
avg_time_val = int(p.avg_time or 0)
pages_list.append({
'path': p.path,
'label': _humanize_path(p.path, db),
'views': p.views,
'unique_users': p.unique_users,
'avg_time': avg_time_val,
'avg_time_fmt': _format_time(avg_time_val),
'avg_scroll': int(p.avg_scroll or 0),
'avg_load': int(p.avg_load or 0),
'bar_pct': int(p.views / max_page_views * 100),
})
# Ignored pages (< 5 views in 30d, exclude bots + technical paths)
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
ignored_raw = db.query(
PageView.path,
func.count(PageView.id).label('views'),
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_30d,
UserSession.is_bot == False
).group_by(PageView.path).having(
func.count(PageView.id) < 5
).order_by('views').limit(60).all()
ignored = [p for p in ignored_raw if not _is_technical_path(p.path)][:30]
# Top searches
search_query = db.query(
SearchQuery.query_normalized,
func.count(SearchQuery.id).label('count'),
func.avg(SearchQuery.results_count).label('avg_results')
).filter(
func.date(SearchQuery.searched_at) >= start_date
).group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(15).all()
# Searches without results
no_results_query = db.query(
SearchQuery.query_normalized,
func.count(SearchQuery.id).label('count')
).filter(
SearchQuery.has_results == False,
func.date(SearchQuery.searched_at) >= start_date
).group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(10).all()
# Search effectiveness (CTR)
total_searches = db.query(func.count(SearchQuery.id)).filter(
func.date(SearchQuery.searched_at) >= start_date
).scalar() or 0
searches_with_click = db.query(func.count(SearchQuery.id)).filter(
func.date(SearchQuery.searched_at) >= start_date,
SearchQuery.clicked_result_position.isnot(None)
).scalar() or 0
search_ctr = round(searches_with_click / total_searches * 100) if total_searches > 0 else 0
avg_click_position = db.query(func.avg(SearchQuery.clicked_result_position)).filter(
func.date(SearchQuery.searched_at) >= start_date,
SearchQuery.clicked_result_position.isnot(None)
).scalar()
avg_click_position = round(avg_click_position, 1) if avg_click_position else None
# Most clicked companies from search
top_clicked_companies = db.query(
SearchQuery.clicked_company_id,
Company.name,
func.count(SearchQuery.id).label('clicks')
).join(Company, SearchQuery.clicked_company_id == Company.id).filter(
func.date(SearchQuery.searched_at) >= start_date,
SearchQuery.clicked_company_id.isnot(None)
).group_by(SearchQuery.clicked_company_id, Company.name).order_by(desc('clicks')).limit(10).all()
search_effectiveness = {
'total': total_searches,
'with_click': searches_with_click,
'ctr': search_ctr,
'avg_position': avg_click_position,
'top_clicked': [{'name': r.name, 'clicks': r.clicks} for r in top_clicked_companies],
}
# Content engagement (30 days)
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
# Total active members for open rate denominator
total_active = db.query(func.count(User.id)).filter(
User.is_active == True, User.last_login.isnot(None)
).scalar() or 1
# Announcements: published in 30d + unique readers of those announcements
total_announcements = db.query(func.count(Announcement.id)).filter(
Announcement.created_at >= start_30d
).scalar() or 0
# How many unique users read ANY announcement (regardless of when published)
total_announcement_readers = db.query(func.count(func.distinct(AnnouncementRead.user_id))).filter(
AnnouncementRead.read_at >= start_30d
).scalar() or 0
# Open rate = % of active members who read at least one announcement
ann_open_rate = min(100, round(total_announcement_readers / total_active * 100)) if total_active > 0 else 0
# Forum: topics published in 30d + unique readers
total_forum_topics = db.query(func.count(ForumTopic.id)).filter(
ForumTopic.created_at >= start_30d
).scalar() or 0
total_forum_readers = db.query(func.count(func.distinct(ForumTopicRead.user_id))).filter(
ForumTopicRead.read_at >= start_30d
).scalar() or 0
forum_open_rate = min(100, round(total_forum_readers / total_active * 100)) if total_active > 0 else 0
# Classifieds: published in 30d + unique readers
total_classifieds = db.query(func.count(Classified.id)).filter(
Classified.created_at >= start_30d
).scalar() or 0
total_classified_readers = db.query(func.count(func.distinct(ClassifiedRead.user_id))).filter(
ClassifiedRead.read_at >= start_30d
).scalar() or 0
classified_open_rate = min(100, round(total_classified_readers / total_active * 100)) if total_active > 0 else 0
content_engagement = {
'announcements': {
'published': total_announcements,
'read_by': total_announcement_readers,
'open_rate': ann_open_rate,
},
'forum': {
'published': total_forum_topics,
'read_by': total_forum_readers,
'open_rate': forum_open_rate,
},
'classifieds': {
'published': total_classifieds,
'read_by': total_classified_readers,
'open_rate': classified_open_rate,
},
}
return {
'sections': sections,
'top_pages': pages_list,
'ignored_pages': [{'path': _mask_token_path(p.path), 'views': p.views} for p in ignored],
'top_searches': search_query,
'searches_no_results': no_results_query,
'search_effectiveness': search_effectiveness,
'content_engagement': content_engagement,
}
# ============================================================
# TAB 4: PATHS
# ============================================================
def _tab_paths(db, start_date, days):
"""User flow analysis."""
start_dt = datetime.combine(start_date, datetime.min.time())
# Entry pages - first page in each session (exclude bots)
entry_sql = text("""
WITH first_pages AS (
SELECT DISTINCT ON (pv.session_id) pv.path
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
ORDER BY pv.session_id, pv.viewed_at ASC
)
SELECT path, COUNT(*) as cnt
FROM first_pages
GROUP BY path ORDER BY cnt DESC LIMIT 25
""")
entry_pages = db.execute(entry_sql, {'start_dt': start_dt}).fetchall()
# Exit pages - last page in each session (exclude bots)
exit_sql = text("""
WITH last_pages AS (
SELECT DISTINCT ON (pv.session_id) pv.path
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
ORDER BY pv.session_id, pv.viewed_at DESC
)
SELECT path, COUNT(*) as cnt
FROM last_pages
GROUP BY path ORDER BY cnt DESC LIMIT 25
""")
exit_pages = db.execute(exit_sql, {'start_dt': start_dt}).fetchall()
max_entry = entry_pages[0].cnt if entry_pages else 1
max_exit = exit_pages[0].cnt if exit_pages else 1
# Top transitions (exclude bots)
transitions_sql = text("""
WITH ordered AS (
SELECT pv.session_id, pv.path,
LEAD(pv.path) OVER (PARTITION BY pv.session_id ORDER BY pv.viewed_at) AS next_path
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
)
SELECT path, next_path, COUNT(*) as cnt
FROM ordered
WHERE next_path IS NOT NULL AND path != next_path
GROUP BY path, next_path ORDER BY cnt DESC LIMIT 60
""")
transitions = db.execute(transitions_sql, {'start_dt': start_dt}).fetchall()
# Drop-off pages (high exit rate, exclude bots)
dropoff_sql = text("""
WITH page_stats AS (
SELECT pv.path, COUNT(*) as total_views
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
GROUP BY pv.path HAVING COUNT(*) >= 5
),
exit_stats AS (
SELECT path, COUNT(*) as exit_count
FROM (
SELECT DISTINCT ON (pv.session_id) pv.path
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
ORDER BY pv.session_id, pv.viewed_at DESC
) lp
GROUP BY path
)
SELECT ps.path, ps.total_views as views,
COALESCE(es.exit_count, 0) as exits,
ROUND(COALESCE(es.exit_count, 0)::numeric / ps.total_views * 100, 1) as exit_rate
FROM page_stats ps
LEFT JOIN exit_stats es ON ps.path = es.path
ORDER BY exit_rate DESC LIMIT 20
""")
dropoff = db.execute(dropoff_sql, {'start_dt': start_dt}).fetchall()
# Session length distribution (exclude bots)
session_length_sql = text("""
SELECT
CASE
WHEN pv_count = 1 THEN '1 strona'
WHEN pv_count = 2 THEN '2 strony'
WHEN pv_count BETWEEN 3 AND 5 THEN '3-5 stron'
WHEN pv_count BETWEEN 6 AND 10 THEN '6-10 stron'
ELSE '10+ stron'
END as bucket,
COUNT(*) as cnt
FROM (
SELECT pv.session_id, COUNT(*) as pv_count
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
GROUP BY pv.session_id
) session_counts
GROUP BY bucket
ORDER BY MIN(pv_count)
""")
session_lengths = db.execute(session_length_sql, {'start_dt': start_dt}).fetchall()
max_sl = max((r.cnt for r in session_lengths), default=1) or 1
# Filter out technical paths from results
entry_filtered = [r for r in entry_pages if not _is_technical_path(r.path)][:10]
exit_filtered = [r for r in exit_pages if not _is_technical_path(r.path)][:10]
transitions_filtered = [r for r in transitions if not _is_technical_path(r.path) and not _is_technical_path(r.next_path)][:30]
dropoff_filtered = [r for r in dropoff if not _is_technical_path(r.path)][:20]
max_entry_f = entry_filtered[0].cnt if entry_filtered else 1
max_exit_f = exit_filtered[0].cnt if exit_filtered else 1
return {
'entry_pages': [{'path': r.path, 'label': _humanize_path(r.path, db), 'count': r.cnt, 'bar_pct': int(r.cnt / max_entry_f * 100)} for r in entry_filtered],
'exit_pages': [{'path': r.path, 'label': _humanize_path(r.path, db), 'count': r.cnt, 'bar_pct': int(r.cnt / max_exit_f * 100)} for r in exit_filtered],
'transitions': [{'from': r.path, 'from_label': _humanize_path(r.path, db), 'to': r.next_path, 'to_label': _humanize_path(r.next_path, db), 'count': r.cnt} for r in transitions_filtered],
'dropoff': [{'path': r.path, 'label': _humanize_path(r.path, db), 'views': r.views, 'exits': r.exits, 'exit_rate': float(r.exit_rate)} for r in dropoff_filtered],
'session_lengths': [{'bucket': r.bucket, 'count': r.cnt, 'bar_pct': int(r.cnt / max_sl * 100)} for r in session_lengths],
}
# ============================================================
# TAB 5: OVERVIEW
# ============================================================
def _kpi_for_period(db, start_dt, end_dt=None):
"""Calculate KPI metrics for a given period. Returns dict with raw values."""
filters_session = [UserSession.started_at >= start_dt, UserSession.is_bot == False]
filters_pv = [PageView.viewed_at >= start_dt, UserSession.is_bot == False]
if end_dt:
filters_session.append(UserSession.started_at < end_dt)
filters_pv.append(PageView.viewed_at < end_dt)
active = db.query(func.count(func.distinct(UserSession.user_id))).filter(
*filters_session, UserSession.user_id.isnot(None)
).scalar() or 0
sessions = db.query(func.count(UserSession.id)).filter(*filters_session).scalar() or 0
pvs = db.query(func.count(PageView.id)).join(
UserSession, PageView.session_id == UserSession.id
).filter(*filters_pv).scalar() or 0
single = db.query(func.count()).select_from(
db.query(PageView.session_id).join(
UserSession, PageView.session_id == UserSession.id
).filter(*filters_pv).group_by(PageView.session_id).having(
func.count(PageView.id) == 1
).subquery()
).scalar() or 0
bounce = round(single / sessions * 100) if sessions > 0 else 0
return {'active_users': active, 'sessions': sessions, 'pageviews': pvs, 'bounce_rate': bounce}
def _trend_pct(current, previous):
"""Calculate trend percentage. Returns int or None if no previous data."""
if not previous or previous == 0:
return None
return round((current - previous) / previous * 100)
def _tab_overview(db, start_date, days):
"""Overview charts - sessions, hourly heatmap, devices."""
filter_type = request.args.get('filter', 'all') # all, logged, anonymous
start_dt = datetime.combine(start_date, datetime.min.time())
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
# KPI: current period
kpi_now = _kpi_for_period(db, start_dt)
# KPI: previous period (same length, ending at start_dt)
prev_start = datetime.combine(start_date - timedelta(days=days), datetime.min.time())
kpi_prev = _kpi_for_period(db, prev_start, start_dt)
# Attach trends
kpi = {}
for key in ('active_users', 'sessions', 'pageviews', 'bounce_rate'):
kpi[key] = kpi_now[key]
kpi[f'{key}_trend'] = _trend_pct(kpi_now[key], kpi_prev[key])
# Daily sessions from analytics_daily (already bot-filtered after migration)
daily_data = db.query(AnalyticsDaily).filter(
AnalyticsDaily.date >= date.today() - timedelta(days=30)
).order_by(AnalyticsDaily.date).all()
chart_labels = []
chart_sessions = []
for d in daily_data:
chart_labels.append(d.date.strftime('%d.%m'))
if filter_type == 'logged':
chart_sessions.append(d.total_sessions - (d.anonymous_sessions or 0))
elif filter_type == 'anonymous':
chart_sessions.append(d.anonymous_sessions or 0)
else:
chart_sessions.append(d.total_sessions or 0)
# Daily page views from raw PageView + JOIN (bot-filtered, supports logged/anon filter)
pv_filter = [
PageView.viewed_at >= start_30d,
UserSession.is_bot == False,
]
if filter_type == 'logged':
pv_filter.append(UserSession.user_id.isnot(None))
elif filter_type == 'anonymous':
pv_filter.append(UserSession.user_id.is_(None))
pv_daily = db.query(
func.date(PageView.viewed_at).label('day'),
func.count(PageView.id).label('cnt')
).join(UserSession, PageView.session_id == UserSession.id).filter(
*pv_filter
).group_by(func.date(PageView.viewed_at)).all()
pv_by_date = {str(r.day): r.cnt for r in pv_daily}
chart_pageviews = []
for d in daily_data:
chart_pageviews.append(pv_by_date.get(str(d.date), 0))
# Hourly heatmap (7 days x 24 hours, exclude bots)
heatmap_sql = text("""
SELECT EXTRACT(DOW FROM started_at)::int as dow,
EXTRACT(HOUR FROM started_at)::int as hour,
COUNT(*) as cnt
FROM user_sessions
WHERE started_at >= :start_dt AND is_bot = false
GROUP BY dow, hour
""")
heatmap_raw = db.execute(heatmap_sql, {'start_dt': start_30d}).fetchall()
heatmap = {}
max_heat = 1
for r in heatmap_raw:
key = (r.dow, r.hour)
heatmap[key] = r.cnt
if r.cnt > max_heat:
max_heat = r.cnt
heatmap_grid = []
dow_names = ['Nd', 'Pn', 'Wt', 'Śr', 'Cz', 'Pt', 'Sb']
for dow in range(7):
row = {'name': dow_names[dow], 'hours': []}
for h in range(24):
cnt = heatmap.get((dow, h), 0)
intensity = int(cnt / max_heat * 100) if max_heat else 0
row['hours'].append({'count': cnt, 'intensity': intensity})
heatmap_grid.append(row)
# Logged vs Anonymous (exclude bots)
total_logged = db.query(func.count(UserSession.id)).filter(
UserSession.started_at >= start_30d,
UserSession.user_id.isnot(None),
UserSession.is_bot == False
).scalar() or 0
total_anon = db.query(func.count(UserSession.id)).filter(
UserSession.started_at >= start_30d,
UserSession.user_id.is_(None),
UserSession.is_bot == False
).scalar() or 0
# Devices over time (weekly, exclude bots)
devices_sql = text("""
SELECT DATE_TRUNC('week', started_at)::date as week,
device_type,
COUNT(*) as cnt
FROM user_sessions
WHERE started_at >= :start_dt AND is_bot = false
GROUP BY week, device_type
ORDER BY week
""")
devices_raw = db.execute(devices_sql, {'start_dt': start_30d}).fetchall()
weeks_set = sorted(set(r.week for r in devices_raw))
device_map = {}
for r in devices_raw:
if r.week not in device_map:
device_map[r.week] = {}
device_map[r.week][r.device_type or 'unknown'] = r.cnt
device_labels = [w.strftime('%d.%m') for w in weeks_set]
device_desktop = [device_map.get(w, {}).get('desktop', 0) for w in weeks_set]
device_mobile = [device_map.get(w, {}).get('mobile', 0) for w in weeks_set]
device_tablet = [device_map.get(w, {}).get('tablet', 0) for w in weeks_set]
# Goal: active members / total members
total_members = db.query(func.count(User.id)).filter(User.is_active == True).scalar() or 0
kpi['total_members'] = total_members
kpi['active_pct'] = round(kpi['active_users'] / total_members * 100) if total_members > 0 else 0
# Feature adoption: unique logged users per section (30 days)
feature_sections = {
'NordaGPT': ['/chat'],
'Forum': ['/forum'],
'Wyszukiwarka': ['/search', '/szukaj'],
'Kalendarz': ['/kalendarz', '/events'],
'Tablica B2B': ['/tablica', '/classifieds'],
'Aktualności': ['/ogloszenia', '/wiadomosci'],
'Profile firm': ['/company/'],
}
adoption = []
for feature_name, prefixes in feature_sections.items():
conditions = [PageView.path.like(p + '%') for p in prefixes]
users_count = db.query(func.count(func.distinct(PageView.user_id))).join(
UserSession, PageView.session_id == UserSession.id
).filter(
or_(*conditions),
PageView.viewed_at >= start_30d,
PageView.user_id.isnot(None),
UserSession.is_bot == False
).scalar() or 0
adoption.append({
'name': feature_name,
'users': users_count,
'pct': round(users_count / total_members * 100) if total_members > 0 else 0,
})
adoption.sort(key=lambda x: x['users'], reverse=True)
# Event comparison (last 30 days - views, RSVP count per event)
events_sql = text("""
SELECT ne.id, ne.title, ne.event_date,
COUNT(DISTINCT pv.id) as views,
COUNT(DISTINCT pv.user_id) as unique_viewers
FROM norda_events ne
LEFT JOIN page_views pv ON pv.path = '/kalendarz/' || ne.id::text
AND pv.viewed_at >= :start_dt
LEFT JOIN user_sessions us ON pv.session_id = us.id AND us.is_bot = false
WHERE ne.event_date >= :month_ago
GROUP BY ne.id, ne.title, ne.event_date
ORDER BY ne.event_date DESC
""")
events_raw = db.execute(events_sql, {
'start_dt': start_30d,
'month_ago': (date.today() - timedelta(days=60)).isoformat()
}).fetchall()
# Get RSVP counts
from database import EventAttendee
event_ids = [r.id for r in events_raw]
rsvp_map = {}
if event_ids:
rsvps = db.query(
EventAttendee.event_id,
func.count(EventAttendee.id)
).filter(
EventAttendee.event_id.in_(event_ids),
EventAttendee.status == 'confirmed'
).group_by(EventAttendee.event_id).all()
rsvp_map = {eid: cnt for eid, cnt in rsvps}
events_comparison = []
for r in events_raw:
events_comparison.append({
'title': r.title,
'date': r.event_date.strftime('%d.%m.%Y') if r.event_date else '',
'views': r.views or 0,
'unique_viewers': r.unique_viewers or 0,
'rsvp': rsvp_map.get(r.id, 0),
})
# Referrer sources (top 10, 30 days, exclude bots + internal)
referrer_raw = db.query(
PageView.referrer,
func.count(PageView.id).label('cnt')
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_30d,
PageView.referrer.isnot(None),
PageView.referrer != '',
UserSession.is_bot == False
).group_by(PageView.referrer).order_by(desc('cnt')).limit(50).all()
# Aggregate by domain
from urllib.parse import urlparse
domain_counts = {}
for r in referrer_raw:
try:
domain = urlparse(r.referrer).netloc or r.referrer
# Skip own domain
if 'nordabiznes.pl' in domain or 'nordabiznes' in domain:
continue
# Simplify well-known domains
if 'facebook' in domain or 'fbclid' in (r.referrer or ''):
domain = 'Facebook'
elif 'google' in domain:
domain = 'Google'
elif 'linkedin' in domain:
domain = 'LinkedIn'
elif 'instagram' in domain:
domain = 'Instagram'
elif 't.co' in domain or 'twitter' in domain:
domain = 'X / Twitter'
domain_counts[domain] = domain_counts.get(domain, 0) + r.cnt
except Exception:
pass
referrer_sources = sorted(domain_counts.items(), key=lambda x: x[1], reverse=True)[:10]
max_ref = referrer_sources[0][1] if referrer_sources else 1
# Company profile popularity (top 15, 30 days) - paths are /company/<id>
company_views_raw = db.query(
PageView.path,
func.count(PageView.id).label('views'),
func.count(func.distinct(PageView.user_id)).label('unique_viewers')
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.viewed_at >= start_30d,
PageView.path.op('~')(r'^/company/\d+$'),
UserSession.is_bot == False
).group_by(PageView.path).order_by(desc('views')).limit(15).all()
company_popularity = [{
'name': _humanize_path(r.path, db),
'path': r.path,
'views': r.views,
'unique_viewers': r.unique_viewers,
} for r in company_views_raw]
max_company = company_popularity[0]['views'] if company_popularity else 1
# Companies needing attention (active, with incomplete profiles)
import os
all_companies = db.query(Company).filter(Company.status == 'active').all()
companies_attention = []
for co in all_companies:
issues = []
if not co.description_short and not co.description_full:
issues.append('Brak opisu')
if not co.phone and not co.email:
issues.append('Brak kontaktu')
if not co.website:
issues.append('Brak strony WWW')
if not co.address_city:
issues.append('Brak adresu')
# Check logo file
logo_exists = False
for ext in ('webp', 'svg', 'png', 'jpg'):
logo_path = os.path.join('static', 'img', 'companies', f'{co.slug}.{ext}')
if os.path.exists(logo_path):
logo_exists = True
break
if not logo_exists:
issues.append('Brak logo')
if issues:
companies_attention.append({
'id': co.id,
'name': co.name,
'slug': co.slug,
'issues': issues,
'issue_count': len(issues),
'quality': co.data_quality or 'basic',
})
companies_attention.sort(key=lambda x: x['issue_count'], reverse=True)
# Session length distribution (exclude bots)
session_length_sql = text("""
SELECT
CASE
WHEN pv_count = 1 THEN '1 strona'
WHEN pv_count = 2 THEN '2 strony'
WHEN pv_count BETWEEN 3 AND 5 THEN '3-5 stron'
WHEN pv_count BETWEEN 6 AND 10 THEN '6-10 stron'
ELSE '10+ stron'
END as bucket,
COUNT(*) as cnt
FROM (
SELECT pv.session_id, COUNT(*) as pv_count
FROM page_views pv
JOIN user_sessions us ON pv.session_id = us.id
WHERE pv.viewed_at >= :start_dt AND pv.session_id IS NOT NULL AND us.is_bot = false
GROUP BY pv.session_id
) session_counts
GROUP BY bucket
ORDER BY MIN(pv_count)
""")
session_lengths = db.execute(session_length_sql, {'start_dt': start_30d}).fetchall()
max_sl = max((r.cnt for r in session_lengths), default=1) or 1
return {
'filter_type': filter_type,
'kpi': kpi,
'chart_data': {
'labels': chart_labels,
'sessions': chart_sessions,
'pageviews': chart_pageviews,
},
'heatmap': heatmap_grid,
'logged_vs_anon': {'logged': total_logged, 'anonymous': total_anon},
'devices': {
'labels': device_labels,
'desktop': device_desktop,
'mobile': device_mobile,
'tablet': device_tablet,
},
'adoption': adoption,
'events_comparison': events_comparison,
'referrer_sources': [{'domain': d, 'count': c, 'bar_pct': int(c / max_ref * 100)} for d, c in referrer_sources],
'company_popularity': company_popularity,
'max_company_views': max_company,
'companies_attention': companies_attention[:20],
'companies_attention_total': len(companies_attention),
'session_lengths': [{'bucket': r.bucket, 'count': r.cnt, 'bar_pct': int(r.cnt / max_sl * 100)} for r in session_lengths],
}
# ============================================================
# TAB 6: CHAT ANALYTICS
# ============================================================
def _tab_chat(db, start_date, days):
"""Chat analytics tab - how members use NordaGPT."""
start_dt = datetime.combine(start_date, datetime.min.time())
# Period stats
period_conversations = db.query(AIChatConversation).filter(
AIChatConversation.started_at >= start_dt
).count()
period_messages = db.query(AIChatMessage).filter(
AIChatMessage.role == 'user',
AIChatMessage.created_at >= start_dt
).count()
today_messages = db.query(AIChatMessage).filter(
AIChatMessage.role == 'user',
func.date(AIChatMessage.created_at) == date.today()
).count()
# Feedback stats
feedback_count = db.query(AIChatMessage).filter(
AIChatMessage.feedback_rating.isnot(None)
).count()
positive_feedback = db.query(AIChatMessage).filter_by(feedback_rating=2).count()
negative_feedback = db.query(AIChatMessage).filter_by(feedback_rating=1).count()
satisfaction_rate = round(positive_feedback / feedback_count * 100, 1) if feedback_count > 0 else 0
# Recent feedback with conversation context
recent_feedback = db.query(AIChatMessage).filter(
AIChatMessage.feedback_rating.isnot(None)
).order_by(desc(AIChatMessage.feedback_at)).limit(20).all()
# What do people ask about? (category counts from period)
period_user_msgs = db.query(AIChatMessage).filter(
AIChatMessage.role == 'user',
AIChatMessage.created_at >= start_dt
).all()
categories = {
'O firmach': ['firma', 'spółka', 'przedsiębior', 'działalność'],
'Szukanie kontaktu': ['kontakt', 'telefon', 'email', 'numer', 'adres', 'www'],
'O wydarzeniach': ['wydarzen', 'spotkani', 'konferencj', 'szkoleni', 'networking'],
'O usługach': ['usług', 'ofert', 'cennik', 'wycen', 'współprac'],
'O stowarzyszeniu': ['norda', 'izba', 'stowarzysz', 'członk', 'składk'],
}
category_counts = {cat: 0 for cat in categories}
other_count = 0
for msg in period_user_msgs:
content = (msg.content or '').lower()
matched = False
for cat, keywords in categories.items():
if any(kw in content for kw in keywords):
category_counts[cat] += 1
matched = True
break
if not matched:
other_count += 1
category_counts['Inne'] = other_count
# Sort by count, remove zeros
topic_list = [{'name': k, 'count': v} for k, v in category_counts.items() if v > 0]
topic_list.sort(key=lambda x: x['count'], reverse=True)
max_topic = topic_list[0]['count'] if topic_list else 1
return {
'period_conversations': period_conversations,
'period_messages': period_messages,
'today_messages': today_messages,
'positive_feedback': positive_feedback,
'negative_feedback': negative_feedback,
'satisfaction_rate': satisfaction_rate,
'recent_feedback': recent_feedback,
'topics': topic_list,
'max_topic': max_topic,
}
# ============================================================
# USER PROFILE DRILL-DOWN
# ============================================================
@bp.route('/analytics/user/<int:user_id>')
@login_required
@role_required(SystemRole.OFFICE_MANAGER)
def user_insights_profile(user_id):
"""Individual user behavioral profile."""
db = SessionLocal()
try:
user = db.query(User).options(joinedload(User.company)).get(user_id)
if not user:
flash('Użytkownik nie znaleziony.', 'error')
return redirect(url_for('admin.user_insights'))
now = datetime.now()
start_30d = datetime.combine(date.today() - timedelta(days=30), datetime.min.time())
start_7d = datetime.combine(date.today() - timedelta(days=7), datetime.min.time())
# Back link context
ref_tab = request.args.get('ref_tab', 'engagement')
ref_period = request.args.get('ref_period', 'week')
# Engagement score (30d) — with bot filtering
s30 = db.query(func.count(UserSession.id)).filter(
UserSession.user_id == user_id, UserSession.started_at >= start_30d,
UserSession.is_bot == False
).scalar() or 0
pv30 = db.query(func.count(PageView.id)).join(
UserSession, PageView.session_id == UserSession.id
).filter(
PageView.user_id == user_id, PageView.viewed_at >= start_30d,
UserSession.is_bot == False
).scalar() or 0
clicks30 = db.query(func.sum(UserSession.clicks_count)).filter(
UserSession.user_id == user_id, UserSession.started_at >= start_30d,
UserSession.is_bot == False
).scalar() or 0
dur30 = db.query(func.sum(UserSession.duration_seconds)).filter(
UserSession.user_id == user_id, UserSession.started_at >= start_30d,
UserSession.is_bot == False
).scalar() or 0
conv30 = db.query(func.count(ConversionEvent.id)).filter(
ConversionEvent.user_id == user_id, ConversionEvent.converted_at >= start_30d
).scalar() or 0
search30 = db.query(func.count(SearchQuery.id)).filter(
SearchQuery.user_id == user_id, SearchQuery.searched_at >= start_30d
).scalar() or 0
raw = (s30 * 3 + pv30 * 1 + int(clicks30) * 0.5 +
int(dur30) / 60 * 2 + conv30 * 10 + search30 * 2)
engagement_score = _log_engagement_score(raw)
# Problem score (failed logins from audit_logs, time-based)
fl = db.query(func.count(AuditLog.id)).filter(
AuditLog.user_email == user.email,
AuditLog.action == 'login_failed',
AuditLog.created_at >= start_7d
).scalar() or 0
sa_7d = db.query(func.count(SecurityAlert.id)).filter(
SecurityAlert.user_email == user.email,
SecurityAlert.created_at >= start_7d
).scalar() or 0
pr_30d = db.query(func.count(EmailLog.id)).filter(
EmailLog.recipient_email == user.email,
EmailLog.email_type == 'password_reset',
EmailLog.created_at >= start_30d
).scalar() or 0
je_7d = db.query(func.count(JSError.id)).join(
UserSession, JSError.session_id == UserSession.id
).filter(
UserSession.user_id == user_id,
JSError.occurred_at >= start_7d
).scalar() or 0
sp_7d = db.query(func.count(PageView.id)).filter(
PageView.user_id == user_id,
PageView.viewed_at >= start_7d,
PageView.load_time_ms > 3000
).scalar() or 0
is_locked = 1 if user.locked_until and user.locked_until > now else 0
problem_score = min(100,
fl * 10 + pr_30d * 15 + je_7d * 3 + sp_7d * 2 + sa_7d * 20 + is_locked * 40
)
# ============================================================
# FULL PROBLEM CHRONOLOGY (audit_logs + email_logs + sessions)
# ============================================================
timeline = []
# Audit logs: login attempts (successful and failed)
audit_entries = db.query(AuditLog).filter(
AuditLog.user_email == user.email
).order_by(desc(AuditLog.created_at)).limit(50).all()
for a in audit_entries:
if a.action == 'login':
timeline.append({
'type': 'login',
'icon': 'key',
'time': a.created_at,
'desc': f'Zalogowano pomyślnie',
'detail': f'IP: {a.ip_address or "?"}',
'css': 'success',
})
elif a.action == 'login_failed':
timeline.append({
'type': 'problem',
'icon': 'x',
'time': a.created_at,
'desc': f'Nieudane logowanie',
'detail': f'IP: {a.ip_address or "?"}',
'css': 'danger',
})
elif a.action == 'email_verified':
timeline.append({
'type': 'login',
'icon': 'check',
'time': a.created_at,
'desc': 'Email zweryfikowany',
'detail': '',
'css': 'success',
})
elif a.action == 'logout':
timeline.append({
'type': 'info',
'icon': 'logout',
'time': a.created_at,
'desc': 'Wylogowanie',
'detail': '',
'css': 'muted',
})
else:
timeline.append({
'type': 'info',
'icon': 'info',
'time': a.created_at,
'desc': f'{a.action}',
'detail': f'{a.entity_type or ""} {a.entity_name or ""}',
'css': 'muted',
})
# All emails sent to this user
all_emails = db.query(EmailLog).filter(
EmailLog.recipient_email == user.email
).order_by(desc(EmailLog.created_at)).limit(30).all()
for e in all_emails:
email_labels = {
'password_reset': 'Reset hasła',
'welcome': 'Email powitalny',
'notification': 'Powiadomienie',
'forum_notification': 'Powiadomienie z forum',
'role_notification': 'Zmiana roli',
'registration_notification': 'Rejestracja',
}
label = email_labels.get(e.email_type, e.email_type)
status_label = {'sent': 'wysłany', 'failed': 'BŁĄD', 'pending': 'oczekuje'}.get(e.status, e.status)
css = 'warning' if e.email_type == 'password_reset' else 'info'
if e.status == 'failed':
css = 'danger'
timeline.append({
'type': 'email',
'icon': 'mail',
'time': e.created_at,
'desc': f'Email: {label} ({status_label})',
'detail': e.subject or '',
'css': css,
})
# Sessions (browser/device context, exclude bots)
sessions = db.query(UserSession).filter(
UserSession.user_id == user_id,
UserSession.is_bot == False
).order_by(desc(UserSession.started_at)).limit(20).all()
for s in sessions:
dur = f', {s.duration_seconds // 60}min' if s.duration_seconds else ''
timeline.append({
'type': 'login',
'icon': 'monitor',
'time': s.started_at,
'desc': f'Sesja: {s.device_type or "?"} / {s.browser or "?"} / {s.os or "?"}',
'detail': f'{s.page_views_count or 0} stron, {s.clicks_count or 0} kliknięć{dur}',
'css': 'info',
})
# Key page views
key_paths = ['/', '/forum', '/chat', '/search', '/admin', '/events', '/membership']
recent_pvs = db.query(PageView).filter(
PageView.user_id == user_id,
).order_by(desc(PageView.viewed_at)).limit(50).all()
for pv in recent_pvs:
is_key = any(pv.path == p or pv.path.startswith(p + '/') for p in key_paths)
if is_key or '/company/' in pv.path:
load_info = ''
if pv.load_time_ms and pv.load_time_ms > 3000:
load_info = f' (WOLNE: {pv.load_time_ms}ms)'
timeline.append({
'type': 'pageview',
'icon': 'eye',
'time': pv.viewed_at,
'desc': f'Odwiedzono: {pv.path}{load_info}',
'detail': '',
'css': 'danger' if load_info else 'muted',
})
# Searches
searches = db.query(SearchQuery).filter(
SearchQuery.user_id == user_id
).order_by(desc(SearchQuery.searched_at)).limit(10).all()
for s in searches:
timeline.append({
'type': 'search',
'icon': 'search',
'time': s.searched_at,
'desc': f'Szukano: "{s.query}"',
'detail': f'{s.results_count} wyników' if s.results_count else 'Brak wyników',
'css': 'muted' if s.has_results else 'warning',
})
# Conversions
convs = db.query(ConversionEvent).filter(
ConversionEvent.user_id == user_id
).order_by(desc(ConversionEvent.converted_at)).limit(10).all()
for c in convs:
timeline.append({
'type': 'conversion',
'icon': 'check',
'time': c.converted_at,
'desc': f'Konwersja: {c.event_type}',
'detail': c.target_type or '',
'css': 'success',
})
# Security alerts
sec_alerts = db.query(SecurityAlert).filter(
SecurityAlert.user_email == user.email
).order_by(desc(SecurityAlert.created_at)).limit(10).all()
for a in sec_alerts:
timeline.append({
'type': 'problem',
'icon': 'shield',
'time': a.created_at,
'desc': f'Alert: {a.alert_type} ({a.severity})',
'detail': f'IP: {a.ip_address or "?"}',
'css': 'danger' if a.severity in ('high', 'critical') else 'warning',
})
# Account creation event
if user.created_at:
has_welcome = db.query(EmailLog.id).filter(
EmailLog.recipient_email == user.email,
EmailLog.email_type == 'welcome'
).first() is not None
timeline.append({
'type': 'info',
'icon': 'user',
'time': user.created_at,
'desc': 'Konto utworzone',
'detail': f'Email powitalny: {"Tak" if has_welcome else "NIE WYSŁANO"}',
'css': 'info' if has_welcome else 'danger',
})
timeline.sort(key=lambda x: x['time'], reverse=True)
timeline = timeline[:150]
# ============================================================
# PROBLEM RESOLUTION STATUS
# ============================================================
resolution = None
has_problems = (fl > 0 or pr_30d > 0 or is_locked)
if has_problems or user.last_login is None:
# Find first symptom
first_failed = db.query(func.min(AuditLog.created_at)).filter(
AuditLog.user_email == user.email,
AuditLog.action == 'login_failed'
).scalar()
first_reset = db.query(func.min(EmailLog.created_at)).filter(
EmailLog.recipient_email == user.email,
EmailLog.email_type == 'password_reset'
).scalar()
first_symptom = None
if first_failed and first_reset:
first_symptom = min(first_failed, first_reset)
else:
first_symptom = first_failed or first_reset
# What was sent
all_resets = db.query(EmailLog).filter(
EmailLog.recipient_email == user.email,
EmailLog.email_type == 'password_reset'
).order_by(EmailLog.created_at).all()
# Did user login after last reset?
last_reset_time = all_resets[-1].created_at if all_resets else None
login_after_reset = None
if last_reset_time:
login_after_reset = db.query(AuditLog).filter(
AuditLog.user_email == user.email,
AuditLog.action == 'login',
AuditLog.created_at > last_reset_time
).first()
# Has active token?
has_active_token = (
user.reset_token is not None and
user.reset_token_expires is not None and
user.reset_token_expires > now
)
# Determine status
if user.last_login and (not last_reset_time or user.last_login > last_reset_time):
status = 'resolved'
status_label = 'Rozwiązany'
elif login_after_reset:
status = 'resolved'
status_label = 'Rozwiązany'
elif has_active_token:
status = 'pending'
status_label = f'Oczekuje (token ważny do {user.reset_token_expires.strftime("%d.%m %H:%M")})'
elif is_locked:
status = 'blocked'
status_label = f'Zablokowany (do {user.locked_until.strftime("%d.%m %H:%M")})'
elif all_resets and not login_after_reset:
status = 'unresolved'
status_label = 'Nierozwiązany (token wygasł, brak loginu)'
elif user.last_login is None:
status = 'unresolved'
status_label = 'Nigdy nie zalogowany'
else:
status = 'unknown'
status_label = 'Nieznany'
# Time to resolution
duration = None
if status == 'resolved' and first_symptom:
resolved_at = login_after_reset.created_at if login_after_reset else user.last_login
if resolved_at:
delta = resolved_at - first_symptom
hours = delta.total_seconds() / 3600
if hours < 1:
duration = f'{int(delta.total_seconds() / 60)} min'
elif hours < 24:
duration = f'{hours:.1f} godz.'
else:
duration = f'{delta.days} dni'
resolution = {
'status': status,
'status_label': status_label,
'first_symptom': first_symptom,
'resets_sent': len(all_resets),
'last_reset': last_reset_time,
'login_after_reset': login_after_reset is not None,
'has_active_token': has_active_token,
'duration': duration,
'has_welcome_email': db.query(EmailLog.id).filter(
EmailLog.recipient_email == user.email,
EmailLog.email_type == 'welcome'
).first() is not None,
}
# Favorite pages (top 10, exclude bots)
fav_pages = db.query(
PageView.path,
func.count(PageView.id).label('cnt')
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.user_id == user_id,
PageView.viewed_at >= start_30d,
UserSession.is_bot == False
).group_by(PageView.path).order_by(desc('cnt')).limit(10).all()
max_fav = fav_pages[0].cnt if fav_pages else 1
# Device/browser breakdown (exclude bots)
devices = db.query(
UserSession.device_type,
func.count(UserSession.id).label('cnt')
).filter(
UserSession.user_id == user_id,
UserSession.started_at >= start_30d,
UserSession.is_bot == False
).group_by(UserSession.device_type).all()
browsers = db.query(
UserSession.browser,
func.count(UserSession.id).label('cnt')
).filter(
UserSession.user_id == user_id,
UserSession.started_at >= start_30d,
UserSession.is_bot == False
).group_by(UserSession.browser).order_by(desc('cnt')).limit(5).all()
# Hourly activity pattern (24 bars, exclude bots)
hourly_sql = text("""
SELECT EXTRACT(HOUR FROM started_at)::int as hour, COUNT(*) as cnt
FROM user_sessions
WHERE user_id = :uid AND started_at >= :start_dt AND is_bot = false
GROUP BY hour ORDER BY hour
""")
hourly_raw = db.execute(hourly_sql, {'uid': user_id, 'start_dt': start_30d}).fetchall()
hourly = {r.hour: r.cnt for r in hourly_raw}
max_hourly = max(hourly.values(), default=1) or 1
hourly_bars = []
for h in range(24):
cnt = hourly.get(h, 0)
hourly_bars.append({'hour': h, 'count': cnt, 'pct': int(cnt / max_hourly * 100)})
# Daily engagement trend (30d for Chart.js) — 2 batch queries instead of 60
trend_start = datetime.combine(date.today() - timedelta(days=29), datetime.min.time())
trend_sessions = dict(db.query(
func.date(UserSession.started_at).label('day'),
func.count(UserSession.id)
).filter(
UserSession.user_id == user_id,
UserSession.started_at >= trend_start,
UserSession.is_bot == False
).group_by(func.date(UserSession.started_at)).all())
trend_pvs = dict(db.query(
func.date(PageView.viewed_at).label('day'),
func.count(PageView.id)
).join(UserSession, PageView.session_id == UserSession.id).filter(
PageView.user_id == user_id,
PageView.viewed_at >= trend_start,
UserSession.is_bot == False
).group_by(func.date(PageView.viewed_at)).all())
trend_labels = []
trend_scores = []
for i in range(30):
d = date.today() - timedelta(days=29 - i)
d_sessions = trend_sessions.get(d, 0)
d_pv = trend_pvs.get(d, 0)
daily_score = _log_engagement_score(d_sessions * 3 + d_pv)
trend_labels.append(d.strftime('%d.%m'))
trend_scores.append(daily_score)
# Problem history
js_errors_list = db.query(JSError).join(
UserSession, JSError.session_id == UserSession.id
).filter(
UserSession.user_id == user_id
).order_by(desc(JSError.occurred_at)).limit(10).all()
slow_pages_list = db.query(PageView).filter(
PageView.user_id == user_id,
PageView.load_time_ms > 3000
).order_by(desc(PageView.viewed_at)).limit(10).all()
# Avg sessions per week (exclude bots)
weeks_active = max(1, (date.today() - (user.created_at.date() if user.created_at else date.today())).days / 7)
total_sessions_all = db.query(func.count(UserSession.id)).filter(
UserSession.user_id == user_id,
UserSession.is_bot == False
).scalar() or 0
avg_sessions_week = round(total_sessions_all / weeks_active, 1)
avg_session_dur = db.query(func.avg(UserSession.duration_seconds)).filter(
UserSession.user_id == user_id,
UserSession.duration_seconds.isnot(None),
UserSession.is_bot == False
).scalar() or 0
return render_template(
'admin/user_insights_profile.html',
user=user,
engagement_score=engagement_score,
problem_score=problem_score,
timeline=timeline,
fav_pages=[{'path': p.path, 'count': p.cnt, 'bar_pct': int(p.cnt / max_fav * 100)} for p in fav_pages],
devices=[{'type': d.device_type or 'unknown', 'count': d.cnt} for d in devices],
browsers=[{'name': b.browser or 'unknown', 'count': b.cnt} for b in browsers],
hourly_bars=hourly_bars,
trend_data={'labels': trend_labels, 'scores': trend_scores},
js_errors=js_errors_list,
slow_pages=slow_pages_list,
password_resets=pr_30d,
security_alerts_count=sa_7d,
avg_sessions_week=avg_sessions_week,
avg_session_duration=int(avg_session_dur),
search_queries=searches,
resolution=resolution,
ref_tab=ref_tab,
ref_period=ref_period,
)
except Exception as e:
logger.error(f"User insights profile error: {e}", exc_info=True)
flash('Błąd ładowania profilu użytkownika.', 'error')
return redirect(url_for('admin.user_insights'))
finally:
db.close()
# ============================================================
# CSV EXPORT
# ============================================================
@bp.route('/analytics/export')
@login_required
@role_required(SystemRole.OFFICE_MANAGER)
def user_insights_export():
"""Export user insights data as CSV."""
export_type = request.args.get('type', 'engagement')
period = request.args.get('period', 'week')
start_date, days = _get_period_dates(period)
db = SessionLocal()
try:
output = io.StringIO()
writer = csv.writer(output)
if export_type == 'problems':
data = _tab_problems(db, start_date, days)
writer.writerow(['Użytkownik', 'Email', 'Problem Score', 'Nieudane logowania',
'Resety hasła', 'Błędy JS', 'Wolne strony', 'Ostatni login', 'Status'])
all_problems = data['active_problems'] + data['resolved_problems']
for p in all_problems:
writer.writerow([
p['user'].name, p['user'].email, p['score'],
p['failed_logins'], p['password_resets'], p['js_errors'],
p['slow_pages'], p['last_login'] or 'Nigdy',
'Rozwiązany' if p['resolved_at'] else 'Aktywny'
])
elif export_type == 'engagement':
data = _tab_engagement(db, start_date, days)
writer.writerow(['Użytkownik', 'Email', 'Score', 'Sesje', 'Odsłony',
'Zmiana WoW %', 'Status'])
for e in data['engagement_list']:
writer.writerow([
e['user'].name, e['user'].email, e['score'],
e['sessions'], e['page_views'],
f"{e['wow']}%" if e['wow'] is not None else 'N/A',
e['status']
])
elif export_type == 'pages':
data = _tab_pages(db, start_date, days)
writer.writerow(['Ścieżka', 'Odsłony', 'Unikalni', 'Śr. czas (s)',
'Śr. scroll %', 'Śr. ładowanie (ms)'])
for p in data['top_pages']:
writer.writerow([
p['path'], p['views'], p['unique_users'],
p['avg_time'], p['avg_scroll'], p['avg_load']
])
output.seek(0)
return Response(
output.getvalue(),
mimetype='text/csv',
headers={'Content-Disposition': f'attachment; filename=user_insights_{export_type}_{period}.csv'}
)
except Exception as e:
logger.error(f"User insights export error: {e}")
flash('Błąd eksportu danych.', 'error')
return redirect(url_for('admin.user_insights'))
finally:
db.close()
@bp.route('/analytics/send-reset/<int:user_id>', methods=['POST'])
@login_required
@role_required(SystemRole.OFFICE_MANAGER)
def user_insights_send_reset(user_id):
"""Send password reset email to user from User Insights."""
db = SessionLocal()
try:
user = db.query(User).get(user_id)
if not user or not user.is_active:
return jsonify({'error': 'Użytkownik nie znaleziony lub nieaktywny'}), 404
# Generate reset token (24h validity)
token = secrets.token_urlsafe(32)
user.reset_token = token
user.reset_token_expires = datetime.now() + timedelta(hours=24)
db.commit()
# Build reset URL
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
reset_url = f"{base_url}/reset-password/{token}"
# Send email
import email_service
if email_service.is_configured():
success = email_service.send_password_reset_email(user.email, reset_url)
else:
return jsonify({'error': 'Email service nie skonfigurowany'}), 500
if not success:
return jsonify({'error': 'Błąd wysyłki emaila'}), 500
# Log to email_logs
email_log = EmailLog(
recipient_email=user.email,
recipient_name=user.name,
email_type='password_reset',
subject='Reset hasła - Norda Biznes Partner',
status='sent',
user_id=user.id,
sender_email=current_user.email,
)
db.add(email_log)
# Audit log
audit = AuditLog(
user_id=current_user.id,
user_email=current_user.email,
action='user.password_reset_sent',
entity_type='user',
entity_id=user.id,
entity_name=user.email,
ip_address=request.remote_addr,
request_path=request.path,
)
db.add(audit)
db.commit()
logger.info(f"Admin {current_user.email} sent password reset to {user.email} (user {user.id})")
return jsonify({'success': True, 'message': f'Reset wysłany do {user.email}'})
except Exception as e:
logger.error(f"Send reset error for user {user_id}: {e}")
db.rollback()
return jsonify({'error': 'Błąd wysyłki'}), 500
finally:
db.close()
@bp.route('/analytics/send-welcome/<int:user_id>', methods=['POST'])
@login_required
@role_required(SystemRole.OFFICE_MANAGER)
def user_insights_send_welcome(user_id):
"""Send welcome activation email to user who never logged in."""
db = SessionLocal()
try:
user = db.query(User).get(user_id)
if not user or not user.is_active:
return jsonify({'error': 'Użytkownik nie znaleziony lub nieaktywny'}), 404
# Generate activation token (72h validity)
token = secrets.token_urlsafe(32)
user.reset_token = token
user.reset_token_expires = datetime.now() + timedelta(hours=72)
db.commit()
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
reset_url = f"{base_url}/reset-password/{token}"
import email_service
if not email_service.is_configured():
return jsonify({'error': 'Email service nie skonfigurowany'}), 500
success = email_service.send_welcome_activation_email(
email=user.email, name=user.name or user.email, reset_url=reset_url
)
if not success:
return jsonify({'error': 'Błąd wysyłki emaila'}), 500
email_log = EmailLog(
recipient_email=user.email,
recipient_name=user.name,
email_type='welcome',
subject='Zaproszenie do portalu Norda Biznes Partner',
status='sent',
user_id=user.id,
sender_email=current_user.email,
)
db.add(email_log)
audit = AuditLog(
user_id=current_user.id,
user_email=current_user.email,
action='user.welcome_email_sent',
entity_type='user',
entity_id=user.id,
entity_name=user.email,
ip_address=request.remote_addr,
request_path=request.path,
)
db.add(audit)
db.commit()
logger.info(f"Admin {current_user.email} sent welcome email to {user.email} (user {user.id})")
return jsonify({'success': True, 'message': f'Email powitalny wysłany do {user.email}'})
except Exception as e:
logger.error(f"Send welcome error for user {user_id}: {e}")
db.rollback()
return jsonify({'error': 'Błąd wysyłki'}), 500
finally:
db.close()
# ============================================================
# OLD URL REDIRECTS
# ============================================================
@bp.route('/user-insights/user/<int:user_id>')
@login_required
def user_insights_profile_redirect(user_id):
"""Redirect old user profile URL."""
return redirect(url_for('admin.user_insights_profile', user_id=user_id, **request.args))
@bp.route('/user-insights/export')
@login_required
def user_insights_export_redirect():
"""Redirect old export URL."""
return redirect(url_for('admin.user_insights_export', **request.args))
@bp.route('/user-insights/send-reset/<int:user_id>', methods=['POST'])
@login_required
def user_insights_send_reset_redirect(user_id):
"""Redirect old send-reset URL."""
return redirect(url_for('admin.user_insights_send_reset', user_id=user_id), code=307)