debug(cache): Log per-field hashes to identify unstable field
Some checks are pending
NordaBiz Tests / Unit & Integration Tests (push) Waiting to run
NordaBiz Tests / E2E Tests (Playwright) (push) Blocked by required conditions
NordaBiz Tests / Smoke Tests (Production) (push) Blocked by required conditions
NordaBiz Tests / Send Failure Notification (push) Blocked by required conditions

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Maciej Pienczyn 2026-02-07 13:54:03 +01:00
parent 1d6dfa0da8
commit 4ff386fa7d

View File

@ -597,6 +597,11 @@ def generate_analysis(company_id: int, audit_type: str, user_id: int = None, for
if cache:
logger.info(f"Cache check: stored_hash={cache.audit_data_hash[:12]}... current_hash={data_hash[:12]}... match={cache.audit_data_hash == data_hash} expires={cache.expires_at}")
if cache.audit_data_hash != data_hash:
# Debug: find which fields changed
for k, v in sorted(hash_data.items()):
field_hash = hashlib.sha256(json.dumps({k: v}, default=str).encode()).hexdigest()[:8]
logger.info(f" field_hash: {k}={field_hash} type={type(v).__name__} val={str(v)[:80]}")
if cache and cache.audit_data_hash == data_hash and cache.expires_at and cache.expires_at > datetime.now():
logger.info(f"AI analysis cache hit for company {company_id} audit_type={audit_type}")