- Pipe macro title/description through | safe to render NBSP/& correctly
(autoescape was producing literal '95 %+' and 'Q&R' text on screen)
- Replace dynamic col-span-{{ span }} with static lookup table so Tailwind
scanner generates the utilities for A-2.7+ reuse
- Replace inline border style with border-white/[0.045] utility (codebase consistency)
- Add explicit Q&R assertion + autoescape regression guard test
303 lines
14 KiB
Python
303 lines
14 KiB
Python
"""Verify the marketing landing template renders correctly."""
|
|
import os
|
|
import sys
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
os.environ.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:')
|
|
os.environ.setdefault('SECRET_KEY', 'test-secret-key')
|
|
|
|
from src.app import app # noqa: E402
|
|
|
|
|
|
def test_landing_renders_template_not_inline_html():
|
|
"""GET / renders templates/marketing/landing.html (not inline HTML from Phase 1)."""
|
|
client = app.test_client()
|
|
response = client.get('/', follow_redirects=False)
|
|
assert response.status_code == 200
|
|
body = response.data.decode('utf-8')
|
|
# Phase 2 template hallmarks
|
|
assert '<!DOCTYPE html>' in body, "Missing DOCTYPE — base.html not rendering"
|
|
assert 'lang="fr-CA"' in body, "Missing lang=fr-CA"
|
|
assert '/static/css/marketing.css' in body, "Missing marketing.css link"
|
|
assert '/static/fonts/Inter-Variable.woff2' in body, "Missing Inter font preload"
|
|
assert '/static/js/alpine.min.js' in body, "Missing Alpine.js script"
|
|
|
|
|
|
def test_landing_has_canonical_url():
|
|
"""OG + canonical metadata present."""
|
|
client = app.test_client()
|
|
response = client.get('/')
|
|
body = response.data.decode('utf-8')
|
|
assert 'rel="canonical"' in body
|
|
assert 'og:type' in body
|
|
assert 'og:locale' in body and 'fr_CA' in body
|
|
assert 'twitter:card' in body
|
|
|
|
|
|
def test_landing_has_glassmorphism_header():
|
|
"""FlexiHub-style header present (navy + backdrop-blur)."""
|
|
client = app.test_client()
|
|
response = client.get('/')
|
|
body = response.data.decode('utf-8')
|
|
assert 'bg-brand-navy/[0.97]' in body or 'bg-brand-navy' in body
|
|
assert 'backdrop-blur-xl' in body
|
|
assert 'border-white/[0.045]' in body, "Missing FlexiHub-style 0.045 border opacity"
|
|
|
|
|
|
def test_landing_has_main_nav():
|
|
"""Main nav has 5 links: Fonctionnalités, Conformité, Tarifs, Blog, Contact."""
|
|
client = app.test_client()
|
|
response = client.get('/')
|
|
body = response.data.decode('utf-8')
|
|
for link in ['/fonctionnalites', '/conformite', '/tarifs', '/blog', '/contact']:
|
|
assert f'href="{link}"' in body, f"Missing nav link: {link}"
|
|
|
|
|
|
def test_landing_has_login_and_signup_ctas():
|
|
"""Login + Signup CTAs present in header."""
|
|
client = app.test_client()
|
|
response = client.get('/')
|
|
body = response.data.decode('utf-8')
|
|
assert 'href="/login"' in body
|
|
assert 'href="/signup"' in body
|
|
assert 'Démarrer' in body or 'Démarrer' in body
|
|
|
|
|
|
def test_landing_footer_has_legal_links():
|
|
"""Footer placeholder includes legal links (full footer in A-2.7)."""
|
|
client = app.test_client()
|
|
response = client.get('/')
|
|
body = response.data.decode('utf-8')
|
|
assert '/legal/conditions' in body
|
|
assert '/legal/confidentialite' in body
|
|
assert 'info@dictia.ca' in body, "Missing canonical email info@dictia.ca"
|
|
assert 'Inverness' in body, "Missing Inverness QC address"
|
|
|
|
|
|
def test_landing_no_login_redirect_for_anonymous():
|
|
"""Anonymous user GET / must see template (regression check from B-1.3)."""
|
|
client = app.test_client()
|
|
response = client.get('/', follow_redirects=False)
|
|
assert response.status_code == 200, \
|
|
f"Expected 200, got {response.status_code} — possibly login_required regression"
|
|
|
|
|
|
def test_hero_has_h1_with_grad_text_accent():
|
|
"""Hero H1 contains grad-text span on the brand tagline."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'id="hero-title"' in body, "Missing hero-title id on H1"
|
|
assert 'grad-text' in body, "Missing grad-text class somewhere"
|
|
assert 'sans risquer votre permis' in body, "Missing key brand tagline"
|
|
|
|
|
|
def test_hero_has_dual_cta():
|
|
"""Hero has both primary (Réserver une démo) and ghost (Voir les tarifs) CTAs."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'href="/contact"' in body
|
|
assert 'href="/tarifs"' in body
|
|
assert 'Réserver une démo' in body or 'Réserver une démo' in body
|
|
assert 'Voir les tarifs' in body
|
|
|
|
|
|
def test_hero_has_cosmic_orbs_background():
|
|
"""Hero has 3 radial gradient orbs (FlexiHub signature)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
# Look for the 3 orb opacities (16% blue, 7% cyan, 11% green)
|
|
assert 'rgba(0,98,255,0.16)' in body, "Missing primary blue orb"
|
|
assert 'rgba(0,189,216,0.07)' in body, "Missing cyan orb"
|
|
assert 'rgba(0,200,150,0.11)' in body, "Missing green accent orb"
|
|
|
|
|
|
def test_hero_has_social_proof_microcopy():
|
|
"""Hero has defensible social proof: 9 ordres pros + waitlist + launch date."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert '9 ordres professionnels' in body, "Missing factual ordres pros count"
|
|
assert 'Pré-inscription' in body or 'Pré-inscription' in body, "Missing waitlist mention"
|
|
assert 'Lancement printemps 2026' in body, "Missing launch date"
|
|
|
|
|
|
def test_hero_has_staggered_animations():
|
|
"""Hero elements use tc-fade-in-up with staggered delays."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'animate-tc-fade-in-up' in body, "Missing fade-in animation"
|
|
assert 'animation-delay: 0ms' in body
|
|
assert 'animation-delay: 75ms' in body
|
|
assert 'animation-delay: 150ms' in body
|
|
assert 'animation-delay: 300ms' in body
|
|
assert 'animation-delay: 400ms' in body
|
|
assert 'animation-fill-mode: backwards' in body, \
|
|
"Missing animation-fill-mode (causes flash before delay fires)"
|
|
|
|
|
|
def test_hero_eyebrow_has_brand_messaging():
|
|
"""Hero eyebrow declares the 3 brand pillars."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'TRANSCRIPTION IA' in body
|
|
assert 'CONFORME LOI 25' in body
|
|
assert 'QU' in body # Either QUÉBEC or QUÉBEC
|
|
|
|
|
|
def test_trust_bar_has_9_ordres_pros():
|
|
"""Trust bar lists all 9 canonical Quebec ordres pros (matches dictia.ca)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
for ordre in ['Barreau', 'Chambre des notaires', 'CPA Québec', 'ChAD', 'OACIQ', 'CMQ', 'OIIQ', 'OPQ', 'OEQ']:
|
|
assert ordre in body, f"Missing ordre pro: {ordre}"
|
|
# Note: OPPQ deliberately removed (ambiguous abbrev — replaced with OPQ for Pharmaciens)
|
|
|
|
|
|
def test_trust_bar_has_eyebrow_factual_phrasing():
|
|
"""Trust bar avoids false-endorsement language (LPC art. 219 / Competition Act s. 52)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'MAPP' in body and '9 ORDRES PROFESSIONNELS' in body, "Missing factual eyebrow"
|
|
# Forbidden marketing phrases that imply official endorsement we don't have
|
|
forbidden = [
|
|
'CERTIFIÉ PAR',
|
|
'CERTIFIE PAR',
|
|
'ENDOSSÉ PAR',
|
|
'APPROUVÉ PAR',
|
|
'RECONNU PAR',
|
|
'AVALISÉ PAR',
|
|
]
|
|
body_upper = body.upper()
|
|
for phrase in forbidden:
|
|
assert phrase not in body_upper, f"Forbidden marketing claim found: {phrase}"
|
|
|
|
|
|
def test_trust_bar_has_4_kpis_with_grad_text():
|
|
"""Trust bar has 4 KPI metrics rendered with grad-text (NBSP per OQLF typography)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert '~5 min' in body
|
|
# OQLF: non-breaking space before %/$ via entity
|
|
assert '95 %+' in body, "Missing NBSP-separated 95%+ KPI"
|
|
assert '0 $' in body, "Missing NBSP-separated 0$ KPI"
|
|
assert '100 %' in body, "Missing NBSP-separated 100% KPI"
|
|
# Verify grad-text on KPI numbers
|
|
assert 'grad-text mb-2' in body, "Missing grad-text on KPI numbers"
|
|
|
|
|
|
def test_trust_bar_has_methodology_footnote():
|
|
"""95%+ claim has a defensible methodology footnote (LPC art. 219 hygiene)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
# Verifiable wording: no specific hour count, methodology available on request
|
|
assert 'méthodologie disponible sur demande' in body or 'méthodologie disponible sur demande' in body
|
|
assert 'audio professionnel québécois' in body or 'audio professionnel québécois' in body
|
|
assert 'info@dictia.ca' in body
|
|
|
|
|
|
def test_pas_probleme_section_present():
|
|
"""Problème section (P of PAS frame) is present after trust bar."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'PROBL' in body and 'TRANSCRIPTION CLOUD' in body, "Missing Problème eyebrow"
|
|
assert 'violent la Loi 25' in body, "Missing legal-risk H2 anchor phrase"
|
|
assert 'Cloud Act' in body, "Missing Cloud Act card"
|
|
assert 'biom' in body and 'Loi 25' in body, "Missing Loi 25 biometric card"
|
|
assert 'Sanctions disciplinaires' in body, "Missing sanctions disciplinaires card"
|
|
|
|
|
|
def test_pas_solution_section_present():
|
|
"""Solution section (S of PAS frame) is present after Problème."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'LA SOLUTION' in body and 'DICTIA' in body, "Missing Solution eyebrow"
|
|
assert 'Conforme' in body and 'par design' in body, "Missing solution H2"
|
|
assert 'WhisperX' in body, "Missing WhisperX mention"
|
|
assert 'Mistral 7B' in body, "Missing Mistral 7B mention"
|
|
assert 'OVH Beauharnois' in body, "Missing Quebec hosting mention"
|
|
|
|
|
|
def test_pas_solution_3_pillars_with_check_icon():
|
|
"""Solution has 3 pillars: 100% local, Conforme Loi 25, Précision FR-CA."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert '100 %' in body and 'local' in body, "Missing 100% local pillar"
|
|
assert 'Conforme Loi 25' in body or 'Conforme Loi 25' in body, "Missing Conforme Loi 25 pillar"
|
|
assert 'Précision FR-CA' in body or 'Précision FR-CA' in body, "Missing Précision FR-CA pillar"
|
|
assert 'AGPL v3' in body, "Missing AGPL v3 transparency mention"
|
|
|
|
|
|
def test_pas_uses_wcag_safe_text_opacity():
|
|
"""PAS section text uses /70 opacity (WCAG AA compliant), not /40 or /50."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
# Text on white surface in problem cards must use /70 minimum
|
|
# Check the problem card paragraph text uses navy/70 not navy/40 or /50
|
|
assert 'text-brand-navy/70 leading-relaxed' in body or 'text-brand-navy/70 mb-3' in body
|
|
# No regression to /40 in this section
|
|
# (Other sections may use /40 for decorative text — we just verify the new content uses /70)
|
|
|
|
|
|
def test_bento_section_present():
|
|
"""Bento features section is present after Solution section."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'FONCTIONNALIT' in body, "Missing Fonctionnalités eyebrow"
|
|
assert 'bento-title' in body, "Missing bento section anchor"
|
|
assert "rien que vous n'ayez besoin" in body, "Missing bento H2 differentiator"
|
|
|
|
|
|
def test_bento_has_6_features():
|
|
"""Bento grid renders 6 distinct feature cards."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
for feature in ['WhisperX', 'Diarisation', 'Mistral 7B', 'RAG local', 'DOCX, PDF, SRT', 'Outlook, Teams']:
|
|
assert feature in body, f"Missing bento feature: {feature}"
|
|
# Watermark numbers 01..06
|
|
for n in ['01', '02', '03', '04', '05', '06']:
|
|
assert f'>{n}<' in body, f"Missing bento watermark number {n}"
|
|
# Card 04 must use French Q&R, not English Q&A — primary identifier check
|
|
assert 'Q&R' in body or 'Q&R' in body, "Card 04 must use French Q&R, not Q&A"
|
|
|
|
|
|
def test_bento_uses_flexihub_styling():
|
|
"""Bento uses FlexiHub spec: max-w-[1060px], gap-[1.5px], bg-brand-navy2, /[0.04] watermark."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'max-w-[1060px]' in body, "Missing FlexiHub bento container width 1060px"
|
|
assert 'gap-[1.5px]' in body, "Missing FlexiHub ultrafin separator gap"
|
|
assert 'bg-brand-navy2' in body, "Missing dark card background"
|
|
assert 'text-white/[0.04]' in body, "Missing FlexiHub watermark opacity"
|
|
assert 'grad-bg rounded-[0.5rem]' in body, "Missing gradient icon corner"
|
|
|
|
|
|
def test_bento_responsive_grid():
|
|
"""Bento grid responsive: 1 col mobile, 2 cols sm, 3 cols md+."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'grid-cols-1 sm:grid-cols-2 md:grid-cols-3' in body, \
|
|
"Missing responsive grid breakpoints (1/2/3 cols)"
|
|
|
|
|
|
def test_bento_uses_wcag_safe_text_on_dark():
|
|
"""Bento card descriptions use text-white/70 (WCAG AA on bg-brand-navy2)."""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert 'text-white/70' in body, "Missing WCAG-safe /70 text opacity on dark cards"
|
|
|
|
|
|
def test_bento_renders_nbsp_entities_not_escaped():
|
|
"""Card 01 '95 %+' NBSP must render as a non-breaking space, not as literal ' ' text.
|
|
|
|
Regression guard: if the bento macro stops piping description through `| safe`,
|
|
Jinja autoescape will double-escape ' ' to '&nbsp;' and users see the
|
|
raw entity. The HTML response must contain the literal '95 %+' once
|
|
(single escape), never '95&nbsp;%+'.
|
|
"""
|
|
client = app.test_client()
|
|
body = client.get('/').data.decode('utf-8')
|
|
assert '95 %+' in body, "NBSP entity should appear single-escaped in card 01"
|
|
assert '95&nbsp;' not in body, "NBSP entity must not be double-escaped (missing | safe?)"
|
|
# Q&R card title: French ampersand must survive as & in HTML, not &amp;
|
|
assert 'Q&R' in body, "Q&R title should appear single-escaped"
|
|
assert 'Q&amp;R' not in body, "Q&R title must not be double-escaped"
|