Files
weval-l99/v109_proof.py
2026-04-24 04:38:58 +02:00

91 lines
3.6 KiB
Python

#!/usr/bin/env python3
"""V109 - E2E: test human mode routes to llm-direct and shows natural text"""
import asyncio, json, os
from playwright.async_api import async_playwright
OUT = "/var/www/html/api/blade-tasks/v109-human-final"
os.makedirs(OUT, exist_ok=True)
async def main():
async with async_playwright() as p:
browser = await p.chromium.launch(headless=True, args=['--no-sandbox'])
context = await browser.new_context(
viewport={'width': 1920, 'height': 1080},
record_video_dir=OUT
)
page = await context.new_page()
# Cache bust
await page.goto(f"https://weval-consulting.com/all-ia-hub.html?v=v109", wait_until='load', timeout=30000)
await page.wait_for_timeout(2500)
# Verify human toggle visible & checked
human_state = await page.evaluate("""() => ({
toggle_exists: !!document.getElementById('human-chk'),
toggle_checked: document.getElementById('human-chk')?.checked,
toggle_label_text: document.getElementById('human-toggle')?.textContent?.trim()
})""")
print("Human toggle:", json.dumps(human_state))
# Send 'salut'
await page.fill('#inp-chat', 'salut wevia')
await page.wait_for_timeout(500)
await page.click('#btn-chat')
await page.wait_for_timeout(10000) # wait for LLM response
# Capture result
chat_out = await page.evaluate("""() => {
const msgs = Array.from(document.querySelectorAll('#out-chat .msg'));
return msgs.map(m => ({
cls: m.className,
text: m.textContent.trim().substring(0, 500)
}));
}""")
print("\nChat output after 'salut':")
for m in chat_out:
print(f" [{m['cls']}] {m['text']}")
await page.screenshot(path=f"{OUT}/01-salut-response.png", full_page=True)
# Test 2: 'comment ca va'
await page.fill('#inp-chat', 'comment ca va aujourd hui')
await page.wait_for_timeout(300)
await page.click('#btn-chat')
await page.wait_for_timeout(10000)
chat_out2 = await page.evaluate("""() => {
const msgs = Array.from(document.querySelectorAll('#out-chat .msg'));
return msgs.slice(-2).map(m => ({
cls: m.className,
text: m.textContent.trim().substring(0, 500)
}));
}""")
print("\nChat output 'comment ca va':")
for m in chat_out2:
print(f" [{m['cls']}] {m['text']}")
await page.screenshot(path=f"{OUT}/02-comment-ca-va.png", full_page=True)
await context.close()
await browser.close()
# Detect robotic JSON patterns
all_txt = ' '.join([m['text'] for m in chat_out+chat_out2])
has_raw_json = '"provider"' in all_txt or '"tool":"bonjour-llm"' in all_txt or '"content":"' in all_txt
has_intent_echo = "Intent 'wevia_greetings' executed" in all_txt
report = {
'v109': 'human-mode-via-llm-direct',
'human_state': human_state,
'chat_msgs_salut': chat_out,
'chat_msgs_comment': chat_out2,
'has_raw_json': has_raw_json,
'has_intent_echo': has_intent_echo,
'VERDICT': 'HUMAN' if not has_raw_json and not has_intent_echo else 'STILL ROBOTIC'
}
with open(f"{OUT}/proof.json",'w') as f:
json.dump(report, f, indent=2)
print("\n=== VERDICT:", report['VERDICT'])
asyncio.run(main())