73 lines
3.2 KiB
Python
73 lines
3.2 KiB
Python
#!/usr/bin/env python3
|
|
"""V109 - Switch human mode to llm-direct.php (JSON direct, pas SSE)"""
|
|
path = "/var/www/html/all-ia-hub.html"
|
|
with open(path, "r", encoding="utf-8") as f:
|
|
c = f.read()
|
|
|
|
if "V109-LLM-DIRECT" in c:
|
|
print("ALREADY")
|
|
exit(0)
|
|
|
|
# Switch endpoint from wevia-stream-api (SSE) to llm-direct (JSON)
|
|
# llm-direct expects: stdin body = message text
|
|
# But HTTP POST hits nginx → php-fpm reads php://input which has the body
|
|
# Looking at llm-direct: $msg = $argv[1] ?? file_get_contents("php://stdin")
|
|
# via HTTP POST, stdin IS the request body - so sending raw text works
|
|
|
|
old = """ const endpoint = (humanMode && shortMsg) ? '/api/wevia-stream-api.php' : '/api/wevia-master-api.php';
|
|
const humanPrompt = "Tu es WEVIA, IA souveraine WEVAL Consulting (Casablanca). Tu parles comme une collegue chaleureuse et directe. Tutoie Yacine. Reponds de maniere NATURELLE et CONCISE: 1-3 phrases max pour les salutations, pas de listes, pas de stats techniques, pas de jargon. Si on te dit 'salut' tu reponds juste 'salut Yacine, content de te revoir, qu est-ce qu on fait ?' par exemple. Pas de self-intro exhaustive. Sois humaine.";
|
|
const body = (humanMode && shortMsg) ? {
|
|
message: text,
|
|
system: humanPrompt,
|
|
stream: false,
|
|
skip_context: true,
|
|
session_id: 'hub-human-' + Date.now()
|
|
} : {
|
|
message: text,
|
|
session_id: 'all-ia-hub-' + Date.now()
|
|
};
|
|
const res=await fetch(endpoint,{
|
|
method:'POST',
|
|
headers:{'Content-Type':'application/json'},
|
|
body:JSON.stringify(body),
|
|
signal:AbortSignal.timeout(300000)
|
|
});"""
|
|
|
|
new = """ /* V109-LLM-DIRECT: human mode hits llm-direct.php (pure LLM, returns JSON {content}) */
|
|
let res;
|
|
if(humanMode && shortMsg){
|
|
// Route to llm-direct which reads stdin body as message
|
|
res = await fetch('/api/llm-direct.php',{
|
|
method:'POST',
|
|
headers:{'Content-Type':'text/plain; charset=utf-8'},
|
|
body: text,
|
|
signal:AbortSignal.timeout(60000)
|
|
});
|
|
} else {
|
|
res = await fetch('/api/wevia-master-api.php',{
|
|
method:'POST',
|
|
headers:{'Content-Type':'application/json'},
|
|
body:JSON.stringify({message:text,session_id:'all-ia-hub-'+Date.now()}),
|
|
signal:AbortSignal.timeout(300000)
|
|
});
|
|
}"""
|
|
|
|
assert old in c, "old block NOT FOUND - structure may have changed"
|
|
c = c.replace(old, new, 1)
|
|
|
|
# Also update the human toggle label
|
|
old_toggle = """<span style="cursor:pointer;user-select:none" id="human-toggle" onclick="toggleHumanMode()" title="Mode humain: bypass les reponses robotiques, route vers LLM souveraine avec prompt humain">
|
|
<input type="checkbox" id="human-chk" style="vertical-align:middle" checked> Mode Humain
|
|
</span>"""
|
|
|
|
new_toggle = """<span style="cursor:pointer;user-select:none;color:var(--vl)" id="human-toggle" onclick="toggleHumanMode()" title="Mode humain: bypass stubs/intents, route vers LLM pure (llm-direct.php) avec prompt /etc/wevia/system-prompt.txt">
|
|
<input type="checkbox" id="human-chk" style="vertical-align:middle" checked> 💖 Mode Humain (LLM pure)
|
|
</span>"""
|
|
|
|
if old_toggle in c:
|
|
c = c.replace(old_toggle, new_toggle, 1)
|
|
|
|
with open(path, "w", encoding="utf-8") as f:
|
|
f.write(c)
|
|
print(f"PATCHED size={len(c)}")
|