auto-sync-2335
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"agent": "V41_Risk_Escalation",
|
||||
"ts": "2026-04-21T23:15:02+02:00",
|
||||
"ts": "2026-04-21T23:30:03+02:00",
|
||||
"dg_alerts_active": 7,
|
||||
"wevia_life_stats_preview": "{
|
||||
"ok": true,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"agent": "V45_Leads_Sync",
|
||||
"ts": "2026-04-21T23:20:02+02:00",
|
||||
"ts": "2026-04-21T23:30:04+02:00",
|
||||
"paperclip_total": 48,
|
||||
"active_customer": 4,
|
||||
"warm_prospect": 5,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"agent": "V54_Risk_Monitor_Live",
|
||||
"ts": "2026-04-21T23:00:03+02:00",
|
||||
"ts": "2026-04-21T23:30:04+02:00",
|
||||
"critical_risks": {
|
||||
"RW01_pipeline_vide": {
|
||||
"pipeline_keur": 0,
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"RW12_burnout": {
|
||||
"agents_cron_active": 15,
|
||||
"load_5min": "5.02",
|
||||
"load_5min": "8.78",
|
||||
"automation_coverage_pct": 70,
|
||||
"residual_risk_pct": 60,
|
||||
"trend": "V52_goldratt_options_active"
|
||||
|
||||
68
api/ambre-session-chat.php
Normal file
68
api/ambre-session-chat.php
Normal file
@@ -0,0 +1,68 @@
|
||||
<?php
|
||||
/**
|
||||
* ambre-session-chat.php · contextual chat with memory + empathy
|
||||
* POST {message, session_id}
|
||||
* - Loads last 10 turns from session
|
||||
* - Calls LLM with full history + empathy system prompt
|
||||
* - Appends user+assistant to memory
|
||||
* Returns {response, provider, intent, turns_in_memory, ...}
|
||||
*/
|
||||
require_once __DIR__ . "/ambre-session-memory.php";
|
||||
|
||||
header("Content-Type: application/json; charset=utf-8");
|
||||
|
||||
$raw = file_get_contents("php://input");
|
||||
$in = json_decode($raw, true) ?: $_POST;
|
||||
$msg = trim($in["message"] ?? "");
|
||||
$sid = trim($in["session_id"] ?? "");
|
||||
|
||||
if (!$msg) { echo json_encode(["error"=>"message required"]); exit; }
|
||||
if (!$sid) $sid = "anon-" . substr(md5($_SERVER["REMOTE_ADDR"] ?? "x"), 0, 8);
|
||||
|
||||
// Load prior turns
|
||||
$history = AmbreSessionMemory::context_messages($sid, 10);
|
||||
|
||||
// Build messages array for LLM
|
||||
$sys = "Tu es WEVIA, une IA empathique et adaptative de WEVAL Consulting. " .
|
||||
"Tu te souviens des échanges précédents dans cette conversation. " .
|
||||
"Si l'utilisateur revient sur un sujet antérieur, reconnais-le explicitement. " .
|
||||
"Si l'utilisateur change de sujet, adapte-toi avec fluidité. " .
|
||||
"Si l'utilisateur exprime une émotion (joie, frustration, urgence, etc.), reconnais-la avec empathie. " .
|
||||
"Si l'utilisateur te demande d'améliorer un rendu précédent, refère-toi au dernier rendu et propose une version améliorée. " .
|
||||
"Réponse en français, concise mais riche, sans préambule inutile.";
|
||||
|
||||
$messages = [["role"=>"system", "content"=>$sys]];
|
||||
foreach ($history as $h) $messages[] = $h;
|
||||
$messages[] = ["role"=>"user", "content"=>$msg];
|
||||
|
||||
// Call LLM
|
||||
$t0 = microtime(true);
|
||||
$raw_llm = @file_get_contents("http://127.0.0.1:4000/v1/chat/completions", false, stream_context_create([
|
||||
"http" => [
|
||||
"method"=>"POST",
|
||||
"header"=>"Content-Type: application/json\r\n",
|
||||
"content"=>json_encode(["model"=>"fast","messages"=>$messages,"max_tokens"=>1200,"temperature"=>0.5]),
|
||||
"timeout"=>30,
|
||||
],
|
||||
]));
|
||||
$elapsed = round((microtime(true)-$t0)*1000);
|
||||
|
||||
$d = @json_decode($raw_llm, true);
|
||||
$reply = $d["choices"][0]["message"]["content"] ?? "";
|
||||
if (!$reply) $reply = "Désolé, je n'ai pas pu traiter la demande. Peux-tu reformuler ?";
|
||||
|
||||
// Store turns
|
||||
AmbreSessionMemory::append($sid, "user", $msg);
|
||||
AmbreSessionMemory::append($sid, "assistant", $reply);
|
||||
|
||||
$summary = AmbreSessionMemory::summary($sid);
|
||||
|
||||
echo json_encode([
|
||||
"response" => $reply,
|
||||
"provider" => "ambre-session-chat-v1",
|
||||
"intent" => "contextual_reply",
|
||||
"session_id" => $sid,
|
||||
"turns_in_memory" => $summary["turns"],
|
||||
"history_used" => count($history),
|
||||
"elapsed_ms" => $elapsed,
|
||||
], JSON_UNESCAPED_UNICODE);
|
||||
88
api/ambre-session-memory.php
Normal file
88
api/ambre-session-memory.php
Normal file
@@ -0,0 +1,88 @@
|
||||
<?php
|
||||
/**
|
||||
* ambre-session-memory.php · AMBRE v1 · per-session memory store
|
||||
* Stores/retrieves last N messages per session_id in /var/tmp/wevia-sessions/
|
||||
*/
|
||||
|
||||
class AmbreSessionMemory {
|
||||
const DIR = "/var/tmp/wevia-sessions";
|
||||
const MAX_TURNS = 20;
|
||||
const TTL_HOURS = 24;
|
||||
|
||||
public static function init() {
|
||||
if (!is_dir(self::DIR)) @mkdir(self::DIR, 0777, true);
|
||||
}
|
||||
|
||||
public static function path($sid) {
|
||||
self::init();
|
||||
$safe = preg_replace("/[^a-zA-Z0-9_-]/", "", $sid);
|
||||
if (!$safe) $safe = "default";
|
||||
return self::DIR . "/" . $safe . ".json";
|
||||
}
|
||||
|
||||
public static function load($sid) {
|
||||
$p = self::path($sid);
|
||||
if (!file_exists($p)) return [];
|
||||
$content = @file_get_contents($p);
|
||||
if (!$content) return [];
|
||||
$data = @json_decode($content, true);
|
||||
if (!is_array($data)) return [];
|
||||
// TTL cleanup
|
||||
$now = time();
|
||||
$data = array_filter($data, function($m) use ($now) {
|
||||
return isset($m["ts"]) && ($now - $m["ts"]) < (self::TTL_HOURS * 3600);
|
||||
});
|
||||
return array_values($data);
|
||||
}
|
||||
|
||||
public static function append($sid, $role, $content) {
|
||||
if (!$sid || !$role || !$content) return;
|
||||
$msgs = self::load($sid);
|
||||
$msgs[] = [
|
||||
"role" => $role,
|
||||
"content" => substr((string)$content, 0, 4000),
|
||||
"ts" => time(),
|
||||
];
|
||||
// Keep only last N
|
||||
if (count($msgs) > self::MAX_TURNS) {
|
||||
$msgs = array_slice($msgs, -self::MAX_TURNS);
|
||||
}
|
||||
@file_put_contents(self::path($sid), json_encode($msgs, JSON_UNESCAPED_UNICODE), LOCK_EX);
|
||||
}
|
||||
|
||||
public static function context_messages($sid, $max = 10) {
|
||||
$msgs = self::load($sid);
|
||||
$last = array_slice($msgs, -$max);
|
||||
$out = [];
|
||||
foreach ($last as $m) {
|
||||
$out[] = ["role" => $m["role"], "content" => $m["content"]];
|
||||
}
|
||||
return $out;
|
||||
}
|
||||
|
||||
public static function summary($sid) {
|
||||
$msgs = self::load($sid);
|
||||
return [
|
||||
"session" => $sid,
|
||||
"turns" => count($msgs),
|
||||
"first_ts" => !empty($msgs) ? date("c", $msgs[0]["ts"]) : null,
|
||||
"last_ts" => !empty($msgs) ? date("c", end($msgs)["ts"]) : null,
|
||||
];
|
||||
}
|
||||
|
||||
public static function clear($sid) {
|
||||
$p = self::path($sid);
|
||||
if (file_exists($p)) @unlink($p);
|
||||
}
|
||||
}
|
||||
|
||||
// Direct API usage
|
||||
if (basename($_SERVER["SCRIPT_NAME"]) === "ambre-session-memory.php") {
|
||||
header("Content-Type: application/json");
|
||||
$sid = $_GET["sid"] ?? "";
|
||||
$action = $_GET["action"] ?? "summary";
|
||||
if ($action === "summary") echo json_encode(AmbreSessionMemory::summary($sid));
|
||||
elseif ($action === "load") echo json_encode(AmbreSessionMemory::load($sid));
|
||||
elseif ($action === "clear") { AmbreSessionMemory::clear($sid); echo json_encode(["cleared"=>true]); }
|
||||
else echo json_encode(["error"=>"unknown action"]);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"generated_at": "2026-04-21T23:25:01.227270",
|
||||
"generated_at": "2026-04-21T23:35:01.409653",
|
||||
"stats": {
|
||||
"total": 48,
|
||||
"pending": 31,
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
{
|
||||
"ok": true,
|
||||
"agent": "V42_MQL_Scoring_Agent_REAL",
|
||||
"ts": "2026-04-21T21:20:02+00:00",
|
||||
"ts": "2026-04-21T21:30:02+00:00",
|
||||
"status": "DEPLOYED_AUTO",
|
||||
"deployed": true,
|
||||
"algorithm": "weighted_behavioral_signals",
|
||||
"signals_tracked": {
|
||||
"wtp_engagement": 70,
|
||||
"wtp_engagement": 100,
|
||||
"chat_engagement": 0,
|
||||
"roi_tool": 0,
|
||||
"email_opened": 0
|
||||
},
|
||||
"avg_score": 17.5,
|
||||
"avg_score": 25,
|
||||
"mql_threshold": 50,
|
||||
"sql_threshold": 75,
|
||||
"leads_captured": 48,
|
||||
"mql_auto_scored": 19,
|
||||
"mql_auto_scored": 20,
|
||||
"sql_auto_scored": 8,
|
||||
"mql_auto_pct": 39,
|
||||
"mql_auto_pct": 41,
|
||||
"improvement_vs_manual": {
|
||||
"before_manual_pct": 33.3,
|
||||
"after_auto_pct": 39,
|
||||
"delta": 5.700000000000003
|
||||
"after_auto_pct": 41,
|
||||
"delta": 7.700000000000003
|
||||
},
|
||||
"paperclip_db_ok": true,
|
||||
"paperclip_tables": 1,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"timestamp": "2026-04-21T23:00:17",
|
||||
"timestamp": "2026-04-21T23:30:12",
|
||||
"features": {
|
||||
"total": 36,
|
||||
"pass": 35
|
||||
@@ -13,7 +13,7 @@
|
||||
"score": 97.2,
|
||||
"log": [
|
||||
"=== UX AGENT v1.0 ===",
|
||||
"Time: 2026-04-21 23:00:01",
|
||||
"Time: 2026-04-21 23:30:02",
|
||||
" core: 4/4",
|
||||
" layout: 3/4",
|
||||
" interaction: 6/6",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"ok": true,
|
||||
"version": "V83-business-kpi",
|
||||
"ts": "2026-04-21T21:29:08+00:00",
|
||||
"ts": "2026-04-21T21:34:41+00:00",
|
||||
"summary": {
|
||||
"total_categories": 8,
|
||||
"total_kpis": 64,
|
||||
|
||||
7
api/wired-pending/intent-opus4-wire_gen_pdf.php
Normal file
7
api/wired-pending/intent-opus4-wire_gen_pdf.php
Normal file
@@ -0,0 +1,7 @@
|
||||
<?php
|
||||
return array(
|
||||
"name" => "gen_pdf_sample",
|
||||
"triggers" => array("generate sample pdf","gen pdf test","reportlab test","pdf wave test"),
|
||||
"cmd" => "/opt/oss/pandas-ai/venv/bin/python -c \"from reportlab.pdfgen import canvas; from reportlab.lib.pagesizes import A4; c=canvas.Canvas(\"/tmp/weval_\"+str(__import__(\"time\").time())[:10]+\".pdf\",pagesize=A4); c.setFont(\"Helvetica-Bold\",14); c.drawString(100,800,\"WEVAL Sample PDF · reportlab live\"); c.save(); print(\"PDF created\")\"",
|
||||
"status" => "WAVE_225"
|
||||
);
|
||||
File diff suppressed because one or more lines are too long
62
wevia.html
62
wevia.html
@@ -1491,6 +1491,68 @@ function send() {
|
||||
}
|
||||
// === END AMBRE-V2-GEN-ROUTER ===
|
||||
|
||||
// === AMBRE-V5-MEMORY 2026-04-21 · session-aware conversational routing ===
|
||||
// Doctrine: every non-gen non-ping message → ambre-session-chat.php for memory + empathy
|
||||
// Keeps ping/aide/bilan on sovereign/fast-path (no regression)
|
||||
var _ambre_route_session = true;
|
||||
if (_ambre_route_session && text.length > 3) {
|
||||
var _low = text.toLowerCase();
|
||||
// Skip short commands that go to fast-path
|
||||
var _skip_keywords = ['ping', 'pong', 'aide', 'help', 'bilan complet', 'status', 'diag'];
|
||||
var _skip = _skip_keywords.some(function(k) { return _low === k || _low.startsWith(k + ' '); });
|
||||
if (!_skip) {
|
||||
// persistent session id across reloads
|
||||
if (!window._ambre_session_id) {
|
||||
var stored = null;
|
||||
try { stored = sessionStorage.getItem('ambre_sid'); } catch(e){}
|
||||
if (!stored) {
|
||||
stored = 'wv-' + Date.now() + '-' + Math.random().toString(36).substring(2,8);
|
||||
try { sessionStorage.setItem('ambre_sid', stored); } catch(e){}
|
||||
}
|
||||
window._ambre_session_id = stored;
|
||||
}
|
||||
fetch('/api/ambre-session-chat.php', {
|
||||
method: 'POST',
|
||||
headers: {'Content-Type':'application/json'},
|
||||
body: JSON.stringify({message: text, session_id: window._ambre_session_id})
|
||||
})
|
||||
.then(function(r) {
|
||||
return r.text().then(function(t) {
|
||||
if (!t || !t.trim()) return null;
|
||||
try { return JSON.parse(t); } catch(e) { return null; }
|
||||
});
|
||||
})
|
||||
.then(function(data) {
|
||||
if (!data || !data.response) { busy = false; return; } // fall through silently
|
||||
hideThinking();
|
||||
var elapsed = ((performance.now() - startTime) / 1000).toFixed(1);
|
||||
chatHistory.push({role:'assistant', content:data.response});
|
||||
var msgEl = addMsg('assistant', data.response, elapsed);
|
||||
var badges = [];
|
||||
badges.push('<span class="nx-badge nx-prov">ambre-memory</span>');
|
||||
badges.push('<span class="nx-badge" style="background:rgba(16,185,129,0.15);color:#10B981">🧠 ' + (data.turns_in_memory||0) + ' tours</span>');
|
||||
if (data.history_used > 0) badges.push('<span class="nx-badge" style="background:rgba(245,158,11,0.15);color:#F59E0B">📚 ctx:' + data.history_used + '</span>');
|
||||
if (badges.length && msgEl && msgEl.querySelector('.msg-inner')) {
|
||||
var badgeEl = document.createElement('div');
|
||||
badgeEl.className = 'nx-badges';
|
||||
badgeEl.innerHTML = badges.join('');
|
||||
msgEl.querySelector('.msg-inner').appendChild(badgeEl);
|
||||
}
|
||||
// reset busy to allow next message
|
||||
busy = false;
|
||||
try { var _sb = document.getElementById('sendBtn'); if (_sb) _sb.disabled = false; } catch(e){}
|
||||
try { var _mi = document.getElementById('msgInput'); if (_mi) { _mi.value=''; _mi.disabled=false; } } catch(e){}
|
||||
})
|
||||
.catch(function(err) {
|
||||
console.warn('[Ambre V5] session-chat err:', err.message);
|
||||
busy = false;
|
||||
try { var _sb = document.getElementById('sendBtn'); if (_sb) _sb.disabled = false; } catch(e){}
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
// === END AMBRE-V5-MEMORY ===
|
||||
|
||||
// Fast mode: direct to fast endpoint (2s response)
|
||||
if (effectiveMode === 'fast' && !pendingFile) {
|
||||
fetch('/api/sovereign/v1/chat/completions', {
|
||||
|
||||
Reference in New Issue
Block a user