762 lines
33 KiB
Plaintext
Executable File
762 lines
33 KiB
Plaintext
Executable File
<?php
|
|
require_once __DIR__ . '/hamid-failover.php';
|
|
header('Content-Type: application/json');
|
|
error_reporting(0);
|
|
|
|
try {
|
|
// Support JSON et multipart/form-data
|
|
$contentType = $_SERVER["CONTENT_TYPE"] ?? "";
|
|
if (strpos($contentType, "multipart/form-data") !== false) {
|
|
$input = $_POST;
|
|
} else {
|
|
$input = json_decode(file_get_contents('php://input'), true);
|
|
}
|
|
|
|
if (!$input || (!isset($input['message']) && !isset($input['action']))) {
|
|
|
|
// Auto-apprentissage
|
|
try {
|
|
// Extraire entités de la conversation
|
|
$allText = $message . " " . $aiResponse;
|
|
|
|
// Détecter IPs
|
|
if (preg_match_all('/\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/', $allText, $ipMatches)) {
|
|
foreach ($ipMatches[0] as $ip) {
|
|
$pdo->exec("INSERT INTO admin.hamid_entities (entity_type, entity_name) VALUES ('server', '$ip') ON CONFLICT DO NOTHING");
|
|
}
|
|
}
|
|
|
|
// Stocker la conversation comme mémoire si importante (> 100 chars réponse)
|
|
if (strlen($aiResponse) > 200) {
|
|
$topic = substr($message, 0, 100);
|
|
$pdo->prepare("INSERT INTO admin.hamid_memory (session_id, memory_type, key, value, source) VALUES (?, 'context', ?, ?, 'conversation')")
|
|
->execute([$sessionId, $topic, substr($aiResponse, 0, 500)]);
|
|
}
|
|
|
|
// Stocker questions/réponses pour apprentissage
|
|
if (strlen($message) > 20 && strlen($aiResponse) > 100) {
|
|
$pdo->prepare("INSERT INTO admin.hamid_learning (topic, question, answer, learned_from) VALUES (?, ?, ?, ?)")
|
|
->execute(['auto', $message, substr($aiResponse, 0, 1000), $sessionId]);
|
|
}
|
|
} catch (Exception $e) {}
|
|
|
|
echo json_encode(['error' => 'Message requis']);
|
|
exit;
|
|
}
|
|
|
|
$message = $input['message'];
|
|
$history = $input['history'] ?? [];
|
|
$provider = $input['provider'] ?? 'cerebras';
|
|
|
|
$failoverUsed = false;
|
|
$requestedProvider = $provider;
|
|
$attemptsLog = [];
|
|
|
|
// TIMEOUTS OPTIMISES PAR PROVIDER
|
|
$timeouts = ["cerebras"=>15,"groq"=>20,"mistral"=>30,"deepseek"=>45,"claude"=>90,"gemini"=>60,"ollama"=>120,"ollama-mini"=>60,"openai"=>60,"openai-mini"=>30,"perplexity"=>45,"together"=>45,"fireworks"=>30,"openrouter"=>45,"vllm"=>30,"xai"=>45,"novita"=>45,"lepton"=>60,"ai21"=>45,"cloudflare"=>30,"huggingface"=>60,"cohere"=>30,"sambanova"=>30,"hyperbolic"=>45];
|
|
$timeout = $timeouts[$provider] ?? 60;
|
|
|
|
|
|
$pdo = new PDO("pgsql:host=localhost;dbname=adx_system", "admin", "admin123");
|
|
$pdo->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
|
|
// Handler actions speciales
|
|
if (isset($input["action"])) {
|
|
if ($input["action"] === "list_conversations") {
|
|
$stmt = $pdo->query("SELECT id, title, created_at, updated_at FROM admin.hamid_conversations ORDER BY updated_at DESC LIMIT 20");
|
|
echo json_encode(["conversations" => $stmt->fetchAll(PDO::FETCH_ASSOC)]);
|
|
exit;
|
|
}
|
|
}
|
|
|
|
|
|
// Récupérer config
|
|
$config = [];
|
|
try {
|
|
$stmt = $pdo->query("SELECT config_key, config_value FROM admin.hamid_config");
|
|
while ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {
|
|
$config[$row['config_key']] = $row['config_value'];
|
|
}
|
|
} catch (Exception $e) {}
|
|
|
|
// =============================================
|
|
// UPLOAD FICHIERS + VISION
|
|
// =============================================
|
|
$uploadedFiles = [];
|
|
$imageData = [];
|
|
$uploadDir = "/opt/wevads/public/hamid-files/";
|
|
|
|
// Gérer upload de fichiers
|
|
if (!empty($_FILES["files"])) {
|
|
foreach ($_FILES["files"]["tmp_name"] as $i => $tmp) {
|
|
if ($_FILES["files"]["error"][$i] === UPLOAD_ERR_OK) {
|
|
$origName = $_FILES["files"]["name"][$i];
|
|
$ext = strtolower(pathinfo($origName, PATHINFO_EXTENSION));
|
|
$safeName = date("Ymd_His") . "_" . preg_replace("/[^a-zA-Z0-9._-]/", "_", $origName);
|
|
$path = $uploadDir . $safeName;
|
|
|
|
if (move_uploaded_file($tmp, $path)) {
|
|
$fileInfo = [
|
|
"name" => $origName,
|
|
"saved" => $safeName,
|
|
"path" => $path,
|
|
"url" => "/hamid-files/" . $safeName,
|
|
"type" => $ext,
|
|
"size" => filesize($path)
|
|
];
|
|
$uploadedFiles[] = $fileInfo;
|
|
|
|
// Si image, préparer pour vision (Claude/Gemini)
|
|
if (in_array($ext, ["png","jpg","jpeg","gif","webp"])) {
|
|
$b64 = base64_encode(file_get_contents($path));
|
|
$mimes = ["png"=>"image/png","jpg"=>"image/jpeg","jpeg"=>"image/jpeg","gif"=>"image/gif","webp"=>"image/webp"];
|
|
$imageData[] = [
|
|
"base64" => $b64,
|
|
"mime" => $mimes[$ext] ?? "image/png",
|
|
"name" => $origName
|
|
];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Ajouter info fichiers au message si présents
|
|
if (!empty($uploadedFiles)) {
|
|
$fileList = array_map(function($f) { return $f["name"] . " (" . $f["type"] . ")"; }, $uploadedFiles);
|
|
$message .= "\n\n[Fichiers joints: " . implode(", ", $fileList) . "]";
|
|
}
|
|
|
|
|
|
// ============================================
|
|
// RECHERCHE KNOWLEDGE BASE - AMÉLIORÉE
|
|
// ============================================
|
|
$kbContext = "";
|
|
$kbCount = 0;
|
|
$kbTitles = [];
|
|
|
|
try {
|
|
// Extraire les mots-clés du message (mots de plus de 3 caractères)
|
|
$words = preg_split('/\s+/', strtolower($message));
|
|
$keywords = array_filter($words, function($w) {
|
|
return strlen($w) > 3 && !in_array($w, ['pour', 'dans', 'avec', 'cette', 'faire', 'comment', 'quoi', 'quel', 'quelle', 'sont', 'est-ce', 'peux', 'veux', 'dois']);
|
|
});
|
|
|
|
if (!empty($keywords)) {
|
|
// Recherche par mots-clés
|
|
$conditions = [];
|
|
$params = [];
|
|
foreach ($keywords as $i => $kw) {
|
|
$conditions[] = "(LOWER(title) LIKE ? OR LOWER(content) LIKE ?)";
|
|
$params[] = '%' . $kw . '%';
|
|
$params[] = '%' . $kw . '%';
|
|
}
|
|
|
|
$sql = "SELECT id, question as title, answer as content FROM admin.commonia_knowledge WHERE " . implode(' OR ', $conditions) . " LIMIT 8";
|
|
$stmt = $pdo->prepare($sql);
|
|
$stmt->execute($params);
|
|
|
|
while ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {
|
|
$kbContext .= "\n\n### " . $row['title'] . " ###\n" . mb_substr($row['content'], 0, 500);
|
|
$kbTitles[] = $row['title'];
|
|
$kbCount++;
|
|
}
|
|
}
|
|
|
|
// Si pas de résultats avec mots-clés, recherche générale
|
|
if ($kbCount == 0) {
|
|
$stmt = $pdo->prepare("SELECT id, question as title, answer as content FROM admin.commonia_knowledge WHERE LOWER(content) LIKE ? OR LOWER(question) LIKE ? LIMIT 8");
|
|
$searchTerm = '%' . strtolower(substr($message, 0, 50)) . '%';
|
|
$stmt->execute([$searchTerm, $searchTerm]);
|
|
|
|
while ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {
|
|
$kbContext .= "\n\n### " . $row['title'] . " ###\n" . mb_substr($row['content'], 0, 500);
|
|
$kbTitles[] = $row['title'];
|
|
$kbCount++;
|
|
}
|
|
}
|
|
|
|
} catch (Exception $e) {
|
|
// Log error but continue
|
|
error_log("KB Search Error: " . $e->getMessage());
|
|
}
|
|
|
|
// ============================================
|
|
// RECHERCHE MÉMOIRE
|
|
// ============================================
|
|
$memContext = "";
|
|
$memCount = 0;
|
|
|
|
try {
|
|
$stmt = $pdo->prepare("SELECT key, value FROM admin.chatbot_memory WHERE LOWER(key) LIKE ? OR LOWER(value) LIKE ? LIMIT 8");
|
|
$searchTerm = '%' . strtolower($message) . '%';
|
|
$stmt->execute([$searchTerm, $searchTerm]);
|
|
|
|
while ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {
|
|
$memContext .= "\n[" . $row['key'] . "]: " . $row['value'];
|
|
$memCount++;
|
|
}
|
|
} catch (Exception $e) {}
|
|
|
|
// ============================================
|
|
// SYSTEM PROMPT AVEC CONTEXTE KB
|
|
// ============================================
|
|
$systemPrompt = "## FORMAT DE REPONSE OBLIGATOIRE
|
|
Pour CHAQUE reponse, tu DOIS commencer par un bloc de reflexion:
|
|
<thinking>
|
|
[Ton raisonnement detaille: analyse du probleme, hypotheses, verification, conclusion]
|
|
</thinking>
|
|
|
|
Puis ta reponse finale.
|
|
|
|
---
|
|
|
|
Tu es HAMID (Hyper-Advanced Multi-Intelligence Director), assistant IA expert du systeme WEVADS.\n\n## REGLES\n1. META-COGNITION: Evalue ta confiance (0-100%), identifie tes limites, choisis la strategie optimale\n2. TREE OF THOUGHTS: Genere 3+ pistes de raisonnement, evalue chacune, explore la meilleure, backtrack si necessaire\n3. CHAIN OF VERIFICATION: Verifie facts, logique, coherence apres chaque affirmation importante\n4. SELF-REFINE: Genere reponse, auto-critique, ameliore iterativement\n5. MEMOIRE CONTEXTUELLE: Utilise KB en priorite, connecte concepts, rappelle historique\n\n## TECHNIQUES AVANCEES DE RAISONNEMENT\n- Decomposition recursive (Least-to-Most): divise problemes complexes en sous-problemes simples\n- Raisonnement analogique++: trouve domaines similaires, transfere patterns, adapte solutions\n- First principles thinking: retour aux fondamentaux, reconstruit depuis zero\n- Contrafactuel: simule \"que se passerait-il si X etait different?\"\n- Bayesian updating: mise a jour probabiliste des croyances avec nouvelles evidences\n- ReAct (Reasoning+Acting): pense, observe, agit, evalue en boucle\n\n## DIAGRAMMES ET SCHEMAS
|
|
Pour les schemas, utilise Mermaid avec ces REGLES STRICTES:
|
|
1. PAS d'accents dans les labels (utilise e au lieu de é)
|
|
2. PAS de caracteres speciaux sauf - et _
|
|
3. Labels entre crochets [] pour les rectangles
|
|
4. Labels entre parentheses () pour les ovales
|
|
5. Labels entre {} pour les losanges
|
|
|
|
EXEMPLE VALIDE architecture:
|
|
```mermaid
|
|
graph TD
|
|
A[Serveur Principal] --> B[PostgreSQL]
|
|
A --> C[Apache]
|
|
C --> D[WEVADS App]
|
|
D --> E[API REST]
|
|
E --> F[PowerMTA]
|
|
F --> G[SMTP Sortant]
|
|
D --> H[Tracking]
|
|
```
|
|
|
|
EXEMPLE VALIDE flux:
|
|
```mermaid
|
|
graph LR
|
|
User --> Frontend
|
|
Frontend --> API
|
|
API --> Database
|
|
API --> Cache
|
|
```
|
|
|
|
N'utilise JAMAIS de schemas ASCII!
|
|
|
|
## QUALITE DE REPONSE\n- **Gras** pour concepts cles et termes importants\n- Listes numerotees pour etapes sequentielles\n- `code inline` et blocs pour elements techniques\n- Exemples concrets et analogies parlantes\n- TL;DR en debut pour questions complexes\n- Sources KB citees quand pertinent\n\n## GENERATION DOCUMENTS\nQuand on demande un document/rapport/PDF:\n1. Ecris contenu TRES DETAILLE (1500+ mots minimum)\n2. Structure: Titre, Sommaire, Introduction, Sections avec sous-titres, Exemples, Tableaux si utile, Conclusion\n3. A la fin ajoute: [GENERATE_DOC:pdf:NomDuFichier]\n\n## EXPERTISE TECHNIQUE WEVADS\nPostgreSQL, PowerMTA, VMTA, PHP, Java, Apache, Nginx, Email Marketing, Tracking pixels, DNS (SPF/DKIM/DMARC), Delivrabilite, Cloud (Hetzner, Huawei, OVH, AWS), Architecture multi-serveurs, API REST, WebSockets.\n\n## REGLES\n- Toujours repondre en francais sauf si demande contraire\n- Concis mais complet - pas de blabla inutile\n- Si incertain, dis-le clairement avec niveau de confiance\n- Utilise le contexte KB fourni en priorite absolue";
|
|
|
|
// IMPORTANT: Ajouter le contexte KB au prompt
|
|
if ($kbContext) {
|
|
$systemPrompt .= "\n\n=== KNOWLEDGE BASE (utilise ces informations pour répondre) ===\n" . $kbContext . "\n=== FIN KNOWLEDGE BASE ===";
|
|
}
|
|
|
|
if ($memContext) {
|
|
$systemPrompt .= "\n\n=== MÉMOIRE UTILISATEUR ===\n" . $memContext . "\n=== FIN MÉMOIRE ===";
|
|
}
|
|
|
|
|
|
// ============================================
|
|
// SYSTÈME FAILOVER INTELLIGENT
|
|
// ============================================
|
|
// Priorité: Gratuit illimité > Gratuit limité > Payant
|
|
$providerPriority = [
|
|
"cerebras", // #1 Gratuit illimité, ultra rapide
|
|
"groq", // #2 Gratuit 100k/jour, très rapide
|
|
"sambanova", // #3 Gratuit, rapide
|
|
"mistral", // #4 Gratuit limité
|
|
"cohere", // #5 Gratuit limité
|
|
"together", // #6 Gratuit limité
|
|
"fireworks", // #7 Gratuit limité
|
|
"openrouter", // #8 Gratuit limité
|
|
"deepseek", // #9 Payant pas cher
|
|
"claude", // #10 Payant premium
|
|
"openai", // #11 Payant premium
|
|
"ollama" // #12 Local (lent mais toujours dispo)
|
|
];
|
|
|
|
// Cache statut providers (fichier temporaire)
|
|
$statusFile = "/tmp/hamid_provider_status.json";
|
|
$providerStatus = [];
|
|
if (file_exists($statusFile)) {
|
|
$providerStatus = json_decode(file_get_contents($statusFile), true) ?: [];
|
|
// Réinitialiser statuts vieux de plus de 5 minutes
|
|
foreach ($providerStatus as $p => $s) {
|
|
if (time() - ($s["time"] ?? 0) > 300) {
|
|
unset($providerStatus[$p]);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Fonction pour marquer un provider comme down
|
|
function markProviderDown($provider, $error, &$status, $file) {
|
|
$status[$provider] = ["down" => true, "error" => $error, "time" => time()];
|
|
file_put_contents($file, json_encode($status));
|
|
}
|
|
|
|
// Fonction pour marquer un provider comme up
|
|
function markProviderUp($provider, &$status, $file) {
|
|
unset($status[$provider]);
|
|
file_put_contents($file, json_encode($status));
|
|
}
|
|
|
|
// Déterminer le provider à utiliser (failover)
|
|
$requestedProvider = $provider;
|
|
$failoverAttempts = [];
|
|
$maxAttempts = 3;
|
|
$currentAttempt = 0;
|
|
$useFailover = true; // Activer le failover automatique
|
|
|
|
// Si le provider demandé est down, commencer par le suivant
|
|
if ($useFailover && isset($providerStatus[$provider]["down"])) {
|
|
$idx = array_search($provider, $providerPriority);
|
|
if ($idx !== false) {
|
|
for ($i = $idx + 1; $i < count($providerPriority); $i++) {
|
|
if (!isset($providerStatus[$providerPriority[$i]]["down"])) {
|
|
$provider = $providerPriority[$i];
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// ============================================
|
|
// PROVIDERS
|
|
// ============================================
|
|
$providers = [
|
|
'cerebras' => [
|
|
'url' => 'https://api.cerebras.ai/v1/chat/completions',
|
|
'key' => $config['cerebras_api_key'] ?? '',
|
|
'model' => 'llama3.1-8b'
|
|
],
|
|
'groq' => [
|
|
'url' => 'https://api.groq.com/openai/v1/chat/completions',
|
|
'key' => $config['groq_api_key'] ?? '',
|
|
'model' => 'llama-3.3-70b-versatile'
|
|
],
|
|
'deepseek' => [
|
|
'url' => 'https://api.deepseek.com/v1/chat/completions',
|
|
'key' => $config['deepseek_api_key'] ?? '',
|
|
'model' => 'deepseek-chat'
|
|
],
|
|
'gemini' => [
|
|
'url' => 'https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent',
|
|
'key' => $config['gemini_api_key'] ?? '',
|
|
'model' => 'gemini-2.0-flash'
|
|
],
|
|
'chatgpt' => [
|
|
'url' => 'https://api.openai.com/v1/chat/completions',
|
|
'key' => $config['openai_api_key'] ?? '',
|
|
'model' => 'gpt-4o-mini'
|
|
],
|
|
'claude' => [
|
|
'url' => 'https://api.anthropic.com/v1/messages',
|
|
'key' => $config['claude_api_key'] ?? '',
|
|
'model' => 'claude-sonnet-4-20250514'
|
|
],
|
|
'hyperbolic' => [
|
|
'url' => 'https://api.hyperbolic.xyz/v1/chat/completions',
|
|
'key' => $config['hyperbolic_api_key'] ?? '',
|
|
'model' => 'meta-llama/Llama-3.3-70B-Instruct'
|
|
],
|
|
'mistral' => [
|
|
'url' => 'https://api.mistral.ai/v1/chat/completions',
|
|
'key' => $config['mistral_api_key'] ?? '',
|
|
'model' => 'mistral-small-latest'
|
|
],
|
|
'cohere' => [
|
|
'url' => 'https://api.cohere.ai/v1/chat',
|
|
'key' => $config['cohere_api_key'] ?? '',
|
|
'model' => 'command-r7b-12-2024'
|
|
],
|
|
'sambanova' => [
|
|
'url' => 'https://api.sambanova.ai/v1/chat/completions',
|
|
'key' => $config['sambanova_api_key'] ?? '',
|
|
'model' => 'Meta-Llama-3.1-8B-Instruct'
|
|
],
|
|
'ollama' => [
|
|
'url' => ($config['ollama_url'] ?? 'http://localhost:11434') . '/api/chat',
|
|
'key' => '',
|
|
'model' => $config['ollama_model'] ?? 'mistral:7b-instruct'
|
|
],
|
|
'ollama-mini' => [
|
|
'url' => ($config['ollama_url'] ?? 'http://localhost:11434') . '/api/chat',
|
|
// === NOUVEAUX PROVIDERS ===
|
|
'openai' => [
|
|
'url' => 'https://api.openai.com/v1/chat/completions',
|
|
'key' => $config['openai_api_key'] ?? '',
|
|
'model' => 'gpt-4o'
|
|
],
|
|
'openai-mini' => [
|
|
'url' => 'https://api.openai.com/v1/chat/completions',
|
|
'key' => $config['openai_api_key'] ?? '',
|
|
'model' => 'gpt-4o-mini'
|
|
],
|
|
'perplexity' => [
|
|
'url' => 'https://api.perplexity.ai/chat/completions',
|
|
'key' => $config['perplexity_api_key'] ?? '',
|
|
'model' => 'llama-3.1-sonar-large-128k-online'
|
|
],
|
|
'together' => [
|
|
'url' => 'https://api.together.xyz/v1/chat/completions',
|
|
'key' => $config['together_api_key'] ?? '',
|
|
'model' => 'meta-llama/Llama-3.3-70B-Instruct-Turbo'
|
|
],
|
|
'fireworks' => [
|
|
'url' => 'https://api.fireworks.ai/inference/v1/chat/completions',
|
|
'key' => $config['fireworks_api_key'] ?? '',
|
|
'model' => 'accounts/fireworks/models/llama-v3p1-70b-instruct'
|
|
],
|
|
'openrouter' => [
|
|
'url' => 'https://openrouter.ai/api/v1/chat/completions',
|
|
'key' => $config['openrouter_api_key'] ?? '',
|
|
'model' => 'meta-llama/llama-3.1-70b-instruct'
|
|
],
|
|
'vllm' => [
|
|
'url' => ($config['vllm_url'] ?? 'http://localhost:8000') . '/v1/chat/completions',
|
|
'key' => 'none',
|
|
'model' => $config['vllm_model'] ?? 'default'
|
|
],
|
|
|
|
'n8n' => [
|
|
'url' => $config['n8n_webhook_url'] ?? 'http://localhost:5678/webhook/hamid',
|
|
'key' => $config['n8n_api_key'] ?? '',
|
|
'model' => 'n8n-workflow'
|
|
],
|
|
'azure' => [
|
|
'url' => $config['azure_openai_endpoint'] ?? '',
|
|
'key' => $config['azure_api_key'] ?? '',
|
|
'model' => 'gpt-4o'
|
|
],
|
|
'bedrock' => [
|
|
'url' => $config['bedrock_url'] ?? '',
|
|
'key' => $config['bedrock_api_key'] ?? '',
|
|
'model' => 'claude-3-sonnet'
|
|
],
|
|
'replicate' => [
|
|
'url' => 'https://api.replicate.com/v1/predictions',
|
|
'key' => $config['replicate_api_key'] ?? '',
|
|
'model' => 'meta/llama-2-70b-chat'
|
|
],
|
|
'lmstudio' => [
|
|
'url' => ($config['lmstudio_url'] ?? 'http://localhost:1234') . '/v1/chat/completions',
|
|
'key' => 'none',
|
|
'model' => 'local'
|
|
],
|
|
'localai' => [
|
|
'url' => ($config['localai_url'] ?? 'http://localhost:8080') . '/v1/chat/completions',
|
|
'key' => 'none',
|
|
'model' => $config['localai_model'] ?? 'gpt-3.5-turbo'
|
|
],
|
|
'xai' => [
|
|
'url' => 'https://api.x.ai/v1/chat/completions',
|
|
'key' => $config['xai_api_key'] ?? '',
|
|
'model' => 'grok-beta'
|
|
],
|
|
'novita' => [
|
|
'url' => 'https://api.novita.ai/v3/openai/chat/completions',
|
|
'key' => $config['novita_api_key'] ?? '',
|
|
'model' => 'meta-llama/llama-3.1-70b-instruct'
|
|
],
|
|
'lepton' => [
|
|
'url' => 'https://llama3-1-405b.lepton.run/api/v1/chat/completions',
|
|
'key' => $config['lepton_api_key'] ?? '',
|
|
'model' => 'llama3.1-405b'
|
|
],
|
|
'ai21' => [
|
|
'url' => 'https://api.ai21.com/studio/v1/chat/completions',
|
|
'key' => $config['ai21_api_key'] ?? '',
|
|
'model' => 'jamba-1.5-large'
|
|
],
|
|
'cloudflare' => [
|
|
'url' => 'https://api.cloudflare.com/client/v4/accounts/{account_id}/ai/run/@cf/meta/llama-3.1-70b-instruct',
|
|
'key' => $config['cloudflare_api_key'] ?? '',
|
|
'model' => '@cf/meta/llama-3.1-70b-instruct'
|
|
],
|
|
'huggingface' => [
|
|
'url' => 'https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3.1-70B-Instruct/v1/chat/completions',
|
|
'key' => $config['huggingface_api_key'] ?? '',
|
|
'model' => 'meta-llama/Meta-Llama-3.1-70B-Instruct'
|
|
],
|
|
|
|
'key' => '',
|
|
'model' => 'tinyllama:latest'
|
|
]
|
|
];
|
|
|
|
// Initialiser système failover
|
|
$failover = new HamidFailover($providers, $config);
|
|
|
|
|
|
$providerConfig = $providers[$provider] ?? $providers['cerebras'];
|
|
|
|
if (empty($providerConfig['key']) && !in_array($provider, ['ollama', 'ollama-mini', 'vllm'])) {
|
|
echo json_encode([
|
|
'error' => "Clé API non configurée pour $provider",
|
|
'provider' => $provider,
|
|
'kb_count' => $kbCount, 'memory_count' => $memoryCount,
|
|
'mem_count' => $memCount
|
|
]);
|
|
exit;
|
|
}
|
|
|
|
// Build messages
|
|
$messages = [['role' => 'system', 'content' => $systemPrompt]];
|
|
foreach ($history as $msg) {
|
|
if (isset($msg['role']) && isset($msg['content'])) {
|
|
$messages[] = ['role' => $msg['role'], 'content' => $msg['content']];
|
|
}
|
|
}
|
|
$messages[] = ['role' => 'user', 'content' => $message];
|
|
|
|
// API call
|
|
$ch = curl_init();
|
|
$startTime = microtime(true);
|
|
|
|
if ($provider === 'gemini') {
|
|
$url = $providerConfig['url'] . '?key=' . $providerConfig['key'];
|
|
$geminiMessages = array_filter($messages, fn($m) => $m['role'] !== 'system');
|
|
$payload = [
|
|
'contents' => array_map(function($m) {
|
|
return [
|
|
'role' => $m['role'] === 'assistant' ? 'model' : 'user',
|
|
'parts' => [['text' => $m['content']]]
|
|
];
|
|
}, array_values($geminiMessages)),
|
|
'systemInstruction' => ['parts' => [['text' => $systemPrompt]]]
|
|
];
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_URL => $url,
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_HTTPHEADER => ['Content-Type: application/json'],
|
|
CURLOPT_POSTFIELDS => json_encode($payload),
|
|
CURLOPT_TIMEOUT => $timeout
|
|
]);
|
|
} elseif ($provider === 'claude') {
|
|
// Construire messages Claude (sans system)
|
|
$claudeMessages = [];
|
|
foreach ($messages as $msg) {
|
|
if ($msg['role'] === 'system') continue;
|
|
$claudeMessages[] = ['role' => $msg['role'], 'content' => $msg['content']];
|
|
}
|
|
|
|
// Ajouter images au dernier message user si présentes
|
|
if (!empty($imageData)) {
|
|
$lastIdx = count($claudeMessages) - 1;
|
|
$textContent = $claudeMessages[$lastIdx]['content'];
|
|
$contentParts = [['type' => 'text', 'text' => $textContent]];
|
|
foreach ($imageData as $img) {
|
|
$contentParts[] = [
|
|
'type' => 'image',
|
|
'source' => [
|
|
'type' => 'base64',
|
|
'media_type' => $img['mime'],
|
|
'data' => $img['base64']
|
|
]
|
|
];
|
|
}
|
|
$claudeMessages[$lastIdx]['content'] = $contentParts;
|
|
}
|
|
|
|
$payload = [
|
|
'model' => $providerConfig['model'],
|
|
'max_tokens' => 4096,
|
|
'system' => $systemPrompt,
|
|
'messages' => $claudeMessages
|
|
];
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_URL => $providerConfig['url'],
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_HTTPHEADER => [
|
|
'Content-Type: application/json',
|
|
'x-api-key: ' . $providerConfig['key'],
|
|
'anthropic-version: 2023-06-01'
|
|
],
|
|
CURLOPT_POSTFIELDS => json_encode($payload),
|
|
CURLOPT_TIMEOUT => $timeout
|
|
]);
|
|
} elseif ($provider === 'cohere') {
|
|
$chatHistory = [];
|
|
foreach (array_slice($messages, 1, -1) as $m) {
|
|
$chatHistory[] = [
|
|
'role' => $m['role'] === 'assistant' ? 'CHATBOT' : 'USER',
|
|
'message' => $m['content']
|
|
];
|
|
}
|
|
$payload = [
|
|
'model' => $providerConfig['model'],
|
|
'message' => $message,
|
|
'preamble' => $systemPrompt,
|
|
'chat_history' => $chatHistory
|
|
];
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_URL => $providerConfig['url'],
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_HTTPHEADER => [
|
|
'Content-Type: application/json',
|
|
'Authorization: Bearer ' . $providerConfig['key']
|
|
],
|
|
CURLOPT_POSTFIELDS => json_encode($payload),
|
|
CURLOPT_TIMEOUT => $timeout
|
|
]);
|
|
} elseif (in_array($provider, ['ollama', 'ollama-mini'])) {
|
|
$payload = [
|
|
'model' => $providerConfig['model'],
|
|
'messages' => $messages,
|
|
'stream' => false
|
|
];
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_URL => $providerConfig['url'],
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_HTTPHEADER => ['Content-Type: application/json'],
|
|
CURLOPT_POSTFIELDS => json_encode($payload),
|
|
CURLOPT_TIMEOUT => $timeout
|
|
]);
|
|
} else {
|
|
// OpenAI-compatible
|
|
$payload = [
|
|
'model' => $providerConfig['model'],
|
|
'messages' => $messages,
|
|
'max_tokens' => 4096,
|
|
'temperature' => 0.7
|
|
];
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_URL => $providerConfig['url'],
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_HTTPHEADER => [
|
|
'Content-Type: application/json',
|
|
'Authorization: Bearer ' . $providerConfig['key']
|
|
],
|
|
CURLOPT_POSTFIELDS => json_encode($payload),
|
|
CURLOPT_TIMEOUT => $timeout
|
|
]);
|
|
}
|
|
|
|
$response = curl_exec($ch);
|
|
$httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
|
|
$curlError = curl_error($ch);
|
|
curl_close($ch);
|
|
|
|
$duration = round((microtime(true) - $startTime) * 1000);
|
|
|
|
if ($curlError) {
|
|
echo json_encode(['error' => "Erreur connexion: $curlError", 'provider' => $provider, 'kb_count' => $kbCount, 'memory_count' => $memoryCount]);
|
|
exit;
|
|
}
|
|
|
|
$data = json_decode($response, true);
|
|
|
|
if ($httpCode !== 200) {
|
|
$errorMsg = $data['error']['message'] ?? $data['error'] ?? $response;
|
|
echo json_encode(['error' => "Erreur API ($httpCode): $errorMsg", 'provider' => $provider, 'kb_count' => $kbCount, 'memory_count' => $memoryCount]);
|
|
exit;
|
|
}
|
|
|
|
// Extract response
|
|
$aiResponse = '';
|
|
if ($provider === 'gemini') {
|
|
$aiResponse = $data['candidates'][0]['content']['parts'][0]['text'] ?? 'Pas de réponse';
|
|
} elseif ($provider === 'claude') {
|
|
$aiResponse = $data['content'][0]['text'] ?? 'Pas de réponse';
|
|
} elseif ($provider === 'cohere') {
|
|
$aiResponse = $data['text'] ?? 'Pas de réponse';
|
|
} elseif (in_array($provider, ['ollama', 'ollama-mini'])) {
|
|
$aiResponse = $data['message']['content'] ?? 'Pas de réponse';
|
|
} else {
|
|
$aiResponse = $data['choices'][0]['message']['content'] ?? 'Pas de réponse';
|
|
}
|
|
|
|
// GENERATION PDF SI TAG PRESENT
|
|
$generatedDoc = null;
|
|
if (preg_match("/\[GENERATE_DOC:(pdf):([^\]]+)\]/i", $aiResponse, $dm)) {
|
|
$docTitle = trim($dm[2]);
|
|
$safeName = date("Ymd_His") . "_" . preg_replace("/[^a-zA-Z0-9_-]/", "_", substr($docTitle, 0, 40)) . ".pdf";
|
|
$pdfPath = "/opt/wevads/public/hamid-files/" . $safeName;
|
|
$tmpHtml = "/tmp/hamid_" . time() . ".html";
|
|
$docContent = preg_replace("/\[GENERATE_DOC:[^\]]+\]/", "", $aiResponse);
|
|
$md = $docContent;
|
|
// Conversion Markdown vers HTML
|
|
$md = preg_replace("/^#### (.+)$/m", "<h4>$1</h4>", $md);
|
|
$md = preg_replace("/^### (.+)$/m", "<h3>$1</h3>", $md);
|
|
$md = preg_replace("/^## (.+)$/m", "<h2>$1</h2>", $md);
|
|
$md = preg_replace("/^# (.+)$/m", "<h1>$1</h1>", $md);
|
|
$md = preg_replace("/\*\*(.+?)\*\*/", "<strong>$1</strong>", $md);
|
|
$md = preg_replace("/\*(.+?)\*/", "<em>$1</em>", $md);
|
|
$md = preg_replace("/`([^`]+)`/", "<code>$1</code>", $md);
|
|
$md = preg_replace("/```(\w*)\n([\s\S]*?)```/", "<pre><code>$2</code></pre>", $md);
|
|
$md = preg_replace("/^\- (.+)$/m", "<li>$1</li>", $md);
|
|
$md = preg_replace("/(<li>.*<\/li>\n?)+/s", "<ul>$0</ul>", $md);
|
|
$md = preg_replace("/^\d+\. (.+)$/m", "<li>$1</li>", $md);
|
|
$md = nl2br($md);
|
|
$html = "<html><head><meta charset=\"UTF-8\"><style>
|
|
@page { margin: 2cm; }
|
|
body { font-family: Arial, sans-serif; line-height: 1.8; color: #333; max-width: 800px; margin: auto; }
|
|
h1 { color: #1a1a2e; border-bottom: 3px solid #00d4ff; padding-bottom: 15px; margin-top: 40px; font-size: 28px; }
|
|
h2 { color: #2d3748; margin-top: 35px; font-size: 22px; border-left: 4px solid #00d4ff; padding-left: 15px; }
|
|
h3 { color: #4a5568; margin-top: 25px; font-size: 18px; }
|
|
h4 { color: #718096; margin-top: 20px; font-size: 16px; }
|
|
p { margin: 12px 0; text-align: justify; }
|
|
ul, ol { margin: 15px 0; padding-left: 30px; }
|
|
li { margin: 8px 0; }
|
|
code { background: #f4f4f4; padding: 2px 8px; border-radius: 4px; font-family: Consolas, monospace; color: #e53e3e; }
|
|
pre { background: #1a1a2e; color: #00d4ff; padding: 20px; border-radius: 8px; overflow-x: auto; margin: 20px 0; }
|
|
pre code { background: none; color: #00d4ff; }
|
|
strong { color: #2d3748; }
|
|
table { width: 100%; border-collapse: collapse; margin: 20px 0; }
|
|
th, td { border: 1px solid #e2e8f0; padding: 12px; text-align: left; }
|
|
th { background: #00d4ff; color: white; }
|
|
.cover { text-align: center; padding: 100px 0; page-break-after: always; }
|
|
.cover h1 { font-size: 36px; border: none; }
|
|
.cover .date { color: #718096; margin-top: 50px; }
|
|
.cover .author { color: #00d4ff; font-weight: bold; }
|
|
.toc { page-break-after: always; }
|
|
.toc h2 { border: none; }
|
|
</style></head><body>
|
|
<div class=\"cover\">
|
|
<h1>" . htmlspecialchars($docTitle) . "</h1>
|
|
<p class=\"date\">Genere le " . date("d/m/Y H:i") . "</p>
|
|
<p class=\"author\">Par HAMID IA - Superintelligence</p>
|
|
</div>
|
|
" . $md . "
|
|
<hr style=\"margin-top:50px;border:1px solid #00d4ff\">
|
|
<p style=\"text-align:center;color:#718096;font-size:12px\">Document genere automatiquement par HAMID IA - WEVADS Platform</p>
|
|
</body></html>";
|
|
file_put_contents($tmpHtml, $html);
|
|
exec("wkhtmltopdf --quiet \"$tmpHtml\" \"$pdfPath\" 2>&1");
|
|
@unlink($tmpHtml);
|
|
if (file_exists($pdfPath)) {
|
|
$generatedDoc = ["type"=>"pdf","name"=>$safeName,"url"=>"/hamid-files/".$safeName,"size"=>filesize($pdfPath)];
|
|
}
|
|
$aiResponse = trim(preg_replace("/\[GENERATE_DOC:[^\]]+\]/", "", $aiResponse));
|
|
}
|
|
|
|
|
|
|
|
// Files
|
|
$files = [];
|
|
if (preg_match('/\[FILE:([^\]]+)\]/', $aiResponse, $matches)) {
|
|
$fileName = trim($matches[1]);
|
|
$files[] = [
|
|
'name' => $fileName,
|
|
'type' => pathinfo($fileName, PATHINFO_EXTENSION),
|
|
'url' => '/hamid-files/' . $fileName
|
|
];
|
|
}
|
|
|
|
echo json_encode([
|
|
'response' => $aiResponse,
|
|
'provider' => $provider,
|
|
'model' => $providerConfig['model'],
|
|
'duration' => $duration, 'failover_used' => $failoverUsed ?? false, 'original_provider' => $requestedProvider ?? $provider,
|
|
'kb_count' => $kbCount, 'memory_count' => $memoryCount,
|
|
'kb_titles' => $kbTitles,
|
|
'mem_count' => $memCount,
|
|
'files' => $uploadedFiles, 'generated_doc' => $generatedDoc ?? null
|
|
]);
|
|
|
|
} catch (Exception $e) {
|
|
echo json_encode(['error' => 'Erreur serveur: ' . $e->getMessage()]);
|
|
}
|