262 lines
11 KiB
PHP
262 lines
11 KiB
PHP
<?php
|
|
// Brain Nucleus Engine - Active Learning
|
|
require_once("/opt/wevads/public/api/wevia-brain-nucleus.php");
|
|
/**
|
|
* HAMID Providers Config — Auto-loads from DB
|
|
* Required by: hamid.php, hamid-api.php
|
|
*/
|
|
|
|
function getHamidDB() {
|
|
static $pdo = null;
|
|
if (!$pdo) {
|
|
$pdo = new PDO("pgsql:host=localhost;port=5432;dbname=adx_system", "admin", "admin123");
|
|
$pdo->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
|
|
}
|
|
return $pdo;
|
|
}
|
|
|
|
function getProviders() {
|
|
$pdo = getHamidDB();
|
|
return $pdo->query("SELECT * FROM admin.hamid_providers WHERE is_active = true AND api_key != '' AND api_key IS NOT NULL ORDER BY priority")->fetchAll(PDO::FETCH_ASSOC);
|
|
}
|
|
|
|
function getProvider($name) {
|
|
$pdo = getHamidDB();
|
|
$stmt = $pdo->prepare("SELECT * FROM admin.hamid_providers WHERE LOWER(provider_name) = LOWER(?) AND is_active = true");
|
|
$stmt->execute([$name]);
|
|
return $stmt->fetch(PDO::FETCH_ASSOC);
|
|
}
|
|
|
|
function getBrainWinners($isp = null) {
|
|
$pdo = getHamidDB();
|
|
$sql = "SELECT bw.*, bc.* FROM admin.brain_winners bw LEFT JOIN admin.brain_configs bc ON bw.config_id = bc.id WHERE bw.is_active = true";
|
|
if ($isp) {
|
|
$stmt = $pdo->prepare($sql . " AND LOWER(bw.isp_target) = LOWER(?) ORDER BY bw.inbox_rate DESC LIMIT 5");
|
|
$stmt->execute([$isp]);
|
|
} else {
|
|
$stmt = $pdo->query($sql . " ORDER BY bw.inbox_rate DESC LIMIT 10");
|
|
}
|
|
return $stmt->fetchAll(PDO::FETCH_ASSOC);
|
|
}
|
|
|
|
function getBrainConfigs() {
|
|
$pdo = getHamidDB();
|
|
return $pdo->query("SELECT * FROM admin.brain_configs ORDER BY id")->fetchAll(PDO::FETCH_ASSOC);
|
|
}
|
|
|
|
function callProvider($provider, $message, $systemPrompt = '') {
|
|
$name = strtolower($provider['provider_name']);
|
|
$apiKey = $provider['api_key'];
|
|
$model = $provider['model'];
|
|
$url = $provider['api_url'];
|
|
|
|
if (empty($apiKey) && !in_array($name, ['ollama', 'hamid engine'])) return ['error' => 'No API key'];
|
|
|
|
$timeout = 15;
|
|
|
|
// Build system prompt with Brain knowledge
|
|
if (empty($systemPrompt)) {
|
|
$brainWinners = getBrainWinners();
|
|
$brainSummary = '';
|
|
foreach ($brainWinners as $w) {
|
|
$brainSummary .= "- ISP: {$w['isp_target']}, Inbox: {$w['inbox_rate']}%, Config #{$w['config_id']}, Confidence: {$w['confidence_level']}\n";
|
|
}
|
|
$systemPrompt = "Tu es HAMID, assistant IA spécialisé DELIVERADS (envoi email, delivrabilité, configurations Brain).
|
|
Tu connais les configurations winning actuelles du Brain Engine:
|
|
$brainSummary
|
|
Tu donnes des conseils concrets sur les configurations, ISPs, headers, et stratégies d'envoi.";
|
|
}
|
|
|
|
// Gemini has different API format
|
|
if ($name === 'gemini') {
|
|
$url = "https://generativelanguage.googleapis.com/v1beta/models/{$model}:generateContent?key={$apiKey}";
|
|
$payload = json_encode([
|
|
'contents' => [
|
|
['parts' => [['text' => $systemPrompt . "\n\nUser: " . $message]]]
|
|
]
|
|
]);
|
|
$headers = ['Content-Type: application/json'];
|
|
}
|
|
// Ollama / HAMID Engine (local)
|
|
elseif (in_array($name, ['ollama', 'hamid engine', 'gpu-local'])) {
|
|
$url = 'http://88.198.4.195:11435/api/chat';
|
|
$payload = json_encode([
|
|
'model' => $model,
|
|
'messages' => [
|
|
['role' => 'system', 'content' => $systemPrompt],
|
|
['role' => 'user', 'content' => $message]
|
|
],
|
|
'stream' => false
|
|
]);
|
|
$headers = ['Content-Type: application/json'];
|
|
$timeout = 120;
|
|
}
|
|
// Claude has different format
|
|
elseif ($name === 'claude') {
|
|
$payload = json_encode([
|
|
'model' => $model,
|
|
'max_tokens' => 1024,
|
|
'system' => $systemPrompt,
|
|
'messages' => [['role' => 'user', 'content' => $message]]
|
|
]);
|
|
$headers = [
|
|
'Content-Type: application/json',
|
|
'x-api-key: ' . $apiKey,
|
|
'anthropic-version: 2023-06-01'
|
|
];
|
|
}
|
|
// Standard OpenAI-compatible
|
|
else {
|
|
$payload = json_encode([
|
|
'model' => $model,
|
|
'messages' => [
|
|
['role' => 'system', 'content' => $systemPrompt],
|
|
['role' => 'user', 'content' => $message]
|
|
],
|
|
'max_tokens' => 1024,
|
|
'temperature' => 0.7
|
|
]);
|
|
$headers = [
|
|
'Content-Type: application/json',
|
|
'Authorization: Bearer ' . $apiKey
|
|
];
|
|
}
|
|
|
|
$ch = curl_init($url);
|
|
curl_setopt_array($ch, [
|
|
CURLOPT_RETURNTRANSFER => true,
|
|
CURLOPT_POST => true,
|
|
CURLOPT_POSTFIELDS => $payload,
|
|
CURLOPT_HTTPHEADER => $headers,
|
|
CURLOPT_TIMEOUT => $timeout,
|
|
CURLOPT_CONNECTTIMEOUT => 5,
|
|
CURLOPT_SSL_VERIFYPEER => false
|
|
]);
|
|
|
|
$start = microtime(true);
|
|
$response = curl_exec($ch);
|
|
$httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
|
|
$latency = round((microtime(true) - $start) * 1000);
|
|
curl_close($ch);
|
|
|
|
if (!$response || $httpCode >= 400) {
|
|
return ['error' => "HTTP $httpCode", 'latency' => $latency];
|
|
}
|
|
|
|
$data = json_decode($response, true);
|
|
|
|
// Extract text based on provider format
|
|
$text = '';
|
|
if ($name === 'gemini') {
|
|
$text = $data['candidates'][0]['content']['parts'][0]['text'] ?? '';
|
|
} elseif (in_array($name, ['ollama', 'hamid engine', 'gpu-local'])) {
|
|
$text = $data['message']['content'] ?? '';
|
|
} elseif ($name === 'claude') {
|
|
$text = $data['content'][0]['text'] ?? '';
|
|
} else {
|
|
$text = $data['choices'][0]['message']['content'] ?? '';
|
|
}
|
|
|
|
return [
|
|
'response' => $text,
|
|
'provider' => $provider['provider_name'],
|
|
'model' => $model,
|
|
'latency' => $latency,
|
|
'http_code' => $httpCode
|
|
];
|
|
}
|
|
|
|
function callWithFailover($message, $preferredProvider = null, $systemPrompt = '') {
|
|
// === GPU LOCAL ROUTING: gpu-* providers → Ollama S88 ===
|
|
if ($preferredProvider && strpos($preferredProvider, 'gpu-') === 0) {
|
|
$gpuModels = [
|
|
'gpu-deepseek-r1' => 'deepseek-r1:32b',
|
|
'gpu-llama3.3' => 'llama3.3:70b',
|
|
'gpu-qwen72b' => 'qwen2.5:72b',
|
|
'gpu-deepseek-r1-70b' => 'deepseek-r1:70b',
|
|
'gpu-nemotron' => 'nemotron:70b',
|
|
'gpu-mixtral-8x22b' => 'mixtral:8x22b',
|
|
'gpu-codellama' => 'codellama:70b',
|
|
'gpu-command-r' => 'command-r-plus:104b',
|
|
'gpu-llama405b' => 'llama3.1:405b-instruct-q2_K',
|
|
'gpu-deepseek-coder' => 'deepseek-coder-v2:236b',
|
|
'gpu-wizardlm' => 'wizardlm2:8x22b',
|
|
];
|
|
$model = $gpuModels[$preferredProvider] ?? str_replace('gpu-', '', $preferredProvider);
|
|
$gpuProvider = [
|
|
'provider_name' => 'GPU-Local',
|
|
'api_key' => '',
|
|
'model' => $model,
|
|
'api_url' => 'http://88.198.4.195:11435/api/chat',
|
|
'is_active' => true,
|
|
'priority' => 0
|
|
];
|
|
if (empty($systemPrompt) && function_exists('buildEnhancedSystemPrompt')) {
|
|
$systemPrompt = buildEnhancedSystemPrompt($message);
|
|
}
|
|
$result = callProvider($gpuProvider, $message, $systemPrompt);
|
|
if (!isset($result['error']) && !empty($result['response'])) {
|
|
return $result;
|
|
}
|
|
// GPU failed → fall through to cloud failover
|
|
}
|
|
// === BRAIN NUCLEUS v2: Enhanced System Prompt (includes deep thinking + lead detection) ===
|
|
if (empty($systemPrompt) && function_exists('buildEnhancedSystemPrompt')) {
|
|
$systemPrompt = buildEnhancedSystemPrompt($message);
|
|
} elseif (empty($systemPrompt) && function_exists('getNucleusPrompt')) {
|
|
$systemPrompt = getNucleusPrompt();
|
|
if (function_exists('injectAdaptiveReasoning')) {
|
|
$systemPrompt = injectAdaptiveReasoning($systemPrompt, $message);
|
|
}
|
|
}
|
|
$providers = getProviders();
|
|
|
|
// Put preferred first
|
|
if ($preferredProvider) {
|
|
usort($providers, function($a, $b) use ($preferredProvider) {
|
|
if (strtolower($a['provider_name']) === strtolower($preferredProvider)) return -1;
|
|
if (strtolower($b['provider_name']) === strtolower($preferredProvider)) return 1;
|
|
return $a['priority'] - $b['priority'];
|
|
});
|
|
}
|
|
|
|
$errors = [];
|
|
foreach ($providers as $p) {
|
|
$result = callProvider($p, $message, $systemPrompt);
|
|
if (!isset($result['error']) && !empty($result['response'])) {
|
|
logTokenUsage($p["provider_name"], $result);
|
|
// === BRAIN NUCLEUS: Feedback + Learning ===
|
|
if (function_exists('feedbackLoop')) feedbackLoop($message, $result['response'] ?? '', $p['provider_name'], $result['latency'] ?? 0);
|
|
if (function_exists('continuousLearning')) continuousLearning($message, $result['response'] ?? '', $p['provider_name']);
|
|
// === BRAIN NUCLEUS v2: Full pipeline (archive + leads + provider intel + gaps) ===
|
|
if (function_exists('masterPostResponsePipeline')) {
|
|
$pipeline = masterPostResponsePipeline($message, $result['response'] ?? '', $p['provider_name'], $result['latency'] ?? 0, null, [], 50);
|
|
$result['quality_score'] = $pipeline['quality']['score'] ?? null;
|
|
$result['lead_detected'] = isset($pipeline['lead']) ? $pipeline['lead']['level'] : null;
|
|
}
|
|
return $result;
|
|
}
|
|
$errors[] = $p['provider_name'] . ': ' . ($result['error'] ?? 'empty response');
|
|
}
|
|
|
|
$extras = getFactoryAccounts(); foreach($extras as $ex) { $r2=callProvider(['provider_name'=>$ex['provider_name'],'api_key'=>$ex['api_key'],'model'=>$ex['model'],'api_url'=>$ex['api_url']], $message, $systemPrompt); if(!isset($r2['error'])&&!empty($r2['response'])){logTokenUsage($ex['provider_name'],$r2);return $r2;} } return ['error'=>'All providers failed','details'=>$errors];
|
|
}
|
|
|
|
function logTokenUsage($provider, $result) {
|
|
try {
|
|
$pdo = getHamidDB();
|
|
$tok = (int)(strlen($result['response'] ?? '')/4);
|
|
$lat = (int)($result['latency'] ?? 0);
|
|
$stmt = $pdo->prepare("INSERT INTO admin.ia_token_usage_log(provider_name,tokens_total,latency_ms,success) VALUES(?,?,?,true)");
|
|
$stmt->execute([$provider, $tok, $lat]);
|
|
} catch (Exception $e) {}
|
|
}
|
|
|
|
function getFactoryAccounts() {
|
|
try {
|
|
$pdo = getHamidDB();
|
|
return $pdo->query("SELECT id,provider_name,api_key,model,api_url FROM admin.ia_provider_accounts WHERE status='active' AND api_key IS NOT NULL AND LENGTH(api_key)>5 AND (cooldown_until IS NULL OR cooldown_until<NOW()) AND requests_today<requests_limit_daily ORDER BY priority,success_rate DESC LIMIT 5")->fetchAll(PDO::FETCH_ASSOC);
|
|
} catch (Exception $e) { return []; }
|
|
}
|
|
?>
|