126 lines
11 KiB
PHP
126 lines
11 KiB
PHP
<?php
|
|
// WEVAL Extra Providers — auto-activates when tokens are set in /etc/weval/secrets.env
|
|
// Loaded by weval-ia-fast.php before S151 fallback
|
|
|
|
// Load secrets once
|
|
static $_ws = null;
|
|
if ($_ws === null) {
|
|
$_ws = [];
|
|
foreach (file('/etc/weval/secrets.env') as $_l) {
|
|
$_l = trim($_l);
|
|
if (!$_l || $_l[0] === '#') continue;
|
|
$_p = strpos($_l, '=');
|
|
if ($_p !== false) $_ws[substr($_l, 0, $_p)] = substr($_l, $_p + 1);
|
|
}
|
|
}
|
|
|
|
// Skills context loader
|
|
@include_once '/var/www/weval/wevia-ia/skills-context-loader.php';
|
|
if (function_exists('loadClawSkillsContext')) {
|
|
$_sc = loadClawSkillsContext($msg);
|
|
if ($_sc) $sys .= $_sc;
|
|
}
|
|
|
|
// === GEMINI 2.5 FLASH (Google FREE) ===
|
|
if (!$resp) {
|
|
$gk = $_ws['GEMINI_KEY'] ?? '';
|
|
if ($gk) {
|
|
$ch = curl_init("https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key=$gk");
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>10, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json'], CURLOPT_POSTFIELDS=>json_encode(['contents'=>[['parts'=>[['text'=>$msg]]]], 'systemInstruction'=>['parts'=>[['text'=>mb_substr($sys,0,2000)]]], 'generationConfig'=>['maxOutputTokens'=>$maxTok, 'temperature'=>0.7]])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['candidates'][0]['content']['parts'][0]['text'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Gemini 2.5 Flash'; } }
|
|
}
|
|
}
|
|
|
|
// === CLOUDFLARE LLAMA 3.1 8B (FREE GPU) ===
|
|
if (!$resp) {
|
|
$cfKey = $_ws['CF_API_TOKEN'] ?? '';
|
|
$cfEmail = $_ws['CF_EMAIL'] ?? '';
|
|
if ($cfKey && $cfEmail) {
|
|
$ch = curl_init('https://api.cloudflare.com/client/v4/accounts/d3d50d5b6fb372afed8d7a8e4b16dd10/ai/run/@cf/meta/llama-3.1-8b-instruct');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>12, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', "X-Auth-Email: $cfEmail", "X-Auth-Key: $cfKey"], CURLOPT_POSTFIELDS=>json_encode(['messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]]])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['result']['response'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Cloudflare Llama-3.1-8B (FREE GPU)'; } }
|
|
}
|
|
}
|
|
|
|
// === CLOUDFLARE DEEPSEEK R1 32B (FREE GPU) ===
|
|
if (!$resp) {
|
|
$cfKey = $_ws['CF_API_TOKEN'] ?? '';
|
|
$cfEmail = $_ws['CF_EMAIL'] ?? '';
|
|
if ($cfKey && $cfEmail) {
|
|
$ch = curl_init('https://api.cloudflare.com/client/v4/accounts/d3d50d5b6fb372afed8d7a8e4b16dd10/ai/run/@cf/deepseek-ai/deepseek-r1-distill-qwen-32b');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>15, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', "X-Auth-Email: $cfEmail", "X-Auth-Key: $cfKey"], CURLOPT_POSTFIELDS=>json_encode(['messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]]])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['result']['response'] ?? ''); $t = preg_replace('/<think>.*?<\/think>/s', '', $t); $t = trim($t); if (strlen($t) > 20) { $resp = $t; $provider = 'Cloudflare DeepSeek-R1-32B (FREE GPU)'; } }
|
|
}
|
|
}
|
|
|
|
// === NVIDIA NIM NEMOTRON 49B (FREE GPU) ===
|
|
if (!$resp) {
|
|
$nvKey = $_ws['NVIDIA_NIM_KEY'] ?? '';
|
|
if ($nvKey) {
|
|
$ch = curl_init('https://integrate.api.nvidia.com/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>12, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', "Authorization: Bearer $nvKey"], CURLOPT_POSTFIELDS=>json_encode(['model'=>'nvidia/llama-3.3-nemotron-super-49b-v1', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok, 'temperature'=>0.7])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'NVIDIA NIM Nemotron-49B (FREE GPU)'; } }
|
|
}
|
|
}
|
|
|
|
// === OLLAMA S204 (sovereign CPU) ===
|
|
if (!$resp) {
|
|
$ch = curl_init('http://127.0.0.1:11434/api/chat');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>15, CURLOPT_CONNECTTIMEOUT=>2, CURLOPT_HTTPHEADER=>['Content-Type: application/json'], CURLOPT_POSTFIELDS=>json_encode(['model'=>'qwen2.5:7b', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,1500)], ['role'=>'user','content'=>mb_substr($msg,0,1500)]], 'stream'=>false, 'options'=>['num_predict'=>500, 'temperature'=>0.7]])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Ollama S204 qwen2.5:7b (sovereign)'; } }
|
|
}
|
|
|
|
// === AUTO-ACTIVATE FUTURE PROVIDERS (fill key in secrets.env → provider activates) ===
|
|
if (!$resp && !empty($_ws['OPENROUTER_KEY'])) {
|
|
$ch = curl_init('https://openrouter.ai/api/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>12, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['OPENROUTER_KEY']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'nvidia/nemotron-3-super-120b-a12b:free', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'OpenRouter Nemotron-120B (FREE GPU)'; } }
|
|
}
|
|
if (!$resp && !empty($_ws['MISTRAL_KEY'])) {
|
|
$ch = curl_init('https://api.mistral.ai/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>10, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['MISTRAL_KEY']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'mistral-small-latest', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Mistral Small (FREE)'; } }
|
|
}
|
|
if (!$resp && !empty($_ws['TOGETHER_KEY'])) {
|
|
$ch = curl_init('https://api.together.xyz/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>10, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['TOGETHER_KEY']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Together.ai (credits)'; } }
|
|
}
|
|
|
|
// === HUGGINGFACE INFERENCE (FREE) ===
|
|
if (!$resp && !empty($_ws['HF_TOKEN'])) {
|
|
$ch = curl_init('https://router.huggingface.co/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>15, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['HF_TOKEN']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'Qwen/Qwen2.5-72B-Instruct', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok, 'temperature'=>0.7])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'HuggingFace Qwen-72B (FREE)'; } }
|
|
|
|
// === QWEN VL (VISION — OPENROUTER $0.20/M) ===
|
|
if (!$resp && !empty($_ws['OPENROUTER_KEY']) && !empty($imageUrl)) {
|
|
$vl_msgs = [['role'=>'system','content'=>mb_substr($sys,0,1500)]];
|
|
$vl_content = [];
|
|
if ($imageUrl) $vl_content[] = ['type'=>'image_url','image_url'=>['url'=>$imageUrl]];
|
|
$vl_content[] = ['type'=>'text','text'=>mb_substr($msg,0,2000)];
|
|
$vl_msgs[] = ['role'=>'user','content'=>$vl_content];
|
|
$ch = curl_init('https://openrouter.ai/api/v1/chat/completions');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>25, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['OPENROUTER_KEY']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'qwen/qwen2.5-vl-32b-instruct', 'messages'=>$vl_msgs, 'max_tokens'=>$maxTok, 'temperature'=>0.7])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['choices'][0]['message']['content'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Qwen VL-32B (Vision)'; } }
|
|
}
|
|
}
|
|
// === COHERE COMMAND R+ (FREE TRIAL) ===
|
|
if (!$resp && !empty($_ws['COHERE_KEY'])) {
|
|
$ch = curl_init('https://api.cohere.com/v2/chat');
|
|
curl_setopt_array($ch, [CURLOPT_POST=>true, CURLOPT_RETURNTRANSFER=>true, CURLOPT_TIMEOUT=>12, CURLOPT_CONNECTTIMEOUT=>3, CURLOPT_HTTPHEADER=>['Content-Type: application/json', 'Authorization: Bearer '.$_ws['COHERE_KEY']], CURLOPT_POSTFIELDS=>json_encode(['model'=>'command-a-03-2025', 'messages'=>[['role'=>'system','content'=>mb_substr($sys,0,2000)], ['role'=>'user','content'=>mb_substr($msg,0,2000)]], 'max_tokens'=>$maxTok])]);
|
|
$r = curl_exec($ch); $code = curl_getinfo($ch, CURLINFO_HTTP_CODE); curl_close($ch);
|
|
if ($code == 200 && $r) { $d = json_decode($r, true); $t = trim($d['message']['content'][0]['text'] ?? $d['text'] ?? ''); if (strlen($t) > 20) { $resp = $t; $provider = 'Cohere Command R+'; } }
|
|
}
|
|
|