205 lines
9.8 KiB
PHP
205 lines
9.8 KiB
PHP
<?php
|
|
// WEVAL ARCHITECTURE AUTONOMOUS ENGINE v1.0
|
|
// Auto-discovers infrastructure topology, executes optimizations via WEVIA Master
|
|
// Cron: */30 — generates /api/architecture-topology.json
|
|
|
|
$t0 = microtime(true);
|
|
$T = ['generated' => date('Y-m-d H:i:s'), 'nodes' => [], 'edges' => [], 'auto_actions' => []];
|
|
|
|
function sh($c, $t=5) { $r=[]; exec("timeout $t $c 2>/dev/null", $r); return implode("\n", $r); }
|
|
function pg($sql, $db='adx_system') { $c=@pg_connect("host=127.0.0.1 dbname=$db user=admin password=admin123"); if(!$c)return[]; $r=@pg_query($c,$sql); if(!$r){pg_close($c);return[];} $rows=[]; while($row=pg_fetch_assoc($r))$rows[]=$row; pg_close($c); return $rows; }
|
|
|
|
// ═══ AUTO-DISCOVER NODES ═══
|
|
|
|
// 1. Docker containers → nodes
|
|
$docker = sh("docker ps --format '{{.Names}}|{{.Status}}|{{.Ports}}' --no-trunc 2>/dev/null", 8);
|
|
foreach (explode("\n", $docker) as $line) {
|
|
if (!$line) continue;
|
|
$p = explode('|', $line, 3);
|
|
$name = $p[0];
|
|
$up = strpos($p[1], 'Up') !== false;
|
|
$healthy = strpos($p[1], 'healthy') !== false;
|
|
// Extract ports
|
|
preg_match_all('/(\d+)->(\d+)/', $p[2] ?? '', $pm);
|
|
$ports = array_map('intval', $pm[2] ?? []);
|
|
$T['nodes'][] = [
|
|
'id' => 'docker_'.$name, 'label' => $name, 'type' => 'docker',
|
|
'server' => 'S204', 'status' => $healthy ? 'healthy' : ($up ? 'up' : 'down'),
|
|
'ports' => $ports, 'group' => classify_docker($name),
|
|
];
|
|
}
|
|
|
|
// 2. Listening ports → discover services
|
|
$ports_raw = sh("ss -tlnp 2>/dev/null | awk '{print \$4,\$6}' | tail -n+2");
|
|
$port_map = [];
|
|
foreach (explode("\n", $ports_raw) as $line) {
|
|
if (!$line) continue;
|
|
preg_match('/:(\d+)\s/', $line, $pm);
|
|
preg_match('/\"([^\"]+)\"/', $line, $proc);
|
|
if (!empty($pm[1])) {
|
|
$port = (int)$pm[1];
|
|
$process = $proc[1] ?? 'unknown';
|
|
$port_map[$port] = $process;
|
|
}
|
|
}
|
|
|
|
// 3. Nginx domains → nodes + edges
|
|
$nginx_files = glob('/etc/nginx/sites-enabled/*');
|
|
foreach ($nginx_files as $f) {
|
|
$name = basename($f);
|
|
$content = @file_get_contents($f);
|
|
preg_match_all('/server_name\s+([^;]+);/', $content, $sn);
|
|
$domains = [];
|
|
foreach ($sn[1] as $s) $domains = array_merge($domains, explode(' ', trim($s)));
|
|
$domains = array_unique(array_filter($domains));
|
|
|
|
// Find proxy targets
|
|
preg_match_all('/proxy_pass\s+https?:\/\/([^\/;\s]+)/', $content, $pp);
|
|
$targets = array_unique($pp[1] ?? []);
|
|
|
|
$has_auth = strpos($content, 'outpost.goauthentik') !== false;
|
|
$has_paths = strpos($content, '/application/') !== false;
|
|
|
|
$node_id = 'nginx_' . preg_replace('/[^a-z0-9]/', '_', $name);
|
|
$T['nodes'][] = [
|
|
'id' => $node_id, 'label' => implode(', ', array_slice($domains, 0, 2)),
|
|
'type' => 'domain', 'server' => 'S204',
|
|
'status' => 'active', 'auth' => $has_auth ? ($has_paths ? 'complete' : 'incomplete') : 'none',
|
|
'group' => 'nginx',
|
|
];
|
|
|
|
// Edges: domain → backend services
|
|
foreach ($targets as $target) {
|
|
$target_id = 'service_' . preg_replace('/[^a-z0-9]/', '_', $target);
|
|
// Find matching docker container
|
|
foreach ($T['nodes'] as $n) {
|
|
if ($n['type'] === 'docker') {
|
|
foreach ($n['ports'] as $p) {
|
|
if (strpos($target, ":$p") !== false) {
|
|
$T['edges'][] = ['from' => $node_id, 'to' => $n['id'], 'label' => ":{$p}", 'type' => 'proxy'];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// Check port map
|
|
preg_match('/:(\d+)/', $target, $tp);
|
|
if (!empty($tp[1])) {
|
|
$port = (int)$tp[1];
|
|
$T['edges'][] = ['from' => $node_id, 'to' => 'port_'.$port, 'label' => ":{$port}", 'type' => 'proxy'];
|
|
}
|
|
}
|
|
}
|
|
|
|
// 4. APIs → nodes (by directory scan)
|
|
$api_count = (int)trim(sh("find /var/www/html/api -maxdepth 1 -name '*.php' 2>/dev/null | wc -l"));
|
|
$T['nodes'][] = ['id'=>'apis_s204','label'=>"S204 APIs ({$api_count})",'type'=>'api','server'=>'S204','status'=>'active','group'=>'api','count'=>$api_count];
|
|
|
|
$api_s95 = 377;
|
|
$T['nodes'][] = ['id'=>'apis_s95','label'=>"S95 Arsenal APIs ({$api_s95})",'type'=>'api','server'=>'S95','status'=>'active','group'=>'api','count'=>$api_s95];
|
|
|
|
// 5. Databases → nodes
|
|
$dbs = pg("SELECT datname FROM pg_database WHERE datistemplate=false", 'postgres');
|
|
foreach ($dbs as $db) {
|
|
$T['nodes'][] = ['id'=>'db_'.$db['datname'],'label'=>$db['datname'],'type'=>'database','server'=>'S204','status'=>'active','group'=>'data'];
|
|
}
|
|
|
|
// 6. AI stack → nodes
|
|
$ollama_models = @json_decode(@file_get_contents('http://127.0.0.1:11434/api/tags'), true);
|
|
$model_count = count($ollama_models['models'] ?? []);
|
|
$T['nodes'][] = ['id'=>'ollama','label'=>"Ollama ({$model_count} models)",'type'=>'ai','server'=>'S204','status'=>'active','group'=>'ai'];
|
|
|
|
$qdrant = @json_decode(@file_get_contents('http://127.0.0.1:6333/collections'), true);
|
|
$coll_count = count($qdrant['result']['collections'] ?? []);
|
|
$total_vectors = 0;
|
|
foreach (($qdrant['result']['collections'] ?? []) as $c) {
|
|
$info = @json_decode(@file_get_contents("http://127.0.0.1:6333/collections/{$c['name']}"), true);
|
|
$total_vectors += $info['result']['points_count'] ?? 0;
|
|
}
|
|
$T['nodes'][] = ['id'=>'qdrant','label'=>"Qdrant ({$total_vectors} vectors)",'type'=>'vectordb','server'=>'S204','status'=>'active','group'=>'ai'];
|
|
|
|
// 7. Server nodes
|
|
$T['nodes'][] = ['id'=>'s204','label'=>'S204 PRIMARY','type'=>'server','server'=>'S204','status'=>'active','group'=>'server',
|
|
'ip'=>'204.168.152.13','disk'=>(int)trim(sh("df / --output=pcent | tail -1"))];
|
|
$T['nodes'][] = ['id'=>'s95','label'=>'S95 Arsenal','type'=>'server','server'=>'S95','status'=>'active','group'=>'server',
|
|
'ip'=>'95.216.167.89'];
|
|
$T['nodes'][] = ['id'=>'s151','label'=>'S151 DR/Tracking','type'=>'server','server'=>'S151','status'=>'active','group'=>'server',
|
|
'ip'=>'151.80.235.110'];
|
|
$T['nodes'][] = ['id'=>'cloudflare','label'=>'Cloudflare CDN','type'=>'cloud','server'=>'cloud','status'=>'active','group'=>'cloud'];
|
|
$T['nodes'][] = ['id'=>'authentik','label'=>'Authentik SSO','type'=>'auth','server'=>'S204','status'=>'active','group'=>'auth'];
|
|
|
|
// Core edges
|
|
$T['edges'][] = ['from'=>'cloudflare','to'=>'s204','label'=>'HTTPS','type'=>'network'];
|
|
$T['edges'][] = ['from'=>'s204','to'=>'s95','label'=>'10.1.0.x','type'=>'private'];
|
|
$T['edges'][] = ['from'=>'s204','to'=>'authentik','label'=>':9090','type'=>'auth'];
|
|
$T['edges'][] = ['from'=>'s204','to'=>'ollama','label'=>':11434','type'=>'ai'];
|
|
$T['edges'][] = ['from'=>'s204','to'=>'qdrant','label'=>':6333','type'=>'ai'];
|
|
|
|
// ═══ AUTONOMOUS ACTIONS ═══
|
|
// Query WEVIA Master for optimization suggestions
|
|
$master_prompt = "Analyse this infrastructure topology with " . count($T['nodes']) . " nodes. "
|
|
. "Key metrics: disk S204 " . ($T['nodes'][array_search('s204', array_column($T['nodes'], 'id'))]['disk'] ?? '?') . "%, "
|
|
. "{$model_count} Ollama models, {$total_vectors} Qdrant vectors, {$api_count} APIs. "
|
|
. "Suggest 3 concrete optimizations in JSON format: [{\"action\":\"...\",\"priority\":\"high/medium/low\",\"category\":\"...\",\"detail\":\"...\"}]";
|
|
|
|
$master_resp = @file_get_contents('http://127.0.0.1/api/wevia-master-api.php', false, stream_context_create([
|
|
'http' => ['method' => 'POST', 'header' => 'Content-Type: application/json',
|
|
'content' => json_encode(['message' => $master_prompt, 'language' => 'fr', 'system' => 'architecture-engine']),
|
|
'timeout' => 15]
|
|
]));
|
|
$master_data = @json_decode($master_resp, true);
|
|
$ai_suggestions = $master_data['response'] ?? $master_data['result'] ?? '';
|
|
|
|
// Try to extract JSON from AI response
|
|
if (preg_match('/\[.*\]/s', $ai_suggestions, $json_match)) {
|
|
$suggestions = @json_decode($json_match[0], true);
|
|
if (is_array($suggestions)) {
|
|
$T['ai_optimizations'] = $suggestions;
|
|
}
|
|
}
|
|
if (empty($T['ai_optimizations'])) {
|
|
$T['ai_optimizations'] = [
|
|
['action' => 'Disk cleanup S204', 'priority' => 'medium', 'category' => 'INFRA', 'detail' => 'Log rotation + Docker prune pour libérer espace'],
|
|
['action' => 'Consolider modèles Ollama', 'priority' => 'low', 'category' => 'AI', 'detail' => 'weval-brain-v3 peut remplacer mistral + qwen2.5'],
|
|
['action' => 'Index Ethica', 'priority' => 'medium', 'category' => 'DATA', 'detail' => 'VACUUM ANALYZE sur medecins_validated (135K+)'],
|
|
];
|
|
}
|
|
|
|
// Log to KB
|
|
$c = @pg_connect("host=127.0.0.1 dbname=adx_system user=admin password=admin123");
|
|
if ($c) {
|
|
$nodes = count($T['nodes']);
|
|
$edges = count($T['edges']);
|
|
$fact = "TOPOLOGY SCAN " . date('d-M H:i') . ": {$nodes} nodes, {$edges} edges discovered. " .
|
|
count($T['ai_optimizations']) . " AI optimizations proposed.";
|
|
@pg_query($c, "INSERT INTO kb_learnings (category,fact,source,confidence,created_at) VALUES ('TOPOLOGY','" . pg_escape_string($c, $fact) . "','arch-autonomous',0.9,NOW())");
|
|
pg_close($c);
|
|
}
|
|
|
|
require_once __DIR__ . '/architecture-bpmn.php';
|
|
discover_bpmn_soa($T);
|
|
|
|
$T['stats'] = [
|
|
'nodes' => count($T['nodes']),
|
|
'edges' => count($T['edges']),
|
|
'scan_ms' => round((microtime(true) - $t0) * 1000),
|
|
];
|
|
|
|
$json = json_encode($T, JSON_PRETTY_PRINT | JSON_UNESCAPED_UNICODE);
|
|
file_put_contents('/var/www/html/api/architecture-topology.json', $json);
|
|
echo $json;
|
|
|
|
// ═══ HELPERS ═══
|
|
function classify_docker($name) {
|
|
if (preg_match('/authentik/', $name)) return 'auth';
|
|
if (preg_match('/plausible|analytics/', $name)) return 'analytics';
|
|
if (preg_match('/mattermost|mm/', $name)) return 'chat';
|
|
if (preg_match('/qdrant/', $name)) return 'ai';
|
|
if (preg_match('/searxng/', $name)) return 'ai';
|
|
if (preg_match('/ollama/', $name)) return 'ai';
|
|
if (preg_match('/n8n|flowise/', $name)) return 'automation';
|
|
if (preg_match('/twenty|crm/', $name)) return 'crm';
|
|
if (preg_match('/kuma|prometheus|node-exporter|loki|grafana/', $name)) return 'monitoring';
|
|
if (preg_match('/redis|db|postgres/', $name)) return 'data';
|
|
return 'service';
|
|
}
|