CI/CD auto-commit 2026-04-16 12:55

This commit is contained in:
WEVIA
2026-04-16 14:55:04 +02:00
parent 7cec0712bc
commit 44b7506487
14 changed files with 19832 additions and 29 deletions

View File

@@ -1,5 +1,5 @@
{
"generated": "2026-04-16 12:50:02",
"generated": "2026-04-16 12:55:02",
"version": "1.0",
"servers": [
{
@@ -10,7 +10,7 @@
"ssh": 49222,
"disk_pct": 85,
"disk_avail": "23G",
"uptime": "up 2 days, 2 hours, 58 minutes",
"uptime": "up 2 days, 3 hours, 3 minutes",
"nginx": "active",
"php_fpm": "active",
"php_version": "8.5.5"
@@ -36,7 +36,7 @@
"docker": [
{
"name": "loki",
"status": "Up 48 minutes",
"status": "Up 53 minutes",
"ports": ""
},
{
@@ -76,7 +76,7 @@
},
{
"name": "twenty",
"status": "Up 2 hours",
"status": "Up 3 hours",
"ports": ""
},
{
@@ -91,42 +91,42 @@
},
{
"name": "redis-weval",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
},
{
"name": "gitea",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
},
{
"name": "node-exporter",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
},
{
"name": "prometheus",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
},
{
"name": "searxng",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
},
{
"name": "uptime-kuma",
"status": "Up 46 hours (healthy)",
"status": "Up 47 hours (healthy)",
"ports": ""
},
{
"name": "vaultwarden",
"status": "Up 46 hours (healthy)",
"status": "Up 47 hours (healthy)",
"ports": ""
},
{
"name": "qdrant",
"status": "Up 46 hours",
"status": "Up 47 hours",
"ports": ""
}
],
@@ -301,7 +301,7 @@
"langfuse"
],
"key_tables": {
"kb_learnings": 5025,
"kb_learnings": 5027,
"kb_documents": 0,
"ethica_medecins": 50004,
"enterprise_agents": 0
@@ -468,8 +468,8 @@
],
"crons": {
"s204_root": 0,
"s204_www": 2,
"s204_total": 2,
"s204_www": 3,
"s204_total": 3,
"key_crons": [
{
"name": "L99 Master",
@@ -539,15 +539,15 @@
]
},
"wiki": {
"total_entries": 5025,
"total_entries": 5027,
"categories": [
{
"category": "AUTO-FIX",
"cnt": "2789"
"cnt": "2790"
},
{
"category": "TOPOLOGY",
"cnt": "880"
"cnt": "881"
},
{
"category": "DISCOVERY",
@@ -1656,6 +1656,10 @@
"optimizations": {
"recent_commits": [],
"auto_fixes": [
{
"fact": "AUTONOMY 16Apr 12:50: 1 fixes. Disk light cleanup 85%",
"created_at": "2026-04-16 14:50:05.137377"
},
{
"fact": "AUTONOMY 16Apr 12:45: 1 fixes. Disk light cleanup 85%",
"created_at": "2026-04-16 14:45:05.471488"
@@ -1691,10 +1695,6 @@
{
"fact": "AUTONOMY 16Apr 11:40: 1 fixes. Docker restart litellm",
"created_at": "2026-04-16 13:40:14.16824"
},
{
"fact": "AUTONOMY 16Apr 11:25: 1 fixes. Docker restart litellm",
"created_at": "2026-04-16 13:25:13.598448"
}
],
"architecture_decisions": [
@@ -1874,7 +1874,7 @@
"fixes_log": [],
"recommendations": []
},
"scan_time_ms": 1038,
"scan_time_ms": 743,
"gaps": [],
"score": 100,
"automation": {

39
api/find-watchdog.py Executable file
View File

@@ -0,0 +1,39 @@
#!/usr/bin/env python3
import subprocess, os, json, re
results = {"scripts": [], "crons": [], "systemd": [], "logs_active": []}
# Find all scripts containing 'systemctl restart nginx' or similar
paths = ["/opt", "/usr/local/bin", "/usr/local/sbin", "/etc/cron.d", "/etc/cron.daily",
"/etc/cron.hourly", "/var/spool/cron", "/root", "/home"]
for p in paths:
if not os.path.exists(p): continue
try:
r = subprocess.run(f"grep -rln 'restart nginx\|reload nginx\|nginx.*restart\|systemctl.*nginx' {p} 2>/dev/null | head -20",
shell=True, capture_output=True, text=True, timeout=15)
if r.stdout.strip():
results["scripts"].extend([l for l in r.stdout.strip().split("\n") if l])
except: pass
# Check root crontab
r = subprocess.run("sudo crontab -u root -l 2>/dev/null | grep -v '^#' | grep -v '^$'",
shell=True, capture_output=True, text=True, timeout=5)
results["crons"] = [l for l in r.stdout.split("\n") if l.strip()]
# Check systemd timers
r2 = subprocess.run("systemctl list-timers --no-pager 2>/dev/null | head -20", shell=True, capture_output=True, text=True, timeout=5)
results["systemd"] = [l for l in r2.stdout.split("\n") if l.strip()][:15]
# Recent watchdog log activity
r3 = subprocess.run("ls -lt /var/log/*watchdog* /var/log/*fpm* 2>/dev/null | head -10", shell=True, capture_output=True, text=True, timeout=5)
results["logs_active"] = [l for l in r3.stdout.split("\n") if l.strip()][:10]
# Recent nginx restarts
r4 = subprocess.run("sudo journalctl -u nginx --since '1 hour ago' 2>/dev/null | grep -i 'reload\|restart\|stopped\|started' | tail -10",
shell=True, capture_output=True, text=True, timeout=10)
results["nginx_journal_1h"] = [l[-150:] for l in r4.stdout.split("\n") if l.strip()][:10]
# Find owner of phpfpm-watchdog.log
r5 = subprocess.run("sudo lsof /var/log/phpfpm-watchdog.log 2>/dev/null | head -5", shell=True, capture_output=True, text=True, timeout=5)
results["log_writer"] = r5.stdout.strip()[:500]
print(json.dumps(results, indent=2)[:5000])

46
api/investigate-500.py Executable file
View File

@@ -0,0 +1,46 @@
#!/usr/bin/env python3
import json, requests, subprocess, sys, time, re
from collections import Counter
SMART = "/var/www/html/api/screens-health-smart.json"
if not __import__('os').path.exists(SMART):
print(json.dumps({"error":"no smart_classify yet, run smart_classify_safe first"}))
sys.exit(0)
j = json.load(open(SMART))
broken = [p for p in j.get("real_problems",[]) if p.get("status") == "TRULY_BROKEN"]
sample = broken[:8]
# Re-test each sample carefully and capture timing + FPM log
results = []
log_marker = time.strftime("%Y-%m-%dT%H:%M")
for s in sample:
url = s["url"]
t0 = time.time()
try:
r = requests.post(url, json={}, timeout=8)
elapsed = time.time() - t0
body_excerpt = r.text[:400] if r.text else "(empty body)"
results.append({"url": url, "code": r.status_code, "ms": int(elapsed*1000), "body": body_excerpt})
except Exception as e:
results.append({"url": url, "error": str(e)[:200]})
# Get last FPM errors related to our sample paths
err_log = subprocess.run("sudo tail -100 /var/log/php8.5-fpm.log 2>/dev/null", shell=True, capture_output=True, text=True, timeout=5).stdout
# Find shared common include candidates
includes = Counter()
for s in sample[:8]:
path = "/var/www/html" + s["url"].replace("https://weval-consulting.com","")
try:
with open(path) as f:
for line in f.readlines()[:20]:
m = re.search(r'(?:require|include)(?:_once)?[ (]+["\']([^"\']+)["\']', line)
if m: includes[m.group(1)] += 1
except: pass
print(json.dumps({
"sample_results": results,
"common_includes": dict(includes.most_common(10)),
"fpm_log_tail": [l for l in err_log.split("\n") if 'WARNING' in l or 'ERROR' in l][-10:],
"load_now": subprocess.run("uptime", shell=True, capture_output=True, text=True).stdout.strip()
}, indent=2)[:5000])

View File

@@ -13,5 +13,5 @@
"Run simulation",
"CEO insights"
],
"timestamp": "2026-04-16 12:50:03"
"timestamp": "2026-04-16 12:55:03"
}

45
api/reclassify-health.py Executable file
View File

@@ -0,0 +1,45 @@
#!/usr/bin/env python3
import json, time, os, shutil
H = "/var/www/html/api/screens-health.json"
S = "/var/www/html/api/screens-health-smart.json"
if not os.path.exists(S):
print(json.dumps({"error":"no smart classification"}))
raise SystemExit(0)
h = json.load(open(H))
sm = json.load(open(S))
# Build URL → smart_status map from details_by_status OR false_positives
url_to_smart = {}
if "false_positives" in sm:
for fp in sm["false_positives"]:
url_to_smart[fp["url"]] = fp.get("status") or fp.get("smart_status")
if "details_by_status" in sm:
for status, urls in sm["details_by_status"].items():
for u in urls:
if u != "...":
url_to_smart[u] = status
remap = {"POST_OK":"UP","POST_BAD_REQUEST":"API_POST","POST_302":"UP",
"POST_NOT_ALLOWED":"API_POST","AUTH_REQUIRED":"PROTECTED"}
reclassified = 0
for u, smart in url_to_smart.items():
if smart not in remap: continue
if u in h.get("by_url",{}):
h["by_url"][u]["status"] = remap[smart]
h["by_url"][u]["smart_classified"] = smart
reclassified += 1
counts = {}
for u,d in h.get("by_url",{}).items():
s = d.get("status","UNKNOWN")
counts[s] = counts.get(s,0) + 1
h["counts"] = counts
h["reclassified_at"] = time.strftime("%Y-%m-%dT%H:%M:%S+0200")
h["reclassified_count"] = reclassified
bk = H + ".pre-reclass-" + time.strftime("%Y%m%d_%H%M%S")
shutil.copy(H, bk)
json.dump(h, open(H,"w"), indent=2)
print(json.dumps({"reclassified": reclassified, "new_counts": counts, "backup": bk, "smart_urls_mapped": len(url_to_smart)}))

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env python3
import json, requests, sys, time
from concurrent.futures import ThreadPoolExecutor, as_completed
H = "/var/www/html/api/screens-health.json"
OUT = "/var/www/html/api/screens-health-smart.json"
SAFE_WORKERS = 5 # reduced from 20
j = json.load(open(H))
by_url = j.get("by_url", {})
to_recheck = [(u,d) for u,d in by_url.items() if d.get("status") in ("DOWN","BROKEN","NOT_FOUND") and d.get("code",0) != 429]
print(f"SAFE re-checking {len(to_recheck)} (skip 429)", file=sys.stderr)
def probe(url, prev):
out = {"url": url, "prev_status": prev["status"], "prev_code": prev.get("code",0)}
try:
r = requests.post(url, json={}, timeout=4, allow_redirects=False)
out["post_code"] = r.status_code
if r.status_code == 200: out["smart_status"] = "POST_OK"
elif r.status_code in (400,422): out["smart_status"] = "POST_BAD_REQUEST"
elif r.status_code in (401,403): out["smart_status"] = "AUTH_REQUIRED"
elif r.status_code == 405: out["smart_status"] = "POST_NOT_ALLOWED"
elif r.status_code == 404: out["smart_status"] = "TRULY_404"
elif r.status_code in (500,502,503): out["smart_status"] = "TRULY_BROKEN"
elif r.status_code == 429: out["smart_status"] = "RATE_LIMITED"
else: out["smart_status"] = f"POST_{r.status_code}"
except requests.exceptions.Timeout:
out["smart_status"] = "REAL_DOWN_TIMEOUT"
out["post_code"] = 0
except Exception:
out["smart_status"] = "REAL_DOWN_ERROR"
out["post_code"] = 0
return out
results = []
with ThreadPoolExecutor(max_workers=SAFE_WORKERS) as ex:
futs = {ex.submit(probe, u, d): u for u,d in to_recheck}
for i, f in enumerate(as_completed(futs)):
results.append(f.result())
if i % 50 == 0: time.sleep(0.5) # back-pressure
agg = {}
for r in results:
s = r["smart_status"]; agg[s] = agg.get(s,0) + 1
out = {"generated_at": time.strftime("%Y-%m-%dT%H:%M:%S+0200"), "rechecked": len(results),
"smart_counts": agg, "real_problems": [], "false_positives": []}
for r in results:
if r["smart_status"] in ("POST_OK","POST_BAD_REQUEST","POST_302","POST_NOT_ALLOWED","AUTH_REQUIRED"):
out["false_positives"].append(r)
else:
out["real_problems"].append(r)
json.dump(out, open(OUT,"w"), indent=2)
print(json.dumps(agg))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,5 @@
{
"ts": "2026-04-16 12:45:07",
"ts": "2026-04-16 12:50:07",
"r": {
"reconcile": "OK",
"nonreg": "NONREG: 153\/153 (100%)",
@@ -7,7 +7,8 @@
"docker": 19,
"disk": 85,
"hubs": 29,
"dirty": 0,
"dirty": 8,
"pushed": true,
"alerts": [
"CLEAR"
]

View File

@@ -1 +1 @@
{"ts":"14:50","status":"offline"}
{"ts":"14:54","status":"offline"}

View File

@@ -818,6 +818,35 @@ switch($action) {
}
break;
case "find_watchdog":
$r = shell_exec("python3 /var/www/html/api/find-watchdog.py 2>&1 | head -c 5000");
$results = json_decode(trim($r), true) ?: ["raw"=>$r, "parse_error"=>true];
break;
case "smart_classify_safe":
$log = "/tmp/smart_safe.log";
exec("nohup python3 /var/www/html/api/screens-health-smart-safe.py > $log 2>&1 &");
$sf = "/var/www/html/api/screens-health-smart.json";
if (file_exists($sf)) {
$sj = json_decode(file_get_contents($sf), true);
$results = ["status"=>"safe_classifier_running_bg", "previous_run"=>$sj["generated_at"]??"none",
"smart_counts"=>$sj["smart_counts"]??[], "rechecked"=>$sj["rechecked"]??0,
"false_positives_count"=>count($sj["false_positives"]??[]),
"real_problems_count"=>count($sj["real_problems"]??[]),
"log"=>$log, "workers"=>"5 (safe)"];
} else {
$results = ["status"=>"safe_classifier_first_run","wait_seconds"=>120, "log"=>$log];
}
break;
case "investigate_500":
$r = shell_exec("python3 /var/www/html/api/investigate-500.py 2>&1 | head -c 5000");
$results = json_decode(trim($r), true) ?: ["raw"=>$r, "parse_error"=>true];
break;
case "reclassify_health":
$r = shell_exec("sudo python3 /var/www/html/api/reclassify-health.py 2>&1 | head -c 3000");
$results = json_decode(trim($r), true) ?: ["raw"=>$r, "parse_error"=>true];
break;
default:
$results=["actions"=>["test_providers","webchat","nonreg","reconcile","git_push","ethica","docker_list","git_log","disk","ports","crons","services","playwright_scan","paperclip","slack"]];
}

View File

@@ -1 +1 @@
{"ts": "14:50", "status": "ok"}
{"ts": "14:55", "status": "ok"}

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400 200"><rect width="400" height="200" rx="20" fill="#0f172a"/><text x="200" y="110" text-anchor="middle" font-family="sans-serif" font-size="48" font-weight="bold" fill="#4d84ff">WEVAL</text></svg>

After

Width:  |  Height:  |  Size: 252 B

Binary file not shown.