551 lines
26 KiB
Python
551 lines
26 KiB
Python
#!/usr/bin/env python3
|
|
"""L99 UX AGENT v3.0 — FULL INFRASTRUCTURE E2E
|
|
Tests: 3 servers, 24 docker, 8 domains, all APIs, blade, DBs, AI providers,
|
|
BPMN processes, SOA services, crons, NonReg, architecture pipeline.
|
|
Cron: 0 */6 on S204
|
|
"""
|
|
import json,time,sys,os,subprocess as sp
|
|
R={"pass":0,"fail":0,"warn":0,"tests":[],"timestamp":"","version":"3.0"}
|
|
def ok(n,d=""):R["pass"]+=1;R["tests"].append({"name":n,"status":"P","detail":d});print(f" P {n}"+f" ({d})" if d else f" P {n}")
|
|
def fail(n,d=""):R["fail"]+=1;R["tests"].append({"name":n,"status":"F","detail":d});print(f" F {n} — {d}" if d else f" F {n}")
|
|
def warn(n,d=""):R["warn"]+=1;R["tests"].append({"name":n,"status":"W","detail":d});print(f" W {n} — {d}" if d else f" W {n}")
|
|
def sh(c,t=8):
|
|
try:r=sp.run(c,shell=True,capture_output=True,text=True,timeout=t);return r.stdout.strip()
|
|
except:return""
|
|
def curl_code(u,t=8):return sh(f'curl -sk -o /dev/null -w "%{{http_code}}" "{u}" --max-time {t}')
|
|
def curl_json(u,t=10):
|
|
try:r=sh(f'curl -sk "{u}" --max-time {t}');return json.loads(r)
|
|
except:return None
|
|
def sentinel(c):
|
|
try:r=sh(f"curl -sk 'http://10.1.0.3:5890/api/sentinel-brain.php?action=exec&cmd={c}' --max-time 8");return r
|
|
except:return""
|
|
|
|
SITE="https://weval-consulting.com"
|
|
print("=== L99 UX AGENT v3.0 — FULL INFRA E2E ===\n")
|
|
R["timestamp"]=time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S1: SERVERS
|
|
# ═══════════════════════════════════════════
|
|
print("--- S1: Servers ---")
|
|
# S204
|
|
disk=sh("df / --output=pcent | tail -1").strip().replace('%','')
|
|
try:
|
|
d=int(disk)
|
|
if d<90:ok("S204_DISK",f"{d}%")
|
|
elif d<95:warn("S204_DISK",f"{d}%")
|
|
else:fail("S204_DISK",f"{d}% CRITICAL")
|
|
except:fail("S204_DISK","Cannot read")
|
|
|
|
nginx=sh("systemctl is-active nginx")
|
|
if nginx=="active":ok("S204_NGINX")
|
|
else:fail("S204_NGINX",nginx)
|
|
|
|
php=sh("systemctl is-active php8.5-fpm")
|
|
if php=="active":ok("S204_PHP")
|
|
else:fail("S204_PHP",php)
|
|
|
|
# S95
|
|
s95=sentinel("hostname")
|
|
if s95 and 'snapshot' in s95:ok("S95_ALIVE",s95[:30])
|
|
else:fail("S95_ALIVE",s95[:30] if s95 else "No response")
|
|
|
|
# S151
|
|
s151_code=curl_code("https://culturellemejean.charity/",5)
|
|
if s151_code in ["200","301","302"]:ok("S151_ALIVE",s151_code)
|
|
else:warn("S151_ALIVE",f"HTTP {s151_code}")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S2: DOCKER (all 24)
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S2: Docker Containers ---")
|
|
docker_raw=sh("docker ps --format '{{.Names}}:{{.Status}}' --no-trunc",10)
|
|
containers={}
|
|
for line in docker_raw.split('\n'):
|
|
if ':' not in line:continue
|
|
name,status=line.split(':',1)
|
|
containers[name.strip()]=status.strip()
|
|
|
|
expected=['authentik-server','authentik-worker','authentik-db','authentik-redis',
|
|
'mattermost','n8n','uptime-kuma','qdrant','searxng','plausible','twenty',
|
|
'vaultwarden','open-webui','flowise','prometheus','node-exporter','loki']
|
|
|
|
for c in expected:
|
|
if c in containers:
|
|
st=containers[c]
|
|
if 'Up' in st:ok(f"DOCKER_{c}","healthy" if "healthy" in st else "up")
|
|
else:fail(f"DOCKER_{c}",st[:30])
|
|
else:fail(f"DOCKER_{c}","NOT RUNNING")
|
|
|
|
total_up=sum(1 for s in containers.values() if 'Up' in s)
|
|
ok("DOCKER_TOTAL",f"{total_up}/{len(containers)} up") if total_up==len(containers) else warn("DOCKER_TOTAL",f"{total_up}/{len(containers)}")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S3: BLADE GPU
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S3: Blade GPU ---")
|
|
blade=curl_json(f"{SITE}/api/blade-tasks/heartbeat.json")
|
|
if blade:
|
|
ts=blade.get('ts','')
|
|
hostname=blade.get('hostname','')
|
|
ok("BLADE_HEARTBEAT",f"{hostname} @ {ts[:19]}")
|
|
# Check freshness
|
|
try:
|
|
from datetime import datetime
|
|
bt=datetime.fromisoformat(ts.replace('Z','+00:00'))
|
|
age=(datetime.now(bt.tzinfo)-bt).total_seconds()/60
|
|
if age<15:ok("BLADE_FRESH",f"{age:.0f}min ago")
|
|
elif age<60:warn("BLADE_FRESH",f"{age:.0f}min ago")
|
|
else:warn("BLADE_STALE",f"{age:.0f}min ago")
|
|
except:warn("BLADE_TIME","Cannot parse timestamp")
|
|
else:warn("BLADE_HEARTBEAT","No heartbeat file")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S4: DATABASES
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S4: Databases ---")
|
|
import psycopg2
|
|
try:
|
|
c=psycopg2.connect("host=127.0.0.1 dbname=postgres user=admin password=admin123")
|
|
cur=c.cursor()
|
|
cur.execute("SELECT datname FROM pg_database WHERE datistemplate=false")
|
|
dbs=[r[0] for r in cur.fetchall()]
|
|
ok("PG_CONNECT",f"{len(dbs)} databases")
|
|
c.close()
|
|
|
|
expected_dbs=['adx_system','wevia_db','twenty_db','mattermost_db','paperclip','deerflow']
|
|
for db in expected_dbs:
|
|
if db in dbs:ok(f"DB_{db}")
|
|
else:fail(f"DB_{db}","Missing")
|
|
|
|
# Key table counts
|
|
c2=psycopg2.connect("host=127.0.0.1 dbname=adx_system user=admin password=admin123")
|
|
cur2=c2.cursor()
|
|
|
|
cur2.execute("SELECT count(*) FROM kb_learnings")
|
|
kb=cur2.fetchone()[0]
|
|
if kb>100:ok("KB_ENTRIES",f"{kb}")
|
|
else:warn("KB_ENTRIES",f"Only {kb}")
|
|
|
|
cur2.execute("SELECT count(*) FROM ethica.medecins_validated")
|
|
ethica=cur2.fetchone()[0]
|
|
if ethica>10000:ok("ETHICA_HCP",f"{ethica:,}")
|
|
else:warn("ETHICA_HCP",f"{ethica}")
|
|
|
|
c2.close()
|
|
except Exception as e:
|
|
fail("PG_CONNECT",str(e)[:60])
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S5: AI STACK
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S5: AI Stack ---")
|
|
# Ollama
|
|
ollama=curl_json("http://127.0.0.1:11435/api/tags")
|
|
if ollama and 'models' in ollama:
|
|
models=ollama['models']
|
|
ok("OLLAMA_UP",f"{len(models)} models")
|
|
for m in models:
|
|
ok(f"OLLAMA_{m['name'].split(':')[0]}",m['details'].get('parameter_size','?'))
|
|
else:fail("OLLAMA_UP","Cannot reach :11435")
|
|
|
|
# Qdrant
|
|
qd=curl_json("http://127.0.0.1:6333/collections")
|
|
if qd and 'result' in qd:
|
|
colls=qd['result'].get('collections',[])
|
|
ok("QDRANT_UP",f"{len(colls)} collections")
|
|
total_vec=0
|
|
for c in colls:
|
|
info=curl_json(f"http://127.0.0.1:6333/collections/{c['name']}")
|
|
vecs=info.get('result',{}).get('points_count',0) if info else 0
|
|
total_vec+=vecs
|
|
ok(f"QDRANT_{c['name']}",f"{vecs:,} vectors")
|
|
ok("QDRANT_TOTAL",f"{total_vec:,} vectors")
|
|
else:fail("QDRANT_UP","Cannot reach :6333")
|
|
|
|
# WEVIA Master
|
|
wm=curl_json(f"{SITE}/api/wevia-master-api.php?health")
|
|
if wm and wm.get('ollama')=='UP':
|
|
ok("WEVIA_MASTER",f"T1={wm.get('tier1_providers',0)} providers")
|
|
else:fail("WEVIA_MASTER","DOWN")
|
|
|
|
# AI request
|
|
try:
|
|
r=sp.run(['curl','-sk','-X','POST',f'{SITE}/api/weval-ia','-H','Content-Type: application/json',
|
|
'-d','{"message":"test","widget":true}','--max-time','20'],capture_output=True,text=True,timeout=25)
|
|
d=json.loads(r.stdout)
|
|
if (d.get('provider') or d.get('result')) and len(d.get('result','') or d.get('response',''))>5:
|
|
ok("AI_PIPELINE",f"{d.get('provider','chatbot')}: {len(d.get('result','') or d.get('response',''))} chars")
|
|
else:fail("AI_PIPELINE","No provider or empty response")
|
|
except Exception as e:fail("AI_PIPELINE",str(e)[:50])
|
|
|
|
# MiroFish Swarm Intelligence
|
|
mf=curl_json("http://127.0.0.1:5001/health")
|
|
if mf and mf.get("status")=="ok":ok("MIROFISH_API","UP")
|
|
else:fail("MIROFISH_API","DOWN")
|
|
mfr=curl_json("http://127.0.0.1:5001/api/report/list")
|
|
if mfr and mfr.get("success"):ok("MIROFISH_REPORTS",str(mfr.get("count",0))+" reports")
|
|
else:warn("MIROFISH_REPORTS","Cannot list")
|
|
mfb=curl_json("https://weval-consulting.com/api/mirofish-bridge.php?action=health")
|
|
if mfb and mfb.get("status")=="active":ok("MIROFISH_BRIDGE",mfb.get("service","?"))
|
|
else:fail("MIROFISH_BRIDGE","DOWN")
|
|
mfc=curl_json("https://weval-consulting.com/api/mirofish-bridge.php?action=ceo")
|
|
if mfc and "mirofish" in mfc:ok("MIROFISH_CEO","score="+str(mfc.get("infrastructure",{}).get("score",0)))
|
|
else:fail("MIROFISH_CEO","No CEO data")
|
|
|
|
# ═══════════════════════════════════════════
|
|
|
|
# S95 MTA ports via sentinel
|
|
s95_ss = sh("curl -sf 'http://10.1.0.3:5890/api/sentinel-brain.php?action=exec&cmd=ss%20-tln' --max-time 5")
|
|
for svc, port in [("pmta",25),("kumomta",587),("postfix",2525),("sentinel",5890),("adx",5821)]:
|
|
if f":{port} " in s95_ss:
|
|
ok(f"MTA_{svc}",str(port))
|
|
else:
|
|
fail(f"MTA_{svc}",f":{port} not on S95")
|
|
|
|
# SSL CERT EXPIRY
|
|
import subprocess as _sp
|
|
r=_sp.run(['openssl','x509','-in','/var/www/weval/ssl/fullchain.pem','-noout','-enddate'],capture_output=True,text=True,timeout=5)
|
|
if r.stdout:
|
|
import datetime
|
|
exp_str=r.stdout.strip().split('=')[1]
|
|
exp=datetime.datetime.strptime(exp_str,'%b %d %H:%M:%S %Y %Z')
|
|
days=(exp-datetime.datetime.utcnow()).days
|
|
if days>14: ok("SSL_EXPIRY",f"{days} days")
|
|
elif days>0: warn("SSL_EXPIRY",f"{days} days!")
|
|
else: fail("SSL_EXPIRY",f"EXPIRED {days}d ago!")
|
|
|
|
# REGRESSION CHECK: Critical APIs must return 200
|
|
import subprocess as _sp2
|
|
for api in ['optimisation-engine.php','ads-api.php','wevia-manifest.php','wevia-action-engine.php?action=help','wevia-dashboard.php']:
|
|
r=_sp2.run(['curl','-sk','-o','/dev/null','-w','%{http_code}',f'https://weval-consulting.com/api/{api}','--max-time','5'],capture_output=True,text=True,timeout=8)
|
|
if r.stdout=='200': ok(f"REGRESSION_API:{api.split('?')[0][:20]}")
|
|
else: fail(f"REGRESSION_API:{api.split('?')[0][:20]}",f"HTTP {r.stdout}")
|
|
|
|
# REGRESSION CHECK: Critical pages
|
|
for page,name in [('/',':home'),('/enterprise-model.html',':enterprise'),('/wevia.html',':wevia')]:
|
|
r=_sp2.run(['curl','-sk','-o','/dev/null','-w','%{http_code}',f'https://weval-consulting.com{page}','--max-time','5'],capture_output=True,text=True,timeout=8)
|
|
if r.stdout in ['200','302']: ok(f"REGRESSION_PAGE{name}")
|
|
else: fail(f"REGRESSION_PAGE{name}",f"HTTP {r.stdout}")
|
|
|
|
# S5b: ALL-SERVER PORT MONITORING
|
|
print(chr(10)+"--- S5b: All Server Ports ---")
|
|
import socket
|
|
def ckp(h,p,t=3):
|
|
try: s=socket.socket();s.settimeout(t);s.connect((h,p));s.close();return True
|
|
except: return False
|
|
|
|
for svc,port in [("nginx",80),("php-fpm",9000),("postgresql",5432),("deerflow",2024),("deerflow-gw",8001),("deerflow-fe",3000),("ollama",11435),("qdrant",6333),("authentik",9090),("mirofish-api",5001),("mirofish-fe",3050),("n8n",5678),("mattermost",8065),("searxng",8888),("loki",3100)]:
|
|
if ckp("127.0.0.1",port): ok(f"S204_{svc}",str(port))
|
|
else: fail(f"S204_{svc}",f":{port} DOWN")
|
|
|
|
s95_ss=sh("curl -sf 'http://10.1.0.3:5890/api/sentinel-brain.php?action=exec&cmd=ss%20-tln' --max-time 5")
|
|
for svc,port in [("pmta",25),("kumomta",587),("kumomta-api",8010),("postfix",2525),("postgresql",5432),("sentinel",5890),("adx",5821),("arsenal",5822)]:
|
|
if f":{port} " in s95_ss: ok(f"S95_{svc}",str(port))
|
|
else: fail(f"S95_{svc}",f":{port} DOWN")
|
|
|
|
for svc,url in [("nginx","https://culturellemejean.charity/")]:
|
|
code=curl_code(url,5)
|
|
if code in ["200","301","302"]: ok(f"S151_{svc}",code)
|
|
else: fail(f"S151_{svc}",f"HTTP {code}")
|
|
|
|
bl=curl_json("https://weval-consulting.com/api/blade-tasks/heartbeat.json")
|
|
if bl and bl.get("ts"): ok("BLADE_LIVE",bl.get("hostname","?"))
|
|
else: warn("BLADE_HB","no data")
|
|
|
|
# S6: AUTH DOMAINS (all 8)
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S6: Auth Domains ---")
|
|
domains=[
|
|
("weval-consulting.com","/products/workspace.html"),
|
|
("wevads.weval-consulting.com","/auth/login.html"),
|
|
("analytics.weval-consulting.com","/"),
|
|
("crm.weval-consulting.com","/"),
|
|
("deerflow.weval-consulting.com","/"),
|
|
("mm.weval-consulting.com","/"),
|
|
("monitor.weval-consulting.com","/"),
|
|
("n8n.weval-consulting.com","/"),
|
|
]
|
|
for domain,path in domains:
|
|
code=curl_code(f"https://{domain}{path}",5)
|
|
if code=="302":
|
|
# Check redirect goes to outpost
|
|
redir=sh(f'curl -sk -o /dev/null -w "%{{redirect_url}}" "https://{domain}{path}" --max-time 5 --max-redirs 0')
|
|
if 'outpost' in redir:ok(f"AUTH_{domain.split('.')[0]}","302→outpost")
|
|
else:fail(f"AUTH_{domain.split('.')[0]}",f"302 but not outpost: {redir[:40]}")
|
|
elif code=="200":warn(f"AUTH_{domain.split('.')[0]}","200 (public or session)")
|
|
else:fail(f"AUTH_{domain.split('.')[0]}",f"HTTP {code}")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S7: CRONS HEALTH
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S7: Crons ---")
|
|
cron_count=sh("crontab -l 2>/dev/null | grep -cv '^#\\|^$'")
|
|
try:
|
|
cc=int(cron_count)
|
|
if cc>=20:ok("CRONS_COUNT",f"{cc} active")
|
|
else:warn("CRONS_COUNT",f"Only {cc}")
|
|
except:fail("CRONS_COUNT","Cannot read")
|
|
|
|
# Check key cron outputs exist and are fresh
|
|
from datetime import datetime
|
|
cron_checks=[
|
|
("L99_MASTER","/var/www/html/api/l99-ux-results.json","timestamp"),
|
|
("NONREG","/var/www/html/api/nonreg-latest.json","ts"),
|
|
("ARCH_INDEX","/var/www/html/api/architecture-index.json","generated"),
|
|
("ARCH_TOPO","/var/www/html/api/architecture-topology.json","generated"),
|
|
]
|
|
for name,path,ts_key in cron_checks:
|
|
try:
|
|
d=json.load(open(path))
|
|
ts=d.get(ts_key,'')
|
|
if ts:
|
|
ok(f"CRON_{name}",f"ts={ts[:16]}")
|
|
else:
|
|
warn(f"CRON_{name}","No timestamp")
|
|
except:fail(f"CRON_{name}","File missing or invalid")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S8: NONREG STATUS
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S8: NonReg ---")
|
|
nr=curl_json(f"{SITE}/api/nonreg-latest.json")
|
|
if nr:
|
|
total=nr.get('total',0);passed=nr.get('pass',0)
|
|
if total>100 and passed==total:ok("NONREG_ALL",f"{passed}/{total}")
|
|
elif total>100:warn("NONREG_PARTIAL",f"{passed}/{total}")
|
|
else:fail("NONREG_COUNT",f"Only {total} tests")
|
|
else:fail("NONREG_FILE","Cannot load")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S9: ARCHITECTURE PIPELINE
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S9: Architecture Pipeline ---")
|
|
idx=curl_json(f"{SITE}/api/architecture-index.json")
|
|
topo=curl_json(f"{SITE}/api/architecture-topology.json")
|
|
|
|
if idx:
|
|
# Score
|
|
score=idx.get('recommendations',{}).get('score',0)
|
|
if score>=80:ok("ARCH_SCORE",f"{score}/100")
|
|
elif score>=50:warn("ARCH_SCORE",f"{score}/100")
|
|
else:fail("ARCH_SCORE",f"{score}/100")
|
|
|
|
# Sections count
|
|
keys=len(idx)
|
|
if keys>=20:ok("ARCH_SECTIONS",f"{keys}")
|
|
else:warn("ARCH_SECTIONS",f"Only {keys}")
|
|
|
|
# Recommendations
|
|
reco_count=idx.get('recommendations',{}).get('total',0)
|
|
ok("ARCH_RECO",f"{reco_count} items")
|
|
|
|
# UX agent wired
|
|
ux=idx.get('ux_agent',{})
|
|
if ux.get('pass',0)>0:ok("ARCH_UX_WIRED",f"{ux['pass']}/{ux.get('total',0)}")
|
|
else:warn("ARCH_UX_WIRED","Not in index")
|
|
else:fail("ARCH_INDEX","Cannot load")
|
|
|
|
if topo:
|
|
nodes=topo.get('stats',{}).get('nodes',0)
|
|
edges=topo.get('stats',{}).get('edges',0)
|
|
if nodes>=40:ok("TOPO_NODES",f"{nodes}")
|
|
else:fail("TOPO_NODES",f"Only {nodes}")
|
|
if edges>=20:ok("TOPO_EDGES",f"{edges}")
|
|
else:fail("TOPO_EDGES",f"Only {edges}")
|
|
else:fail("TOPO_FILE","Cannot load")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S10: BPMN PROCESS VERIFICATION
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S10: BPMN Processes ---")
|
|
if topo and topo.get('bpmn_processes'):
|
|
procs=topo['bpmn_processes']
|
|
ok("BPMN_COUNT",f"{len(procs)}")
|
|
total_auto=0;total_steps=0
|
|
for p in procs:
|
|
steps=p.get('steps',[])
|
|
auto=sum(1 for s in steps if s.get('status')=='automated')
|
|
total_auto+=auto;total_steps+=len(steps)
|
|
status=p.get('status','?')
|
|
if len(steps)>=3:ok(f"BPMN_{p['id']}",f"{p['name']}: {auto}/{len(steps)} auto ({status})")
|
|
else:fail(f"BPMN_{p['id']}",f"Only {len(steps)} steps")
|
|
|
|
pct=round(total_auto/total_steps*100) if total_steps else 0
|
|
if pct>=60:ok("BPMN_AUTOMATION",f"{pct}% ({total_auto}/{total_steps})")
|
|
else:warn("BPMN_AUTOMATION",f"{pct}%")
|
|
else:fail("BPMN_DATA","Not in topology")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S11: SOA SERVICES
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S11: SOA Services ---")
|
|
if topo and topo.get('soa_services'):
|
|
svcs=topo['soa_services']
|
|
groups={}
|
|
for s in svcs:
|
|
g=s.get('group','other')
|
|
if g not in groups:groups[g]=[]
|
|
groups[g].append(s)
|
|
|
|
ok("SOA_TOTAL",f"{len(svcs)} services")
|
|
for g,items in sorted(groups.items()):
|
|
active=sum(1 for s in items if s.get('status') in ['active','up','healthy'])
|
|
if active==len(items):ok(f"SOA_{g}",f"{active}/{len(items)}")
|
|
else:warn(f"SOA_{g}",f"{active}/{len(items)} active")
|
|
else:fail("SOA_DATA","Not in topology")
|
|
|
|
# ═══════════════════════════════════════════
|
|
# S12: BROWSER E2E (SSO + page + data)
|
|
# ═══════════════════════════════════════════
|
|
print("\n--- S12: Browser E2E ---")
|
|
try:
|
|
from playwright.sync_api import sync_playwright
|
|
with sync_playwright() as pw:
|
|
br=pw.chromium.launch(headless=True,args=['--no-sandbox','--disable-gpu','--disable-dev-shm-usage'],executable_path='/usr/bin/google-chrome-stable')
|
|
ctx=br.new_context(ignore_https_errors=True,viewport={"width":1440,"height":900})
|
|
pg=ctx.new_page()
|
|
js_err=[]
|
|
pg.on("console",lambda m:js_err.append(m.text) if m.type=="error" else None)
|
|
|
|
pg.goto(f"{SITE}/architecture.html",wait_until='networkidle',timeout=30000)
|
|
time.sleep(2)
|
|
uid=pg.query_selector('input[name="uidField"]')
|
|
if uid:
|
|
uid.fill('yacine');time.sleep(0.3)
|
|
pg.query_selector('button[type="submit"]').click();time.sleep(3)
|
|
ppw=pg.query_selector('input[type="password"]')
|
|
if ppw:ppw.fill('YacineWeval2026');time.sleep(0.3);pg.query_selector('button[type="submit"]').click();time.sleep(6)
|
|
ok("E2E_LOGIN")
|
|
pg.goto(f"{SITE}/architecture.html",wait_until='networkidle',timeout=30000)
|
|
else:
|
|
ok("E2E_SESSION","Already authenticated")
|
|
|
|
time.sleep(5)
|
|
# Ensure page fully loaded
|
|
pg.wait_for_load_state('networkidle')
|
|
time.sleep(3)
|
|
|
|
# Data loaded
|
|
try:
|
|
check=pg.evaluate("()=>{if(typeof D==='undefined')return null;return{docker:D.docker?.length,score:D.recommendations?.score,nodes:typeof T!='undefined'&&T?T.stats?.nodes:0}}")
|
|
except: check=None
|
|
if check:
|
|
ok("E2E_DATA",f"Docker={check.get('docker',0)} Score={check.get('score',0)} Nodes={check.get('nodes',0)}")
|
|
else:warn("E2E_DATA","D not loaded (auth required)")
|
|
|
|
# All 12 tabs render
|
|
tabs_ok=0
|
|
for tid in ['overview','reco','cortex','pipes','apps','infra','ai','data','bpmn','soa','topo','log']:
|
|
btn=pg.query_selector(f'button[data-id="{tid}"]')
|
|
if btn:
|
|
btn.click();time.sleep(0.4)
|
|
pnl=pg.query_selector(f'#p-{tid}')
|
|
if pnl and pg.evaluate("el=>el.offsetHeight>30&&el.innerHTML.length>50",pnl):tabs_ok+=1
|
|
if tabs_ok==12:ok("E2E_TABS",f"{tabs_ok}/12")
|
|
else:warn("E2E_TABS",f"{tabs_ok}/12")
|
|
|
|
# Gauges centered
|
|
pg.query_selector('button[data-id="overview"]').click();time.sleep(1)
|
|
rings=pg.query_selector_all('.ring')
|
|
for i,ring in enumerate(rings[:2]):
|
|
c=pg.evaluate("""el=>{const r=el.getBoundingClientRect();const v=el.querySelector('.rv');if(!v)return null;const vr=v.getBoundingClientRect();return{x:Math.round(Math.abs((vr.left+vr.width/2)-(r.left+r.width/2))),y:Math.round(Math.abs((vr.top+vr.height/2)-(r.top+r.height/2)))}}""",ring)
|
|
label=['Health','Automation'][i]
|
|
if c and c['x']<=2 and c['y']<=2:ok(f"E2E_CENTER_{label}",f"X={c['x']}px Y={c['y']}px")
|
|
elif c:fail(f"E2E_CENTER_{label}",f"X={c['x']}px Y={c['y']}px")
|
|
|
|
# Drill-down
|
|
cards=pg.query_selector_all('.cd.click')
|
|
if cards:
|
|
cards[0].click();time.sleep(0.8)
|
|
if pg.query_selector('.modal-bg.open'):
|
|
ok("E2E_DRILL");pg.query_selector('.modal-x').click()
|
|
else:warn("E2E_DRILL")
|
|
|
|
# Search→navigate
|
|
gs=pg.query_selector('#gs')
|
|
if gs:
|
|
gs.fill('qdrant');time.sleep(1)
|
|
sr=pg.query_selector_all('.sr-item')
|
|
if sr:
|
|
pg.evaluate("()=>document.querySelector('.sr-item')?.click()");time.sleep(0.5)
|
|
tab=pg.evaluate("()=>document.querySelector('.tab.on')?.dataset?.id||'none'")
|
|
ok("E2E_SEARCH",f"→{tab}")
|
|
else:warn("E2E_SEARCH","No results")
|
|
gs.fill('')
|
|
|
|
# Trigger scan
|
|
pg.query_selector('button[data-id="overview"]').click();time.sleep(0.5)
|
|
trig=pg.query_selector('#wmb')
|
|
if trig and pg.evaluate("el=>el.offsetHeight>0",trig):
|
|
try: old_ts=pg.evaluate('()=>window.D?window.D.generated:null')
|
|
except: old_ts='N/A'
|
|
trig.scroll_into_view_if_needed();trig.click();time.sleep(5)
|
|
try: new_ts=pg.evaluate('()=>window.D?window.D.generated:null')
|
|
except: new_ts='N/A'
|
|
if new_ts!=old_ts:ok("E2E_TRIGGER",f"{old_ts[:16]}→{new_ts[:16]}")
|
|
else:warn("E2E_TRIGGER","Same timestamp")
|
|
|
|
# JS errors
|
|
real=[e for e in js_err if "404" not in e and "Failed to load" not in e]
|
|
if not real:ok("E2E_JS_CLEAN",f"0 errors ({len(js_err)} network filtered)")
|
|
else:warn("E2E_JS_ERRORS",f"{len(real)} errors")
|
|
|
|
# Tokens
|
|
tk=pg.evaluate("()=>{const s=getComputedStyle(document.documentElement);return{bg:s.getPropertyValue('--bg').trim(),card:s.getPropertyValue('--card').trim()}}")
|
|
if tk.get('bg')=='#09090b':ok("E2E_TOKENS",f"bg={tk['bg']}")
|
|
else:fail("E2E_TOKENS",f"bg={tk.get('bg')}")
|
|
|
|
|
|
# ═══ S13: WEVIA MASTER PAGE ═══
|
|
print("\n--- S13: WEVIA Master Page ---")
|
|
try:
|
|
pg.goto(f"{SITE}/wevia-master.html",wait_until='networkidle',timeout=30000)
|
|
time.sleep(4)
|
|
if 'WEVIA' in pg.title():
|
|
ok("WM_PAGE",pg.title()[:30])
|
|
else:
|
|
fail("WM_PAGE",pg.title()[:30])
|
|
wc=pg.evaluate("()=>{const c=document.querySelectorAll('.wcard');const n=Array.from(c).filter(x=>getComputedStyle(x).textAlign==='center').length;return{t:c.length,c:n}}")
|
|
if wc and wc.get('t',0)>0 and wc.get('c',0)==wc.get('t',0):
|
|
ok("WM_CARDS_CENTER",str(wc['c'])+"/"+str(wc['t']))
|
|
elif wc and wc.get('t',0)>0:
|
|
fail("WM_CARDS_CENTER",str(wc.get('c',0))+"/"+str(wc.get('t',0)))
|
|
sb=pg.evaluate("()=>{const b=document.getElementById('scrollBtn');return b?b.getAttribute('onclick'):'NONE'}")
|
|
if sb and 'scrollChat' in sb:
|
|
ok("WM_SCROLL_BTN",sb)
|
|
else:
|
|
fail("WM_SCROLL_BTN",str(sb))
|
|
ci=pg.query_selector("textarea")
|
|
if ci:ok("WM_CHAT_INPUT")
|
|
else:fail("WM_CHAT_INPUT")
|
|
sl=pg.evaluate("()=>document.querySelectorAll('[class*=sidebar] [onclick]').length")
|
|
if sl>=5:ok("WM_SIDEBAR",str(sl)+" items")
|
|
else:warn("WM_SIDEBAR",str(sl))
|
|
except Exception as e:
|
|
fail("WM_E2E",str(e)[:60])
|
|
|
|
# ═══ S14: PUBLIC PAGES ═══
|
|
print("\n--- S14: Public Pages ---")
|
|
|
|
br.close()
|
|
except Exception as e:
|
|
warn("E2E_BROWSER",str(e)[:80])
|
|
|
|
|
|
# S14 continued (no browser needed)
|
|
pub_pages=[("/","Home"),("/wevia.html","WEVIA"),("/wevia-widget.html","Widget"),("/enterprise-model.html","Enterprise")]
|
|
for path,name in pub_pages:
|
|
code=curl_code(f"{SITE}{path}",5)
|
|
if code=="200":ok(f"PUB_{name}")
|
|
else:fail(f"PUB_{name}",f"HTTP {code}")
|
|
|
|
# ═══ SAVE ═══
|
|
total=R["pass"]+R["fail"]+R["warn"]
|
|
pct=round(R["pass"]/total*100) if total else 0
|
|
print(f"\n{'='*60}")
|
|
print(f"L99 UX AGENT v3.0: {R['pass']}/{total} pass ({pct}%) | {R['fail']} fail | {R['warn']} warn")
|
|
print(f"{'='*60}")
|
|
with open("/var/www/html/api/l99-ux-results.json","w") as f:json.dump(R,f,indent=2)
|
|
print("Saved")
|