Files
wevads-platform/scripts/api_youtube-factory-api.php
2026-02-26 04:53:11 +01:00

275 lines
13 KiB
PHP
Executable File

<?php
/**
* YOUTUBE FACTORY API — Trend → Script → Render → Upload pipeline
* Endpoint: :5821/api/youtube-factory-api.php
*/
require_once("/opt/wevads/config/credentials.php");
header('Content-Type: application/json');
$db = pg_connect("host=localhost dbname=adx_system user=admin password=".WEVADS_DB_PASS);
pg_query($db, "SET search_path TO admin");
$action = $_GET['action'] ?? $_POST['action'] ?? 'status';
switch($action) {
case 'scrape_trends':
$country = $_GET['country'] ?? 'FR';
$max = (int)($_GET['max'] ?? 20);
// YouTube Data API trending
$yt_key = pg_fetch_result(pg_query($db, "SELECT api_key FROM api_keys_pool WHERE provider='youtube' AND status='active' LIMIT 1"), 0);
$trends = [];
if ($yt_key) {
$url = "https://www.googleapis.com/youtube/v3/videos?part=snippet,statistics&chart=mostPopular&regionCode={$country}&maxResults={$max}&key={$yt_key}";
$resp = @file_get_contents($url);
if ($resp) {
$data = json_decode($resp, true);
foreach ($data['items'] ?? [] as $item) {
$kw = pg_escape_string($db, $item['snippet']['title']);
$views = (int)($item['statistics']['viewCount'] ?? 0);
$likes = (int)($item['statistics']['likeCount'] ?? 0);
$comments = (int)($item['statistics']['commentCount'] ?? 0);
$score = round(($views * 0.3 + $likes * 2 + $comments * 5) / 1000, 2);
$cat = pg_escape_string($db, $item['snippet']['categoryId'] ?? 'general');
pg_query($db, "INSERT INTO yt_trends(keyword, trend_score, volume_estimate, country_code, category, source)
VALUES('$kw', $score, $views, '$country', '$cat', 'youtube') ON CONFLICT DO NOTHING");
$trends[] = ['keyword' => $item['snippet']['title'], 'score' => $score, 'views' => $views];
}
}
}
// Google Trends via scraping
$gt_url = "https://trends.google.com/trending/rss?geo={$country}";
$gt_resp = @file_get_contents($gt_url);
if ($gt_resp && strpos($gt_resp, '<item>') !== false) {
preg_match_all('/<title>([^<]+)<\/title>/', $gt_resp, $matches);
foreach (array_slice($matches[1] ?? [], 1, 10) as $kw) {
$kw_safe = pg_escape_string($db, trim($kw));
pg_query($db, "INSERT INTO yt_trends(keyword, trend_score, country_code, source)
VALUES('$kw_safe', 50, '$country', 'google_trends') ON CONFLICT DO NOTHING");
$trends[] = ['keyword' => trim($kw), 'score' => 50, 'source' => 'google'];
}
}
echo json_encode(['status' => 'success', 'trends_scraped' => count($trends), 'trends' => array_slice($trends, 0, 10)]);
break;
case 'generate_script':
$trend_id = (int)($_POST['trend_id'] ?? 0);
$style = $_POST['style'] ?? 'educational';
$duration = (int)($_POST['duration'] ?? 180);
if (!$trend_id) { echo json_encode(['error' => 'Need trend_id']); break; }
$trend = pg_fetch_assoc(pg_query($db, "SELECT * FROM yt_trends WHERE id=$trend_id"));
if (!$trend) { echo json_encode(['error' => 'Trend not found']); break; }
// Call HAMID IA for script generation
$prompt = "Generate a {$duration}s YouTube script about: {$trend['keyword']}. Style: {$style}. Language: French.
Structure: Hook(15s) → Intro(30s) → 3 Key Points(90s) → CTA(15s). Make it engaging with rhetorical questions.";
$hamid_resp = @file_get_contents('http://127.0.0.1:5821/api/hamid-brain.php?action=generate&prompt=' . urlencode($prompt));
$hamid_data = json_decode($hamid_resp, true);
$script = $hamid_data['response'] ?? $hamid_data['text'] ?? "Script generation failed - manual input needed";
// Create job
$job_id = pg_fetch_result(pg_query($db, "SELECT gen_random_uuid()"), 0);
pg_query($db, "INSERT INTO yt_jobs(id, trend_id, script_text, script_style, status, title)
VALUES('$job_id', $trend_id, '".pg_escape_string($db, $script)."', '$style', 'scripted', '".pg_escape_string($db, $trend['keyword'])."')");
pg_query($db, "UPDATE yt_trends SET processed=true WHERE id=$trend_id");
echo json_encode([
'status' => 'success',
'job_id' => $job_id,
'script_length' => strlen($script),
'word_count' => str_word_count($script),
'estimated_duration' => $duration
]);
break;
case 'render_video':
$job_id = $_POST['job_id'] ?? '';
if (!$job_id) { echo json_encode(['error' => 'Need job_id']); break; }
$job = pg_fetch_assoc(pg_query($db, "SELECT * FROM yt_jobs WHERE id='".pg_escape_string($db, $job_id)."'"));
if (!$job) { echo json_encode(['error' => 'Job not found']); break; }
pg_query($db, "UPDATE yt_jobs SET status='rendering', rendering_start=NOW() WHERE id='".pg_escape_string($db, $job_id)."'");
$output_dir = '/opt/youtube-factory/output';
$audio_file = "$output_dir/{$job_id}_audio.mp3";
$video_file = "$output_dir/{$job_id}_final.mp4";
// Step 1: TTS via Ollama or piper
$tts_cmd = "echo '".addslashes($job['script_text'])."' | timeout 120 piper --model fr_FR-siwis-medium --output_file $audio_file 2>&1";
$tts_out = shell_exec($tts_cmd);
// Fallback: espeak
if (!file_exists($audio_file)) {
shell_exec("espeak-ng -v fr -w $audio_file '".addslashes(substr($job['script_text'], 0, 2000))."' 2>&1");
}
// Step 2: Get audio duration
$duration = (int)trim(shell_exec("ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 $audio_file 2>/dev/null"));
if (!$duration) $duration = 180;
// Step 3: Generate video with background + captions
$bg_color = '#1a1a2e'; // Dark background
$ffmpeg_cmd = "ffmpeg -y -f lavfi -i color=c=$bg_color:s=1920x1080:d=$duration -i $audio_file " .
"-vf \"drawtext=text='".addslashes(substr($job['title'], 0, 80))."':fontsize=48:fontcolor=white:x=(w-text_w)/2:y=h/3:font=Arial\" " .
"-c:v libx264 -preset fast -crf 23 -c:a aac -b:a 192k -shortest $video_file 2>&1";
$ffmpeg_out = shell_exec($ffmpeg_cmd);
if (file_exists($video_file)) {
$size_mb = round(filesize($video_file) / 1048576, 2);
pg_query($db, "UPDATE yt_jobs SET status='rendered', audio_file='$audio_file', video_file='$video_file',
duration_seconds=$duration, file_size_mb=$size_mb, rendering_end=NOW()
WHERE id='".pg_escape_string($db, $job_id)."'");
echo json_encode(['status' => 'success', 'video_file' => $video_file, 'duration' => $duration, 'size_mb' => $size_mb]);
} else {
pg_query($db, "UPDATE yt_jobs SET status='failed', error_log='".pg_escape_string($db, $ffmpeg_out)."' WHERE id='".pg_escape_string($db, $job_id)."'");
echo json_encode(['status' => 'error', 'message' => 'Rendering failed', 'log' => substr($ffmpeg_out, 0, 500)]);
}
break;
case 'upload_youtube':
// YouTube upload via API — requires OAuth token in yt_channels
$job_id = $_POST['job_id'] ?? '';
$channel_id = (int)($_POST['channel_id'] ?? 0);
$job = pg_fetch_assoc(pg_query($db, "SELECT * FROM yt_jobs WHERE id='".pg_escape_string($db, $job_id)."' AND status='rendered'"));
if (!$job) { echo json_encode(['error' => 'No rendered job found']); break; }
$channel = pg_fetch_assoc(pg_query($db, "SELECT * FROM yt_channels WHERE id=$channel_id AND is_active=true"));
if (!$channel) { echo json_encode(['error' => 'No active channel']); break; }
// YouTube Data API v3 upload
$token = $channel['oauth_token_encrypted']; // In prod: decrypt from vault
$metadata = json_encode([
'snippet' => [
'title' => $job['title'],
'description' => $job['description'] ?? $job['script_text'],
'tags' => ['trending', 'viral', $job['title']],
'categoryId' => '22'
],
'status' => ['privacyStatus' => 'public']
]);
// Upload via resumable upload
$ch = curl_init('https://www.googleapis.com/upload/youtube/v3/videos?uploadType=resumable&part=snippet,status');
curl_setopt_array($ch, [
CURLOPT_POST => true,
CURLOPT_HTTPHEADER => [
"Authorization: Bearer $token",
'Content-Type: application/json',
'X-Upload-Content-Type: video/mp4',
'X-Upload-Content-Length: ' . filesize($job['video_file'])
],
CURLOPT_POSTFIELDS => $metadata,
CURLOPT_RETURNTRANSFER => true,
CURLOPT_HEADER => true
]);
$resp = curl_exec($ch);
// Extract upload URL and complete upload
if (preg_match('/location:\s*(https:\/\/[^\s]+)/i', $resp, $m)) {
$upload_url = $m[1];
$ch2 = curl_init($upload_url);
curl_setopt_array($ch2, [
CURLOPT_PUT => true,
CURLOPT_INFILE => fopen($job['video_file'], 'r'),
CURLOPT_INFILESIZE => filesize($job['video_file']),
CURLOPT_HTTPHEADER => ['Content-Type: video/mp4'],
CURLOPT_RETURNTRANSFER => true
]);
$upload_resp = json_decode(curl_exec($ch2), true);
if (isset($upload_resp['id'])) {
$yt_id = $upload_resp['id'];
$yt_url = "https://www.youtube.com/watch?v=$yt_id";
pg_query($db, "UPDATE yt_jobs SET status='published', youtube_video_id='$yt_id', youtube_url='$yt_url', published_at=NOW() WHERE id='".pg_escape_string($db, $job_id)."'");
pg_query($db, "UPDATE yt_channels SET uploads_today=uploads_today+1, total_uploads=total_uploads+1, last_upload_at=NOW() WHERE id=$channel_id");
echo json_encode(['status' => 'success', 'youtube_url' => $yt_url]);
} else {
echo json_encode(['status' => 'error', 'message' => 'Upload failed', 'response' => $upload_resp]);
}
} else {
echo json_encode(['status' => 'error', 'message' => 'Could not initiate upload']);
}
break;
case 'process_queue':
// Process pending jobs (called by cron)
$pending = pg_fetch_all(pg_query($db, "SELECT id FROM yt_jobs WHERE status='pending' ORDER BY created_at ASC LIMIT 3"));
$results = [];
if ($pending) {
foreach ($pending as $p) {
// Auto-generate script if missing
$job = pg_fetch_assoc(pg_query($db, "SELECT * FROM yt_jobs WHERE id='{$p['id']}'"));
if (empty($job['script_text'])) {
// Generate script
$trend = pg_fetch_assoc(pg_query($db, "SELECT keyword FROM yt_trends WHERE id={$job['trend_id']}"));
// Call self for script gen
$_POST['trend_id'] = $job['trend_id'];
// ... simplified: mark for rendering
}
// Mark for rendering
pg_query($db, "UPDATE yt_jobs SET status='queued' WHERE id='{$p['id']}'");
$results[] = $p['id'];
}
}
echo json_encode(['status' => 'success', 'queued' => count($results)]);
break;
case 'auto_pipeline':
// Full autonomous pipeline: scrape → pick best → script → render
$country = $_GET['country'] ?? 'FR';
// 1. Scrape fresh trends
$trends_url = "http://127.0.0.1:5821/api/youtube-factory-api.php?action=scrape_trends&country=$country";
$tr = json_decode(@file_get_contents($trends_url), true);
// 2. Pick top unprocessed trend
$top = pg_fetch_assoc(pg_query($db, "SELECT id, keyword, trend_score FROM yt_trends WHERE processed=false ORDER BY trend_score DESC LIMIT 1"));
if (!$top) { echo json_encode(['status' => 'no_trends']); break; }
// 3. Generate script via HAMID
$prompt = urlencode("YouTube script about: {$top['keyword']}. Style: educational. 180 seconds. French.");
$hamid = json_decode(@file_get_contents("http://127.0.0.1:5821/api/hamid-brain.php?action=generate&prompt=$prompt"), true);
$script = $hamid['response'] ?? $hamid['text'] ?? "Script pending manual creation for: {$top['keyword']}";
// 4. Create job
$job_id = pg_fetch_result(pg_query($db, "SELECT gen_random_uuid()"), 0);
pg_query($db, "INSERT INTO yt_jobs(id, trend_id, script_text, title, status)
VALUES('$job_id', {$top['id']}, '".pg_escape_string($db, $script)."', '".pg_escape_string($db, $top['keyword'])."', 'scripted')");
pg_query($db, "UPDATE yt_trends SET processed=true WHERE id={$top['id']}");
echo json_encode([
'status' => 'success',
'pipeline' => 'trend_to_script',
'trend' => $top['keyword'],
'job_id' => $job_id,
'next_step' => 'render_video'
]);
break;
case 'status':
$s = pg_fetch_assoc(pg_query($db, "
SELECT
(SELECT COUNT(*) FROM yt_trends) as total_trends,
(SELECT COUNT(*) FROM yt_trends WHERE processed=false) as unprocessed_trends,
(SELECT COUNT(*) FROM yt_jobs) as total_jobs,
(SELECT COUNT(*) FROM yt_jobs WHERE status='pending') as pending_jobs,
(SELECT COUNT(*) FROM yt_jobs WHERE status='rendering') as rendering_jobs,
(SELECT COUNT(*) FROM yt_jobs WHERE status='published') as published_videos,
(SELECT COUNT(*) FROM yt_channels WHERE is_active=true) as active_channels,
(SELECT COUNT(*) FROM yt_assets) as total_assets
"));
echo json_encode(['status' => 'success', 'youtube_factory' => $s]);
break;
}
?>