215 lines
7.6 KiB
Plaintext
215 lines
7.6 KiB
Plaintext
import { Router } from "express";
|
|
import { randomUUID } from "crypto";
|
|
import multer from "multer";
|
|
import {
|
|
S3Client, PutObjectCommand, GetObjectCommand,
|
|
CreateBucketCommand, HeadBucketCommand,
|
|
} from "@aws-sdk/client-s3";
|
|
import pg from "pg";
|
|
|
|
const { Pool } = pg;
|
|
const router = Router();
|
|
|
|
const s3 = new S3Client({
|
|
region: process.env.S3_REGION || "us-east-1",
|
|
endpoint: process.env.S3_ENDPOINT || "http://127.0.0.1:9000",
|
|
forcePathStyle: true,
|
|
credentials: {
|
|
accessKeyId: process.env.S3_ACCESS_KEY || "UN0-admin",
|
|
secretAccessKey: process.env.S3_SECRET_KEY || "RAygtZHqGN49qKn",
|
|
},
|
|
});
|
|
const BUCKET = process.env.S3_BUCKET || "uno-click";
|
|
|
|
// ffmpeg-api теперь доступен на хосте через 127.0.0.1:8000
|
|
const FFMPEG_API = process.env.FFMPEG_API_URL || "http://127.0.0.1:8000";
|
|
|
|
const pool = new Pool({
|
|
host: process.env.PG_HOST || "127.0.0.1",
|
|
port: Number(process.env.PG_PORT || 5432),
|
|
database: process.env.PG_DATABASE || "n8n",
|
|
user: process.env.PG_USER || "n8n",
|
|
password: process.env.PG_PASSWORD,
|
|
});
|
|
|
|
async function ensureBucket() {
|
|
try { await s3.send(new HeadBucketCommand({ Bucket: BUCKET })); }
|
|
catch { await s3.send(new CreateBucketCommand({ Bucket: BUCKET })); }
|
|
}
|
|
ensureBucket().catch(console.error);
|
|
|
|
pool.query(`CREATE TABLE IF NOT EXISTS video_jobs (
|
|
job_id TEXT PRIMARY KEY,
|
|
status TEXT NOT NULL DEFAULT 'queued',
|
|
input_s3_key TEXT,
|
|
output_s3_key TEXT,
|
|
error_msg TEXT,
|
|
created_at TIMESTAMPTZ DEFAULT NOW(),
|
|
updated_at TIMESTAMPTZ DEFAULT NOW()
|
|
)`).then(() => console.log("[media] video_jobs table ready")).catch(console.error);
|
|
|
|
const upload = multer({
|
|
storage: multer.memoryStorage(),
|
|
limits: { fileSize: 500 * 1024 * 1024 },
|
|
fileFilter: (req, file, cb) => {
|
|
if (file.mimetype.startsWith("video/")) cb(null, true);
|
|
else cb(new Error("Only video files allowed"));
|
|
},
|
|
});
|
|
|
|
// Рандомные параметры уникализации для ffmpeg-api
|
|
function buildFFmpegArgs() {
|
|
const r = (a, b) => Math.random() * (b - a) + a;
|
|
const hue = r(0, 5).toFixed(1);
|
|
const sat = r(0, 5).toFixed(1);
|
|
const br = r(-0.05, 0.05).toFixed(2);
|
|
const con = r(0.98, 1.05).toFixed(2);
|
|
const satv = r(0.95, 1.1).toFixed(2);
|
|
const noise = Math.round(r(10, 20));
|
|
const speed = r(0.97, 0.99).toFixed(3);
|
|
const tempo = (1 / parseFloat(speed)).toFixed(3);
|
|
const scale = r(0.94, 0.98).toFixed(2);
|
|
const pad = (1 / parseFloat(scale)).toFixed(4);
|
|
const vol = r(1.0, 1.1).toFixed(2);
|
|
const crf = Math.round(r(20, 24));
|
|
|
|
return [
|
|
"-c:v", "libx264", "-preset", "medium", "-crf", String(crf),
|
|
"-c:a", "aac", "-b:a", "128k",
|
|
"-vf", [
|
|
`hue=s=${sat}:h=${hue}`,
|
|
`eq=brightness=${br}:contrast=${con}:saturation=${satv}`,
|
|
`noise=alls=${noise}:allf=t+u`,
|
|
`setpts=${speed}*PTS`,
|
|
`scale=iw*${scale}:ih*${scale}`,
|
|
`pad=iw*${pad}:ih*${pad}:(ow-iw)/2:(oh-ih)/2`,
|
|
].join(","),
|
|
"-af", `volume=${vol},atempo=${tempo}`,
|
|
"-max_muxing_queue_size", "1024",
|
|
];
|
|
}
|
|
|
|
// Вызывает ffmpeg-api /run-s3 асинхронно, потом обновляет БД
|
|
async function processVideoJob(jobId, inputKey, outputKey) {
|
|
console.log("[media] calling ffmpeg-api for job:", jobId);
|
|
try {
|
|
await pool.query(
|
|
"UPDATE video_jobs SET status='processing', updated_at=NOW() WHERE job_id=$1",
|
|
[jobId],
|
|
);
|
|
|
|
const body = {
|
|
job_id: jobId,
|
|
input_s3: `s3://${BUCKET}/${inputKey}`,
|
|
output_s3: `s3://${BUCKET}/${outputKey}`,
|
|
ffmpeg_args: buildFFmpegArgs(),
|
|
};
|
|
|
|
const resp = await fetch(`${FFMPEG_API}/run-s3`, {
|
|
method: "POST",
|
|
headers: { "Content-Type": "application/json" },
|
|
body: JSON.stringify(body),
|
|
signal: AbortSignal.timeout(600_000), // 10 минут
|
|
});
|
|
|
|
const data = await resp.json();
|
|
console.log("[media] ffmpeg-api response:", resp.status, JSON.stringify(data).slice(0, 200));
|
|
|
|
if (!resp.ok || !data.ok) {
|
|
throw new Error(data.detail ? JSON.stringify(data.detail) : "ffmpeg-api error");
|
|
}
|
|
|
|
await pool.query(
|
|
"UPDATE video_jobs SET status='done', updated_at=NOW() WHERE job_id=$1",
|
|
[jobId],
|
|
);
|
|
console.log("[media] job done:", jobId);
|
|
} catch (err) {
|
|
console.error("[media] job failed:", jobId, err.message);
|
|
await pool.query(
|
|
"UPDATE video_jobs SET status='error', error_msg=$2, updated_at=NOW() WHERE job_id=$1",
|
|
[jobId, err.message],
|
|
).catch(() => {});
|
|
}
|
|
}
|
|
|
|
// ─── POST /api/media/upload-video ─────────────────────────────────────────────
|
|
router.post("/upload-video", upload.single("video"), async (req, res) => {
|
|
if (!req.file) return res.status(400).json({ error: "Field video required" });
|
|
const jobId = randomUUID();
|
|
const inputKey = `videos/input/${jobId}.mp4`;
|
|
const outputKey = `videos/output/${jobId}.mp4`;
|
|
|
|
try {
|
|
// 1. Загружаем в MinIO
|
|
await s3.send(new PutObjectCommand({
|
|
Bucket: BUCKET, Key: inputKey,
|
|
Body: req.file.buffer, ContentType: req.file.mimetype || "video/mp4",
|
|
}));
|
|
|
|
// 2. Создаём задачу в БД
|
|
await pool.query(
|
|
"INSERT INTO video_jobs (job_id, status, input_s3_key, output_s3_key) VALUES ($1,$2,$3,$4)",
|
|
[jobId, "queued", inputKey, outputKey],
|
|
);
|
|
|
|
// 3. Отвечаем фронту немедленно
|
|
res.json({ ok: true, job_id: jobId });
|
|
|
|
// 4. Запускаем FFmpeg через ffmpeg-api в фоне (не блокирует ответ)
|
|
processVideoJob(jobId, inputKey, outputKey);
|
|
|
|
} catch (err) {
|
|
console.error("[upload-video]", err);
|
|
if (!res.headersSent) res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// ─── GET /api/media/status/:job_id ────────────────────────────────────────────
|
|
router.get("/status/:job_id", async (req, res) => {
|
|
const { job_id } = req.params;
|
|
try {
|
|
const { rows } = await pool.query(
|
|
"SELECT status, output_s3_key, error_msg FROM video_jobs WHERE job_id=$1",
|
|
[job_id],
|
|
);
|
|
if (!rows.length) return res.status(404).json({ error: "Job not found" });
|
|
|
|
const job = rows[0];
|
|
const PUBLIC_BFF = process.env.PUBLIC_BFF_URL || "https://uno-click.pip-test.ru";
|
|
const url = job.status === "done"
|
|
? `${PUBLIC_BFF}/api/media/download/${job_id}`
|
|
: null;
|
|
|
|
res.json({ status: job.status, url, error: job.error_msg || null });
|
|
} catch (err) {
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
// ─── GET /api/media/download/:job_id ──────────────────────────────────────────
|
|
router.get("/download/:job_id", async (req, res) => {
|
|
const { job_id } = req.params;
|
|
try {
|
|
const { rows } = await pool.query(
|
|
"SELECT status, output_s3_key FROM video_jobs WHERE job_id=$1",
|
|
[job_id],
|
|
);
|
|
if (!rows.length) return res.status(404).json({ error: "Job not found" });
|
|
if (rows[0].status !== "done") return res.status(409).json({ error: "Not ready yet" });
|
|
|
|
const s3Resp = await s3.send(
|
|
new GetObjectCommand({ Bucket: BUCKET, Key: rows[0].output_s3_key }),
|
|
);
|
|
res.setHeader("Content-Type", "video/mp4");
|
|
res.setHeader("Content-Disposition", `attachment; filename="uniqueized_${job_id}.mp4"`);
|
|
if (s3Resp.ContentLength) res.setHeader("Content-Length", s3Resp.ContentLength);
|
|
s3Resp.Body.pipe(res);
|
|
} catch (err) {
|
|
console.error("[download]", err);
|
|
res.status(500).json({ error: err.message });
|
|
}
|
|
});
|
|
|
|
export default router;
|