update _debugs

This commit is contained in:
SrGooglo 2025-02-11 16:14:27 +00:00
parent 10759e0cef
commit 14234ffa6c
4 changed files with 208 additions and 115 deletions

View File

@ -0,0 +1,92 @@
import React, { useState, useEffect, useRef } from "react";
import Hls from "hls.js"
const exampleData = {
video: "https://im-fa.manifest.tidal.com/1/manifests/CAESCTE5Njg2MTQ0NCIWd05QUkh1YTIyOGRXTUVUdmFxbThQdyIWZE05ZHNYTFNkTEhaODdmTUxQMDhGQSIWS0dfYTZubHUtcTUydVZMenRyOTJwQSIWLWU1NHRpanJlNzZhSjdMcXVoQ05idyIWenRCWnZEYmpia1hvNS14UUowWFl1USIWdFRHY20ycFNpVTktaHBtVDlzUlNvdyIWdVJDMlNqMFJQYWVMSnN6NWRhRXZtdyIWZnNYUWZpNk01LUdpeUV3dE9JNTZ2dygBMAJQAQ.m3u8?token=1738270941~MjEyMTc0MTk0NTlmNjdiY2RkNjljYzc0NzU1NGRmZDcxMGJhNDI2Mg==",
audio: "https://sp-pr-fa.audio.tidal.com/mediatracks/CAEaKwgDEidmMmE5YjEyYTQ5ZTQ4YWFkZDdhOTY0YzBmZTdhZTY1ZV82MS5tcDQ/0.flac?token=1738270937~Y2ViYjZiNmYyZmVjN2JhNmYzN2ViMWEzOTcwNzQ3NDdkNzA5YzhhZg=="
}
function AudioSyncApp() {
const videoRef = useRef(null);
const audioRef = useRef(null);
const [worker, setWorker] = useState(null);
const [startTime, setStartTime] = useState(null);
const audioCtxRef = useRef(null);
const hlsRef = useRef(null);
// Configurar HLS para el video
useEffect(() => {
if (Hls.isSupported()) {
const hls = new Hls({ enableWorker: false, xhrSetup: (xhr) => xhr.withCredentials = false });
hlsRef.current = hls;
hls.loadSource(exampleData.video);
hls.attachMedia(videoRef.current);
} else if (videoRef.current.canPlayType("application/vnd.apple.mpegurl")) {
videoRef.current.src = exampleData.video;
}
return () => {
if (hlsRef.current) hlsRef.current.destroy();
};
}, []);
// Inicializar Web Audio y Worker
useEffect(() => {
audioCtxRef.current = new (window.AudioContext || window.webkitAudioContext)();
const newWorker = new Worker(new URL("./worker.js", import.meta.url));
newWorker.onmessage = (event) => {
setStartTime(event.data.offset);
};
setWorker(newWorker);
return () => newWorker.terminate();
}, []);
// Manejar la sincronización
const handleSync = async () => {
try {
// 1. Obtener buffers de audio
const [videoBuffer, audioBuffer] = await Promise.all([
fetch(exampleData.video, { mode: "cors" }).then(r => r.arrayBuffer()),
fetch(exampleData.audio, { mode: "cors" }).then(r => r.arrayBuffer())
]);
// 2. Decodificar
const [videoAudio, songAudio] = await Promise.all([
audioCtxRef.current.decodeAudioData(videoBuffer),
audioCtxRef.current.decodeAudioData(audioBuffer)
]);
// 3. Enviar al Worker
worker.postMessage(
{ videoBuffer: videoAudio, audioBuffer: songAudio },
[videoAudio, songAudio]
);
} catch (error) {
console.error("Error de decodificación:", error);
}
};
return (
<div>
<video
ref={videoRef}
controls
crossOrigin="anonymous"
width="600"
/>
<audio
ref={audioRef}
controls
crossOrigin="anonymous"
src={exampleData.audio}
/>
<button onClick={handleSync}>Sincronizar</button>
{startTime !== null && (
<p>Offset de sincronización: {startTime.toFixed(2)} segundos</p>
)}
</div>
);
}
export default AudioSyncApp;

View File

@ -0,0 +1,70 @@
self.onmessage = async (event) => {
const { videoBuffer, audioBuffer } = event.data;
const SAMPLE_RATE = 44100;
// Extraer energía en rango de frecuencias
const getEnergy = (buffer, freqRange) => {
const offlineCtx = new OfflineAudioContext(1, buffer.length, SAMPLE_RATE);
const source = offlineCtx.createBufferSource();
source.buffer = buffer;
const analyser = offlineCtx.createAnalyser();
analyser.fftSize = 4096;
source.connect(analyser);
analyser.connect(offlineCtx.destination);
source.start();
return offlineCtx.startRendering().then(() => {
const data = new Float32Array(analyser.frequencyBinCount);
analyser.getFloatFrequencyData(data);
const startBin = Math.floor(freqRange[0] * analyser.fftSize / SAMPLE_RATE);
const endBin = Math.floor(freqRange[1] * analyser.fftSize / SAMPLE_RATE);
return data.slice(startBin, endBin);
});
};
// Cross-correlación optimizada
const crossCorrelate = (videoFeatures, audioFeatures) => {
let maxCorr = -Infinity;
let bestOffset = 0;
for (let i = 0; i < videoFeatures.length - audioFeatures.length; i++) {
let corr = 0;
for (let j = 0; j < audioFeatures.length; j++) {
corr += videoFeatures[i + j] * audioFeatures[j];
}
if (corr > maxCorr) {
maxCorr = corr;
bestOffset = i;
}
}
return bestOffset;
};
// Procesar características
try {
const [videoBass, audioBass] = await Promise.all([
getEnergy(videoBuffer, [60, 250]), // Bajos
getEnergy(audioBuffer, [60, 250])
]);
const [videoVoice, audioVoice] = await Promise.all([
getEnergy(videoBuffer, [300, 3400]), // Voces
getEnergy(audioBuffer, [300, 3400])
]);
// Combinar características (peso dinámico)
const isElectronic = audioVoice.reduce((a, b) => a + b) < audioBass.reduce((a, b) => a + b);
const weight = isElectronic ? 0.8 : 0.4;
const videoFeatures = videoBass.map((v, i) => weight * v + (1 - weight) * videoVoice[i]);
const audioFeatures = audioBass.map((v, i) => weight * v + (1 - weight) * audioVoice[i]);
// Calcular offset
const offset = crossCorrelate(videoFeatures, audioFeatures);
self.postMessage({ offset: offset / SAMPLE_RATE });
} catch (error) {
self.postMessage({ error: "Error en el procesamiento" });
}
};

View File

@ -1,115 +0,0 @@
import React, { useState } from "react"
import { Button, Card, List, Typography, Space, Divider, notification } from "antd"
import QueueManager from "@cores/player/classes/QueueManager"
const { Title, Text } = Typography
const QueueDebugger = () => {
const queueManager = React.useRef(new QueueManager())
const [current, setCurrent] = useState(queueManager.current.currentItem)
const [prevItems, setPrevItems] = useState([...queueManager.current.prevItems])
const [nextItems, setNextItems] = useState([...queueManager.current.nextItems])
const updateQueueState = () => {
setCurrent(queueManager.current.currentItem)
setPrevItems([...queueManager.current.prevItems])
setNextItems([...queueManager.current.nextItems])
}
const handleNext = (random = false) => {
queueManager.current.next(random)
updateQueueState()
}
const handlePrevious = () => {
queueManager.current.previous()
updateQueueState()
}
const handleSet = (item) => {
try {
queueManager.current.set(item)
updateQueueState()
} catch (error) {
notification.error({
message: "Error",
description: error.message,
placement: "bottomRight",
})
}
}
const handleAdd = () => {
const newItem = {
id: (nextItems.length + prevItems.length + 2).toString(),
name: `Item ${nextItems.length + prevItems.length + 2}`
}
queueManager.current.add(newItem)
updateQueueState()
}
const handleRemove = (item) => {
queueManager.current.remove(item)
updateQueueState()
}
React.useEffect(() => {
queueManager.current.add({ id: "1", name: "Item 1" })
queueManager.current.add({ id: "2", name: "Item 2" })
queueManager.current.add({ id: "3", name: "Item 3" })
queueManager.current.add({ id: "4", name: "Item 4" })
updateQueueState()
}, [])
return (
<Space direction="vertical" size="large" style={{ width: "100%", padding: "20px" }}>
<Title level={2}>Queue Debugger</Title>
<Card title="Current Item">
<Text>{current ? current.name : "None"}</Text>
</Card>
<Divider />
<Card title="Previous Items">
<List
bordered
dataSource={prevItems}
renderItem={(item) => (
<List.Item
actions={[
<Button type="link" onClick={() => handleSet(item)}>Set</Button>,
]}
>
{item.name}
</List.Item>
)}
/>
</Card>
<Card title="Next Items">
<List
bordered
dataSource={nextItems}
renderItem={(item) => (
<List.Item
actions={[
<Button type="link" onClick={() => handleSet(item)}>Set</Button>,
<Button type="link" danger onClick={() => handleRemove(item)}>Remove</Button>,
]}
>
{item.name}
</List.Item>
)}
/>
</Card>
<Divider />
<Space>
<Button onClick={handlePrevious}>Previous</Button>
<Button onClick={() => handleNext(false)}>Next</Button>
<Button onClick={() => handleNext(true)}>Next (Random)</Button>
<Button type="primary" onClick={handleAdd}>Add Item</Button>
</Space>
</Space>
)
}
export default QueueDebugger

View File

@ -0,0 +1,46 @@
import React from "react"
import { Progress } from "antd"
import UploadButton from "@components/UploadButton"
const VideoSegmentedUpload = () => {
const [result, setResult] = React.useState(null)
const [progress, setProgress] = React.useState(null)
return (
<div>
<UploadButton
onSuccess={(id, response) => {
setResult(response)
}}
onProgress={(id, progress) => {
setProgress({
id,
progress,
})
}}
accept={["video/*"]}
headers={{
transmux: "mq-hls",
}}
>
Upload video
</UploadButton>
{progress && (
<div>
<h2>Progress</h2>
<Progress
percent={progress.progress}
status={
progress.progress === 100 ? "success" : "active"
}
/>
</div>
)}
{result && <code>{JSON.stringify(result, null, 2)}</code>}
</div>
)
}
export default VideoSegmentedUpload