mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-18 06:54:15 +00:00
remove _debug
This commit is contained in:
parent
9f29043e47
commit
08e3b32866
@ -1,92 +0,0 @@
|
||||
import React, { useState, useEffect, useRef } from "react";
|
||||
import Hls from "hls.js"
|
||||
|
||||
const exampleData = {
|
||||
video: "https://im-fa.manifest.tidal.com/1/manifests/CAESCTE5Njg2MTQ0NCIWd05QUkh1YTIyOGRXTUVUdmFxbThQdyIWZE05ZHNYTFNkTEhaODdmTUxQMDhGQSIWS0dfYTZubHUtcTUydVZMenRyOTJwQSIWLWU1NHRpanJlNzZhSjdMcXVoQ05idyIWenRCWnZEYmpia1hvNS14UUowWFl1USIWdFRHY20ycFNpVTktaHBtVDlzUlNvdyIWdVJDMlNqMFJQYWVMSnN6NWRhRXZtdyIWZnNYUWZpNk01LUdpeUV3dE9JNTZ2dygBMAJQAQ.m3u8?token=1738270941~MjEyMTc0MTk0NTlmNjdiY2RkNjljYzc0NzU1NGRmZDcxMGJhNDI2Mg==",
|
||||
audio: "https://sp-pr-fa.audio.tidal.com/mediatracks/CAEaKwgDEidmMmE5YjEyYTQ5ZTQ4YWFkZDdhOTY0YzBmZTdhZTY1ZV82MS5tcDQ/0.flac?token=1738270937~Y2ViYjZiNmYyZmVjN2JhNmYzN2ViMWEzOTcwNzQ3NDdkNzA5YzhhZg=="
|
||||
}
|
||||
|
||||
function AudioSyncApp() {
|
||||
const videoRef = useRef(null);
|
||||
const audioRef = useRef(null);
|
||||
const [worker, setWorker] = useState(null);
|
||||
const [startTime, setStartTime] = useState(null);
|
||||
const audioCtxRef = useRef(null);
|
||||
const hlsRef = useRef(null);
|
||||
|
||||
// Configurar HLS para el video
|
||||
useEffect(() => {
|
||||
if (Hls.isSupported()) {
|
||||
const hls = new Hls({ enableWorker: false, xhrSetup: (xhr) => xhr.withCredentials = false });
|
||||
hlsRef.current = hls;
|
||||
hls.loadSource(exampleData.video);
|
||||
hls.attachMedia(videoRef.current);
|
||||
} else if (videoRef.current.canPlayType("application/vnd.apple.mpegurl")) {
|
||||
videoRef.current.src = exampleData.video;
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (hlsRef.current) hlsRef.current.destroy();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Inicializar Web Audio y Worker
|
||||
useEffect(() => {
|
||||
audioCtxRef.current = new (window.AudioContext || window.webkitAudioContext)();
|
||||
const newWorker = new Worker(new URL("./worker.js", import.meta.url));
|
||||
newWorker.onmessage = (event) => {
|
||||
setStartTime(event.data.offset);
|
||||
};
|
||||
setWorker(newWorker);
|
||||
|
||||
return () => newWorker.terminate();
|
||||
}, []);
|
||||
|
||||
// Manejar la sincronización
|
||||
const handleSync = async () => {
|
||||
try {
|
||||
// 1. Obtener buffers de audio
|
||||
const [videoBuffer, audioBuffer] = await Promise.all([
|
||||
fetch(exampleData.video, { mode: "cors" }).then(r => r.arrayBuffer()),
|
||||
fetch(exampleData.audio, { mode: "cors" }).then(r => r.arrayBuffer())
|
||||
]);
|
||||
|
||||
// 2. Decodificar
|
||||
const [videoAudio, songAudio] = await Promise.all([
|
||||
audioCtxRef.current.decodeAudioData(videoBuffer),
|
||||
audioCtxRef.current.decodeAudioData(audioBuffer)
|
||||
]);
|
||||
|
||||
// 3. Enviar al Worker
|
||||
worker.postMessage(
|
||||
{ videoBuffer: videoAudio, audioBuffer: songAudio },
|
||||
[videoAudio, songAudio]
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error de decodificación:", error);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<video
|
||||
ref={videoRef}
|
||||
controls
|
||||
crossOrigin="anonymous"
|
||||
width="600"
|
||||
/>
|
||||
<audio
|
||||
ref={audioRef}
|
||||
controls
|
||||
crossOrigin="anonymous"
|
||||
src={exampleData.audio}
|
||||
/>
|
||||
<button onClick={handleSync}>Sincronizar</button>
|
||||
{startTime !== null && (
|
||||
<p>Offset de sincronización: {startTime.toFixed(2)} segundos</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AudioSyncApp;
|
@ -1,70 +0,0 @@
|
||||
self.onmessage = async (event) => {
|
||||
const { videoBuffer, audioBuffer } = event.data;
|
||||
const SAMPLE_RATE = 44100;
|
||||
|
||||
// Extraer energía en rango de frecuencias
|
||||
const getEnergy = (buffer, freqRange) => {
|
||||
const offlineCtx = new OfflineAudioContext(1, buffer.length, SAMPLE_RATE);
|
||||
const source = offlineCtx.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
|
||||
const analyser = offlineCtx.createAnalyser();
|
||||
analyser.fftSize = 4096;
|
||||
source.connect(analyser);
|
||||
analyser.connect(offlineCtx.destination);
|
||||
source.start();
|
||||
|
||||
return offlineCtx.startRendering().then(() => {
|
||||
const data = new Float32Array(analyser.frequencyBinCount);
|
||||
analyser.getFloatFrequencyData(data);
|
||||
|
||||
const startBin = Math.floor(freqRange[0] * analyser.fftSize / SAMPLE_RATE);
|
||||
const endBin = Math.floor(freqRange[1] * analyser.fftSize / SAMPLE_RATE);
|
||||
return data.slice(startBin, endBin);
|
||||
});
|
||||
};
|
||||
|
||||
// Cross-correlación optimizada
|
||||
const crossCorrelate = (videoFeatures, audioFeatures) => {
|
||||
let maxCorr = -Infinity;
|
||||
let bestOffset = 0;
|
||||
|
||||
for (let i = 0; i < videoFeatures.length - audioFeatures.length; i++) {
|
||||
let corr = 0;
|
||||
for (let j = 0; j < audioFeatures.length; j++) {
|
||||
corr += videoFeatures[i + j] * audioFeatures[j];
|
||||
}
|
||||
if (corr > maxCorr) {
|
||||
maxCorr = corr;
|
||||
bestOffset = i;
|
||||
}
|
||||
}
|
||||
return bestOffset;
|
||||
};
|
||||
|
||||
// Procesar características
|
||||
try {
|
||||
const [videoBass, audioBass] = await Promise.all([
|
||||
getEnergy(videoBuffer, [60, 250]), // Bajos
|
||||
getEnergy(audioBuffer, [60, 250])
|
||||
]);
|
||||
|
||||
const [videoVoice, audioVoice] = await Promise.all([
|
||||
getEnergy(videoBuffer, [300, 3400]), // Voces
|
||||
getEnergy(audioBuffer, [300, 3400])
|
||||
]);
|
||||
|
||||
// Combinar características (peso dinámico)
|
||||
const isElectronic = audioVoice.reduce((a, b) => a + b) < audioBass.reduce((a, b) => a + b);
|
||||
const weight = isElectronic ? 0.8 : 0.4;
|
||||
|
||||
const videoFeatures = videoBass.map((v, i) => weight * v + (1 - weight) * videoVoice[i]);
|
||||
const audioFeatures = audioBass.map((v, i) => weight * v + (1 - weight) * audioVoice[i]);
|
||||
|
||||
// Calcular offset
|
||||
const offset = crossCorrelate(videoFeatures, audioFeatures);
|
||||
self.postMessage({ offset: offset / SAMPLE_RATE });
|
||||
} catch (error) {
|
||||
self.postMessage({ error: "Error en el procesamiento" });
|
||||
}
|
||||
};
|
@ -1,47 +0,0 @@
|
||||
import TrackManifest from "@cores/player/classes/TrackManifest"
|
||||
|
||||
const D_Manifest = () => {
|
||||
const [manifest, setManifest] = React.useState(null)
|
||||
|
||||
function selectLocalFile() {
|
||||
const input = document.createElement("input")
|
||||
input.type = "file"
|
||||
input.accept = "audio/*"
|
||||
input.onchange = (e) => {
|
||||
loadManifest(e.target.files[0])
|
||||
}
|
||||
input.click()
|
||||
}
|
||||
|
||||
async function loadManifest(file) {
|
||||
let track = new TrackManifest({ file: file })
|
||||
|
||||
await track.initialize()
|
||||
|
||||
console.log(track)
|
||||
|
||||
setManifest(track)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex-column gap-10">
|
||||
<p>Select a local file to view & create a track manifest</p>
|
||||
|
||||
<button onClick={selectLocalFile}>Select</button>
|
||||
|
||||
{manifest?.cover && (
|
||||
<img
|
||||
src={manifest.cover}
|
||||
alt="Cover"
|
||||
style={{ width: "100px", height: "100px" }}
|
||||
/>
|
||||
)}
|
||||
|
||||
<code style={{ whiteSpace: "break-spaces", width: "300px" }}>
|
||||
{JSON.stringify(manifest)}
|
||||
</code>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default D_Manifest
|
@ -1,49 +0,0 @@
|
||||
import React from "react"
|
||||
|
||||
const defaultURL = "ws://localhost:19236"
|
||||
|
||||
function useLoquiWs() {
|
||||
const [socket, setSocket] = React.useState(null)
|
||||
|
||||
function create() {
|
||||
const s = new WebSocket(defaultURL)
|
||||
|
||||
s.addEventListener("open", (event) => {
|
||||
console.log("WebSocket connection opened")
|
||||
})
|
||||
|
||||
s.addEventListener("close", (event) => {
|
||||
console.log("WebSocket connection closed")
|
||||
})
|
||||
|
||||
s.addEventListener("error", (event) => {
|
||||
console.log("WebSocket error", event)
|
||||
})
|
||||
|
||||
s.addEventListener("message", (event) => {
|
||||
console.log("Message from server ", event.data)
|
||||
})
|
||||
|
||||
setSocket(s)
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
create()
|
||||
|
||||
return () => {
|
||||
if (socket) {
|
||||
socket.close()
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
return [socket]
|
||||
}
|
||||
|
||||
const Loqui = () => {
|
||||
const [socket] = useLoquiWs()
|
||||
|
||||
return <div>{defaultURL}</div>
|
||||
}
|
||||
|
||||
export default Loqui
|
@ -1,46 +0,0 @@
|
||||
import React from "react"
|
||||
import { Progress } from "antd"
|
||||
import UploadButton from "@components/UploadButton"
|
||||
|
||||
const VideoSegmentedUpload = () => {
|
||||
const [result, setResult] = React.useState(null)
|
||||
const [progress, setProgress] = React.useState(null)
|
||||
|
||||
return (
|
||||
<div>
|
||||
<UploadButton
|
||||
onSuccess={(id, response) => {
|
||||
setResult(response)
|
||||
}}
|
||||
onProgress={(id, progress) => {
|
||||
setProgress({
|
||||
id,
|
||||
progress,
|
||||
})
|
||||
}}
|
||||
accept={["video/*"]}
|
||||
headers={{
|
||||
transmux: "mq-hls",
|
||||
}}
|
||||
>
|
||||
Upload video
|
||||
</UploadButton>
|
||||
|
||||
{progress && (
|
||||
<div>
|
||||
<h2>Progress</h2>
|
||||
<Progress
|
||||
percent={progress.progress}
|
||||
status={
|
||||
progress.progress === 100 ? "success" : "active"
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{result && <code>{JSON.stringify(result, null, 2)}</code>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default VideoSegmentedUpload
|
Loading…
x
Reference in New Issue
Block a user