Commit f3e5c915 authored by martin hou's avatar martin hou

feat: 录音试听

parent 178c5496
......@@ -9,6 +9,7 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@ffmpeg/core": "^0.12.10",
"@ffmpeg/ffmpeg": "^0.12.15",
"@ffmpeg/util": "^0.12.2",
"react": "^19.0.0",
......@@ -431,6 +432,14 @@
"node": ">=18"
}
},
"node_modules/@ffmpeg/core": {
"version": "0.12.10",
"resolved": "https://registry.npmjs.org/@ffmpeg/core/-/core-0.12.10.tgz",
"integrity": "sha512-dzNplnn2Nxle2c2i2rrDhqcB19q9cglCkWnoMTDN9Q9l3PvdjZWd1HfSPjCNWc/p8Q3CT+Es9fWOR0UhAeYQZA==",
"engines": {
"node": ">=16.x"
}
},
"node_modules/@ffmpeg/ffmpeg": {
"version": "0.12.15",
"resolved": "https://registry.npmjs.org/@ffmpeg/ffmpeg/-/ffmpeg-0.12.15.tgz",
......
......@@ -24,6 +24,7 @@
"vite": "^6.0.3"
},
"dependencies": {
"@ffmpeg/core": "^0.12.10",
"@ffmpeg/ffmpeg": "^0.12.15",
"@ffmpeg/util": "^0.12.2",
"react": "^19.0.0",
......
This source diff could not be displayed because it is too large. You can view the blob instead.
import React, { useEffect, useRef, useState } from 'react';
export default function AudioPreview() {
const canvasRef = useRef<HTMLCanvasElement>(null);
const [worker, setWorker] = useState<Worker|null>(null);
useEffect(() => {
const canvas = canvasRef.current;
if (canvas) {
const ctx = canvas.getContext('2d');
}
if (worker) worker.terminate();
// 创建worker
let newWorker = new Worker(new URL('./audio-worker.js', import.meta.url));
setWorker(newWorker);
return () => {
newWorker.terminate();
};
}, []);
return (
<div>
<canvas ref={canvasRef} width="2000" height="200" style={{ border: '1px solid #ccc' }}></canvas>
</div>
);
}
\ No newline at end of file
// 此脚本运行于WebWorker之下
import { FFmpeg } from '@ffmpeg/ffmpeg';
import { fetchFile, toBlobURL } from '@ffmpeg/util';
let ffmpeg = null;
// 初始化FFmpeg
async function initFFmpeg() {
if (ffmpeg) return ffmpeg;
try {
console.log('Initializing FFmpeg...');
ffmpeg = new FFmpeg();
// Load FFmpeg core files from the official CDN
console.log('Loading FFmpeg core');
const baseURL = '/ffmpeg';
await ffmpeg.load({
coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, 'text/javascript'),
wasmURL: await toBlobURL(`${baseURL}/ffmpeg-core.wasm`, 'application/wasm'),
log: true
});
console.log('FFmpeg loaded successfully');
return ffmpeg;
} catch (error) {
console.error('FFmpeg initialization error:', error);
throw error;
}
}
// 解码mp3数据为pcm数据
async function decode_mp3(data) {
try {
const ffmpeg = await initFFmpeg();
// 检查FFmpeg是否已加载
if (!ffmpeg || !ffmpeg.loaded) {
throw new Error('FFmpeg not properly loaded');
}
console.log('FFmpeg is ready, starting MP3 decode...');
// 将MP3数据写入FFmpeg
await ffmpeg.writeFile('input.mp3', data);
// 使用FFmpeg解码MP3为PCM s16le格式
await ffmpeg.exec([
'-i', 'input.mp3',
'-f', 's16le',
'-acodec', 'pcm_s16le',
'-ar', '16000',
'-ac', '1',
'output.pcm'
]);
// 读取解码后的PCM数据
const pcmData = await ffmpeg.readFile('output.pcm');
// 清理临时文件
await ffmpeg.deleteFile('input.mp3');
await ffmpeg.deleteFile('output.pcm');
// 将Uint8Array转换为Uint16Array (s16le格式)
const uint8Array = new Uint8Array(pcmData);
const uint16Array = new Uint16Array(uint8Array.buffer, uint8Array.byteOffset, uint8Array.length / 2);
return uint8Array;
} catch (error) {
console.error('MP3 decode error:', error);
// 返回空数组作为fallback
return new Uint16Array(0);
}
}
/**
* 模拟 Recorder.getRecordAnalyseData 的功能
* @param {Int16Array} pcmData PCM 原始数据,s16le 格式
* @returns {Uint8Array} 长度固定 1024,范围 0–255 的波形数据
*/
function getRecordAnalyseData(pcmData) {
const BUF_LEN = 800;
const out = new Uint8Array(BUF_LEN);
const pcm = [];
for (let i = 0; i < pcmData.length; i+=2) {
let h = pcmData[i] & 0xff;
let l = pcmData[i + 1] & 0xff;
let p = (l << 8) | h;
// 如果p是负数,则取反
if (p & 0x8000) p = (p & 0x7fff) - 0x8000;
pcm.push(p);
}
const len = pcm.length;
if (len === 0) return out.fill(128); // 空数据,填充中值
const step = len / BUF_LEN;
let rst = [];
for (let i = 0; i < BUF_LEN; i++) {
const start = Math.floor(i * step);
const end = Math.min(Math.floor((i + 1) * step), len);
// 计算这一段的平均值(保持正负号,显示真实波形)
let sum = 0;
for (let j = start; j < end; j++)
{
sum += Math.abs(pcm[j]);
}
const avg = Math.floor(sum / (end - start));
rst.push(avg);
// 将平均值从 [-32768, 32767] 映射到 [0, 255]
// 128 是中心线(静音),0是最负值,255是最正值
const normalized = avg / 32767; // 归一化到 [0, 1]
out[i] = Math.max(0, Math.min(127, Math.round(normalized * 127)));
}
return out;
}
// 接收worker的消息,发过来的消息是mp3数据片段,帮我完成解码和波形分析并返回数据即可
self.addEventListener('message', async function(e) {
try {
const mp3Data = e.data;
// 解码MP3数据为PCM格式
const pcmData = await decode_mp3(mp3Data);
// 生成波形分析数据
const waveformData = getRecordAnalyseData(pcmData);
// 返回结果给主线程
self.postMessage({
success: true,
waveformData: waveformData,
pcmData: pcmData
});
} catch (error) {
console.error('Worker error:', error);
// 返回错误信息给主线程
self.postMessage({
success: false,
error: error.message
});
}
});
\ No newline at end of file
import { useEffect, useState } from 'react';
import { useEffect, useRef, useState } from 'react';
import Jensen, { BluetoothDevice } from '..';
import './index.css';
import { Logger } from './Logger'
const jensen = new Jensen();
export function Home() {
const [dsn, setDsn] = useState<string|null>(null);
const [files, setFiles] = useState<Jensen.FileInfo[]>([]);
const [devices, setDevices] = useState<Jensen.BluetoothDevice[]>([]);
const [greeting, setGreeting] = useState<string|null>(null);
useEffect(() => {
jensen.connect();
jensen.onconnect = () => {
console.log('connect successfully');
jensen.getDeviceInfo().then((info) => {
alert(info.sn + ' connected');
});
};
}, []);
const [waveformData, setWaveformData] = useState<number[]>([]);
const [jensen, setJensen] = useState<Jensen|null>(null);
const [sourceBuffer, setSourceBuffer] = useState<SourceBuffer|null>(null);
const getFilePart = () => {
const file = files[4];
......@@ -42,8 +34,33 @@ export function Home() {
}
const connect = async () => {
await jensen.connect();
alert(jensen.getModel() + ' connected')
const usb = (navigator as any).usb;
let devices = await usb.getDevices();
for (let i = 0; i < devices.length; i++)
{
let dev = devices[i];
console.log(dev);
if (dev.vendorId != 0x10d6) continue;
await dev.open();
let inst = new Jensen(Logger, dev);
await inst.initialize();
let info = await inst.getDeviceInfo();
if (info && info.sn)
{
setDsn(info.sn);
setJensen(inst);
return;
}
}
// let dev = await usb.requestDevice({
// filters: [{ vendorId: 0x10d6 }]
// });
// if (!dev) return alert('没有找到设备');
// await dev.open();
// jensen.setUSBDevice(dev);
// await jensen.initialize();
// let info = await jensen.getDeviceInfo();
// setDsn(info.sn);
}
const disconnectBTDevice = async () => {
......@@ -203,21 +220,135 @@ export function Home() {
});
}
// 先创建audio-worker,给到后面使用
const audioWorker = new Worker(new URL('./audio-worker.js', import.meta.url), { type: 'module' });
audioWorker.onmessage = (e) => {
// console.log(e);
if (e.data.waveformData)
{
console.log(e.data.waveformData);
// 把e.data.waveformData追加到waveformData的末尾
// let waveform = [];
/*
for (let i = 0; i < e.data.waveformData.length; i++)
{
waveform.push(e.data.waveformData[i]);
}
*/
// 把waveform追加到waveformData的末尾
// waveformData.push(...waveform);
const waveform = e.data.waveformData;
// console.log(waveformData);
// 更新波形图waveformCanvas
const canvas = document.getElementById('waveformCanvas') as HTMLCanvasElement;
const ctx = canvas.getContext('2d');
if (ctx)
{
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'black';
}
else
{
console.log('no ctx');
return;
}
// 使用waveformData绘制波形图
const height = canvas.height;
const width = canvas.width;
const barWidth = width / waveform.length;
ctx.lineWidth = 1;
ctx.lineCap = 'round';
ctx.strokeStyle = '#0099ff';
let half = height / 2;
let x = 0;
let y = half;
// ctx.beginPath();
// ctx.moveTo(x, y);
for (let i = 0, x = 0; i < waveform.length; i++)
{
let v = waveform[i] / 128.0;
// y = half - v * height / 2 + 2;
y = (v * height / 2);
ctx.beginPath();
ctx.moveTo(x, half - Math.min(y, half));
ctx.lineTo(x, half + Math.min(y, half));
ctx.stroke();
ctx.closePath();
// 把y反转一下,y = height - y
// ctx.lineTo(x, height - y);
x += 3;
}
// ctx.stroke();
// ctx.closePath();
}
}
const mp3: any[] = [];
let totalBytes = 0;
const prepareAudio = () => {
const mediaSource = new MediaSource();
const audioElement = document.createElement('audio');
audioElement.src = URL.createObjectURL(mediaSource);
document.body.appendChild(audioElement);
mediaSource.addEventListener('sourceopen', () => {
// Create source buffer for MP3
let sourceBuffer = mediaSource.addSourceBuffer('audio/mpeg');
setSourceBuffer(sourceBuffer);
alert('sourceopen');
// Feed MP3 data to source buffer
sourceBuffer.addEventListener('updateend', () => {
if (!sourceBuffer.updating && mediaSource.readyState === 'open') {
mediaSource.endOfStream();
audioElement.play();
}
});
});
}
const test = async () => {
await jensen.reconnect();
let rst = await jensen.getDeviceInfo();
if (rst) alert(rst.sn + ' reconnected...');
else alert('what the fuck????');
let idx = prompt('请输入文件序号', '0');
if (idx === null || idx === undefined) return;
let file = files[parseInt(idx)];
if (file === null || file === undefined) return alert('文件不存在');
jensen.transferFile(file.name, file.length, (data : Uint8Array | 'fail') => {
if (data instanceof Uint8Array)
{
// 发送到audio-worker中进行解码
// audioWorker.postMessage(data);
mp3.push(data);
totalBytes += data.length;
if (totalBytes == file.length)
{
let mp3Data = new Uint8Array(totalBytes);
for (let i = 0; i < mp3.length; i++)
{
mp3Data.set(mp3[i], i * mp3[i].length);
}
audioWorker.postMessage(mp3Data);
// 创建MSE实例,把mp3Data发送给MSE进行播放
sourceBuffer?.appendBuffer(mp3Data);
}
}
});
}
return (
<>
<div style={{ display: 'flex', flexDirection: 'row', gap: '16px', padding: '16px', alignItems: 'center', flexWrap: 'wrap' }}>
<button onClick={connectx}>连接</button>
<button onClick={connect}>{dsn || '连接'}</button>
<button onClick={listFiles}>文件列表</button>
<button onClick={prepareAudio}>准备音频</button>
<button onClick={test}>传输与播放</button>
<button onClick={getFilePart}>获取文件</button>
<button onClick={writeSN}>SN写号</button>
<button onClick={getTime}>获取时间</button>
<button onClick={listFiles}>文件列表</button>
<button onClick={transferFile}>传输文件</button>
<button onClick={getBluetoothStatus}>蓝牙连接状态</button>
<button onClick={bluetoothScan}>蓝牙扫描</button>
......@@ -225,16 +356,18 @@ export function Home() {
<button onClick={readFilePartial}>Read File Partial</button>
<button onClick={updateDeviceTone}>更新提示音</button>
<button onClick={updateUAC}>更新UAC</button>
<button onClick={test}>测试重连</button>
</div>
<div id="files" style={{ padding: '0px 0px 0px 30px', marginBottom: '20px' }}>
<h3>Files: </h3>
<ol style={{ padding: '0px 0px 0px 30px', 'listStyle': 'none' }}>
{ files.map((item, index) => {
return <li key={item.name}>{index} - {item?.name}, {item?.length} @ {item.duration}</li>
return <li key={item.name}>{index} - {item?.name}, {item?.length} @ {item.duration} - {item.signature}</li>
})}
</ol>
</div>
<div id="waveform">
<canvas id="waveformCanvas" width="2000" height="200" style={{ border: '1px solid #ccc' }}></canvas>
</div>
<div style={{ padding: '0px 0px 0px 30px', width: '500px' }}>
<h3>Bluetooth Device List: </h3>
{
......
......@@ -12,6 +12,13 @@ export default defineConfig({
headers: {
'Cross-Origin-Embedder-Policy': 'require-corp',
'Cross-Origin-Opener-Policy': 'same-origin',
},
proxy: {
'^/ffmpeg-core/.*': {
target: 'https://unpkg.com/@ffmpeg/core@0.12.15/dist/umd',
changeOrigin: true,
rewrite: (path) => path.replace(/^\/ffmpeg-core/, '')
}
}
},
optimizeDeps: {
......@@ -25,5 +32,8 @@ export default defineConfig({
}
}
}
},
worker: {
format: 'es'
}
});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment