Skip to content

Commit

Permalink
Merge pull request #30 from Yukiiro-Nite/feature/audio-waveform
Browse files Browse the repository at this point in the history
Feature/audio waveform
  • Loading branch information
Connor authored Jun 23, 2022
2 parents 8445ef5 + 42d426d commit 418a180
Show file tree
Hide file tree
Showing 5 changed files with 207 additions and 7 deletions.
22 changes: 17 additions & 5 deletions src/renderer/components/PlayerControls.vue
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
<script setup lang="ts">
import { onMounted, ref, computed } from 'vue';
import { WaveformRenderer } from '../waveformRenderer';
import { onMounted, ref, computed, onUnmounted } from 'vue';
import { usePlayer } from "../amethyst";
import DbMeter from "./DbMeter.vue";
Expand All @@ -8,6 +9,7 @@ const currentTime = ref("0");
const timer = ref();
const metadata = computed(() => player.state.currentlyPlayingMetadata );
const duration = computed(() => metadata.value?.format.duration || 0);
let waveformRenderer: WaveformRenderer
const handleVolumeMouseScroll = (e: WheelEvent) => {
const delta = Math.sign(e.deltaY);
Expand All @@ -27,15 +29,25 @@ onMounted(() => {
timer.value = setInterval(() => {
currentTime.value = `${player.currentTimeFormatted()} / ${player.currentDurationFormatted()}`;
}, 500);
waveformRenderer = new WaveformRenderer(player, '#waveformCanvas');
});
onUnmounted(() => {
waveformRenderer.clean();
});
</script>

<template>
<div class="flex p-1 gap-2 items-center">
<input
v-model="player.state.sound.currentTime" class="w-full " min="0" :max="duration" step="0.01"
type="range" @wheel="handleSeekMouseScroll"
>
<div class="w-full h-full relative flex">
<canvas id="waveformCanvas" class="w-full h-full absolute"></canvas>
<input
v-model="player.state.sound.currentTime" class="w-full z-10 opacity-50" min="0" :max="duration" step="0.01"
type="range" @wheel="handleSeekMouseScroll"
>
</div>
<h1 class=" whitespace-nowrap text-sm">
{{ currentTime }}
</h1>
Expand Down
7 changes: 5 additions & 2 deletions src/renderer/components/Spectrum.vue
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ const FFT_SIZE = 8192;
const VERTICAL_ZOOM_FACTOR = 1.5;
const DOWNSCALED_WIDTH = SPECTRUM_WIDTH / DOWNSCALE_FACTOR;
const DOWNSCALED_HEIGHT = SPECTRUM_HEIGHT / DOWNSCALE_FACTOR;
const defaultSpectrumColor = "#868aff";
let shouldFuckOff = false;
Expand Down Expand Up @@ -54,8 +55,10 @@ onMounted(() => {
if (canvas) {
const gradient = canvas.createLinearGradient(0, SPECTRUM_HEIGHT, 0, 0);
gradient.addColorStop(0, getComputedStyle(document.documentElement).getPropertyValue("--color-primary"));
gradient.addColorStop(1, getComputedStyle(document.documentElement).getPropertyValue("--color-secondary"));
const primaryColor = getComputedStyle(document.documentElement).getPropertyValue("--color-primary") || defaultSpectrumColor;
const secondaryColor = getComputedStyle(document.documentElement).getPropertyValue("--color-secondary") || defaultSpectrumColor;
gradient.addColorStop(0, primaryColor);
gradient.addColorStop(1, secondaryColor);
canvas.fillStyle = gradient;
canvas.imageSmoothingEnabled = false;
return canvas;
Expand Down
1 change: 1 addition & 0 deletions src/renderer/player.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ export default class Player {
private events = mitt<typeof Events>();
private emit = this.events.emit;
public on = this.events.on;
public off = this.events.off;

public state = reactive({
sound: new Audio(),
Expand Down
51 changes: 51 additions & 0 deletions src/renderer/waveformRenderWorker.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
let stopRender = false;

function render(canvas: HTMLCanvasElement, audioData: Float32Array): ImageBitmap | undefined {
const ctx = canvas.getContext('2d');

if (!ctx) return;

ctx.clearRect(0, 0, canvas.width, canvas.height);

const gradient = ctx.createLinearGradient(0, 0, 0, canvas.height);
gradient.addColorStop(0, '#e08eff');
gradient.addColorStop(0.5, '#868aff');
gradient.addColorStop(1, '#e08eff');

ctx.strokeStyle = gradient;
ctx.lineWidth = 1;
ctx.beginPath();
ctx.moveTo(0, canvas.height / 2);

let x = 0;
let y = 0;
for (let i = 0; (i < audioData.length || stopRender); i++) {
y = (audioData[i] * (canvas.height / 2)) + (canvas.height / 2);
x = (i / audioData.length) * canvas.width;
ctx.lineTo(x, y);
}

if (stopRender) return;

ctx.stroke();

// Need to create an image from the OffscreenCanvas to send it back to the main process.
// https://developer.mozilla.org/en-US/docs/Web/API/OffScreenCanvas/transferToImageBitmap
// @ts-ignore
const img: ImageBitmap = canvas.transferToImageBitmap();
return img;
}

onmessage = function(e: MessageEvent<{ canvas: HTMLCanvasElement, audioData: Float32Array, stop: boolean }>) {
const canvas = e.data.canvas;
const audioData = e.data.audioData;
stopRender = e.data.stop;

if (stopRender) return;

const img = render(canvas, audioData);

if (img) {
this.postMessage(img);
}
};
133 changes: 133 additions & 0 deletions src/renderer/waveformRenderer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
import Player from "./player";

export class WaveformRenderer {
public canvas: HTMLCanvasElement;

private player: Player;
private audioBuffer: AudioBuffer | null;
private audioCtx: AudioContext;
private currentWorker: Worker | null;

constructor (player: Player, canvasSelector: string) {
this.player = player;
this.canvas = document.querySelector(canvasSelector) || document.createElement("canvas");
this.audioCtx = new AudioContext();
this.audioBuffer = null;
this.currentWorker = null;

this.player.on('play', this.handlePlayAudio);
}

private handlePlayAudio = async () => {
const currentSound = this.player.state.sound;
await this.waitForLoadedData(currentSound);

if (currentSound != this.player.state.sound) return;

const channels = this.player.state.source?.channelCount ?? 1;
const duration = this.player.state.currentlyPlayingMetadata?.format.duration ?? 1;
const sampleRate = this.player.state.currentlyPlayingMetadata?.format.sampleRate ?? 1;

const offlineAudioCtx = new OfflineAudioContext({
numberOfChannels: channels,
length: duration * sampleRate,
sampleRate
});


const tempBuffer = await this.fetchAudioBuffer(this.player.state.sound.src, offlineAudioCtx);

if (currentSound != this.player.state.sound) return;

this.audioBuffer = tempBuffer;
this.renderWaveform();
};

private setCanvasSize = () => {
const parent = this.canvas.parentElement;
if (parent) {
const { width, height } = parent.getBoundingClientRect();
this.canvas.width = width;
this.canvas.height = height;
}
};

private waitForLoadedData = async (audio: HTMLAudioElement) => {
const loaded = new Promise<void>((resolve) => {
const resolver = () => {
audio.removeEventListener('loadeddata', resolver);
resolve();
}
audio.addEventListener('loadeddata', resolver);
});

await loaded;
};

private fetchAudioBuffer = (src: string, offlineAudioCtx: OfflineAudioContext): Promise<AudioBuffer> => {
const source = offlineAudioCtx.createBufferSource();
const request = new XMLHttpRequest();

request.open('GET', src, true);

request.responseType = 'arraybuffer';

return new Promise<AudioBuffer>((resolve, reject) => {
request.onload = () => {
var audioData = request.response;

this.audioCtx.decodeAudioData(audioData)
.then((buffer: AudioBuffer) => {
const myBuffer = buffer;
source.buffer = myBuffer;
source.connect(offlineAudioCtx.destination);
source.start();

offlineAudioCtx.startRendering()
.then(resolve)
.catch(reject);
});
}

request.send();
})
};

private renderWaveform = () => {
this.setCanvasSize();
const ctx = this.canvas.getContext('2d');
ctx?.clearRect(0, 0, this.canvas.width, this.canvas.height);
const backCanvas = document.createElement('canvas');
backCanvas.width = this.canvas.width;
backCanvas.height = this.canvas.height;

// Electron 18.0.3 is using Chrome 100.0.4896.75
// https://www.electronjs.org/releases/stable?version=18&page=2#18.0.3
//
// transferControlToOffscreen has been available since Chrome 69
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/transferControlToOffscreen#browser_compatibility
// @ts-ignore
const offscreen: OffscreenCanvas = backCanvas.transferControlToOffscreen();

if (this.audioBuffer === null) return;

const audioData = this.audioBuffer.getChannelData(0);

if (this.currentWorker !== null) {
this.currentWorker.postMessage({ stop: true });
this.currentWorker.terminate();
}

this.currentWorker = new Worker("waveformRenderWorker.ts");
this.currentWorker.onmessage = (msg) => {
ctx?.clearRect(0, 0, this.canvas.width, this.canvas.height);
ctx?.drawImage(msg.data, 0, 0);
this.currentWorker = null;
};
this.currentWorker.postMessage({ canvas: offscreen, audioData }, [offscreen]);
};

public clean = () => {
this.player.off('play', this.handlePlayAudio);
};
}

0 comments on commit 418a180

Please sign in to comment.