Skip to content

Commit

Permalink
Render audio and save fixture configs
Browse files Browse the repository at this point in the history
  • Loading branch information
newcat committed Mar 28, 2024
1 parent 6539ea9 commit 41181e0
Show file tree
Hide file tree
Showing 5 changed files with 239 additions and 88 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"fflate": "^0.8.2",
"fft.js": "^4.0.4",
"jszip": "^3.10.1",
"lamejs": "^1.2.1",
"msgpackr": "^1.10.1",
"open-simplex-noise": "^2.5.0",
"pinia": "^2.1.7",
Expand Down
85 changes: 28 additions & 57 deletions src/components/RenderDialog.vue
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
<template>
<Dialog :visible="showDialog" :style="{ width: '50vw' }" header="Rendering" modal>
<ProgressBar class="render-bar" :value="progress" />
<p>{{ step }}</p>
<ProgressBar class="render-bar" :mode="progress < 0 ? 'indeterminate' : 'determinate'" :value="progress" />

<template #footer>
<Button @click="cancel">Cancel</Button>
Expand All @@ -10,88 +11,59 @@

<script setup lang="ts">
import { ref } from "vue";
import { pack } from "msgpackr";
import { gzipSync } from "fflate";
import Dialog from "primevue/dialog";
import ProgressBar from "primevue/progressbar";
import Button from "primevue/button";
import { useToast } from "primevue/usetoast";
import { BaseTimelineProcessor } from "@/timeline";
import { useGlobalState } from "@/globalState";
import { TICKS_PER_BEAT } from "@/constants";
import { useStage } from "@/stage";
import { unitToSeconds } from "@/utils";
import { getNativeAdapter } from "@/native";
import { Renderer } from "@/renderer";
const globalState = useGlobalState();
const toast = useToast();
const stage = useStage();
const nativeAdapter = getNativeAdapter();
const showDialog = ref(false);
const cancelRequest = ref(false);
const step = ref("");
const progress = ref(0);
interface RenderResult {
timestamps: number[];
fixtureValues: Record<string, unknown[]>;
}
let renderer: Renderer | null = null;
async function startRender() {
showDialog.value = true;
const maxUnit = globalState.timeline.items.reduce((max, item) => Math.max(max, item.end), 0);
const processor = new BaseTimelineProcessor();
const token = Symbol();
progress.value = 0;
let nextFrameTime = 0;
const result: RenderResult = { timestamps: [], fixtureValues: {} };
showDialog.value = true;
stage.visualization.pause();
for (let unit = 0; unit <= maxUnit; unit++) {
if (cancelRequest.value) {
break;
}
try {
await processor.process(unit);
} catch (err) {
console.error(err);
toast.add({ severity: "error", summary: "Rendering Error", detail: err instanceof Error ? err.message : String(err) });
cancelRequest.value = true;
break;
}
const secondsPerFrame = 1 / globalState.fps;
const nextTimestamp = unitToSeconds(unit + 1, globalState.bpm);
if (nextTimestamp > nextFrameTime) {
result.timestamps.push(nextFrameTime);
nextFrameTime += secondsPerFrame;
for (const [fixtureId, fixture] of stage.fixtures.entries()) {
if (!result.fixtureValues[fixtureId]) {
result.fixtureValues[fixtureId] = [];
}
result.fixtureValues[fixtureId].push(fixture.value);
}
}
progress.value = Math.floor((unit / maxUnit) * 100);
if (unit % TICKS_PER_BEAT === 0) {
await new Promise((res) => setTimeout(res, 0));
}
renderer = new Renderer();
renderer.events.stepChanged.subscribe(token, (value) => {
step.value = value;
});
renderer.events.progress.subscribe(token, (value) => {
progress.value = value;
});
let result: Uint8Array | null = null;
try {
result = await renderer.startRender();
} catch (err) {
console.error(err);
toast.add({ severity: "error", summary: "Rendering Error", detail: err instanceof Error ? err.message : String(err) });
}
progress.value = 100;
showDialog.value = false;
stage.visualization.resume();
renderer.events.stepChanged.unsubscribe(token);
renderer.events.progress.unsubscribe(token);
renderer = null;
if (cancelRequest.value) {
cancelRequest.value = false;
showDialog.value = false;
if (result === null) {
return;
}
const data = gzipSync(pack(result));
await nativeAdapter.chooseAndWriteFile(data, {
await nativeAdapter.chooseAndWriteFile(result, {
suggestedName: "render.lmr",
accept: [
{
Expand All @@ -100,11 +72,10 @@ async function startRender() {
},
],
});
showDialog.value = false;
}
function cancel() {
cancelRequest.value = true;
renderer?.cancelRender();
}
defineExpose({ startRender });
Expand Down
159 changes: 159 additions & 0 deletions src/renderer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
import { pack } from "msgpackr";
import { gzipSync } from "fflate";
// @ts-expect-error lamejs package does not have types
import { Mp3Encoder } from "lamejs";
import { BaklavaEvent } from "@baklavajs/events";
import { useGlobalState } from "./globalState";
import { FixtureState, useStage } from "./stage";
import { BaseTimelineProcessor } from "./timeline";
import { unitToSeconds } from "./utils";
import { LibraryItemType } from "./library";
import { AudioLibraryItem } from "./audio";
import { TICKS_PER_BEAT } from "./constants";

// https://github.com/zhuker/lamejs/issues/86
// @ts-expect-error lamejs package does not have types
import MPEGMode from "lamejs/src/js/MPEGMode";
// @ts-expect-error lamejs package does not have types
import Lame from "lamejs/src/js/Lame";
// @ts-expect-error lamejs package does not have types
import BitStream from "lamejs/src/js/BitStream";
(window as any).MPEGMode = MPEGMode;
(window as any).Lame = Lame;
(window as any).BitStream = BitStream;

export interface RenderResult {
audio: Uint8Array;
fixtures: FixtureState[];
timestamps: number[];
fixtureValues: Record<string, unknown[]>;
}

export class Renderer {
public readonly events = {
stepChanged: new BaklavaEvent<string, undefined>(undefined),
progress: new BaklavaEvent<number, undefined>(undefined),
};

private readonly globalState = useGlobalState();
private readonly stage = useStage();

private cancelRequest = false;

public async startRender(): Promise<Uint8Array | null> {
const maxUnit = this.globalState.timeline.items.reduce((max, item) => Math.max(max, item.end), 0);
const processor = new BaseTimelineProcessor();
let nextFrameTime = 0;
const timestamps: number[] = [];
const fixtureValues: Record<string, unknown[]> = {};

this.stage.visualization.pause();

this.events.progress.emit(0);
this.events.stepChanged.emit("Rendering project");
for (let unit = 0; unit <= maxUnit; unit++) {
if (this.cancelRequest) {
break;
}
await processor.process(unit);

const secondsPerFrame = 1 / this.globalState.fps;
const nextTimestamp = unitToSeconds(unit + 1, this.globalState.bpm);
if (nextTimestamp > nextFrameTime) {
timestamps.push(nextFrameTime);
nextFrameTime += secondsPerFrame;
for (const [fixtureId, fixture] of this.stage.fixtures.entries()) {
if (!fixtureValues[fixtureId]) {
fixtureValues[fixtureId] = [];
}
fixtureValues[fixtureId].push(fixture.value);
}
}

this.events.progress.emit(Math.floor((unit / maxUnit) * 100));
if (unit % TICKS_PER_BEAT === 0) {
await new Promise((res) => setTimeout(res, 0));
}
}

this.events.progress.emit(100);

if (this.cancelRequest) {
this.cancelRequest = false;
return null;
}

const audio = await this.renderAudio(maxUnit);
const result: RenderResult = {
audio,
fixtures: this.stage.save().fixtures,
timestamps,
fixtureValues,
};

return gzipSync(pack(result));
}

public cancelRender() {
this.cancelRequest = true;
}

private async renderAudio(maxUnit: number) {
this.events.stepChanged.emit("Rendering audio");
this.events.progress.emit(-1);

const SAMPLE_RATE = 44100;
const length = unitToSeconds(maxUnit, this.globalState.bpm) * SAMPLE_RATE;
const ctx = new OfflineAudioContext({
length,
sampleRate: SAMPLE_RATE,
numberOfChannels: 2,
});
for (const item of this.globalState.timeline.items) {
if (item.libraryItem.type !== LibraryItemType.AUDIO) {
continue;
}

const source = ctx.createBufferSource();
source.buffer = (item.libraryItem as AudioLibraryItem).audioBuffer;
source.connect(ctx.destination);
const startTime = unitToSeconds(item.start, this.globalState.bpm);
if (startTime > 0) {
source.start(startTime);
} else {
source.start(0, -startTime);
}
}

const buffer = await ctx.startRendering();

this.events.stepChanged.emit("Encoding audio");
this.events.progress.emit(0);

//stereo, 44.1 kHz, encode to 256 kBps
const encoder = new Mp3Encoder(2, 44100, 256);
//can be anything but make it a multiple of 576 to make encoders life easier
const sampleBlockSize = 10 * 576;

const mp3Data: number[] = [];
const left = buffer.getChannelData(0);
const right = buffer.getChannelData(1);
for (let i = 0; i < left.length; i += sampleBlockSize) {
const leftChunk = new Int16Array(left.subarray(i, i + sampleBlockSize).map((v) => v * 32767));
const rightChunk = new Int16Array(right.subarray(i, i + sampleBlockSize).map((v) => v * 32767));
const mp3buf = encoder.encodeBuffer(leftChunk, rightChunk);
if (mp3buf.length > 0) {
mp3Data.push(mp3buf);
}

this.events.progress.emit(Math.floor((i / left.length) * 100));
await new Promise((res) => setTimeout(res, 0));
}
const mp3buf = encoder.flush();
if (mp3buf.length > 0) {
mp3Data.push(mp3buf);
}

return new Uint8Array(mp3Data);
}
}
65 changes: 34 additions & 31 deletions src/timeline/baseTimelineProcessor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,40 +39,43 @@ export class BaseTimelineProcessor {
/** maps trackId -> value */
const trackValues = this.getTrackValues(currentActiveItems, unit);

let timeDomainData = new Float32Array(FFT_SIZE);
let frequencyData = new Float32Array(FFT_SIZE);
// TODO: handle multiple audio items?
const audioItem = currentActiveItems.find((i) => this.isType(i, LibraryItemType.AUDIO));
if (audioItem) {
const audioData = this.getAudioData(audioItem.libraryItem as AudioLibraryItem, audioItem.start, unit);
if (audioData) {
timeDomainData = audioData.timeDomainData;
frequencyData = audioData.frequencyData;
}
}

const uncontrolledFixtures = new Set(this.stage.fixtures.values()) as Set<BaseFixture>;
const calculationData: Omit<ICalculationData, "relativeTrackItemProgress"> = {
resolution: this.globalState.resolution,
fps: this.globalState.fps,
position: unit,
sampleRate: AudioLibraryItem.sampleRate,
timeDomainData: timeDomainData,
frequencyData: frequencyData,
trackValues,
};
const graphs = currentActiveItems.filter((i) => this.isType(i, LibraryItemType.GRAPH));
for (const g of graphs) {
try {
const relativeTrackItemProgress = (unit - g.start) / (g.end - g.start);
const results = await this.processGraph(g, unit, { ...calculationData, relativeTrackItemProgress });
if (g.libraryItem.error) {
g.libraryItem.error = "";
if (graphs.length > 0) {
let timeDomainData = new Float32Array(FFT_SIZE);
let frequencyData = new Float32Array(FFT_SIZE);
// TODO: handle multiple audio items?
const audioItem = currentActiveItems.find((i) => this.isType(i, LibraryItemType.AUDIO));
if (audioItem) {
const audioData = this.getAudioData(audioItem.libraryItem as AudioLibraryItem, audioItem.start, unit);
if (audioData) {
timeDomainData = audioData.timeDomainData;
frequencyData = audioData.frequencyData;
}
}

const calculationData: Omit<ICalculationData, "relativeTrackItemProgress"> = {
resolution: this.globalState.resolution,
fps: this.globalState.fps,
position: unit,
sampleRate: AudioLibraryItem.sampleRate,
timeDomainData: timeDomainData,
frequencyData: frequencyData,
trackValues,
};

for (const g of graphs) {
try {
const relativeTrackItemProgress = (unit - g.start) / (g.end - g.start);
const results = await this.processGraph(g, unit, { ...calculationData, relativeTrackItemProgress });
if (g.libraryItem.error) {
g.libraryItem.error = "";
}
this.applyGraphResults(results, uncontrolledFixtures);
} catch (err) {
console.error(err);
g.libraryItem.error = String(err);
}
this.applyGraphResults(results, uncontrolledFixtures);
} catch (err) {
console.error(err);
g.libraryItem.error = String(err);
}
}

Expand Down
Loading

0 comments on commit 41181e0

Please sign in to comment.