From 35a90ec979867f368b73b0435b2f0393fed6a2f7 Mon Sep 17 00:00:00 2001 From: Hayk Martiros Date: Fri, 25 Nov 2022 21:17:29 -0800 Subject: [PATCH] Minor improvements to viz and params --- components/HeightMapImage.tsx | 2 +- components/SpectrogramViewer.tsx | 14 +++++++------ pages/index.tsx | 35 ++++++++++++++++++++++---------- types.ts | 5 +++-- 4 files changed, 36 insertions(+), 20 deletions(-) diff --git a/components/HeightMapImage.tsx b/components/HeightMapImage.tsx index 983effc..8f9338c 100644 --- a/components/HeightMapImage.tsx +++ b/components/HeightMapImage.tsx @@ -38,7 +38,7 @@ export default function HeightMapImage(props: HeightMapImageProps) { // Feed the heightmap bumpTexture: { value: heightMap }, // Feed the scaling constant for the heightmap - bumpScale: { value: -0.2 }, + bumpScale: { value: -0.5 }, // Feed the texture map terrainTexture: { value: textureMap }, }} diff --git a/components/SpectrogramViewer.tsx b/components/SpectrogramViewer.tsx index a357a39..49fc17c 100644 --- a/components/SpectrogramViewer.tsx +++ b/components/SpectrogramViewer.tsx @@ -39,15 +39,17 @@ export default function SpectrogramViewer({ playheadRef.current.position.y = camera.position.y; }); + const playbarShift = 3.6; // [m] + return ( {inferenceResults.map((value: InferenceResult, index: number) => { - const height = audioLength * (-0.48 - value.counter); + const position = audioLength * (-0.53 - value.counter) + playbarShift; if (use_height_map) { return ( @@ -69,10 +71,10 @@ export default function SpectrogramViewer({ - + diff --git a/pages/index.tsx b/pages/index.tsx index a134bfa..8c435d3 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -16,7 +16,7 @@ const SERVER_URL = "http://129.146.52.68:3013/run_inference/"; const defaultPromptInputs = [ { prompt: "A jazz pianist playing a classical concerto" }, { prompt: "Country singer and a techno DJ" }, - { prompt: "A typewriter in they style of K-Pop" }, + { prompt: "A typewriter in the style of K-Pop" }, { prompt: "lo-fi beat for the holidays" }, { prompt: "" }, { prompt: "" }, @@ -264,15 +264,28 @@ export default function Home() { const transitioning = appState == AppState.Transition; + const denoising = 0.85; + const guidance = 7.0; + const numInferenceSteps = 50; + const seedImageId = 0; + const maskImageId = null; + const inferenceInput = { alpha: alpha, + num_inference_steps: numInferenceSteps, + seed_image_id: seedImageId, + mask_image_id: maskImageId, start: { prompt: startPrompt, seed: seed, + denoising: denoising, + guidance: guidance, }, end: { prompt: transitioning ? endPrompt : startPrompt, seed: transitioning ? seed : seed + 1, + denoising: denoising, + guidance: guidance, }, }; @@ -324,6 +337,8 @@ export default function Home() { }); }; + // Run inference on a timer. + // TODO(hayk): Improve the strategy here. useInterval(() => { console.log(inferenceResults); if (inferenceResults.length < maxNumInferenceResults) { @@ -331,8 +346,6 @@ export default function Home() { } }, timeout); - // Run inference on a timer. - // TODO(hayk): Improve the timing here. // TODO(hayk): Fix warning about effects. useEffect(() => { runInference(alpha, seed, appState, promptInputs); @@ -351,14 +364,14 @@ export default function Home() {
- {tonePlayer && ( - Tone.Transport.seconds} - audioLength={tonePlayer.sampleTime * tonePlayer.buffer.length} - inferenceResults={inferenceResults} - /> - )} + Tone.Transport.seconds} + audioLength={ + tonePlayer ? tonePlayer.sampleTime * tonePlayer.buffer.length : 0 + } + inferenceResults={inferenceResults} + />