From 37bdf4fe61f6ba30d8f836309c3f8aa7515d822b Mon Sep 17 00:00:00 2001
From: Hayk Martiros
Date: Tue, 13 Dec 2022 21:10:43 -0800
Subject: [PATCH] Fix alpha > 1 issue
---
components/ModelInference.tsx | 8 +++++
components/Share.tsx | 2 +-
pages/about.tsx | 12 +++----
pages/index.tsx | 59 ++++++++++++++++++++---------------
4 files changed, 48 insertions(+), 33 deletions(-)
diff --git a/components/ModelInference.tsx b/components/ModelInference.tsx
index 297d79b..feb72f0 100644
--- a/components/ModelInference.tsx
+++ b/components/ModelInference.tsx
@@ -10,6 +10,7 @@ import {
interface ModelInferenceProps {
alpha: number;
+ alphaRollover: boolean;
seed: number;
appState: AppState;
promptInputs: PromptInput[];
@@ -27,6 +28,7 @@ interface ModelInferenceProps {
*/
export default function ModelInference({
alpha,
+ alphaRollover,
seed,
appState,
promptInputs,
@@ -176,6 +178,11 @@ export default function ModelInference({
return;
}
+ // Wait for alpha rollover to resolve.
+ if (alphaRollover) {
+ return;
+ }
+
if (numRequestsMade == 0) {
// Kick off the first request
runInference(alpha, seed, appState, promptInputs);
@@ -193,6 +200,7 @@ export default function ModelInference({
}, [
initializedUrlParams,
alpha,
+ alphaRollover,
seed,
appState,
promptInputs,
diff --git a/components/Share.tsx b/components/Share.tsx
index 1e051f9..e8ff9bf 100644
--- a/components/Share.tsx
+++ b/components/Share.tsx
@@ -266,4 +266,4 @@ export default function Share({
>
);
-}
\ No newline at end of file
+}
diff --git a/pages/about.tsx b/pages/about.tsx
index a45d124..fba0654 100644
--- a/pages/about.tsx
+++ b/pages/about.tsx
@@ -395,15 +395,15 @@ export default function Home() {
The app communicates over an API to run the inference calls on a GPU
- server. We used{" "}
- Truss{" "}
- to package the model and test it locally before
- deploying it to Baseten which provided GPU-backed inference, auto-scaling,
- and observability. We used NVIDIA A10Gs in production.
+ server. We used Truss to
+ package the model and test it locally before deploying it to Baseten
+ which provided GPU-backed inference, auto-scaling, and
+ observability. We used NVIDIA A10Gs in production.
If you have a GPU powerful enough to generate stable diffusion
- results in under five seconds, you can run the experience locally.
+ results in under five seconds, you can run the experience locally
+ using our test flask server.
Code
diff --git a/pages/index.tsx b/pages/index.tsx
index 2eae40a..51f3577 100644
--- a/pages/index.tsx
+++ b/pages/index.tsx
@@ -1,5 +1,5 @@
import { useRouter } from "next/router";
-import { useEffect, useState } from "react";
+import { useCallback, useEffect, useState } from "react";
import * as Tone from "tone";
import AudioPlayer from "../components/AudioPlayer";
@@ -41,6 +41,7 @@ export default function Home() {
// Current interpolation parameters
const [alpha, setAlpha] = useState(0.0);
+ const [alphaRollover, setAlphaRollover] = useState(false);
const [alphaVelocity, setAlphaVelocity] = useState(0.25);
const [seed, setSeed] = useState(getRandomInt(1000000));
@@ -95,9 +96,10 @@ export default function Home() {
// Set the app state based on the prompt inputs array
useEffect(() => {
- if (alpha <= 1) {
+ if (!alphaRollover) {
return;
}
+ setAlphaRollover(false);
const upNextPrompt = promptInputs[promptInputs.length - 1].prompt;
const endPrompt = promptInputs[promptInputs.length - 2].prompt;
@@ -134,9 +136,7 @@ export default function Home() {
if (newAppState != appState) {
setAppState(newAppState);
}
-
- setAlpha(alpha - 1);
- }, [promptInputs, alpha, appState, seed]);
+ }, [promptInputs, alpha, alphaRollover, appState, seed]);
// On any app state change, reset alpha
useEffect(() => {
@@ -150,26 +150,33 @@ export default function Home() {
}, [appState]);
// What to do when a new inference result is available
- const newResultCallback = (
- input: InferenceInput,
- result: InferenceResult
- ) => {
- setInferenceResults((prevResults: InferenceResult[]) => {
- const maxResultCounter = Math.max(...prevResults.map((r) => r.counter));
+ const newResultCallback = useCallback(
+ (input: InferenceInput, result: InferenceResult) => {
+ setInferenceResults((prevResults: InferenceResult[]) => {
+ const maxResultCounter = Math.max(...prevResults.map((r) => r.counter));
- const lastResult = prevResults.find((r) => r.counter == maxResultCounter);
+ const lastResult = prevResults.find(
+ (r) => r.counter == maxResultCounter
+ );
- const newCounter = lastResult ? lastResult.counter + 1 : 0;
+ const newCounter = lastResult ? lastResult.counter + 1 : 0;
- result.counter = newCounter;
- result.input = input;
- result.played = false;
+ result.counter = newCounter;
+ result.input = input;
+ result.played = false;
- setAlpha(alpha + alphaVelocity);
+ let newAlpha = alpha + alphaVelocity;
+ if (newAlpha > 1 + 1e-3) {
+ newAlpha = newAlpha - 1;
+ setAlphaRollover(true);
+ }
+ setAlpha(newAlpha);
- return [...prevResults, result];
- });
- };
+ return [...prevResults, result];
+ });
+ },
+ [alpha, alphaVelocity]
+ );
const nowPlayingCallback = (result: InferenceResult, playerTime: number) => {
console.log(
@@ -234,17 +241,16 @@ export default function Home() {
- {/* Note, top bg section is used to maintain color in background on ios notch devices */}
-
-
-
+ {/* Note, top bg section is used to maintain color in background on ios notch devices */}
+
+
+
-
window.open("/about", "_blank")}
>
[RIFFUSION]
@@ -262,6 +268,7 @@ export default function Home() {