Save settings

This commit is contained in:
Hayk Martiros 2022-12-12 21:43:34 -08:00
parent 45bdd00312
commit 6ccb803968
7 changed files with 405 additions and 359 deletions

View File

@ -9,7 +9,7 @@ interface DebugViewProps {
promptInputs: PromptInput[];
inferenceResults: InferenceResult[];
nowPlayingResult: InferenceResult;
open: boolean ;
open: boolean;
setOpen: (open: boolean) => void;
}
@ -39,8 +39,9 @@ export default function DebugView({
onClose={() => setOpen(false)}
as="div"
className="fixed inset-0 z-30"
key="debug-dialog"
>
<ModalContainer>
<ModalContainer key="debug-modal">
<div className="px-4 text-center text-sm whitespace-nowrap h-[40rem] w-[70rem] overflow-x-scroll">
<div className="my-8 inline-block transform rounded-2xl bg-white p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Panel>

View File

@ -16,6 +16,8 @@ interface ModelInferenceProps {
nowPlayingResult: InferenceResult;
newResultCallback: (input: InferenceInput, result: InferenceResult) => void;
useBaseten: boolean;
denoising: number;
seedImageId: string;
}
/**
@ -31,12 +33,12 @@ export default function ModelInference({
nowPlayingResult,
newResultCallback,
useBaseten,
denoising,
seedImageId,
}: ModelInferenceProps) {
// Create parameters for the inference request
const [denoising, setDenoising] = useState(0.75);
const [guidance, setGuidance] = useState(7.0);
const [numInferenceSteps, setNumInferenceSteps] = useState(50);
const [seedImageId, setSeedImageId] = useState("og_beat");
const [maskImageId, setMaskImageId] = useState(null);
const [initializedUrlParams, setInitializedUrlParams] = useState(false);
@ -50,10 +52,6 @@ export default function ModelInference({
// Set initial params from URL query strings
const router = useRouter();
useEffect(() => {
if (router.query.denoising) {
setDenoising(parseFloat(router.query.denoising as string));
}
if (router.query.guidance) {
setGuidance(parseFloat(router.query.guidance as string));
}
@ -62,10 +60,6 @@ export default function ModelInference({
setNumInferenceSteps(parseInt(router.query.numInferenceSteps as string));
}
if (router.query.seedImageId) {
setSeedImageId(router.query.seedImageId as string);
}
if (router.query.maskImageId) {
if (router.query.maskImageId === "none") {
setMaskImageId("");

View File

@ -59,7 +59,7 @@ export default function PromptPanel({
displayPrompts = [...promptsToAdd, ...displayPrompts];
}
// Add in the upNext and staged prompts
// Add in the upNext and staged prompts
// select the last 2 prompts from prompts
const lastPrompts = prompts.slice(-2);
@ -167,7 +167,7 @@ export default function PromptPanel({
}}
>
<input
className="flex w-full md:fixed md:w-1/2 h-12 pl-3 pr-3 text-xl text-sky-900 rounded-lg border-sky-700 border-4 hover:border-sky-600 focus:outline-none focus:border-sky-400"
className="flex w-full md:fixed md:w-1/2 h-12 pl-3 pr-3 text-xl text-sky-900 dark:text-sky-100 rounded-lg border-sky-700 border-4 hover:border-sky-600 focus:outline-none focus:border-sky-400"
ref={inputPrompt}
type="text"
id="prompt"
@ -238,7 +238,7 @@ const promptEntryClassNames_5_0 = {
1: promptEntryClassNameDict[15],
2: promptEntryClassNameDict[23], // This is the start and end prompt
3: promptEntryClassNameDict[31], // This is the staged prompt
4: promptEntryClassNameDict[36], // This is the UP NEXT prompt
4: promptEntryClassNameDict[36], // This is the UP NEXT prompt
}
const promptEntryClassNames_5_25 = { // This is not reached unless user has poor connection or delayed server response
@ -317,4 +317,4 @@ const promptEntryClassNames_6_1 = {
3: promptEntryClassNameDict[24],
4: promptEntryClassNameDict[32],
5: promptEntryClassNameDict[36],
}
}

View File

@ -2,40 +2,50 @@ import { Dialog, Transition } from "@headlessui/react";
import { Fragment, useState } from "react";
import { FiSettings } from "react-icons/fi";
import { ImStatsBars } from "react-icons/im";
import styled, { css } from "styled-components";
import styled from "styled-components";
import { InferenceResult, PromptInput } from "../types";
import DebugView from "./DebugView";
const ModalContainer = styled.div`
position: absolute;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
position: absolute;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
`;
interface DebugViewProps {
promptInputs: PromptInput[];
inferenceResults: InferenceResult[];
nowPlayingResult: InferenceResult;
denoising: number;
setDenoising: (denoising: number) => void;
seedImage: string;
setSeedImage: (seedImage: string) => void;
}
export default function Settings({
promptInputs,
inferenceResults,
nowPlayingResult,
denoising,
setDenoising,
seedImage,
setSeedImage,
}: DebugViewProps) {
const [open, setOpen] = useState(false);
var classNameCondition = ""
var classNameCondition = "";
if (open) {
classNameCondition = "fixed z-20 top-44 right-4 md:top-48 md:right-8 bg-sky-400 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-white text-2xl hover:bg-sky-500 hover:drop-shadow-2xl"
classNameCondition =
"fixed z-20 top-44 right-4 md:top-48 md:right-8 bg-sky-400 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-white text-2xl hover:bg-sky-500 hover:drop-shadow-2xl";
} else {
classNameCondition = "fixed z-20 top-44 right-4 md:top-48 md:right-8 bg-slate-100 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-sky-900 text-2xl hover:text-white hover:bg-sky-600 hover:drop-shadow-2xl"
classNameCondition =
"fixed z-20 top-44 right-4 md:top-48 md:right-8 bg-slate-100 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-sky-900 text-2xl hover:text-white hover:bg-sky-600 hover:drop-shadow-2xl";
}
return (
@ -82,47 +92,36 @@ export default function Settings({
leaveFrom="opacity-100 scale-100"
leaveTo="opacity-0 scale-95"
>
<ModalContainer>
<ModalContainer key="settings-modal-container">
<div className="my-8 inline-block w-full max-w-md transform overflow-hidden rounded-2xl bg-white p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Title
as="h1"
className="text-3xl font-medium leading-6 text-gray-900 pb-2"
className="text-3xl font-medium leading-6 text-gray-900 pb-4"
>
Settings
</Dialog.Title>
<div className="mt-1">
<p className="label-text-alt">
<label className="label">
Riffusion generates music from text prompts using a diffusion model. Try typing in your favorite artist or genre, and playing with the settings below to explore the latent space of sound.
</label>
{/* <input type="range" min="0" max="100" value="40" className="range" /> */}
{SeedImageSelector()}
{DenoisingSelector()}
{DebugButton(
promptInputs,
inferenceResults,
nowPlayingResult
)}
<p className=" text-gray-700 text-sm">
Riffusion generates music from text prompts using a
diffusion model. Try typing in your favorite artist or
genre, and playing with the settings below to explore the
latent space of sound.
</p>
{/* <input type="range" min="0" max="100" value="40" className="range" /> */}
{SeedImageSelector(seedImage, setSeedImage)}
{DenoisingSelector(denoising, setDenoising)}
{DebugButton(
promptInputs,
inferenceResults,
nowPlayingResult
)}
</div>
<div className="mt-6">
<button
className="relative inline-flex items-center justify-center p-0.5 mb-2 mr-2 overflow-hidden text-sm font-medium text-gray-900 rounded-lg group bg-sky-500 group-hover:from-sky-600 group-hover:to-sky-500 hover:text-white"
onClick={() => {
setOpen(false);
}}
>
<span className="relative px-5 py-2 transition-all ease-in duration-75 bg-white rounded-md group-hover:bg-opacity-0">
Cancel
</span>
</button>
<button
className="relative inline-flex items-center justify-center p-0.5 mb-2 mr-2 overflow-hidden text-sm font-medium text-gray-900 rounded-lg group bg-sky-500 group-hover:from-sky-600 group-hover:to-sky-500 hover:text-white"
onClick={() => {
@ -138,11 +137,12 @@ export default function Settings({
<button
type="button"
className="text-white bg-gradient-to-br from-purple-600 to-sky-500 hover:bg-gradient-to-bl font-medium rounded-lg text-sm px-5 py-2.5 text-center mr-2 mb-2"
onClick={() => setOpen(false)}
onClick={() => {
setOpen(false);
}}
>
Apply changes 🎧
Done 🎧
</button>
</div>
</div>
</ModalContainer>
@ -152,57 +152,103 @@ export default function Settings({
</Transition>
</>
);
};
export function SeedImageSelector() {
return (
<div className="form-control w-full">
<label className="label">
<span className="label-text">Seed Image</span>
{/* <span className="label-text-alt">Chose your vibe</span> */}
</label>
<select className="select select-bordered select-sm">
<option disabled selected>Chose your vibe</option>
<option selected >Og Beat</option>
<option>Soul</option>
<option>High Energy</option>
<option>Spacy</option>
</select>
<label className="label">
<span className="label-text-alt">Used as the base for img2img diffusion. This keeps your riff on beat and impacts melodic patterns.</span>
{/* <span className="label-text-alt">Alt label</span> */}
</label>
</div>
)
}
export function DenoisingSelector() {
return (
<div className="form-control w-full">
<label className="label">
<span className="label-text">Denoising</span>
{/* <span className="label-text-alt">Chose your vibe</span> */}
</label>
<select className="select select-bordered select-sm">
<option disabled selected>How wild to get</option>
<option selected >Keep it on beat (0.75)</option>
<option>Get a little crazy (0.8)</option>
<option>I'm feeling lucky (0.85)</option>
<option>What is tempo? (0.95)</option>
</select>
<label className="label">
<span className="label-text-alt">The higher the denoising, the more creative the output, and the more likely you are to get off beat.</span>
{/* <span className="label-text-alt">Alt label</span> */}
</label>
</div>
)
}
export function DebugButton(
promptInputs,
inferenceResults,
nowPlayingResult
export function SeedImageSelector(
seedImage: string,
setSeedImage: (seedImage: string) => void
) {
let selectOptions = [
["OG Beat", "og_beat"],
["Soul", "chill_soul_1"],
// ["High Energy", 0.85],
// ["Spacy", 0.95],
];
let matchedOption = selectOptions.find((x) => x[1] === seedImage);
if (matchedOption === undefined) {
matchedOption = [`Custom (${seedImage})`, seedImage];
selectOptions.push(matchedOption);
}
return (
<div className="form-control w-full">
<label className="label">
<span className="label-text text-gray-700">Seed Image</span>
</label>
<select
className="select select-bordered select-sm"
onChange={(e) => {
const newValue = selectOptions.find(
(x) => x[0] === e.target.value
)[1];
console.log("Setting seed image: ", newValue);
setSeedImage(newValue);
}}
defaultValue={matchedOption[0]}
>
<option disabled>Chose your vibe</option>
{selectOptions.map((x, i) => (
<option key={i}>{x[0]}</option>
))}
</select>
<p className="label-text-alt text-gray-700 pt-2">
Used as the base for img2img diffusion. This keeps your riff on beat and
impacts melodic patterns.
</p>
</div>
);
}
export function DenoisingSelector(
denoising: number,
setDenoising: (d: number) => void
) {
let selectOptions = [
["Keep it on beat (0.75)", 0.75],
["Get a little crazy (0.8)", 0.8],
["I'm feeling lucky (0.85)", 0.85],
["What is tempo? (0.95)", 0.95],
];
let matchedOption = selectOptions.find((x) => x[1] === denoising);
if (matchedOption === undefined) {
matchedOption = [`Custom (${denoising})`, denoising];
selectOptions.push(matchedOption);
}
return (
<div className="form-control w-full">
<label className="label">
<span className="label-text text-gray-700">Denoising</span>
</label>
<select
className="select select-bordered select-sm"
onChange={(e) => {
const newValue = selectOptions.find(
(x) => x[0] === e.target.value
)[1] as number;
console.log("Setting denoising: ", newValue);
setDenoising(newValue);
}}
defaultValue={matchedOption[0]}
>
<option disabled>How wild to get</option>
{selectOptions.map((x, i) => (
<option key={i}>{x[0]}</option>
))}
</select>
<p className="label-text-alt text-gray-700 pt-2">
The higher the denoising, the more creative the output, and the more
likely you are to get off beat.
</p>
</div>
);
}
export function DebugButton(promptInputs, inferenceResults, nowPlayingResult) {
const [debugOpen, debugSetOpen] = useState(false);
let buttonClassName = "";
@ -218,6 +264,7 @@ export function DebugButton(
<>
<button
title="Debug"
key="debug-button"
className={buttonClassName}
onClick={() => {
debugSetOpen(true);
@ -232,7 +279,8 @@ export function DebugButton(
nowPlayingResult={nowPlayingResult}
open={debugOpen}
setOpen={debugSetOpen}
key="debug-view"
/>
</>
);
}
}

View File

@ -1,277 +1,262 @@
import { Dialog, Transition } from "@headlessui/react";
import { Fragment, useState } from "react";
import { FiShare } from "react-icons/fi";
import styled, { css } from "styled-components";
import styled from "styled-components";
import { InferenceResult } from "../types";
interface ShareProps {
inferenceResults: InferenceResult[];
nowPlayingResult: InferenceResult;
inferenceResults: InferenceResult[];
nowPlayingResult: InferenceResult;
}
const ModalContainer = styled.div`
position: absolute;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
position: absolute;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
background: rgba(0, 0, 0, 0.5);
display: flex;
align-items: center;
justify-content: center;
`;
export default function Share({
inferenceResults,
nowPlayingResult,
inferenceResults,
nowPlayingResult,
}: ShareProps) {
const [open, setOpen] = useState(false);
const [open, setOpen] = useState(false);
var classNameCondition = ""
if (open) {
classNameCondition = "fixed z-20 top-24 right-4 md:top-28 md:right-8 bg-sky-400 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-white text-2xl hover:bg-sky-500 hover:drop-shadow-2xl"
var classNameCondition = "";
if (open) {
classNameCondition =
"fixed z-20 top-24 right-4 md:top-28 md:right-8 bg-sky-400 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-white text-2xl hover:bg-sky-500 hover:drop-shadow-2xl";
} else {
classNameCondition =
"fixed z-20 top-24 right-4 md:top-28 md:right-8 bg-slate-100 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-sky-900 text-2xl hover:text-white hover:bg-sky-600 hover:drop-shadow-2xl";
}
// function to copy link to moment in song to the clipboard
function copyLinkToClipboard(secondsAgo: number) {
// use generateLink to generate the link
const link = generateLink(secondsAgo);
navigator.clipboard.writeText(link);
}
function getActiveResult() {
if (!nowPlayingResult) {
if (inferenceResults.length == 0) {
return null;
}
return inferenceResults[0];
} else {
classNameCondition = "fixed z-20 top-24 right-4 md:top-28 md:right-8 bg-slate-100 w-14 h-14 rounded-full drop-shadow-lg flex justify-center items-center text-sky-900 text-2xl hover:text-white hover:bg-sky-600 hover:drop-shadow-2xl"
return nowPlayingResult;
}
}
// function to generate a link to a the moment in the song based on the played clips, input variable is how many seconds ago
function generateLink(secondsAgo: number) {
var prompt;
var seed;
var denoising;
var maskImageId;
var seedImageId;
var guidance;
var numInferenceSteps;
var alphaVelocity;
if (!nowPlayingResult) {
return window.location.href;
} else {
var selectedInput: InferenceResult["input"];
if (secondsAgo == 0) {
selectedInput = nowPlayingResult.input;
} else {
var selectedCounter = nowPlayingResult.counter - secondsAgo / 5;
selectedInput = inferenceResults.find(
(result) => result.counter == selectedCounter
)?.input;
if (!selectedInput) {
// TODO: ideally don't show the button in this case...
return window.location.href;
}
}
// TODO: Consider only including in the link the things that are different from the default values
prompt = selectedInput.start.prompt;
seed = selectedInput.start.seed;
denoising = selectedInput.start.denoising;
maskImageId = selectedInput.mask_image_id;
seedImageId = nowPlayingResult.input.seed_image_id;
// TODO, selectively add these based on whether we give user option to change them
// guidance = nowPlayingResult.input.guidance
// numInferenceSteps = nowPlayingResult.input.num_inference_steps
// alphaVelocity = nowPlayingResult.input.alpha_velocity
}
// function to copy link to moment in song to the clipboard
function copyLinkToClipboard(secondsAgo: number) {
var baseUrl = window.location.origin + "/?";
// use generateLink to generate the link
const link = generateLink(secondsAgo);
// Note that copying image and text to the clipboard simultaneously is not supported on all browsers, causes some funkiness
// This also has to be executed on a site secured with https on mobile, so will not work on localhost
navigator.clipboard
.writeText(link)
// }
if (prompt != null) {
var promptString = "&prompt=" + prompt;
} else {
promptString = "";
}
if (seed != null) {
var seedString = "&seed=" + seed;
} else {
seedString = "";
}
if (denoising != null) {
var denoisingString = "&denoising=" + denoising;
} else {
denoisingString = "";
}
if (maskImageId != null) {
var maskImageIdString = "&maskImageId=" + maskImageId;
} else {
maskImageIdString = "";
}
if (seedImageId != null) {
var seedImageIdString = "&seedImageId=" + seedImageId;
} else {
seedImageIdString = "";
}
if (guidance != null) {
var guidanceString = "&guidance=" + guidance;
} else {
guidanceString = "";
}
if (numInferenceSteps != null) {
var numInferenceStepsString = "&numInferenceSteps=" + numInferenceSteps;
} else {
numInferenceStepsString = "";
}
if (alphaVelocity != null) {
var alphaVelocityString = "&alphaVelocity=" + alphaVelocity;
} else {
alphaVelocityString = "";
}
function copyImageToClipboard(secondsAgo: number) {
// get image of the current moment in the song
var image: string
if (!nowPlayingResult) {
if (inferenceResults.length == 0) {
return
}
image = inferenceResults[0].image
}
else {
image = nowPlayingResult.image
}
// make pngImageBlob from the image
const imageBlob = dataURItoBlob(image)
// convert pngImageBlob to a PNG file
const pngImageFile = new File([imageBlob], "image.png", { type: "image/png" })
// Format strings to have + in place of spaces for ease of sharing, note this is only necessary for prompts currently
promptString = promptString.replace(/ /g, "+");
// use generateLink to generate the link, we'll add this to the clipboard as plain text as well
const link = generateLink(secondsAgo);
// create url string with the variables above combined
var shareUrl =
baseUrl +
promptString +
seedString +
denoisingString +
maskImageIdString +
seedImageIdString +
guidanceString +
numInferenceStepsString +
alphaVelocityString;
try {
navigator.clipboard.write([
new ClipboardItem({
'image/png': pngImageFile,
'text/plain': new Blob([link], { type: 'text/plain' }),
}),
]);
} catch (error) {
console.error(error);
}
}
return shareUrl;
}
function displayShareImage() {
var image: string
if (!nowPlayingResult) {
if (inferenceResults.length == 0) {
return
}
image = inferenceResults[0].image
}
else {
image = nowPlayingResult.image
return (
<>
<button
title="Info"
className={classNameCondition}
onClick={() => setOpen(true)}
>
<FiShare />
</button>
}
return (
<img src={image}
alt="share image"
className="w-64 h-64"
/>
)
}
// function to generate a link to a the moment in the song based on the played clips, input variable is how many seconds ago
function generateLink(secondsAgo: number) {
var prompt
var seed
var denoising
var maskImageId
var seedImageId
var guidance
var numInferenceSteps
var alphaVelocity
if (!nowPlayingResult) {
return window.location.href;
}
else {
var selectedInput: InferenceResult["input"]
if (secondsAgo == 0) {
selectedInput = nowPlayingResult.input
}
else {
var selectedCounter = nowPlayingResult.counter - (secondsAgo / 5)
selectedInput = inferenceResults.find((result) => result.counter == selectedCounter)?.input
if (!selectedInput) {
// TODO: ideally don't show the button in this case...
return window.location.href;
}
}
// TODO: Consider only including in the link the things that are different from the default values
prompt = selectedInput.start.prompt
seed = selectedInput.start.seed
denoising = selectedInput.start.denoising
maskImageId = selectedInput.mask_image_id
seedImageId = nowPlayingResult.input.seed_image_id
// TODO, selectively add these based on whether we give user option to change them
// guidance = nowPlayingResult.input.guidance
// numInferenceSteps = nowPlayingResult.input.num_inference_steps
// alphaVelocity = nowPlayingResult.input.alpha_velocity
}
var baseUrl = window.location.origin + "/?";
if (prompt != null) { var promptString = "&prompt=" + prompt } else { promptString = "" }
if (seed != null) { var seedString = "&seed=" + seed } else { seedString = "" }
if (denoising != null) { var denoisingString = "&denoising=" + denoising } else { denoisingString = "" }
if (maskImageId != null) { var maskImageIdString = "&maskImageId=" + maskImageId } else { maskImageIdString = "" }
if (seedImageId != null) { var seedImageIdString = "&seedImageId=" + seedImageId } else { seedImageIdString = "" }
if (guidance != null) { var guidanceString = "&guidance=" + guidance } else { guidanceString = "" }
if (numInferenceSteps != null) { var numInferenceStepsString = "&numInferenceSteps=" + numInferenceSteps } else { numInferenceStepsString = "" }
if (alphaVelocity != null) { var alphaVelocityString = "&alphaVelocity=" + alphaVelocity } else { alphaVelocityString = "" }
// Format strings to have + in place of spaces for ease of sharing, note this is only necessary for prompts currently
promptString = promptString.replace(/ /g, "+");
// create url string with the variables above combined
var shareUrl = baseUrl + promptString + seedString + denoisingString + maskImageIdString + seedImageIdString + guidanceString + numInferenceStepsString + alphaVelocityString
return shareUrl;
}
return (
<>
<button
title="Info"
className={classNameCondition}
onClick={() => setOpen(true)}
<Transition appear show={open} as={Fragment}>
<Dialog
as="div"
className="fixed inset-0 z-20 overflow-y-auto"
onClose={() => setOpen(false)}
>
<div className="min-h-screen px-4 text-center">
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<FiShare />
</button>
<Dialog.Overlay className="fixed inset-0" />
</Transition.Child>
<Transition appear show={open} as={Fragment}>
<Dialog
as="div"
className="fixed inset-0 z-20 overflow-y-auto"
onClose={() => setOpen(false)}
>
<div className="min-h-screen px-4 text-center">
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
enterTo="opacity-100"
leave="ease-in duration-200"
leaveFrom="opacity-100"
leaveTo="opacity-0"
>
<Dialog.Overlay className="fixed inset-0" />
</Transition.Child>
<span
className="inline-block h-screen align-middle"
aria-hidden="true"
>
&#8203;
</span>
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0 scale-95"
enterTo="opacity-100 scale-100"
leave="ease-in duration-200"
leaveFrom="opacity-100 scale-100"
leaveTo="opacity-0 scale-95"
>
<ModalContainer>
<div className="my-8 p-8 inline-block w-full max-w-md transform overflow-hidden rounded-2xl bg-white text-left align-middle shadow-xl transition-all">
<Dialog.Title
as="h1"
className="text-3xl font-medium leading-6 text-gray-900 pb-2"
>
Share your riff
</Dialog.Title>
<div className="mt-4">
<img
src={getActiveResult()?.image}
alt="share image"
className="w-3/4"
/>
</div>
<span
className="inline-block h-screen align-middle"
aria-hidden="true"
>
&#8203;
</span>
<Transition.Child
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0 scale-95"
enterTo="opacity-100 scale-100"
leave="ease-in duration-200"
leaveFrom="opacity-100 scale-100"
leaveTo="opacity-0 scale-95"
>
<ModalContainer>
<div className="my-8 inline-block w-full max-w-md transform overflow-hidden rounded-2xl bg-white p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Title
as="h1"
className="text-3xl font-medium leading-6 text-gray-900 pb-2"
>
Share your riff
</Dialog.Title>
<div className="mt-4">
{displayShareImage()}
</div>
<div className="mt-4">
<audio
controls
src={getActiveResult()?.audio}
className="w-3/4"
>
Your browser does not support audio.
</audio>
</div>
<div className="mt-6">
<button
className="relative inline-flex items-center justify-center p-0.5 mb-2 mr-2 overflow-hidden text-sm font-medium text-gray-900 rounded-lg group bg-sky-500 group-hover:from-sky-600 group-hover:to-sky-500 hover:text-white"
onClick={() => {
setOpen(false);
}}
>
<span className="relative px-5 py-2 transition-all ease-in duration-75 bg-white rounded-md group-hover:bg-opacity-0">
Cancel
</span>
</button>
<div className="mt-6">
<button
className="relative inline-flex items-center justify-center p-0.5 mb-2 mr-2 overflow-hidden text-sm font-medium text-gray-900 rounded-lg group bg-sky-500 group-hover:from-sky-600 group-hover:to-sky-500 hover:text-white"
onClick={() => {
setOpen(false);
}}
>
<span className="relative px-5 py-2 transition-all ease-in duration-75 bg-white rounded-md group-hover:bg-opacity-0">
Cancel
</span>
</button>
<button
type="button"
className="w-64 text-white bg-gradient-to-br from-purple-600 to-sky-500 hover:bg-gradient-to-bl font-medium rounded-lg text-sm px-5 py-2.5 text-center mr-2 mb-2"
onClick={() => {
copyLinkToClipboard(0)
// copyImageToClipboard(0)
setOpen(false)
}}
>
Copy link to current moment 🔗
</button>
</div>
</div>
</ModalContainer>
</Transition.Child>
</div>
</Dialog>
</Transition>
</>
);
};
function dataURItoBlob(image: string) {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (image.split(',')[0].indexOf('base64') >= 0)
byteString = atob(image.split(',')[1]);
else
byteString = unescape(image.split(',')[1]);
// separate out the mime component
var mimeString = image.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ia], { type: mimeString });
<button
type="button"
className="w-64 text-white bg-gradient-to-br from-purple-600 to-sky-500 hover:bg-gradient-to-bl font-medium rounded-lg text-sm px-5 py-2.5 text-center mr-2 mb-2"
onClick={() => {
copyLinkToClipboard(0);
setOpen(false);
}}
>
Copy link to current moment 🔗
</button>
</div>
</div>
</ModalContainer>
</Transition.Child>
</div>
</Dialog>
</Transition>
</>
);
}

View File

@ -404,7 +404,7 @@ export default function Home() {
results in under five seconds, you can run the experience locally.
</p>
<br />
<b>Code</b>
<h2 className="pt-10 pb-5 text-3xl font-bold">Code</h2>
<ul className="mt-3 ml-10 list-disc">
<li>
Web app:{" "}

View File

@ -43,6 +43,10 @@ export default function Home() {
const [alphaVelocity, setAlphaVelocity] = useState(0.25);
const [seed, setSeed] = useState(getRandomInt(1000000));
// Settings
const [denoising, setDenoising] = useState(0.75);
const [seedImageId, setSeedImageId] = useState("og_beat");
// Prompts shown on screen and maintained by the prompt panel
const [promptInputs, setPromptInputs] = useState<PromptInput[]>([]);
@ -78,6 +82,14 @@ export default function Home() {
initPromptInputs[3].prompt = router.query.prompt as string;
}
setPromptInputs(defaultPromptInputs);
if (router.query.denoising) {
setDenoising(parseFloat(router.query.denoising as string));
}
if (router.query.seedImageId) {
setSeedImageId(router.query.seedImageId as string);
}
}, [router.isReady, router.query]);
// Set the app state based on the prompt inputs array
@ -214,6 +226,8 @@ export default function Home() {
nowPlayingResult={nowPlayingResult}
newResultCallback={newResultCallback}
useBaseten={process.env.NEXT_PUBLIC_RIFFUSION_USE_BASETEN == "true"}
denoising={denoising}
seedImageId={seedImageId}
/>
<AudioPlayer
@ -245,6 +259,10 @@ export default function Home() {
promptInputs={promptInputs}
inferenceResults={inferenceResults}
nowPlayingResult={nowPlayingResult}
denoising={denoising}
setDenoising={setDenoising}
seedImage={seedImageId}
setSeedImage={setSeedImageId}
/>
</div>
</>