diff --git a/speech-speech/backend/api.py b/speech-speech/backend/api.py index a9e54f4..5b22316 100644 --- a/speech-speech/backend/api.py +++ b/speech-speech/backend/api.py @@ -1,10 +1,6 @@ from openai import OpenAI from fastapi import FastAPI, File, Response, Request -from fastapi.responses import JSONResponse -from fastapi.encoders import jsonable_encoder -from json import dumps from pydantic import BaseModel -import filetype import whisper @@ -27,7 +23,6 @@ def get_text(response: Response, audio: bytes = File()): response.headers["Access-Control-Allow-Origin"] = "*" with open("audio", "wb") as f: f.write(audio) - print(len(audio)) # transcript = openAI_clinet.audio.transcriptions.create( # model="whisper-1", # file=audio, @@ -42,6 +37,8 @@ def get_text(response: Response, audio: bytes = File()): @app.post("/conversation") async def get_next_response(request: Request, response: Response): response.headers["Access-Control-Allow-Origin"] = "*" + #role = "test" + #res_msg = "temp test response" messages = await request.json() res = openAI_clinet.chat.completions.create( model="gpt-3.5-turbo", @@ -49,5 +46,6 @@ async def get_next_response(request: Request, response: Response): ) res_msg = res.choices[0].message.content role = res.choices[0].message.role + print(messages) print(res_msg) return {"role": role, "content": res_msg} diff --git a/speech-speech/backend/audio b/speech-speech/backend/audio index 38793cf..dc5efd2 100644 Binary files a/speech-speech/backend/audio and b/speech-speech/backend/audio differ diff --git a/speech-speech/frontend/src/App.tsx b/speech-speech/frontend/src/App.tsx index 1ec4db4..34bb22c 100644 --- a/speech-speech/frontend/src/App.tsx +++ b/speech-speech/frontend/src/App.tsx @@ -1,4 +1,4 @@ -import { useState } from "react"; +import { useEffect, useRef, useState } from "react"; import { TbBrandOpenai, TbMicrophone2, @@ -7,10 +7,15 @@ import { } from "react-icons/tb"; import "./App.css"; +type ChatMsg = { + role: string; + content: string; +}; + function Header() { return (
-
+
Speach to Speech AI example
@@ -20,7 +25,7 @@ function Header() { let audioBlobs = []; let streamBeingCaptured: MediaStream | null = null; let mediaRecorder: MediaRecorder | null = null; -let chat: Array = [{ +let chat: Array = [{ role: "system", content: "You are a helpful assistant.", }]; @@ -35,7 +40,7 @@ function get_mic() { } function startRecord() { - audioBlobs = [] + audioBlobs = []; get_mic().then((stream) => { streamBeingCaptured = stream; mediaRecorder = new MediaRecorder(stream); @@ -64,15 +69,45 @@ function playRecord() { audio.play(); } -function Feed() { +function Feed(props: { chat: Array[ChatMsg]; setChatStateFn: any }) { + const bottomRef = useRef(null); + + const scrollToBottom = () => { + bottomRef.current?.scrollIntoView({ behavior: "smooth" }); + }; + + useEffect(() => { + scrollToBottom(); + console.log("scroll?"); + }); + return ( -
- chat history goes here +
+
+ {props.chat.filter((m: ChatMsg) => m.role != "system").map(( + m: ChatMsg, + ) => )} +
+
); } -function Controls() { +function Msg(props: { msg: ChatMsg }) { + return ( +
+ + {props.msg.role.toUpperCase()}: + +
+ + {props.msg.content} + +
+ ); +} + +function Controls(props: { setChatStateFn: any; chat: Array[ChatMsg] }) { const [recordState, setRecordState] = useState(false); function toggleRecord() { @@ -94,20 +129,20 @@ function Controls() { }).then((res) => res.json()) .then((res) => { console.log(res); - chat.push({ role: "user", content: res["user-transcript"] }); - console.log(chat); - send_msg(); - }); - } - - function send_msg() { - fetch("http://100.82.51.22:8001/conversation", { - "method": "POST", - "body": JSON.stringify(chat), - }).then((res) => res.json()) - .then((res) => { - chat.push(res) - console.log(res); + props.setChatStateFn((curState) => [ + ...curState, + { "role": "user", "content": res["user-transcript"] }, + ]); + fetch("http://100.82.51.22:8001/conversation", { + "method": "POST", + "body": JSON.stringify([...props.chat, { + "role": "user", + "content": res["user-transcript"], + }]), + }).then((res) => res.json()) + .then((res) => { + props.setChatStateFn((curState) => [...curState, res]); + }); }); } @@ -141,6 +176,11 @@ function Controls() { } function App() { + const [chatState, setChatState] = useState([{ + role: "system", + content: "You are a helpful assistant.", + }]); + return ( <>
@@ -148,8 +188,8 @@ function App() {

- - + +
);