Whisper
Summary
Installation
npm install -S langchain @langchain/openai @chengsokdara/use-whisperyarn add langchain @langchain/openai @chengsokdara/use-whisperpnpm add langchain @langchain/openai @chengsokdara/use-whisperUsage
import { ChatOpenAI } from '@langchain/openai';
import { createOpenAPIChain } from 'langchain/chains';
import { useWhisper } from '@chengsokdara/use-whisper';
import { useEffect, useState } from 'react';
function Whisper() {
const [chain, setChain] = useState<any>(null);
const [results, setResults] = useState<any[]>([]);
const {
recording,
speaking,
transcribing,
transcript,
pauseRecording,
startRecording,
stopRecording,
} = useWhisper({
apiKey: process.env.REACT_APP_OPENAI_API_KEY
});
useEffect(() => {
const initializeChat = async () => {
const chatModel = new ChatOpenAI({
modelName: 'gpt-4-1106-preview',
temperature: 0,
openAIApiKey: process.env.OPENAI_API_KEY,
});
const chain = await createOpenAPIChain('https://beta.usemoon.ai/.well-known/swagger.json', {
llm: chatModel,
headers: {
Authorization: `Bearer ${MOON_API_KEY}`,
},
});
setChain(chain);
};
initializeChat();
}, []);
const sendMessage = async () => {
if (chain) {
const result = await chain.run(transcript.text);
console.log(JSON.stringify(result, null, 2));
setResults((prevResults) => [...prevResults, result]);
}
};
return (
<div>
<p>Recording: {recording}</p>
<p>Speaking: {speaking}</p>
<p>Transcribing: {transcribing}</p>
<p>Transcribed Text: {transcript.text}</p>
<button onClick={() => startRecording()}>Start</button>
<button onClick={() => pauseRecording()}>Pause</button>
<button onClick={() => stopRecording()}>Stop</button>
<button onClick={sendMessage}>Send</button>
{results.map((result, index) => (
<p key={index}>{JSON.stringify(result)}</p>
))}
</div>
);
}
export default Whisper;
Last updated