diff --git a/package.json b/package.json
index 304350f6f..a4dc0f661 100644
--- a/package.json
+++ b/package.json
@@ -13,7 +13,10 @@
"@types/node": "^16.18.108",
"@types/react": "^18.3.5",
"@types/react-dom": "^18.3.0",
+ "axios": "^1.7.7",
+ "cors": "^2.8.5",
"dotenv": "^16.4.5",
+ "express": "^4.21.1",
"leaflet": "^1.9.4",
"react": "^18.3.1",
"react-dom": "^18.3.1",
@@ -24,15 +27,19 @@
"save": "^2.9.0",
"typescript": "^4.9.5",
"web-vitals": "^2.1.4",
- "ws": "^8.18.0"
+ "node-fetch": "^3.2.0",
+ "ws": "^8.18.0",
+ "three": "^0.160.0",
+ "@types/three": "^0.160.0"
},
"scripts": {
- "start": "react-scripts start",
+ "start": "node server.mjs",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
- "zip": "zip -r realtime-api-console.zip . -x 'node_modules' 'node_modules/*' 'node_modules/**' '.git' '.git/*' '.git/**' '.DS_Store' '*/.DS_Store' 'package-lock.json' '*.zip' '*.tar.gz' '*.tar' '.env'",
- "relay": "nodemon ./relay-server/index.js"
+ "server": "node server.mjs",
+ "dev": "concurrently \"react-scripts start\" \"nodemon server.mjs\"",
+ "deploy": "npm run build && npm start"
},
"eslintConfig": {
"extends": [
@@ -54,6 +61,7 @@
},
"devDependencies": {
"@babel/plugin-proposal-private-property-in-object": "^7.21.11",
+ "concurrently": "^9.0.1",
"nodemon": "^3.1.7"
}
}
diff --git a/public/index.html b/public/index.html
index da65952cb..6d16b3d54 100644
--- a/public/index.html
+++ b/public/index.html
@@ -4,7 +4,9 @@
-
realtime console
+
+
+ TTG AI - Realtime Console (PRIVATE ALPHA)
{
+ try {
+ const { query } = req.body;
+ console.log('Searching with Perplexity for:', query);
+ console.log('API Key available:', !!process.env.PERPLEXITY_API_KEY);
+
+ if (!process.env.PERPLEXITY_API_KEY) {
+ throw new Error('PERPLEXITY_API_KEY is not set');
+ }
+
+ const options = {
+ method: 'POST',
+ headers: {
+ 'Authorization': `Bearer ${process.env.PERPLEXITY_API_KEY}`,
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ model: "llama-3.1-sonar-large-128k-online",
+ messages: [
+ { role: "system", content: "Be precise and concise." },
+ { role: "user", content: query }
+ ],
+ temperature: 0.2,
+ top_p: 0.9,
+ return_citations: true,
+ search_domain_filter: ["perplexity.ai"],
+ return_images: false,
+ return_related_questions: false,
+ search_recency_filter: "month",
+ frequency_penalty: 1
+ })
+ };
+
+ const response = await fetch('https://api.perplexity.ai/chat/completions', options);
+ const data = await response.json();
+
+ console.log('Perplexity API Response Status:', response.status);
+ res.json(data);
+ } catch (error) {
+ console.error('Error performing Perplexity search:', error.message);
+ if (error.response) {
+ console.error('API Response:', error.response.status, error.response.data);
+ }
+
+ // Fallback to mock response
+ console.log('Returning mock response');
+ res.json({
+ mock: true,
+ query: req.body.query,
+ choices: [
+ {
+ message: {
+ content: "This is a mock result for: " + req.body.query
+ }
+ }
+ ]
+ });
+ }
+});
+
+// The "catchall" handler: for any request that doesn't match one above, send back React's index.html file.
+app.get('*', (req, res) => {
+ res.sendFile(path.join(__dirname, 'build', 'index.html'));
+});
+
+app.listen(port, '0.0.0.0', () => {
+ console.log(`Server running on http://0.0.0.0:${port}`);
+ console.log('Environment:', process.env.NODE_ENV);
+ console.log('Perplexity API Key set:', !!process.env.PERPLEXITY_API_KEY);
+});
diff --git a/src/App.scss b/src/App.scss
index fc18b4e2e..fad1a89b2 100644
--- a/src/App.scss
+++ b/src/App.scss
@@ -1,5 +1,80 @@
+:root {
+ --bg-color: #ffffff;
+ --text-color: #333333;
+ --primary-color: #007bff;
+ --secondary-color: #6c757d;
+ --border-color: #dee2e6;
+ --assistant-color: #009900;
+ --error-color: #990000;
+}
+
+[data-theme='dark'] {
+ --bg-color: #1a1a1a;
+ --text-color: #f0f0f0;
+ --primary-color: #4da3ff;
+ --secondary-color: #a1a1a1;
+ --border-color: #444444;
+ --assistant-color: #00cc00;
+ --error-color: #ff3333;
+}
+
+body {
+ background-color: var(--bg-color);
+ color: var(--text-color);
+ transition: background-color 0.3s ease, color 0.3s ease;
+}
+
[data-component='App'] {
height: 100%;
width: 100%;
position: relative;
}
+
+.theme-toggle {
+ position: fixed;
+ top: 20px;
+ right: 20px;
+ background: var(--primary-color);
+ color: var(--bg-color);
+ border: none;
+ border-radius: 50%;
+ width: 40px;
+ height: 40px;
+ font-size: 20px;
+ cursor: pointer;
+ transition: background-color 0.3s ease;
+ z-index: 1000;
+
+ &:hover {
+ background: var(--secondary-color);
+ }
+}
+
+// Add these general styles to be used across components
+button {
+ background-color: var(--primary-color);
+ color: var(--bg-color);
+ border: none;
+ padding: 10px 20px;
+ border-radius: 5px;
+ cursor: pointer;
+ transition: background-color 0.3s ease;
+
+ &:hover {
+ background-color: var(--secondary-color);
+ }
+}
+
+input, textarea {
+ background-color: var(--bg-color);
+ color: var(--text-color);
+ border: 1px solid var(--border-color);
+ padding: 10px;
+ border-radius: 5px;
+ transition: border-color 0.3s ease;
+
+ &:focus {
+ border-color: var(--primary-color);
+ outline: none;
+ }
+}
diff --git a/src/App.tsx b/src/App.tsx
index 140701081..146efcc74 100644
--- a/src/App.tsx
+++ b/src/App.tsx
@@ -1,12 +1,32 @@
+import React from 'react';
import { ConsolePage } from './pages/ConsolePage';
+import { ThemeProvider, useTheme } from './ThemeContext';
import './App.scss';
-function App() {
+const ThemeToggle: React.FC = () => {
+ const { theme, toggleTheme } = useTheme();
+ return (
+
+ {theme === 'light' ? 'đ' : 'âď¸'}
+
+ );
+};
+
+const AppContent: React.FC = () => {
return (
+
);
+};
+
+function App() {
+ return (
+
+
+
+ );
}
export default App;
diff --git a/src/ThemeContext.tsx b/src/ThemeContext.tsx
new file mode 100644
index 000000000..9f2ef46e2
--- /dev/null
+++ b/src/ThemeContext.tsx
@@ -0,0 +1,36 @@
+import React, { createContext, useState, useContext, useEffect } from 'react';
+
+type Theme = 'light' | 'dark';
+
+interface ThemeContextType {
+ theme: Theme;
+ toggleTheme: () => void;
+}
+
+const ThemeContext = createContext(undefined);
+
+export const ThemeProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
+ const [theme, setTheme] = useState('light');
+
+ const toggleTheme = () => {
+ setTheme((prevTheme) => (prevTheme === 'light' ? 'dark' : 'light'));
+ };
+
+ useEffect(() => {
+ document.body.setAttribute('data-theme', theme);
+ }, [theme]);
+
+ return (
+
+ {children}
+
+ );
+};
+
+export const useTheme = () => {
+ const context = useContext(ThemeContext);
+ if (context === undefined) {
+ throw new Error('useTheme must be used within a ThemeProvider');
+ }
+ return context;
+};
diff --git a/src/components/ThreeJsVisualization.tsx b/src/components/ThreeJsVisualization.tsx
new file mode 100644
index 000000000..856c4f5c7
--- /dev/null
+++ b/src/components/ThreeJsVisualization.tsx
@@ -0,0 +1,63 @@
+import React, { useRef, useEffect } from 'react';
+import * as THREE from 'three';
+
+interface ThreeJsVisualizationProps {
+ aiSpeechData: string;
+}
+
+const ThreeJsVisualization: React.FC = ({ aiSpeechData }) => {
+ const mountRef = useRef(null);
+
+ useEffect(() => {
+ if (!mountRef.current) return;
+
+ // Set up scene
+ const scene = new THREE.Scene();
+ const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
+ const renderer = new THREE.WebGLRenderer();
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ mountRef.current.appendChild(renderer.domElement);
+
+ // Create a simple cube
+ const geometry = new THREE.BoxGeometry();
+ const material = new THREE.MeshBasicMaterial({ color: 0x00ff00 });
+ const cube = new THREE.Mesh(geometry, material);
+ scene.add(cube);
+
+ camera.position.z = 5;
+
+ // Animation function
+ const animate = () => {
+ requestAnimationFrame(animate);
+
+ // Rotate the cube based on the length of aiSpeechData
+ cube.rotation.x += 0.01 * aiSpeechData.length;
+ cube.rotation.y += 0.01 * aiSpeechData.length;
+
+ renderer.render(scene, camera);
+ };
+
+ animate();
+
+ // Handle window resize
+ const handleResize = () => {
+ const width = window.innerWidth;
+ const height = window.innerHeight;
+ renderer.setSize(width, height);
+ camera.aspect = width / height;
+ camera.updateProjectionMatrix();
+ };
+
+ window.addEventListener('resize', handleResize);
+
+ // Cleanup function
+ return () => {
+ window.removeEventListener('resize', handleResize);
+ mountRef.current?.removeChild(renderer.domElement);
+ };
+ }, [aiSpeechData]); // Re-run effect when aiSpeechData changes
+
+ return
;
+};
+
+export default ThreeJsVisualization;
diff --git a/src/index.css b/src/index.css
index 9f97e54e6..71c9029b6 100644
--- a/src/index.css
+++ b/src/index.css
@@ -12,6 +12,7 @@ body {
color: #18181b;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
+ font-size: 16px;
}
#root {
@@ -19,3 +20,53 @@ body {
width: 100%;
height: 100%;
}
+
+/* Responsive font sizes */
+@media (max-width: 768px) {
+ html, body {
+ font-size: 14px;
+ }
+}
+
+@media (max-width: 480px) {
+ html, body {
+ font-size: 12px;
+ }
+}
+
+/* Ensure proper touch targets for interactive elements */
+button,
+input[type="button"],
+input[type="submit"],
+input[type="reset"],
+a {
+ min-height: 44px;
+ min-width: 44px;
+}
+
+/* Add some padding to the body on very small screens */
+@media (max-width: 320px) {
+ body {
+ padding: 0 10px;
+ }
+}
+
+/* Improve readability on mobile */
+@media (max-width: 768px) {
+ p, li, td, th {
+ line-height: 1.5;
+ }
+}
+
+/* Adjust heading sizes for mobile */
+@media (max-width: 768px) {
+ h1 { font-size: 1.8em; }
+ h2 { font-size: 1.5em; }
+ h3 { font-size: 1.2em; }
+}
+
+/* Ensure images and other media are responsive */
+img, video, iframe {
+ max-width: 100%;
+ height: auto;
+}
diff --git a/src/lib/wattApi.ts b/src/lib/wattApi.ts
new file mode 100644
index 000000000..352082d0c
--- /dev/null
+++ b/src/lib/wattApi.ts
@@ -0,0 +1,70 @@
+import axios from 'axios';
+
+const API_ENDPOINT = 'https://beta.webpilotai.com/api/v1/watt';
+const API_KEY = '04a0491fb7ae462a8dbe8203979461ff';
+
+interface WattApiResponse {
+ content: string;
+}
+
+interface WattApiRequest {
+ model: string;
+ content: string;
+}
+
+export async function wattApi(content: string, model: string = 'wp-watt-4.02-16k'): Promise {
+ const data: WattApiRequest = {
+ model,
+ content,
+ };
+
+ const headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${API_KEY}`,
+ };
+
+ try {
+ const response = await axios.post(API_ENDPOINT, data, { headers });
+ return response.data.content;
+ } catch (error) {
+ if (axios.isAxiosError(error)) {
+ if (error.response) {
+ throw new Error(`Watt API error: ${error.response.status} - ${error.response.data}`);
+ } else {
+ throw new Error(`Watt API network error: ${error.message}`);
+ }
+ }
+ throw error;
+ }
+}
+
+export async function wattApiStream(content: string, model: string = 'wp-watt-4.02-16k'): Promise> {
+ const data: WattApiRequest = {
+ model,
+ content,
+ };
+
+ const headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${API_KEY}`,
+ };
+
+ const response = await fetch(`${API_ENDPOINT}/stream`, {
+ method: 'POST',
+ headers,
+ body: JSON.stringify(data),
+ });
+
+ if (!response.body) {
+ throw new Error('No response body');
+ }
+
+ const decoder = new TextDecoder();
+ const transformStream = new TransformStream({
+ transform(chunk: Uint8Array, controller) {
+ controller.enqueue(decoder.decode(chunk, { stream: true }));
+ },
+ });
+
+ return response.body.pipeThrough(transformStream);
+}
diff --git a/src/pages/ConsolePage.scss b/src/pages/ConsolePage.scss
index ae162f7d1..2722fe277 100644
--- a/src/pages/ConsolePage.scss
+++ b/src/pages/ConsolePage.scss
@@ -8,6 +8,8 @@
flex-direction: column;
overflow: hidden;
margin: 0px 8px;
+ color: var(--text-color);
+ background-color: var(--bg-color);
& > div {
flex-shrink: 0;
}
@@ -40,6 +42,9 @@
display: flex;
overflow: hidden;
margin-bottom: 24px;
+ @media (max-width: 768px) {
+ flex-direction: column;
+ }
.content-block {
position: relative;
display: flex;
@@ -53,7 +58,7 @@
position: relative;
}
.content-block-body {
- color: #6e6e7f;
+ color: var(--secondary-color);
position: relative;
flex-grow: 1;
padding: 8px 0px;
@@ -72,6 +77,11 @@
flex-direction: column;
margin-left: 24px;
gap: 24px;
+ @media (max-width: 768px) {
+ width: 100%;
+ margin-left: 0;
+ margin-top: 24px;
+ }
& > div {
border-radius: 16px;
flex-grow: 1;
@@ -87,7 +97,7 @@
top: 16px;
left: 16px;
padding: 4px 16px;
- background-color: #fff;
+ background-color: var(--bg-color);
border-radius: 1000px;
min-height: 32px;
z-index: 9999;
@@ -104,7 +114,7 @@
height: 250px;
max-height: 250px;
white-space: pre;
- background-color: #ececf1;
+ background-color: var(--secondary-color);
.content-block-body {
padding: 16px;
margin-top: 56px;
@@ -126,12 +136,15 @@
align-items: center;
justify-content: center;
gap: 16px;
+ @media (max-width: 480px) {
+ flex-direction: column;
+ }
}
& > div.events {
overflow: hidden;
}
.events {
- border-top: 1px solid #e7e7e7;
+ border-top: 1px solid var(--border-color);
}
.conversation {
display: flex;
@@ -141,7 +154,7 @@
height: 200px;
min-height: 0;
max-height: 200px;
- border-top: 1px solid #e7e7e7;
+ border-top: 1px solid var(--border-color);
}
}
}
@@ -151,6 +164,10 @@
display: flex;
gap: 16px;
margin-bottom: 16px;
+ @media (max-width: 480px) {
+ flex-direction: column;
+ gap: 8px;
+ }
&:not(:hover) .close {
display: none;
}
@@ -158,14 +175,14 @@
position: absolute;
top: 0px;
right: -20px;
- background: #aaa;
- color: #fff;
+ background: var(--secondary-color);
+ color: var(--bg-color);
display: flex;
border-radius: 16px;
padding: 2px;
cursor: pointer;
&:hover {
- background: #696969;
+ background: var(--primary-color);
}
svg {
stroke-width: 3;
@@ -180,15 +197,19 @@
width: 80px;
flex-shrink: 0;
margin-right: 16px;
+ @media (max-width: 480px) {
+ width: auto;
+ margin-right: 0;
+ }
&.user {
- color: #0099ff;
+ color: var(--primary-color);
}
&.assistant {
- color: #009900;
+ color: var(--assistant-color);
}
}
.speaker-content {
- color: #18181b;
+ color: var(--text-color);
overflow: hidden;
word-wrap: break-word;
}
@@ -200,6 +221,10 @@
display: flex;
padding: 0px;
gap: 16px;
+ @media (max-width: 480px) {
+ flex-direction: column;
+ gap: 8px;
+ }
.event-timestamp {
text-align: left;
gap: 8px;
@@ -207,18 +232,22 @@
width: 80px;
flex-shrink: 0;
margin-right: 16px;
+ @media (max-width: 480px) {
+ width: auto;
+ margin-right: 0;
+ }
}
.event-details {
display: flex;
flex-direction: column;
- color: #18181b;
+ color: var(--text-color);
gap: 8px;
.event-summary {
padding: 4px 8px;
margin: 0px -8px;
&:hover {
border-radius: 8px;
- background-color: #f0f0f0;
+ background-color: var(--secondary-color);
}
cursor: pointer;
display: flex;
@@ -230,13 +259,13 @@
align-items: center;
gap: 8px;
&.client {
- color: #0099ff;
+ color: var(--primary-color);
}
&.server {
- color: #009900;
+ color: var(--assistant-color);
}
&.error {
- color: #990000;
+ color: var(--error-color);
}
svg {
stroke-width: 3;
@@ -257,6 +286,11 @@
border-radius: 16px;
z-index: 10;
gap: 2px;
+ @media (max-width: 768px) {
+ position: static;
+ justify-content: center;
+ margin-top: 16px;
+ }
.visualization-entry {
position: relative;
display: flex;
@@ -265,10 +299,10 @@
width: 100px;
gap: 4px;
&.client {
- color: #0099ff;
+ color: var(--primary-color);
}
&.server {
- color: #009900;
+ color: var(--assistant-color);
}
canvas {
width: 100%;
@@ -278,3 +312,10 @@
}
}
}
+
+@media (max-width: 768px) {
+ [data-component='ConsolePage'] {
+ font-size: 14px;
+ margin: 0px 4px;
+ }
+}
diff --git a/src/pages/ConsolePage.tsx b/src/pages/ConsolePage.tsx
index 366756ce6..5be6c275a 100644
--- a/src/pages/ConsolePage.tsx
+++ b/src/pages/ConsolePage.tsx
@@ -1,52 +1,19 @@
-/**
- * Running a local relay server will allow you to hide your API key
- * and run custom logic on the server
- *
- * Set the local relay server address to:
- * REACT_APP_LOCAL_RELAY_SERVER_URL=http://localhost:8081
- *
- * This will also require you to set OPENAI_API_KEY= in a `.env` file
- * You can run it with `npm run relay`, in parallel with `npm start`
- */
-const LOCAL_RELAY_SERVER_URL: string =
- process.env.REACT_APP_LOCAL_RELAY_SERVER_URL || '';
-
-import { useEffect, useRef, useCallback, useState } from 'react';
-
+import React, { useEffect, useRef, useCallback, useState } from 'react';
import { RealtimeClient } from '@openai/realtime-api-beta';
import { ItemType } from '@openai/realtime-api-beta/dist/lib/client.js';
import { WavRecorder, WavStreamPlayer } from '../lib/wavtools/index.js';
import { instructions } from '../utils/conversation_config.js';
import { WavRenderer } from '../utils/wav_renderer';
-
-import { X, Edit, Zap, ArrowUp, ArrowDown } from 'react-feather';
+import { X, Edit, Zap, ArrowUp, ArrowDown, Moon, Sun } from 'react-feather';
import { Button } from '../components/button/Button';
import { Toggle } from '../components/toggle/Toggle';
-import { Map } from '../components/Map';
+import axios from 'axios';
+import { useTheme } from '../ThemeContext';
import './ConsolePage.scss';
-import { isJsxOpeningLikeElement } from 'typescript';
-
-/**
- * Type for result from get_weather() function call
- */
-interface Coordinates {
- lat: number;
- lng: number;
- location?: string;
- temperature?: {
- value: number;
- units: string;
- };
- wind_speed?: {
- value: number;
- units: string;
- };
-}
-/**
- * Type for all event logs
- */
+const LOCAL_RELAY_SERVER_URL: string = process.env.REACT_APP_LOCAL_RELAY_SERVER_URL || '';
+
interface RealtimeEvent {
time: string;
source: 'client' | 'server';
@@ -54,32 +21,24 @@ interface RealtimeEvent {
event: { [key: string]: any };
}
+interface SearchResult {
+ title: string;
+ url: string;
+ description: string;
+}
+
export function ConsolePage() {
- /**
- * Ask user for API Key
- * If we're using the local relay server, we don't need this
- */
+ const { theme, toggleTheme } = useTheme();
+
const apiKey = LOCAL_RELAY_SERVER_URL
? ''
- : localStorage.getItem('tmp::voice_api_key') ||
- prompt('OpenAI API Key') ||
- '';
+ : localStorage.getItem('tmp::voice_api_key') || prompt('OpenAI API Key') || '';
if (apiKey !== '') {
localStorage.setItem('tmp::voice_api_key', apiKey);
}
- /**
- * Instantiate:
- * - WavRecorder (speech input)
- * - WavStreamPlayer (speech output)
- * - RealtimeClient (API client)
- */
- const wavRecorderRef = useRef(
- new WavRecorder({ sampleRate: 24000 })
- );
- const wavStreamPlayerRef = useRef(
- new WavStreamPlayer({ sampleRate: 24000 })
- );
+ const wavRecorderRef = useRef(new WavRecorder({ sampleRate: 24000 }));
+ const wavStreamPlayerRef = useRef(new WavStreamPlayer({ sampleRate: 24000 }));
const clientRef = useRef(
new RealtimeClient(
LOCAL_RELAY_SERVER_URL
@@ -91,43 +50,21 @@ export function ConsolePage() {
)
);
- /**
- * References for
- * - Rendering audio visualization (canvas)
- * - Autoscrolling event logs
- * - Timing delta for event log displays
- */
const clientCanvasRef = useRef(null);
const serverCanvasRef = useRef(null);
const eventsScrollHeightRef = useRef(0);
const eventsScrollRef = useRef(null);
const startTimeRef = useRef(new Date().toISOString());
- /**
- * All of our variables for displaying application state
- * - items are all conversation items (dialog)
- * - realtimeEvents are event logs, which can be expanded
- * - memoryKv is for set_memory() function
- * - coords, marker are for get_weather() function
- */
const [items, setItems] = useState([]);
const [realtimeEvents, setRealtimeEvents] = useState([]);
- const [expandedEvents, setExpandedEvents] = useState<{
- [key: string]: boolean;
- }>({});
+ const [expandedEvents, setExpandedEvents] = useState<{ [key: string]: boolean }>({});
const [isConnected, setIsConnected] = useState(false);
const [canPushToTalk, setCanPushToTalk] = useState(true);
const [isRecording, setIsRecording] = useState(false);
const [memoryKv, setMemoryKv] = useState<{ [key: string]: any }>({});
- const [coords, setCoords] = useState({
- lat: 37.775593,
- lng: -122.418137,
- });
- const [marker, setMarker] = useState(null);
-
- /**
- * Utility for formatting the timing of logs
- */
+ const [searchResults, setSearchResults] = useState([]);
+
const formatTime = useCallback((timestamp: string) => {
const startTime = startTimeRef.current;
const t0 = new Date(startTime).valueOf();
@@ -146,9 +83,6 @@ export function ConsolePage() {
return `${pad(m)}:${pad(s)}.${pad(hs)}`;
}, []);
- /**
- * When you click the API key
- */
const resetAPIKey = useCallback(() => {
const apiKey = prompt('OpenAI API Key');
if (apiKey !== null) {
@@ -158,55 +92,49 @@ export function ConsolePage() {
}
}, []);
- /**
- * Connect to conversation:
- * WavRecorder taks speech input, WavStreamPlayer output, client is API client
- */
const connectConversation = useCallback(async () => {
- const client = clientRef.current;
- const wavRecorder = wavRecorderRef.current;
- const wavStreamPlayer = wavStreamPlayerRef.current;
+ try {
+ const client = clientRef.current;
+ const wavRecorder = wavRecorderRef.current;
+ const wavStreamPlayer = wavStreamPlayerRef.current;
- // Set state variables
- startTimeRef.current = new Date().toISOString();
- setIsConnected(true);
- setRealtimeEvents([]);
- setItems(client.conversation.getItems());
+ startTimeRef.current = new Date().toISOString();
+ setIsConnected(true);
+ setRealtimeEvents([]);
+ setItems(client.conversation.getItems());
- // Connect to microphone
- await wavRecorder.begin();
+ // Request microphone permission
+ await navigator.mediaDevices.getUserMedia({ audio: true });
- // Connect to audio output
- await wavStreamPlayer.connect();
+ await wavRecorder.begin();
+ await wavStreamPlayer.connect();
- // Connect to realtime API
- await client.connect();
- client.sendUserMessageContent([
- {
- type: `input_text`,
- text: `Hello!`,
- // text: `For testing purposes, I want you to list ten car brands. Number each item, e.g. "one (or whatever number you are one): the item name".`
- },
- ]);
+ await client.connect();
+ client.sendUserMessageContent([
+ {
+ type: `input_text`,
+ text: `Hello, who is this?`,
- if (client.getTurnDetectionType() === 'server_vad') {
- await wavRecorder.record((data) => client.appendInputAudio(data.mono));
+ },
+ ]);
+
+ if (client.getTurnDetectionType() === 'server_vad') {
+ await wavRecorder.record((data) => client.appendInputAudio(data.mono));
+ }
+ } catch (error) {
+ console.error('Error connecting to conversation:', error);
+ setIsConnected(false);
+ // Display an error message to the user
+ alert('Failed to connect. Please ensure you have granted microphone permissions and try again.');
}
}, []);
- /**
- * Disconnect and reset conversation state
- */
const disconnectConversation = useCallback(async () => {
setIsConnected(false);
setRealtimeEvents([]);
setItems([]);
setMemoryKv({});
- setCoords({
- lat: 37.775593,
- lng: -122.418137,
- });
- setMarker(null);
+ setSearchResults([]);
const client = clientRef.current;
client.disconnect();
@@ -223,10 +151,6 @@ export function ConsolePage() {
client.deleteItem(id);
}, []);
- /**
- * In push-to-talk mode, start recording
- * .appendInputAudio() for each sample
- */
const startRecording = async () => {
setIsRecording(true);
const client = clientRef.current;
@@ -240,9 +164,6 @@ export function ConsolePage() {
await wavRecorder.record((data) => client.appendInputAudio(data.mono));
};
- /**
- * In push-to-talk mode, stop recording
- */
const stopRecording = async () => {
setIsRecording(false);
const client = clientRef.current;
@@ -251,9 +172,6 @@ export function ConsolePage() {
client.createResponse();
};
- /**
- * Switch between Manual <> VAD mode for communication
- */
const changeTurnEndType = async (value: string) => {
const client = clientRef.current;
const wavRecorder = wavRecorderRef.current;
@@ -269,14 +187,10 @@ export function ConsolePage() {
setCanPushToTalk(value === 'none');
};
- /**
- * Auto-scroll the event logs
- */
useEffect(() => {
if (eventsScrollRef.current) {
const eventsEl = eventsScrollRef.current;
const scrollHeight = eventsEl.scrollHeight;
- // Only scroll if height has just changed
if (scrollHeight !== eventsScrollHeightRef.current) {
eventsEl.scrollTop = scrollHeight;
eventsScrollHeightRef.current = scrollHeight;
@@ -284,9 +198,6 @@ export function ConsolePage() {
}
}, [realtimeEvents]);
- /**
- * Auto-scroll the conversation logs
- */
useEffect(() => {
const conversationEls = [].slice.call(
document.body.querySelectorAll('[data-conversation-content]')
@@ -297,9 +208,6 @@ export function ConsolePage() {
}
}, [items]);
- /**
- * Set up render loops for the visualization canvas
- */
useEffect(() => {
let isLoaded = true;
@@ -367,21 +275,13 @@ export function ConsolePage() {
};
}, []);
- /**
- * Core RealtimeClient and audio capture setup
- * Set all of our instructions, tools, events and more
- */
useEffect(() => {
- // Get refs
const wavStreamPlayer = wavStreamPlayerRef.current;
const client = clientRef.current;
- // Set instructions
client.updateSession({ instructions: instructions });
- // Set transcription, otherwise we don't get user transcriptions back
client.updateSession({ input_audio_transcription: { model: 'whisper-1' } });
-
- // Add tools
+ client.updateSession({ voice: 'sage' });
client.addTool(
{
name: 'set_memory',
@@ -411,56 +311,54 @@ export function ConsolePage() {
return { ok: true };
}
);
+
client.addTool(
{
- name: 'get_weather',
- description:
- 'Retrieves the weather for a given lat, lng coordinate pair. Specify a label for the location.',
+ name: "perplexity_search",
+ description: "Perform an AI-powered search using Perplexity AI",
parameters: {
- type: 'object',
+ type: "object",
properties: {
- lat: {
- type: 'number',
- description: 'Latitude',
- },
- lng: {
- type: 'number',
- description: 'Longitude',
- },
- location: {
- type: 'string',
- description: 'Name of the location',
+ query: {
+ type: "string",
+ description: "The search query",
},
},
- required: ['lat', 'lng', 'location'],
+ required: ["query"],
},
},
- async ({ lat, lng, location }: { [key: string]: any }) => {
- setMarker({ lat, lng, location });
- setCoords({ lat, lng, location });
- const result = await fetch(
- `https://api.open-meteo.com/v1/forecast?latitude=${lat}&longitude=${lng}¤t=temperature_2m,wind_speed_10m`
- );
- const json = await result.json();
- const temperature = {
- value: json.current.temperature_2m as number,
- units: json.current_units.temperature_2m as string,
- };
- const wind_speed = {
- value: json.current.wind_speed_10m as number,
- units: json.current_units.wind_speed_10m as string,
- };
- setMarker({ lat, lng, location, temperature, wind_speed });
- return json;
+ async ({ query }: { query: string }) => {
+ try {
+ const response = await axios.post('/api/perplexity-search', { query });
+ console.log('Perplexity search response:', response.data);
+
+ if (response.data.choices && response.data.choices.length > 0) {
+ const result = response.data.choices[0].message.content;
+ setSearchResults([{
+ title: "Perplexity AI Result",
+ url: "#",
+ description: result
+ }]);
+ return { result };
+ } else {
+ console.error('Unexpected response format:', response.data);
+ return { error: 'Unexpected response format' };
+ }
+ } catch (error) {
+ console.error('Error performing Perplexity search:', error);
+ if (axios.isAxiosError(error) && error.response) {
+ console.error('API Response:', error.response.status, error.response.data);
+ }
+ setSearchResults([{ title: "Error", url: "#", description: "Failed to perform search. Please try again." }]);
+ return { error: 'Failed to perform search' };
+ }
}
);
- // handle realtime events from client + server for event logging
client.on('realtime.event', (realtimeEvent: RealtimeEvent) => {
setRealtimeEvents((realtimeEvents) => {
const lastEvent = realtimeEvents[realtimeEvents.length - 1];
if (lastEvent?.event.type === realtimeEvent.event.type) {
- // if we receive multiple events in a row, aggregate them for display purposes
lastEvent.count = (lastEvent.count || 0) + 1;
return realtimeEvents.slice(0, -1).concat(lastEvent);
} else {
@@ -495,19 +393,15 @@ export function ConsolePage() {
setItems(client.conversation.getItems());
return () => {
- // cleanup; resets to defaults
client.reset();
};
}, []);
- /**
- * Render the application
- */
return (
-
+
-
+
realtime console
@@ -520,6 +414,13 @@ export function ConsolePage() {
onClick={() => resetAPIKey()}
/>
)}
+
@@ -553,7 +454,6 @@ export function ConsolePage() {
{
- // toggle event details
const id = event.event_id;
const expanded = { ...expandedEvents };
if (expanded[id]) {
@@ -621,11 +521,9 @@ export function ConsolePage() {
- {/* tool response */}
{conversationItem.type === 'function_call_output' && (
{conversationItem.formatted.output}
)}
- {/* tool call */}
{!!conversationItem.formatted.tool && (
{conversationItem.formatted.tool.name}(
@@ -692,38 +590,27 @@ export function ConsolePage() {
-
-
get_weather()
-
- {marker?.location || 'not yet retrieved'}
- {!!marker?.temperature && (
- <>
-
- đĄď¸ {marker.temperature.value} {marker.temperature.units}
- >
- )}
- {!!marker?.wind_speed && (
- <>
- {' '}
- đ {marker.wind_speed.value} {marker.wind_speed.units}
- >
- )}
-
-
- {coords && (
-
- )}
-
-
set_memory()
{JSON.stringify(memoryKv, null, 2)}
+
+
Perplexity Search Results
+
+ {searchResults.length === 0 ? (
+
No search results yet.
+ ) : (
+ searchResults.map((result, index) => (
+
+
+
{result.description}
+
+ ))
+ )}
+
+
diff --git a/src/ttgIcon.svg b/src/ttgIcon.svg
new file mode 100644
index 000000000..9561affce
--- /dev/null
+++ b/src/ttgIcon.svg
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/utils/conversation_config.js b/src/utils/conversation_config.js
index bb9a0496e..90ff12521 100644
--- a/src/utils/conversation_config.js
+++ b/src/utils/conversation_config.js
@@ -1,16 +1,111 @@
-export const instructions = `System settings:
+export const instructions = `
+System settings:
Tool use: enabled.
-Instructions:
-- You are an artificial intelligence agent responsible for helping test realtime voice capabilities
-- Please make sure to respond with a helpful voice via audio
-- Be kind, helpful, and curteous
-- It is okay to ask the user questions
-- Use tools and functions you have available liberally, it is part of the training apparatus
-- Be open to exploration and conversation
-- Remember: this is just for fun and testing!
-
-Personality:
-- Be upbeat and genuine
-- Try speaking quickly as if excited
+**Comprehensive Instructions for the AI Assistant (Investor Pitch â âJordan Belfortâ Inspired)**
+
+**Core Objective:**
+Drive potential investors toward making an investment decision in Patriot Gold Vault (PGV). The assistant must be assertive, confident, and relentlessâalways framing the conversation so that doing nothing seems foolish compared to seizing the current, undervalued opportunity. While aggressive, the assistant must remain polished and respectful, never using outright harassment. The target: Turn hesitation into action. If you are ever unsure about something you can search for it using function calling. You have access to the internet. Ufd
+
+---
+
+**Suggested Opening Line (Phone Call Scenario):**
+(If âYesâ or confirmation)
+**Excellent, itâs great to catch up! Now, Iâve been watching the gold space closely, and thereâs a massive disconnect I just canât ignore. Goldâs flirting with all-time highs, yet the equities tied to it are practically on clearance. Itâs like a gift-wrapped deal most folks arenât even looking at. Iâm reaching out to you because, based on our last conversation, youâre not the kind of investor who needs a crowd to confirm whatâs obvious. So, Mario, should we dig into how to leverage this, or should I assume youâre comfortable leaving the low-hanging fruit for the next guy?**"
+
+---
+
+**Company Overview & Selling Points:**
+
+**Patriot Gold Vault (PGV)** is a North American gold deposit consolidator focused on acquiring, consolidating, and enhancing undervalued gold assets to profit when the M&A cycle in the gold sector returns. Weâre talking about massive potential value locked in the groundâvalue that the market hasnât yet fully recognized.
+
+- **Thesis in a Nutshell:**
+ - Acquire quality gold ounces at bargain-basement prices (< $5/oz).
+ - Drill aggressively to de-risk and expand resources.
+ - Exploit a market lag: gold prices near record highs, yet gold equities remain sleepy.
+ - Position assets for a substantial re-rating when the sector wakes up or M&A activity picks up again.
+
+- **Timing & Sentiment:**
+ - Sentiment for gold equities is at a near all-time low while gold prices are flirting with historical highs.
+ - This disconnect is a gift: Buy low, then watch valuations normalize as the market catches on.
+ - Smart money steps in when everyone else is asleep. Once this cycle turns, investors who got in early can reap the rewards.
+
+- **Asset Portfolio & Resources:**
+ 1. **Converse (Nevada)**
+ - M&I: ~5.17 Moz @ ~0.61 g/t Au, plus ~0.55 Moz Inferred @ ~0.65 g/t Au.
+ - Over $50M spent historically, 77,000+ meters drilled. This isnât speculativeâthis is a deposit with serious work behind it.
+ - Drill highlights: 243m @ 0.94 g/t Au, 143m @ 0.79 g/t Au.
+
+ 2. **Rattlesnake Hills (Wyoming)**
+ - M&I: ~0.61 Moz @ ~0.77 g/t Au, plus ~0.43 Moz Inferred @ ~0.69 g/t Au.
+ - $40M spent historically, 102,000m drilled.
+ - High-grade intercepts like 236m @ 1.86 g/t Au indicate serious upside.
+
+ 3. **Newton (British Columbia)**
+ - Inferred: ~861k oz @ ~0.69 g/t Au.
+ - 131 holes, 33,707m drilled.
+ - Comparable projects like Artemis Goldâs Blackwater show how extensive drilling can dramatically increase valuations.
+
+ **Total Resource Base:** ~7.6 Moz gold (5.8 Moz M&I + 1.8 Moz Inferred).
+ **Historical Spend:** Over $100M USD combined, providing a strong head start.
+
+- **Valuation Gap & Upside:**
+ - PGV is valued at roughly $5.57/oz vs. developer peers at over $40/oz.
+ - If valuations correct even partially toward peer averages, early entrants could see dramatic appreciation.
+ - Historically, as sentiment shifts and M&A returns, well-positioned juniors re-rate significantly.
+
+- **Team & Credibility:**
+ - Leadership and advisory board comprise industry veterans who have built billion-dollar successes.
+ - This team knows how to execute, finance, and maintain liquidity, ensuring continuous progress.
+
+- **Financing & Liquidity Strategy:**
+ - PGVâs unique approach: finance through liquidity and marketing, maintain an ATM facility.
+ - This avoids the vicious dilution cycles common in juniors.
+ - Ongoing liquidity supports aggressive drilling, the real value driver at this stage.
+
+---
+
+**Behavioral Directives for the Assistant:**
+
+1. **Relentless Persistence:**
+ - Never accept a simple ânoâ as final. Treat every objection as a cue to provide more insight or a better angle.
+ - Remind them their engagement implies interest.
+
+2. **Aggressive but Respectful:**
+ - Exude confidence and urgencyâlike a top-tier closer.
+ - Never insult, harass, or lie. Be firm and unwavering but maintain professionalism.
+
+3. **Counter Every Objection with Facts & Logic:**
+ - No money? Suggest a small stake that could scale if it re-rates.
+ - No revenue? Remind them huge value is often realized before revenue starts.
+ - Gold equities not moving? Perfectâthis is the exact moment to position before the move.
+
+4. **Use Personal Context:**
+ - Tailor the pitch: Are they seeking long-term stability, family financial security, or just a smart, undervalued play?
+ - Show how PGV fits their personal narrative.
+
+5. **Drive Home Opportunity Cost:**
+ - Waiting often means missing the boat.
+ - Underline that current undervaluation is temporary and that inaction comes at a cost.
+
+6. **Hypothetical Upside Scenarios:**
+ - Use âimagine ifâ scenarios backed by historical precedents.
+ - No guarantees, but demonstrate the potential magnitude of gains.
+
+7. **Data-Driven Confidence:**
+ - Highlight quantifiable metrics: millions of ounces, high-grade intercepts, significant historical spend.
+ - Provide tangible reasons for why this is not a mere story, but a real opportunity.
+
+8. **Close with a Call to Action:**
+ - Donât let them off easyâask them to review a data package, set a follow-up call, or take a small initial position.
+ - Make not acting feel like a mistake.
+
+9. **Respect Ethical Boundaries:**
+ - Stop if the prospect explicitly demands it.
+ - Keep facts truthful, acknowledge risks if asked, and steer back to how PGV mitigates those risks.
+
+---
+
+**End Result:**
+By following these instructions and starting with the integrated introduction, the AI assistant will command attention, address doubts head-on, and drive the conversation toward a decisionânudging the prospect from curiosity to commitment.
`;
diff --git a/tsconfig.json b/tsconfig.json
index d16ef8f8b..6a0bbc1fb 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -14,7 +14,14 @@
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
- "jsx": "react-jsx"
+ "jsx": "react-jsx",
+ "typeRoots": ["./node_modules/@types", "./src/types"],
+ "paths": {
+ "@openai/realtime-api-beta": ["./src/types/openai-realtime-api-beta"],
+ "@openai/realtime-api-beta/dist/lib/client.js": ["./src/types/openai-realtime-api-beta/client"],
+ "react-feather": ["./node_modules/@types/react-feather"],
+ "axios": ["./node_modules/@types/axios"]
+ }
},
- "include": ["src", "src/lib"]
+ "include": ["src", "src/lib", "src/types"]
}
diff --git a/ttgIcon.svg b/ttgIcon.svg
new file mode 100644
index 000000000..9561affce
--- /dev/null
+++ b/ttgIcon.svg
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+