Skip to content

Commit

Permalink
Merge pull request #59 from sauravpanda/58-add-console-logs-in-chat-d…
Browse files Browse the repository at this point in the history
…emo-to-debug-incase-of-loading-issue

Added logs for debugging
  • Loading branch information
sauravpanda authored Jan 26, 2025
2 parents 1ffd46e + 38e68f4 commit 4b4babd
Showing 1 changed file with 22 additions and 2 deletions.
24 changes: 22 additions & 2 deletions examples/chat-demo/src/components/ChatInterface.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,7 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {


const loadModel = async () => {
console.log(`[BrowserAI] Starting to load model: ${selectedModel}`);
setLoading(true);
setLoadError(null);
const startTime = performance.now();
Expand All @@ -437,10 +438,12 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
try {
await browserAI.loadModel(selectedModel, {
onProgress: (progress: any) => {
console.log(`[BrowserAI] Loading progress:`, progress);
const currentTime = performance.now();
const elapsedTime = (currentTime - startTime) / 1000; // in seconds
const progressPercent = progress.progress;
const text = progress.text;
console.log(`[BrowserAI] Loading progress:`, progressPercent);
// Calculate estimated time remaining
let estimatedTimeRemaining = 0;
if (progressPercent > 0) {
Expand Down Expand Up @@ -480,6 +483,7 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
});

const loadTime = performance.now() - startTime;
console.log(`[BrowserAI] Model loaded successfully in ${loadTime.toFixed(0)}ms`);
const memoryAfter = (performance as any).memory?.usedJSHeapSize;
const memoryIncrease = memoryAfter - memoryBefore;

Expand All @@ -490,10 +494,13 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
setModelLoaded(true);
} catch (err) {
const error = err as Error;
console.error('[BrowserAI] Error loading model:', {
model: selectedModel,
error: error.message,
stack: error.stack
});
setLoadError(error.message);
setModelLoaded(false);

console.error('Error loading model:', error);
}
setLoading(false);
setLoadingProgress(0);
Expand All @@ -512,6 +519,7 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
const handleSend = async () => {
if (!input.trim() || !modelLoaded) return;

console.log(`[BrowserAI] Starting text generation with input length: ${input.length}`);
const userMessage = { text: input, isUser: true };
setMessages(prev => [...prev, userMessage]);
setInput('');
Expand All @@ -527,10 +535,12 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
});

let response = '';
let chunkCount = 0;
for await (const chunk of chunks as AsyncIterable<{
choices: Array<{ delta: { content?: string } }>,
usage: any
}>) {
chunkCount++;
const newContent = chunk.choices[0]?.delta.content || '';
const newUsage = chunk.usage;
response += newContent;
Expand All @@ -545,6 +555,11 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {
});
}
const responseTime = performance.now() - startTime;
console.log('[BrowserAI] Text generation completed:', {
responseTimeMs: responseTime.toFixed(0),
outputLength: response.length,
chunks: chunkCount
});

setStats(prev => {
const newResponseHistory = [...prev.responseHistory, responseTime].slice(-10);
Expand All @@ -562,6 +577,11 @@ export default function ChatInterface({ children }: ChatInterfaceProps) {

} catch (err) {
const error = err as Error;
console.error('[BrowserAI] Error generating text:', {
model: selectedModel,
error: error.message,
stack: error.stack
});
posthog.capture('message_error', {
model: selectedModel,
error: error.message
Expand Down

0 comments on commit 4b4babd

Please sign in to comment.