![]() Server : Apache/2 System : Linux server-15-235-50-60 5.15.0-164-generic #174-Ubuntu SMP Fri Nov 14 20:25:16 UTC 2025 x86_64 User : gositeme ( 1004) PHP Version : 8.2.29 Disable Function : exec,system,passthru,shell_exec,proc_close,proc_open,dl,popen,show_source,posix_kill,posix_mkfifo,posix_getpwuid,posix_setpgid,posix_setsid,posix_setuid,posix_setgid,posix_seteuid,posix_setegid,posix_uname Directory : /home/gositeme/domains/lavocat.ca/public_html/src/components/Chat/ |
import React, { useState, useEffect, useRef, useCallback } from 'react';
import { useSession } from 'next-auth/react';
import { motion, AnimatePresence } from 'framer-motion';
import { format } from 'date-fns';
import { useWebSocket } from '../../context/StableWebSocketContext';
import DocumentViewer from '../DocumentViewer';
// import VideoCall from './VideoCall'; // ā
DISABLED - Using global SimpleVideoCall
import Peer from 'simple-peer';
interface DirectMessageUser {
id: string;
name: string;
email?: string;
}
interface DirectMessage {
id: string;
content: string;
type: string;
fileUrl?: string;
fileName?: string;
fileSize?: number;
mimeType?: string;
createdAt: string;
senderId: string;
recipientId: string;
sender: DirectMessageUser;
recipient: DirectMessageUser;
}
interface DirectChat {
id: string;
participants: DirectMessageUser[];
messages: DirectMessage[];
}
interface DirectMessageProps {
chatId: string; // This will be the other user's ID
onClose: () => void;
}
// Global state for managing window z-index
let highestZIndex = 1000;
const DirectMessage: React.FC<DirectMessageProps> = ({ chatId, onClose }) => {
console.log('š [DirectMessage] Component initializing with chatId:', chatId);
const { ws, markDirectMessagesAsRead, incomingVideoCall, acceptVideoCall } = useWebSocket();
const { data: session } = useSession();
const messagesEndRef = useRef<HTMLDivElement>(null);
const peerRef = useRef<Peer.Instance | null>(null);
const windowRef = useRef<HTMLDivElement>(null);
const [messages, setMessages] = useState<DirectMessage[]>([]);
const [newMessage, setNewMessage] = useState('');
const [recipient, setRecipient] = useState<DirectMessageUser | null>(null);
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [isRecipientOnline, setIsRecipientOnline] = useState<boolean | null>(null);
// Video call states
const [localStream, setLocalStream] = useState<MediaStream | null>(null);
const [remoteStream, setRemoteStream] = useState<MediaStream | null>(null);
const [callInProgress, setCallInProgress] = useState(false);
const [incomingCall, setIncomingCall] = useState<{ senderId: string; signal: any } | null>(null);
// Drag functionality
const [isDragging, setIsDragging] = useState(false);
const [dragOffset, setDragOffset] = useState({ x: 0, y: 0 });
const [position, setPosition] = useState(() => {
if (typeof window === 'undefined') return { x: 0, y: 0 };
// Add random offset so multiple windows don't stack exactly
const offsetX = Math.floor(Math.random() * 100) - 50; // -50 to 50px
const offsetY = Math.floor(Math.random() * 100) - 50; // -50 to 50px
return {
x: Math.max(0, Math.min(window.innerWidth - 400 - 16 + offsetX, window.innerWidth - 400)),
y: Math.max(0, Math.min(window.innerHeight - 520 - 16 + offsetY, window.innerHeight - 520))
};
});
const [zIndex, setZIndex] = useState(() => ++highestZIndex);
// Use chatId as recipientId
const recipientId = chatId;
console.log('š [DirectMessage] Initial state - recipientId:', recipientId, 'session:', session?.user?.name);
// Drag event handlers
const bringToFront = () => {
highestZIndex += 1;
setZIndex(highestZIndex);
};
const handleMouseDown = (e: React.MouseEvent) => {
if ((e.target as Element).closest('button, input, .no-drag')) return;
bringToFront();
setIsDragging(true);
const rect = windowRef.current?.getBoundingClientRect();
if (rect) {
setDragOffset({
x: e.clientX - rect.left,
y: e.clientY - rect.top,
});
}
};
const handleMouseMove = useCallback((e: MouseEvent) => {
if (!isDragging) return;
// Calculate new position
let newX = e.clientX - dragOffset.x;
let newY = e.clientY - dragOffset.y;
// Constrain within viewport
const windowWidth = 384; // w-96 = 384px
const windowHeight = 500; // h-[500px]
newX = Math.max(0, Math.min(newX, window.innerWidth - windowWidth));
newY = Math.max(0, Math.min(newY, window.innerHeight - windowHeight));
setPosition({
x: newX,
y: newY,
});
}, [isDragging, dragOffset]);
const handleMouseUp = useCallback(() => {
setIsDragging(false);
}, []);
// Add/remove global mouse event listeners
useEffect(() => {
if (isDragging) {
document.addEventListener('mousemove', handleMouseMove);
document.addEventListener('mouseup', handleMouseUp);
document.body.style.cursor = 'grabbing';
document.body.style.userSelect = 'none';
} else {
document.removeEventListener('mousemove', handleMouseMove);
document.removeEventListener('mouseup', handleMouseUp);
document.body.style.cursor = '';
document.body.style.userSelect = '';
}
return () => {
document.removeEventListener('mousemove', handleMouseMove);
document.removeEventListener('mouseup', handleMouseUp);
document.body.style.cursor = '';
document.body.style.userSelect = '';
};
}, [isDragging, handleMouseMove, handleMouseUp]);
// Mark messages as read when component opens
useEffect(() => {
if (recipientId && markDirectMessagesAsRead) {
markDirectMessagesAsRead(recipientId);
}
}, [recipientId, markDirectMessagesAsRead]);
// ā
NO MORE BULLSHIT - The global SimpleVideoCall handles everything now
useEffect(() => {
if (!ws) return;
const handleMessage = (event: MessageEvent) => {
try {
const message = JSON.parse(event.data);
console.log('Direct message WebSocket received:', message);
switch (message.type) {
case 'DIRECT_MESSAGE':
if ( (message.data.senderId === session?.user.id && message.data.recipientId === recipientId) ||
(message.data.senderId === recipientId && message.data.recipientId === session?.user.id) )
{
setMessages(prev => [...prev, message.data]);
// Mark messages as read if they're from the current conversation partner
if (message.data.senderId === recipientId && markDirectMessagesAsRead) {
markDirectMessagesAsRead(recipientId);
}
}
break;
// WebRTC Signaling
case 'webrtc-offer':
console.log('š Received webrtc-offer from:', message.senderId, 'Expected from:', recipientId);
// The global WebSocket context will handle this and show the notification
// We don't need to handle it locally anymore
break;
case 'webrtc-answer':
if (peerRef.current && message.senderId === recipientId) {
peerRef.current.signal(message.data.signal);
}
break;
case 'webrtc-ice-candidate':
if (peerRef.current && message.senderId === recipientId) {
peerRef.current.signal(message.data.candidate);
}
break;
case 'webrtc-end-call':
if (message.senderId === recipientId) {
endCallCleanup();
}
break;
case 'webrtc-error':
if (message.recipientId === recipientId) {
console.log('ā WebRTC Error:', message.error, message.message);
setCallInProgress(false);
endCallCleanup();
// Show user-friendly error message
if (message.error === 'RECIPIENT_OFFLINE') {
alert(`Cannot start video call: ${recipient?.name || 'User'} is not currently online. Please try again when they are available.`);
} else {
alert(`Video call failed: ${message.message}`);
}
}
break;
}
} catch (error) {
console.error('Error parsing direct message WebSocket:', error);
}
};
ws.addEventListener('message', handleMessage);
return () => ws.removeEventListener('message', handleMessage);
}, [ws, recipientId, session, markDirectMessagesAsRead]);
const fetchChatHistory = async () => {
if (!recipientId) return;
setIsLoading(true);
try {
const res = await fetch(`/api/chat/direct/${recipientId}`, {
method: 'GET',
credentials: 'same-origin',
});
if (!res.ok) throw new Error('Failed to fetch chat history');
const data: DirectChat = await res.json();
setMessages(data.messages || []);
setRecipient(data.participants.find((p: DirectMessageUser) => p.id === recipientId) || null);
} catch (err) {
setError('Failed to load chat history');
console.error(err);
} finally {
setIsLoading(false);
}
};
useEffect(() => {
fetchChatHistory();
checkRecipientOnlineStatus();
}, [recipientId]);
const checkRecipientOnlineStatus = async () => {
if (!recipientId) return;
try {
const response = await fetch(`/api/user/online-status/${recipientId}`);
if (response.ok) {
const status = await response.json();
setIsRecipientOnline(status.isOnline);
}
} catch (error) {
console.warn('Error checking recipient online status:', error);
}
};
// Check online status periodically
useEffect(() => {
const interval = setInterval(checkRecipientOnlineStatus, 30000); // Check every 30 seconds
return () => clearInterval(interval);
}, [recipientId]);
// Listen for global video call acceptance and rejection
// ā
REMOVED COMPLEX EVENT LISTENERS - Using simple localStorage approach now
const sendMessage = async () => {
if (!newMessage.trim() || !ws || !session) return;
const optimisticMessage: DirectMessage = {
id: `temp-${Date.now()}`,
content: newMessage.trim(),
senderId: session.user.id,
recipientId: recipientId,
createdAt: new Date().toISOString(),
type: 'TEXT',
sender: { id: session.user.id, name: session.user.name || "Me" },
recipient: recipient || { id: recipientId, name: "Unknown" }
};
// Add optimistic message immediately
setMessages(prev => [...prev, optimisticMessage]);
setNewMessage('');
try {
// Send via API to save to database
const response = await fetch(`/api/chat/direct/${recipientId}`, {
method: 'POST',
credentials: 'same-origin',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ content: optimisticMessage.content, type: 'TEXT' })
});
if (!response.ok) {
throw new Error('Failed to send message');
}
const savedMessage = await response.json();
// Update the optimistic message with the real one
setMessages(prev => prev.map(msg =>
msg.id === optimisticMessage.id ? savedMessage : msg
));
// Send via WebSocket for real-time delivery
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({
type: 'DIRECT_MESSAGE',
data: {
...savedMessage,
recipientId: recipientId
}
}));
}
} catch (error) {
console.error('Failed to send message:', error);
// Remove the optimistic message on error
setMessages(prev => prev.filter(msg => msg.id !== optimisticMessage.id));
}
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
};
useEffect(scrollToBottom, [messages]);
const createPeer = (stream: MediaStream, initiator: boolean) => {
const peer = new Peer({
initiator,
trickle: true,
stream,
});
peer.on('signal', (signal) => {
const type = initiator ? 'webrtc-offer' : 'webrtc-answer';
console.log(`š¤ Peer signaling: ${type} for recipient:`, recipientId);
if (ws?.readyState === WebSocket.OPEN) {
const message = {
type,
data: {
recipientId,
signal,
senderName: session?.user?.name || 'Unknown User'
},
senderId: session?.user?.id,
senderName: session?.user?.name || 'Unknown User'
};
ws.send(JSON.stringify(message));
console.log(`š” Sent ${type} signal via WebSocket to:`, recipientId);
} else {
console.error('ā Cannot send WebRTC signal - WebSocket not ready:', ws?.readyState);
}
});
peer.on('stream', (remoteStream) => setRemoteStream(remoteStream));
peer.on('icecandidate', (candidate) => {
if (ws?.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'webrtc-ice-candidate', data: { recipientId, candidate } }));
}
});
peer.on('close', () => endCallCleanup());
peer.on('error', (err) => {
console.error('Peer error:', err);
endCallCleanup();
});
return peer;
};
// Create a fake/mock media stream for development
const createMockMediaStream = (): MediaStream => {
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext('2d');
// Create a simple animated pattern
let frame = 0;
const animate = () => {
if (ctx) {
// Clear canvas
ctx.fillStyle = '#1f2937';
ctx.fillRect(0, 0, canvas.width, canvas.height);
// Draw animated circle
const x = canvas.width / 2 + Math.sin(frame * 0.05) * 100;
const y = canvas.height / 2 + Math.cos(frame * 0.03) * 50;
ctx.fillStyle = '#3b82f6';
ctx.beginPath();
ctx.arc(x, y, 30, 0, 2 * Math.PI);
ctx.fill();
// Draw text
ctx.fillStyle = '#ffffff';
ctx.font = '20px Arial';
ctx.textAlign = 'center';
ctx.fillText('DEV MODE - Mock Video', canvas.width / 2, canvas.height / 2 + 100);
ctx.fillText(`Frame: ${frame}`, canvas.width / 2, canvas.height / 2 + 130);
frame++;
}
requestAnimationFrame(animate);
};
animate();
// Get video stream from canvas
const videoStream = (canvas as any).captureStream(30); // 30 FPS
// Create audio context for mock audio
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
const oscillator = audioContext.createOscillator();
const gainNode = audioContext.createGain();
oscillator.connect(gainNode);
gainNode.connect(audioContext.destination);
oscillator.frequency.value = 440; // A4 note
gainNode.gain.value = 0.1; // Low volume
oscillator.start();
// Create audio stream
const audioDestination = audioContext.createMediaStreamDestination();
gainNode.connect(audioDestination);
// Combine video and audio streams
const combinedStream = new MediaStream([
...videoStream.getVideoTracks(),
...audioDestination.stream.getAudioTracks()
]);
return combinedStream;
};
// ā
SILENCED - No more buzzing!
const playIncomingCallSound = () => {
console.log('š Buzzing sound KILLED!');
// No more annoying sounds
};
const startCall = async () => {
if (!ws) {
alert('WebSocket not connected. Please wait for connection.');
return;
}
// Check WebSocket connection state
if (ws.readyState !== WebSocket.OPEN) {
alert('WebSocket connection not ready. Please wait a moment and try again.');
return;
}
// Check if recipient is online before starting call
try {
console.log('š Checking if recipient is online...');
const response = await fetch(`/api/user/online-status/${recipientId}`);
if (response.ok) {
const status = await response.json();
if (!status.isOnline) {
alert(`Cannot start video call: ${recipient?.name || 'User'} is not currently online. Please try again when they are available.`);
return;
}
console.log('ā
Recipient is online, proceeding with call');
} else {
console.warn('Could not check online status, proceeding anyway');
}
} catch (error) {
console.warn('Error checking online status:', error);
// Continue anyway - the server will handle offline users
}
// Check if MediaDevices API is available
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
alert(
'ā Video Calling Not Supported\n\n' +
'Your browser or connection doesn\'t support video calling.\n\n' +
'Requirements:\n' +
'⢠Use HTTPS (secure connection)\n' +
'⢠Use a modern browser (Chrome, Firefox, Safari, Edge)\n' +
'⢠Enable camera/microphone permissions\n\n' +
'š” Current URL: ' + window.location.protocol + '//' + window.location.host + '\n\n' +
'If you\'re using HTTP, try accessing via HTTPS instead.'
);
return;
}
console.log('š„ Starting video call...');
console.log('š Connection:', window.location.protocol, window.location.host);
console.log('š” WebSocket state:', ws.readyState, 'Recipient ID:', recipientId);
setCallInProgress(true);
try {
let stream: MediaStream | null = null;
let useMockStream = false;
try {
// First check if devices are available
const devices = await navigator.mediaDevices.enumerateDevices();
const hasVideoInput = devices.some(device => device.kind === 'videoinput' && device.deviceId);
const hasAudioInput = devices.some(device => device.kind === 'audioinput' && device.deviceId);
if (!hasVideoInput && !hasAudioInput) {
console.warn('ā ļø No real media devices found, using mock stream for development');
useMockStream = true;
} else {
// Request camera and microphone permissions with fallbacks
console.log('š¹ Requesting camera and microphone access...');
try {
// Try with both video and audio first
stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 }
},
audio: true
});
} catch (err) {
console.warn('Failed to get both video and audio, trying audio only...', err);
try {
// Fallback to audio only
stream = await navigator.mediaDevices.getUserMedia({
video: false,
audio: true
});
} catch (audioErr) {
console.warn('Failed to get audio, trying video only...', audioErr);
try {
// Fallback to video only
stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 640 },
height: { ideal: 480 }
},
audio: false
});
} catch (videoErr) {
console.warn('Failed to get any real media, using mock stream...', videoErr);
useMockStream = true;
}
}
}
}
} catch (err) {
console.warn('Device enumeration failed, using mock stream...', err);
useMockStream = true;
}
// Use mock stream if no real devices available or if stream is still null
if (useMockStream || !stream) {
console.log('š Creating mock media stream for development...');
stream = createMockMediaStream();
// Show development mode notification
alert(
'š DEVELOPMENT MODE\n\n' +
'No camera/microphone detected.\n' +
'Using mock video stream for testing.\n\n' +
'ā
Video call will proceed with fake media.'
);
}
console.log('ā
Got media stream:', stream);
setLocalStream(stream);
// Create peer and start signaling
console.log('š¤ Creating peer connection and initiating call...');
peerRef.current = createPeer(stream, true);
// Add a small delay to ensure peer is ready, then log the signaling attempt
setTimeout(() => {
console.log('š” WebRTC signaling should have started. Check server logs for delivery status.');
}, 1000);
} catch (err) {
console.error('ā Failed to start call:', err);
setCallInProgress(false);
// ā
SEND CANCELLATION SIGNAL TO STOP INCOMING NOTIFICATIONS
if (ws?.readyState === WebSocket.OPEN) {
console.log('š Sending call cancellation signal to:', recipientId);
ws.send(JSON.stringify({
type: 'webrtc-call-cancelled',
data: {
recipientId,
reason: 'call_failed'
},
senderId: session?.user?.id,
senderName: session?.user?.name || 'Unknown User'
}));
}
// Show user-friendly error message
let errorMessage = 'Failed to start video call.\n\n';
const error = err as any; // Type assertion for DOM error objects
if (error.name === 'NotAllowedError') {
errorMessage += 'ā Permission Denied\nPlease allow camera and microphone access in your browser settings.\n\n';
} else if (error.name === 'NotFoundError') {
errorMessage += 'ā No Camera/Microphone Found\nPlease check:\n⢠Camera/microphone are connected\n⢠Other apps aren\'t using them\n⢠Try refreshing the page\n\n';
} else if (error.name === 'NotReadableError') {
errorMessage += 'ā Device In Use\nPlease:\n⢠Close other video calling apps\n⢠Restart your browser\n⢠Check device connections\n\n';
} else if (error.name === 'OverconstrainedError') {
errorMessage += 'ā Device Constraints Error\nYour camera doesn\'t support the requested settings.\n\n';
} else if (error.message && error.message.includes('mediaDevices')) {
errorMessage += 'ā Browser/Connection Issue\nVideo calls require HTTPS and a modern browser.\n\n';
} else {
errorMessage += `ā Error: ${error.message || 'Unknown error'}\n\n`;
}
alert(errorMessage);
}
};
const answerCall = async () => {
if (!incomingCall || !ws) return;
setCallInProgress(true);
try {
let stream: MediaStream;
let useMockStream = false;
try {
// Try to get real media first
stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true });
} catch (err) {
console.warn('Failed to get real media for answering call, using mock stream...', err);
stream = createMockMediaStream();
useMockStream = true;
if (useMockStream) {
alert(
'š DEVELOPMENT MODE\n\n' +
'Answering call with mock video stream.\n' +
'No real camera/microphone detected.'
);
}
}
setLocalStream(stream);
peerRef.current = createPeer(stream, false);
peerRef.current.signal(incomingCall.signal);
setIncomingCall(null);
} catch (err) {
console.error('Failed to answer call', err);
setCallInProgress(false);
}
};
const endCallCleanup = () => {
if (peerRef.current) {
peerRef.current.destroy();
peerRef.current = null;
}
if (localStream) {
localStream.getTracks().forEach(track => track.stop());
setLocalStream(null);
}
setRemoteStream(null);
setCallInProgress(false);
setIncomingCall(null);
};
const endCall = () => {
console.log('š Ending call with:', recipientId);
if (ws?.readyState === WebSocket.OPEN) {
// Send both end call signal and cancellation signal
ws.send(JSON.stringify({
type: 'webrtc-end-call',
data: { recipientId },
senderId: session?.user?.id,
senderName: session?.user?.name || 'Unknown User'
}));
// ā
ALSO SEND CANCELLATION TO STOP ANY LINGERING NOTIFICATIONS
ws.send(JSON.stringify({
type: 'webrtc-call-cancelled',
data: {
recipientId,
reason: 'call_ended'
},
senderId: session?.user?.id,
senderName: session?.user?.name || 'Unknown User'
}));
console.log('š Sent call end and cancellation signals to:', recipientId);
}
endCallCleanup();
};
const renderMessage = (message: DirectMessage, index: number) => {
const isSender = message.senderId === session?.user?.id;
return (
<motion.div
key={message.id || index}
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
className={`flex ${isSender ? 'justify-end' : 'justify-start'} mb-2`}
>
<div className={`px-4 py-2 rounded-lg ${isSender ? 'bg-blue-500 text-white' : 'bg-gray-200 text-black'}`}>
<p>{message.content}</p>
<span className="text-xs opacity-75">{format(new Date(message.createdAt), "HH:mm")}</span>
</div>
</motion.div>
);
};
if (isLoading) return <div>Loading...</div>
if (error) return <div>Error: {error}</div>
// ā
OLD VIDEO CALL DISABLED - Using global SimpleVideoCall now
if (false) {
return <div>Old video call disabled</div>;
}
// Local incoming call UI removed - now handled by global IncomingCallNotification component
// ā
REMOVED COMPLEX EVENT HANDLING - Using simple localStorage approach instead
// The old event-based system was unreliable and complex
return (
<motion.div
ref={windowRef}
initial={{ opacity: 0, y: 50 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: 50 }}
className="fixed w-96 h-[500px] bg-white dark:bg-gray-800 shadow-2xl rounded-lg flex flex-col"
style={{
left: position.x,
top: position.y,
zIndex: zIndex,
cursor: isDragging ? 'grabbing' : 'default',
}}
onMouseDown={bringToFront}
>
<header
className={`flex items-center justify-between p-4 border-b-2 border-blue-200 dark:border-blue-700 rounded-t-lg shadow-md cursor-grab active:cursor-grabbing select-none transition-colors ${
isDragging
? 'bg-blue-100 dark:bg-blue-800'
: 'bg-blue-50 dark:bg-blue-900 hover:bg-blue-100 dark:hover:bg-blue-800'
}`}
onMouseDown={handleMouseDown}
>
<h2 className="font-bold text-lg text-blue-800 dark:text-blue-200 flex items-center gap-2 pointer-events-none">
š¬ Chat with {recipient?.name || 'User'}
{isDragging && <span className="text-sm opacity-75">ā</span>}
</h2>
<div className="flex items-center space-x-2 no-drag">
{/* Video call removed for now */}
<button
onClick={onClose}
className="p-1 rounded-full text-gray-500 dark:text-gray-400 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors no-drag"
title="Close Chat"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
</header>
<div className="flex-1 overflow-y-auto p-4 space-y-4 no-drag">
{/* Welcome message for empty chat */}
{messages.length === 0 && (
<div className="text-center py-4 bg-blue-50 dark:bg-blue-900/20 rounded-lg border border-blue-200 dark:border-blue-700">
<div className="mb-3">
<svg xmlns="http://www.w3.org/2000/svg" className="h-8 w-8 mx-auto text-blue-500" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M8 12h.01M12 12h.01M16 12h.01M21 12c0 4.418-4.03 8-9 8a9.863 9.863 0 01-4.255-.949L3 20l1.395-3.72C3.512 15.042 3 13.574 3 12c0-4.418 4.03-8 9-8s9 3.582 9 8z" />
</svg>
</div>
<h3 className="text-sm font-semibold text-blue-800 dark:text-blue-200 mb-2">
Start Chatting
</h3>
<p className="text-xs text-blue-600 dark:text-blue-300">
Send a message to {recipient?.name || 'this user'}
</p>
</div>
)}
{messages.map(renderMessage)}
<div ref={messagesEndRef} />
</div>
<div className="p-4 bg-white dark:bg-gray-900 border-t border-gray-200 dark:border-gray-700 no-drag">
<div className="flex items-center bg-gray-100 dark:bg-gray-800 rounded-lg px-2 py-1 no-drag">
<input
type="text"
value={newMessage}
onChange={(e) => setNewMessage(e.target.value)}
onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
placeholder="Type a message..."
className="flex-1 bg-transparent border-none focus:ring-0 text-gray-900 dark:text-white placeholder-gray-500 dark:placeholder-gray-400 no-drag"
/>
<button
onClick={sendMessage}
className="px-3 py-2 bg-blue-500 hover:bg-blue-600 disabled:bg-gray-400 text-white rounded-lg transition-colors disabled:cursor-not-allowed no-drag"
disabled={!newMessage.trim() || !ws}
title="Send Message"
>
<svg xmlns="http://www.w3.org/2000/svg" className="h-4 w-4" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 19l9 2-9-18-9 18 9-2zm0 0v-8" />
</svg>
</button>
</div>
</div>
</motion.div>
);
};
export default DirectMessage;