Guides

Web Integration

Integrate Yachtsy Agent into web applications with JavaScript frameworks.

Web Integration

Learn how to integrate Yachtsy Agent into web applications using popular JavaScript frameworks and vanilla JavaScript.

Quick Start with Vanilla JavaScript

The simplest way to get started is with the fetch API:

<!DOCTYPE html>
<html>
<head>
    <title>Yachtsy Agent Demo</title>
</head>
<body>
    <div id="chat-container">
        <div id="messages"></div>
        <input type="text" id="question" placeholder="Ask about sailing...">
        <button onclick="askYachtsy()">Ask</button>
    </div>

    <script>
        const API_KEY = 'your-api-key-here';
        const BASE_URL = 'http://localhost:8000/v1';

        async function askYachtsy() {
            const question = document.getElementById('question').value;
            const messagesDiv = document.getElementById('messages');
            
            // Add user message
            messagesDiv.innerHTML += `<div><strong>You:</strong> ${question}</div>`;
            
            try {
                const response = await fetch(`${BASE_URL}/chat/completions`, {
                    method: 'POST',
                    headers: {
                        'Authorization': `Bearer ${API_KEY}`,
                        'Content-Type': 'application/json'
                    },
                    body: JSON.stringify({
                        model: 'yachtsy-agent',
                        messages: [
                            { role: 'user', content: question }
                        ]
                    })
                });

                const data = await response.json();
                const answer = data.choices[0].message.content;
                
                // Add assistant response
                messagesDiv.innerHTML += `<div><strong>Yachtsy:</strong> ${answer}</div>`;
                
            } catch (error) {
                console.error('Error:', error);
                messagesDiv.innerHTML += `<div><strong>Error:</strong> ${error.message}</div>`;
            }
            
            document.getElementById('question').value = '';
        }
    </script>
</body>
</html>

React Integration

Installation

npm install openai

Basic React Component

import React, { useState } from 'react';
import OpenAI from 'openai';

const client = new OpenAI({
  baseURL: 'http://localhost:8000/v1',
  apiKey: process.env.REACT_APP_YACHTSY_API_KEY,
  dangerouslyAllowBrowser: true // Only for demo - use backend proxy in production
});

function SailingChat() {
  const [messages, setMessages] = useState([]);
  const [input, setInput] = useState('');
  const [loading, setLoading] = useState(false);

  const sendMessage = async () => {
    if (!input.trim()) return;

    const userMessage = { role: 'user', content: input };
    setMessages(prev => [...prev, userMessage]);
    setLoading(true);
    setInput('');

    try {
      const response = await client.chat.completions.create({
        model: 'yachtsy-agent',
        messages: [...messages, userMessage],
        temperature: 0.7
      });

      const assistantMessage = {
        role: 'assistant',
        content: response.choices[0].message.content
      };

      setMessages(prev => [...prev, assistantMessage]);
    } catch (error) {
      console.error('Error:', error);
      setMessages(prev => [...prev, {
        role: 'assistant',
        content: 'Sorry, I encountered an error. Please try again.'
      }]);
    } finally {
      setLoading(false);
    }
  };

  return (
    <div className="sailing-chat">
      <div className="messages">
        {messages.map((msg, index) => (
          <div key={index} className={`message ${msg.role}`}>
            <strong>{msg.role === 'user' ? 'You' : 'Yachtsy'}:</strong>
            <p>{msg.content}</p>
          </div>
        ))}
        {loading && <div className="loading">Yachtsy is thinking...</div>}
      </div>
      
      <div className="input-area">
        <input
          type="text"
          value={input}
          onChange={(e) => setInput(e.target.value)}
          onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
          placeholder="Ask about sailing, yachts, navigation..."
          disabled={loading}
        />
        <button onClick={sendMessage} disabled={loading}>
          Send
        </button>
      </div>
    </div>
  );
}

export default SailingChat;

Streaming React Component

import React, { useState } from 'react';
import OpenAI from 'openai';

const client = new OpenAI({
  baseURL: 'http://localhost:8000/v1',
  apiKey: process.env.REACT_APP_YACHTSY_API_KEY,
  dangerouslyAllowBrowser: true
});

function StreamingSailingChat() {
  const [messages, setMessages] = useState([]);
  const [input, setInput] = useState('');
  const [streaming, setStreaming] = useState(false);

  const sendMessage = async () => {
    if (!input.trim()) return;

    const userMessage = { role: 'user', content: input };
    setMessages(prev => [...prev, userMessage]);
    setStreaming(true);
    setInput('');

    // Add empty assistant message for streaming
    const assistantMessageIndex = messages.length + 1;
    setMessages(prev => [...prev, { role: 'assistant', content: '' }]);

    try {
      const stream = await client.chat.completions.create({
        model: 'yachtsy-agent',
        messages: [...messages, userMessage],
        stream: true,
        temperature: 0.7
      });

      for await (const chunk of stream) {
        const content = chunk.choices[0]?.delta?.content || '';
        if (content) {
          setMessages(prev => {
            const newMessages = [...prev];
            newMessages[assistantMessageIndex] = {
              ...newMessages[assistantMessageIndex],
              content: newMessages[assistantMessageIndex].content + content
            };
            return newMessages;
          });
        }
      }
    } catch (error) {
      console.error('Error:', error);
      setMessages(prev => {
        const newMessages = [...prev];
        newMessages[assistantMessageIndex] = {
          role: 'assistant',
          content: 'Sorry, I encountered an error. Please try again.'
        };
        return newMessages;
      });
    } finally {
      setStreaming(false);
    }
  };

  return (
    <div className="streaming-chat">
      <div className="messages">
        {messages.map((msg, index) => (
          <div key={index} className={`message ${msg.role}`}>
            <strong>{msg.role === 'user' ? 'You' : 'Yachtsy'}:</strong>
            <p>{msg.content}</p>
          </div>
        ))}
      </div>
      
      <div className="input-area">
        <input
          type="text"
          value={input}
          onChange={(e) => setInput(e.target.value)}
          onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
          placeholder="Ask about sailing..."
          disabled={streaming}
        />
        <button onClick={sendMessage} disabled={streaming}>
          {streaming ? 'Streaming...' : 'Send'}
        </button>
      </div>
    </div>
  );
}

export default StreamingSailingChat;

Vue.js Integration

Vue 3 Composition API

<template>
  <div class="sailing-chat">
    <div class="messages">
      <div 
        v-for="(message, index) in messages" 
        :key="index"
        :class="`message ${message.role}`"
      >
        <strong>{{ message.role === 'user' ? 'You' : 'Yachtsy' }}:</strong>
        <p>{{ message.content }}</p>
      </div>
      <div v-if="loading" class="loading">Yachtsy is thinking...</div>
    </div>
    
    <div class="input-area">
      <input
        v-model="input"
        @keyup.enter="sendMessage"
        :disabled="loading"
        placeholder="Ask about sailing, yachts, navigation..."
      />
      <button @click="sendMessage" :disabled="loading">
        Send
      </button>
    </div>
  </div>
</template>

<script setup>
import { ref } from 'vue'
import OpenAI from 'openai'

const client = new OpenAI({
  baseURL: 'http://localhost:8000/v1',
  apiKey: import.meta.env.VITE_YACHTSY_API_KEY,
  dangerouslyAllowBrowser: true
})

const messages = ref([])
const input = ref('')
const loading = ref(false)

const sendMessage = async () => {
  if (!input.value.trim()) return

  const userMessage = { role: 'user', content: input.value }
  messages.value.push(userMessage)
  loading.value = true
  input.value = ''

  try {
    const response = await client.chat.completions.create({
      model: 'yachtsy-agent',
      messages: messages.value,
      temperature: 0.7
    })

    const assistantMessage = {
      role: 'assistant',
      content: response.choices[0].message.content
    }

    messages.value.push(assistantMessage)
  } catch (error) {
    console.error('Error:', error)
    messages.value.push({
      role: 'assistant',
      content: 'Sorry, I encountered an error. Please try again.'
    })
  } finally {
    loading.value = false
  }
}
</script>

Next.js Integration

Create pages/api/chat.js or app/api/chat/route.js:

// pages/api/chat.js (Pages Router)
import OpenAI from 'openai';

const client = new OpenAI({
  baseURL: 'http://localhost:8000/v1',
  apiKey: process.env.YACHTSY_API_KEY
});

export default async function handler(req, res) {
  if (req.method !== 'POST') {
    return res.status(405).json({ error: 'Method not allowed' });
  }

  try {
    const { messages, stream = false } = req.body;

    if (stream) {
      // Handle streaming
      res.writeHead(200, {
        'Content-Type': 'text/event-stream',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive'
      });

      const streamResponse = await client.chat.completions.create({
        model: 'yachtsy-agent',
        messages,
        stream: true
      });

      for await (const chunk of streamResponse) {
        const content = chunk.choices[0]?.delta?.content || '';
        if (content) {
          res.write(`data: ${JSON.stringify({ content })}\n\n`);
        }
      }

      res.write('data: [DONE]\n\n');
      res.end();
    } else {
      // Handle non-streaming
      const response = await client.chat.completions.create({
        model: 'yachtsy-agent',
        messages
      });

      res.status(200).json(response);
    }
  } catch (error) {
    console.error('Error:', error);
    res.status(500).json({ error: 'Internal server error' });
  }
}

Frontend Component

// components/SailingChat.jsx
import { useState } from 'react';

export default function SailingChat() {
  const [messages, setMessages] = useState([]);
  const [input, setInput] = useState('');
  const [loading, setLoading] = useState(false);

  const sendMessage = async () => {
    if (!input.trim()) return;

    const userMessage = { role: 'user', content: input };
    const newMessages = [...messages, userMessage];
    setMessages(newMessages);
    setLoading(true);
    setInput('');

    try {
      const response = await fetch('/api/chat', {
        method: 'POST',
        headers: { 'Content-Type': 'application/json' },
        body: JSON.stringify({ messages: newMessages })
      });

      const data = await response.json();
      
      setMessages(prev => [...prev, {
        role: 'assistant',
        content: data.choices[0].message.content
      }]);
    } catch (error) {
      console.error('Error:', error);
    } finally {
      setLoading(false);
    }
  };

  return (
    <div className="max-w-2xl mx-auto p-4">
      <div className="space-y-4 mb-4">
        {messages.map((msg, index) => (
          <div key={index} className={`p-3 rounded ${
            msg.role === 'user' ? 'bg-blue-100 ml-12' : 'bg-gray-100 mr-12'
          }`}>
            <div className="font-semibold">
              {msg.role === 'user' ? 'You' : 'Yachtsy'}
            </div>
            <div className="mt-1">{msg.content}</div>
          </div>
        ))}
        {loading && (
          <div className="bg-gray-100 mr-12 p-3 rounded">
            <div className="font-semibold">Yachtsy</div>
            <div className="mt-1">Thinking...</div>
          </div>
        )}
      </div>
      
      <div className="flex gap-2">
        <input
          type="text"
          value={input}
          onChange={(e) => setInput(e.target.value)}
          onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
          placeholder="Ask about sailing..."
          disabled={loading}
          className="flex-1 p-2 border rounded"
        />
        <button
          onClick={sendMessage}
          disabled={loading}
          className="px-4 py-2 bg-blue-500 text-white rounded disabled:opacity-50"
        >
          Send
        </button>
      </div>
    </div>
  );
}

Security Best Practices

1. Never Expose API Keys in Frontend

Don't do this:

const client = new OpenAI({
  apiKey: 'your-api-key-here', // Exposed to users!
  dangerouslyAllowBrowser: true
});

Do this instead:

// Use a backend proxy
const response = await fetch('/api/chat', {
  method: 'POST',
  headers: { 'Content-Type': 'application/json' },
  body: JSON.stringify({ messages })
});

2. Implement Rate Limiting

// Example with express-rate-limit
import rateLimit from 'express-rate-limit';

const limiter = rateLimit({
  windowMs: 15 * 60 * 1000, // 15 minutes
  max: 100 // limit each IP to 100 requests per windowMs
});

app.use('/api/chat', limiter);

3. Validate Input

function validateMessage(content) {
  if (!content || typeof content !== 'string') {
    throw new Error('Invalid message content');
  }
  
  if (content.length > 4000) {
    throw new Error('Message too long');
  }
  
  return content.trim();
}

Error Handling

async function handleYachtsyRequest(messages) {
  try {
    const response = await client.chat.completions.create({
      model: 'yachtsy-agent',
      messages
    });
    
    return response.choices[0].message.content;
  } catch (error) {
    if (error.status === 401) {
      throw new Error('Invalid API key');
    } else if (error.status === 429) {
      throw new Error('Rate limit exceeded. Please try again later.');
    } else if (error.status >= 500) {
      throw new Error('Service temporarily unavailable');
    } else {
      throw new Error('An unexpected error occurred');
    }
  }
}
Production Tip: Always use a backend proxy to keep your API keys secure and implement proper rate limiting and validation.