ai.service.ts 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849
  1. import OpenAI from 'openai';
  2. import { ChatRequestDto } from './dto/chat-request.dto';
  3. import { Response } from 'express';
  4. export class AIService {
  5. private openai: OpenAI | null = null;
  6. async chat(chatRequest: ChatRequestDto, res: Response, apiKey: string) {
  7. try {
  8. if (!apiKey) {
  9. throw new Error('API key is required');
  10. }
  11. // Initialize OpenAI client only when needed
  12. this.openai = new OpenAI({
  13. apiKey,
  14. });
  15. const stream = await this.openai.chat.completions.create({
  16. model: 'gpt-3.5-turbo',
  17. messages: chatRequest.messages,
  18. stream: true,
  19. });
  20. // Set headers for streaming response
  21. res.setHeader('Content-Type', 'text/event-stream');
  22. res.setHeader('Cache-Control', 'no-cache');
  23. res.setHeader('Connection', 'keep-alive');
  24. // Handle the stream
  25. for await (const chunk of stream) {
  26. const content = chunk.choices[0]?.delta?.content || '';
  27. if (content) {
  28. res.write(`data: ${JSON.stringify({ content })}\n\n`);
  29. }
  30. }
  31. res.write('data: [DONE]\n\n');
  32. res.end();
  33. } catch (error) {
  34. console.error('OpenAI API error:', error);
  35. if (!res.headersSent) {
  36. res.status(500).json({
  37. error: error instanceof Error ? error.message : 'Failed to get response from OpenAI',
  38. });
  39. }
  40. }
  41. }
  42. }