| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071 |
- import {
- Body,
- Controller,
- Get,
- HttpCode,
- HttpStatus,
- Post,
- Request,
- UseGuards,
- } from '@nestjs/common';
- import { ApiService } from './api.service';
- import { CombinedAuthGuard } from '../auth/combined-auth.guard';
- import { ModelConfigService } from '../model-config/model-config.service';
- import { I18nService } from '../i18n/i18n.service';
- class ChatDto {
- prompt: string;
- }
- @Controller()
- export class ApiController {
- constructor(
- private readonly apiService: ApiService,
- private readonly modelConfigService: ModelConfigService,
- private readonly i18nService: I18nService,
- ) { }
- @Get('health')
- healthCheck() {
- return this.apiService.healthCheck();
- }
- @Post('chat')
- @UseGuards(CombinedAuthGuard)
- @HttpCode(HttpStatus.OK)
- async chat(@Request() req, @Body() chatDto: ChatDto) {
- const { prompt } = chatDto;
- if (!prompt) {
- throw new Error(this.i18nService.getMessage('promptRequired'));
- }
- try {
- // ユーザーの LLM モデル設定を取得
- const models = await this.modelConfigService.findAll(req.user.id, req.user.tenantId);
- const llmModel = models.find((m) => m.type === 'llm');
- if (!llmModel) {
- throw new Error(this.i18nService.getMessage('addLLMConfig'));
- }
- // API key is optional - allows local models
-
- const modelConfigForService = {
- id: llmModel.id,
- name: llmModel.name,
- modelId: llmModel.modelId,
- baseUrl: llmModel.baseUrl,
- apiKey: llmModel.apiKey,
- type: llmModel.type as any,
- };
- const response = await this.apiService.getChatCompletion(
- prompt,
- modelConfigForService,
- );
- return { response };
- } catch (error) {
- throw new Error(error.message || this.i18nService.getMessage('internalServerError'));
- }
- }
- }
|