|
@@ -1,13 +1,15 @@
|
|
|
# mypy: disable - error - code = "no-untyped-def,misc"
|
|
|
-import pathlib
|
|
|
-from fastapi import FastAPI, Response
|
|
|
-from fastapi.staticfiles import StaticFiles
|
|
|
+from fastapi import FastAPI
|
|
|
from pydantic import BaseModel
|
|
|
from typing import Optional, List, Dict, Any
|
|
|
import asyncio
|
|
|
|
|
|
# Import the graph from the agent
|
|
|
from agent.graph import graph
|
|
|
+# Import ChatVertexAI for translation
|
|
|
+from langchain_google_vertexai import ChatVertexAI
|
|
|
+import os
|
|
|
+
|
|
|
# Define the FastAPI app
|
|
|
app = FastAPI()
|
|
|
|
|
@@ -25,6 +27,17 @@ class ResearchResponse(BaseModel):
|
|
|
sources: List[Dict[str, Any]]
|
|
|
|
|
|
|
|
|
+class TranslationRequest(BaseModel):
|
|
|
+ """Request model for the translation endpoint."""
|
|
|
+ text: str
|
|
|
+
|
|
|
+
|
|
|
+class TranslationResponse(BaseModel):
|
|
|
+ """Response model for the translation endpoint."""
|
|
|
+ original_text: str
|
|
|
+ translated_text: str
|
|
|
+
|
|
|
+
|
|
|
@app.post("/api/research", response_model=ResearchResponse)
|
|
|
async def research(request: ResearchRequest):
|
|
|
"""Endpoint to perform research using the LangGraph agent.
|
|
@@ -69,3 +82,40 @@ async def research(request: ResearchRequest):
|
|
|
|
|
|
return ResearchResponse(answer=answer, sources=sources)
|
|
|
|
|
|
+
|
|
|
+@app.post("/api/translate", response_model=TranslationResponse)
|
|
|
+async def translate(request: TranslationRequest):
|
|
|
+ """Endpoint to translate Chinese text to English using ChatVertexAI.
|
|
|
+
|
|
|
+ Args:
|
|
|
+ request: TranslationRequest containing the text to translate.
|
|
|
+
|
|
|
+ Returns:
|
|
|
+ TranslationResponse with the original and translated text.
|
|
|
+ """
|
|
|
+ # Initialize ChatVertexAI
|
|
|
+ llm = ChatVertexAI(
|
|
|
+ model_name="gemini-2.0-flash",
|
|
|
+ temperature=0,
|
|
|
+ max_retries=2,
|
|
|
+ project=os.getenv("GOOGLE_CLOUD_PROJECT"), # 必填
|
|
|
+ location=os.getenv("GOOGLE_CLOUD_LOCATION") # 必填
|
|
|
+ )
|
|
|
+
|
|
|
+ # Create translation prompt
|
|
|
+ prompt = f"Translate the following Chinese text to English:\n\n{request.text}\n\nTranslation:"
|
|
|
+
|
|
|
+ # Run the translation
|
|
|
+ result = await asyncio.get_event_loop().run_in_executor(
|
|
|
+ None,
|
|
|
+ lambda: llm.invoke(prompt)
|
|
|
+ )
|
|
|
+
|
|
|
+ # Extract the translated text
|
|
|
+ translated_text = result.content if hasattr(result, 'content') else str(result)
|
|
|
+
|
|
|
+ return TranslationResponse(
|
|
|
+ original_text=request.text,
|
|
|
+ translated_text=translated_text
|
|
|
+ )
|
|
|
+
|