feat: Add Expert Transformation Agent with multi-expert perspective system

- Backend: Add expert transformation router with 3-step SSE pipeline
  - Step 0: Generate diverse expert team (random domains)
  - Step 1: Each expert generates keywords for attributes
  - Step 2: Batch generate descriptions for expert keywords
- Backend: Add simplified prompts for reliable JSON output
- Frontend: Add TransformationPanel with React Flow visualization
- Frontend: Add TransformationInputPanel for expert configuration
  - Expert count (2-8), keywords per expert (1-3)
  - Custom expert domains support
- Frontend: Add expert keyword nodes with expert badges
- Frontend: Improve description card layout (wider cards, more spacing)
- Frontend: Add fallback for missing descriptions with visual indicators

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-12-03 16:26:17 +08:00
parent 1ed1dab78f
commit 534fdbbcc4
25 changed files with 3114 additions and 27 deletions

View File

@@ -3,7 +3,7 @@ from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .routers import attributes
from .routers import attributes, transformation, expert_transformation
from .services.llm_service import ollama_provider
@@ -29,6 +29,8 @@ app.add_middleware(
)
app.include_router(attributes.router)
app.include_router(transformation.router)
app.include_router(expert_transformation.router)
@app.get("/")

View File

@@ -131,3 +131,92 @@ class DAGRelationship(BaseModel):
source: str # source attribute name
target_category: str
target: str # target attribute name
# ===== Transformation Agent schemas =====
class TransformationRequest(BaseModel):
"""Transformation Agent 請求"""
query: str # 原始查詢 (e.g., "腳踏車")
category: str # 類別名稱 (e.g., "功能")
attributes: List[str] # 該類別的屬性列表
model: Optional[str] = None
temperature: Optional[float] = 0.7
keyword_count: int = 3 # 要生成的新關鍵字數量
class TransformationDescription(BaseModel):
"""單一轉換描述"""
keyword: str # 新關鍵字
description: str # 與 query 結合的描述
class TransformationCategoryResult(BaseModel):
"""單一類別的轉換結果"""
category: str
original_attributes: List[str] # 原始屬性
new_keywords: List[str] # 新生成的關鍵字
descriptions: List[TransformationDescription]
class TransformationDAGResult(BaseModel):
"""完整 Transformation 結果"""
query: str
results: List[TransformationCategoryResult]
# ===== Expert Transformation Agent schemas =====
class ExpertProfile(BaseModel):
"""專家檔案"""
id: str # e.g., "expert-0"
name: str # e.g., "藥師"
domain: str # e.g., "醫療與健康"
perspective: Optional[str] = None # e.g., "從藥物與健康管理角度思考"
class ExpertKeyword(BaseModel):
"""專家視角生成的關鍵字"""
keyword: str # 關鍵字本身
expert_id: str # 哪個專家生成的
expert_name: str # 專家名稱(冗餘,方便前端)
source_attribute: str # 來自哪個原始屬性
class ExpertTransformationDescription(BaseModel):
"""專家關鍵字的描述"""
keyword: str
expert_id: str
expert_name: str
description: str
class ExpertTransformationCategoryResult(BaseModel):
"""單一類別的轉換結果(專家版)"""
category: str
original_attributes: List[str]
expert_keywords: List[ExpertKeyword] # 所有專家生成的關鍵字
descriptions: List[ExpertTransformationDescription]
class ExpertTransformationDAGResult(BaseModel):
"""完整轉換結果(專家版)"""
query: str
experts: List[ExpertProfile] # 使用的專家列表
results: List[ExpertTransformationCategoryResult]
class ExpertTransformationRequest(BaseModel):
"""Expert Transformation Agent 請求"""
query: str
category: str
attributes: List[str]
# Expert parameters
expert_count: int = 3 # 專家數量 (2-8)
keywords_per_expert: int = 1 # 每個專家為每個屬性生成幾個關鍵字 (1-3)
custom_experts: Optional[List[str]] = None # 用戶指定專家 ["藥師", "工程師"]
# LLM parameters
model: Optional[str] = None
temperature: Optional[float] = 0.7

View File

@@ -0,0 +1,78 @@
"""Expert Transformation Agent 提示詞模組"""
from typing import List, Optional
def get_expert_generation_prompt(
query: str,
categories: List[str],
expert_count: int,
custom_experts: Optional[List[str]] = None
) -> str:
"""Step 0: 生成專家團隊(不依賴主題,純隨機多元)"""
custom_text = ""
if custom_experts and len(custom_experts) > 0:
custom_text = f"(已指定:{', '.join(custom_experts[:expert_count])}"
return f"""/no_think
隨機組建 {expert_count} 個來自完全不同領域的專家團隊{custom_text}
回傳 JSON
{{"experts": [{{"id": "expert-0", "name": "職業", "domain": "領域", "perspective": "角度"}}, ...]}}
規則:
- id 為 expert-0 到 expert-{expert_count - 1}
- name 填寫職業名稱非人名2-5字
- 各專家的 domain 必須來自截然不同的領域,越多元越好"""
def get_expert_keyword_generation_prompt(
category: str,
attribute: str,
experts: List[dict], # List[ExpertProfile]
keywords_per_expert: int = 1
) -> str:
"""Step 1: 專家視角關鍵字生成"""
experts_info = ", ".join([f"{exp['id']}:{exp['name']}({exp['domain']})" for exp in experts])
return f"""/no_think
專家團隊:{experts_info}
屬性:「{attribute}」({category}
每位專家從自己的專業視角為此屬性生成 {keywords_per_expert} 個創新關鍵字2-6字
關鍵字要反映該專家領域的獨特思考方式。
回傳 JSON
{{"keywords": [{{"keyword": "詞彙", "expert_id": "expert-X", "expert_name": "名稱"}}, ...]}}
共需 {len(experts) * keywords_per_expert} 個關鍵字。"""
def get_expert_batch_description_prompt(
query: str,
category: str,
expert_keywords: List[dict] # List[ExpertKeyword]
) -> str:
"""Step 2: 批次生成專家關鍵字的描述"""
keywords_info = ", ".join([
f"{kw['expert_name']}:{kw['keyword']}"
for kw in expert_keywords
])
# 建立 keyword -> (expert_id, expert_name) 的對照
keyword_expert_map = ", ".join([
f"{kw['keyword']}{kw['expert_id']}/{kw['expert_name']}"
for kw in expert_keywords
])
return f"""/no_think
物件:「{query}
關鍵字(專家:詞彙):{keywords_info}
對照:{keyword_expert_map}
為每個關鍵字生成創新描述15-30字說明如何將該概念應用到「{query}」上。
回傳 JSON
{{"descriptions": [{{"keyword": "詞彙", "expert_id": "expert-X", "expert_name": "名稱", "description": "應用描述"}}, ...]}}
共需 {len(expert_keywords)} 個描述。"""

View File

@@ -0,0 +1,97 @@
"""Transformation Agent 提示詞模組"""
from typing import List
def get_keyword_generation_prompt(
category: str,
attributes: List[str],
keyword_count: int = 3
) -> str:
"""
Step 1: 生成新關鍵字
給定類別和現有屬性,生成全新的、有創意的關鍵字。
不考慮原始查詢,只專注於類別本身可能的延伸。
"""
attrs_text = "".join(attributes)
return f"""/no_think
你是一個創意發想專家。給定一個類別和該類別下的現有屬性,請生成全新的、有創意的關鍵字或描述片段。
【類別】{category}
【現有屬性】{attrs_text}
【重要規則】
1. 生成 {keyword_count} 個全新的關鍵字
2. 關鍵字必須符合「{category}」這個類別的範疇
3. 關鍵字要有創意,不能與現有屬性重複或太相似
4. 不要考慮任何特定物件,只專注於這個類別本身可能的延伸
5. 每個關鍵字應該是 2-6 個字的詞彙或短語
只回傳 JSON
{{
"keywords": ["關鍵字1", "關鍵字2", "關鍵字3"]
}}"""
def get_description_generation_prompt(
query: str,
category: str,
keyword: str
) -> str:
"""
Step 2: 結合原始查詢生成描述
用新關鍵字創造一個與原始查詢相關的創新應用描述。
"""
return f"""/no_think
你是一個創新應用專家。請將一個新的關鍵字概念應用到特定物件上,創造出創新的應用描述。
【物件】{query}
【類別】{category}
【新關鍵字】{keyword}
【任務】
請用「{keyword}」這個概念,為「{query}」創造一個創新的應用描述。
描述應該是一個完整的句子或短語,說明如何將這個新概念應用到物件上。
【範例格式】
- 如果物件是「腳踏車」,關鍵字是「監視」,可以生成「腳踏車監視騎乘者的身體健康狀況」
- 如果物件是「雨傘」,關鍵字是「發電」,可以生成「雨傘利用雨滴撞擊發電」
只回傳 JSON
{{
"description": "創新應用描述"
}}"""
def get_batch_description_prompt(
query: str,
category: str,
keywords: List[str]
) -> str:
"""
批次生成描述(可選的優化版本,一次處理多個關鍵字)
"""
keywords_text = "".join(keywords)
keywords_json = ", ".join([f'"{k}"' for k in keywords])
return f"""/no_think
你是一個創新應用專家。請將多個新的關鍵字概念應用到特定物件上,為每個關鍵字創造創新的應用描述。
【物件】{query}
【類別】{category}
【新關鍵字】{keywords_text}
【任務】
為每個關鍵字創造一個與「{query}」相關的創新應用描述。
每個描述應該是一個完整的句子或短語。
只回傳 JSON
{{
"descriptions": [
{{"keyword": "關鍵字1", "description": "描述1"}},
{{"keyword": "關鍵字2", "description": "描述2"}}
]
}}"""

View File

@@ -0,0 +1,185 @@
"""Expert Transformation Agent 路由模組"""
import json
import logging
from typing import AsyncGenerator, List
from fastapi import APIRouter
from fastapi.responses import StreamingResponse
from ..models.schemas import (
ExpertTransformationRequest,
ExpertProfile,
ExpertKeyword,
ExpertTransformationCategoryResult,
ExpertTransformationDescription,
)
from ..prompts.expert_transformation_prompt import (
get_expert_generation_prompt,
get_expert_keyword_generation_prompt,
get_expert_batch_description_prompt,
)
from ..services.llm_service import ollama_provider, extract_json_from_response
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/expert-transformation", tags=["expert-transformation"])
async def generate_expert_transformation_events(
request: ExpertTransformationRequest,
all_categories: List[str] # For expert generation context
) -> AsyncGenerator[str, None]:
"""Generate SSE events for expert transformation process"""
try:
temperature = request.temperature if request.temperature is not None else 0.7
model = request.model
# ========== Step 0: Generate expert team ==========
yield f"event: expert_start\ndata: {json.dumps({'message': '正在組建專家團隊...'}, ensure_ascii=False)}\n\n"
experts: List[ExpertProfile] = []
try:
expert_prompt = get_expert_generation_prompt(
query=request.query,
categories=all_categories,
expert_count=request.expert_count,
custom_experts=request.custom_experts
)
logger.info(f"Expert prompt: {expert_prompt[:200]}")
expert_response = await ollama_provider.generate(
expert_prompt, model=model, temperature=temperature
)
logger.info(f"Expert response: {expert_response[:500]}")
expert_data = extract_json_from_response(expert_response)
experts_raw = expert_data.get("experts", [])
for exp in experts_raw:
if isinstance(exp, dict) and all(k in exp for k in ["id", "name", "domain"]):
experts.append(ExpertProfile(**exp))
except Exception as e:
logger.error(f"Failed to generate experts: {e}")
yield f"event: error\ndata: {json.dumps({'error': f'專家團隊生成失敗: {str(e)}'}, ensure_ascii=False)}\n\n"
return
yield f"event: expert_complete\ndata: {json.dumps({'experts': [e.model_dump() for e in experts]}, ensure_ascii=False)}\n\n"
if not experts:
yield f"event: error\ndata: {json.dumps({'error': '無法生成專家團隊'}, ensure_ascii=False)}\n\n"
return
# ========== Step 1: Generate keywords from expert perspectives ==========
yield f"event: keyword_start\ndata: {json.dumps({'message': f'專家團隊為「{request.category}」的屬性生成關鍵字...'}, ensure_ascii=False)}\n\n"
all_expert_keywords: List[ExpertKeyword] = []
# For each attribute, ask all experts to generate keywords
for attr_index, attribute in enumerate(request.attributes):
try:
kw_prompt = get_expert_keyword_generation_prompt(
category=request.category,
attribute=attribute,
experts=[e.model_dump() for e in experts],
keywords_per_expert=request.keywords_per_expert
)
logger.info(f"Keyword prompt for '{attribute}': {kw_prompt[:300]}")
kw_response = await ollama_provider.generate(
kw_prompt, model=model, temperature=temperature
)
logger.info(f"Keyword response for '{attribute}': {kw_response[:500]}")
kw_data = extract_json_from_response(kw_response)
keywords_raw = kw_data.get("keywords", [])
# Add source_attribute to each keyword
for kw in keywords_raw:
if isinstance(kw, dict) and all(k in kw for k in ["keyword", "expert_id", "expert_name"]):
all_expert_keywords.append(ExpertKeyword(
keyword=kw["keyword"],
expert_id=kw["expert_id"],
expert_name=kw["expert_name"],
source_attribute=attribute
))
# Emit progress
yield f"event: keyword_progress\ndata: {json.dumps({'attribute': attribute, 'count': len(keywords_raw)}, ensure_ascii=False)}\n\n"
except Exception as e:
logger.warning(f"Failed to generate keywords for '{attribute}': {e}")
yield f"event: keyword_progress\ndata: {json.dumps({'attribute': attribute, 'count': 0, 'error': str(e)}, ensure_ascii=False)}\n\n"
# Continue with next attribute instead of stopping
yield f"event: keyword_complete\ndata: {json.dumps({'total_keywords': len(all_expert_keywords)}, ensure_ascii=False)}\n\n"
if not all_expert_keywords:
yield f"event: error\ndata: {json.dumps({'error': '無法生成關鍵字'}, ensure_ascii=False)}\n\n"
return
# ========== Step 2: Generate descriptions for each expert keyword ==========
yield f"event: description_start\ndata: {json.dumps({'message': '為專家關鍵字生成創新應用描述...'}, ensure_ascii=False)}\n\n"
descriptions: List[ExpertTransformationDescription] = []
try:
desc_prompt = get_expert_batch_description_prompt(
query=request.query,
category=request.category,
expert_keywords=[kw.model_dump() for kw in all_expert_keywords]
)
logger.info(f"Description prompt: {desc_prompt[:300]}")
desc_response = await ollama_provider.generate(
desc_prompt, model=model, temperature=temperature
)
logger.info(f"Description response: {desc_response[:500]}")
desc_data = extract_json_from_response(desc_response)
descriptions_raw = desc_data.get("descriptions", [])
for desc in descriptions_raw:
if isinstance(desc, dict) and all(k in desc for k in ["keyword", "expert_id", "expert_name", "description"]):
descriptions.append(ExpertTransformationDescription(**desc))
except Exception as e:
logger.warning(f"Failed to generate descriptions: {e}")
# Continue without descriptions - at least we have keywords
yield f"event: description_complete\ndata: {json.dumps({'count': len(descriptions)}, ensure_ascii=False)}\n\n"
# ========== Build final result ==========
result = ExpertTransformationCategoryResult(
category=request.category,
original_attributes=request.attributes,
expert_keywords=all_expert_keywords,
descriptions=descriptions
)
final_data = {
"result": result.model_dump(),
"experts": [e.model_dump() for e in experts]
}
yield f"event: done\ndata: {json.dumps(final_data, ensure_ascii=False)}\n\n"
except Exception as e:
logger.error(f"Expert transformation error: {e}", exc_info=True)
yield f"event: error\ndata: {json.dumps({'error': str(e)}, ensure_ascii=False)}\n\n"
@router.post("/category")
async def expert_transform_category(request: ExpertTransformationRequest):
"""處理單一類別的專家視角轉換"""
# Extract all categories from request (should be passed separately in production)
# For now, use just the single category
return StreamingResponse(
generate_expert_transformation_events(request, [request.category]),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)

View File

@@ -0,0 +1,116 @@
"""Transformation Agent 路由模組"""
import json
import logging
from typing import AsyncGenerator, List
from fastapi import APIRouter
from fastapi.responses import StreamingResponse
from ..models.schemas import (
TransformationRequest,
TransformationCategoryResult,
TransformationDescription,
)
from ..prompts.transformation_prompt import (
get_keyword_generation_prompt,
get_batch_description_prompt,
)
from ..services.llm_service import ollama_provider, extract_json_from_response
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/transformation", tags=["transformation"])
async def generate_transformation_events(
request: TransformationRequest
) -> AsyncGenerator[str, None]:
"""Generate SSE events for transformation process"""
try:
temperature = request.temperature if request.temperature is not None else 0.7
model = request.model
# ========== Step 1: Generate new keywords ==========
yield f"event: keyword_start\ndata: {json.dumps({'message': f'為「{request.category}」生成新關鍵字...'}, ensure_ascii=False)}\n\n"
keyword_prompt = get_keyword_generation_prompt(
category=request.category,
attributes=request.attributes,
keyword_count=request.keyword_count
)
logger.info(f"Keyword prompt: {keyword_prompt[:200]}")
keyword_response = await ollama_provider.generate(
keyword_prompt, model=model, temperature=temperature
)
logger.info(f"Keyword response: {keyword_response[:500]}")
keyword_data = extract_json_from_response(keyword_response)
new_keywords = keyword_data.get("keywords", [])
yield f"event: keyword_complete\ndata: {json.dumps({'keywords': new_keywords}, ensure_ascii=False)}\n\n"
if not new_keywords:
yield f"event: error\ndata: {json.dumps({'error': '無法生成新關鍵字'}, ensure_ascii=False)}\n\n"
return
# ========== Step 2: Generate descriptions for each keyword ==========
yield f"event: description_start\ndata: {json.dumps({'message': '生成創新應用描述...'}, ensure_ascii=False)}\n\n"
# Use batch description prompt for efficiency
desc_prompt = get_batch_description_prompt(
query=request.query,
category=request.category,
keywords=new_keywords
)
logger.info(f"Description prompt: {desc_prompt[:300]}")
desc_response = await ollama_provider.generate(
desc_prompt, model=model, temperature=temperature
)
logger.info(f"Description response: {desc_response[:500]}")
desc_data = extract_json_from_response(desc_response)
descriptions_raw = desc_data.get("descriptions", [])
# Convert to TransformationDescription objects
descriptions: List[TransformationDescription] = []
for desc in descriptions_raw:
if isinstance(desc, dict) and "keyword" in desc and "description" in desc:
descriptions.append(TransformationDescription(
keyword=desc["keyword"],
description=desc["description"]
))
yield f"event: description_complete\ndata: {json.dumps({'count': len(descriptions)}, ensure_ascii=False)}\n\n"
# ========== Build final result ==========
result = TransformationCategoryResult(
category=request.category,
original_attributes=request.attributes,
new_keywords=new_keywords,
descriptions=descriptions
)
final_data = {
"result": result.model_dump()
}
yield f"event: done\ndata: {json.dumps(final_data, ensure_ascii=False)}\n\n"
except Exception as e:
logger.error(f"Transformation error: {e}")
yield f"event: error\ndata: {json.dumps({'error': str(e)}, ensure_ascii=False)}\n\n"
@router.post("/category")
async def transform_category(request: TransformationRequest):
"""處理單一類別的轉換"""
return StreamingResponse(
generate_transformation_events(request),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)