LangChain实战项目:智能客服系统
项目概述
本项目将构建一个基于LangChain的智能客服系统,具备以下功能:
多轮对话支持
知识库问答
情感分析
自动分类转接
对话历史记录
项目结构
smart_customer_service/
├── app/
│ ├── __init__.py
│ ├── main.py
│ ├── config.py
│ ├── models/
│ │ ├── __init__.py
│ │ ├── customer.py
│ │ └── conversation.py
│ ├── services/
│ │ ├── __init__.py
│ │ ├── llm_service.py
│ │ ├── knowledge_service.py
│ │ ├── sentiment_service.py
│ │ └── routing_service.py
│ ├── chains/
│ │ ├── __init__.py
│ │ ├── conversation_chain.py
│ │ ├── qa_chain.py
│ │ └── classification_chain.py
│ └── utils/
│ ├── __init__.py
│ ├── database.py
│ └── logger.py
├── data/
│ ├── knowledge_base/
│ └── training_data/
├── tests/
├── requirements.txt
├── Dockerfile
└── README.md
核心组件实现
1. 配置管理 (config.py)
import os
from typing import Dict, Any
class Config:
"""应用配置类"""
# OpenAI配置
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-3.5-turbo")
# 数据库配置
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///customer_service.db")
# 向量数据库配置
VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH", "./vector_db")
# 服务配置
MAX_CONVERSATION_LENGTH = int(os.getenv("MAX_CONVERSATION_LENGTH", "50"))
TEMPERATURE = float(os.getenv("TEMPERATURE", "0.7"))
# 日志配置
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
LOG_FILE = os.getenv("LOG_FILE", "customer_service.log")
@classmethod
def validate(cls) -> bool:
"""验证配置"""
if not cls.OPENAI_API_KEY:
raise ValueError("OPENAI_API_KEY is required")
return True
config = Config()
2. 数据模型 (models/)
customer.py
from sqlalchemy import Column, Integer, String, DateTime, Text
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
Base = declarative_base()
class Customer(Base):
"""客户模型"""
__tablename__ = "customers"
id = Column(Integer, primary_key=True)
customer_id = Column(String(50), unique=True, nullable=False)
name = Column(String(100))
email = Column(String(100))
phone = Column(String(20))
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
def __repr__(self):
return f"<Customer(id={
self.customer_id}, name={
self.name})>"
conversation.py
from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
Base = declarative_base()
class Conversation(Base):
"""对话模型"""
__tablename__ = "conversations"
id = Column(Integer, primary_key=True)
customer_id = Column(String(50), nullable=False)
session_id = Column(String(100), nullable=False)
message = Column(Text, nullable=False)
response = Column(Text, nullable=False)
sentiment = Column(String(20))
category = Column(String(50))
created_at = Column(DateTime, default=datetime.utcnow)
def __repr__(self):
return f"<Conversation(session_id={
self.session_id}, message={
self.message[:50]}...)>"
3. LLM服务 (services/llm_service.py)
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage
from langchain.memory import ConversationBufferWindowMemory
from typing import List, Dict, Any
import logging
logger = logging.getLogger(__name__)
class LLMService:
"""LLM服务类"""
def __init__(self, model_name: str = "gpt-3.5-turbo", temperature: float = 0.7):
self.model = ChatOpenAI(
model_name=model_name,
temperature=temperature
)
self.memory = ConversationBufferWindowMemory(k=10)
def generate_response(self, message: str, context: str = "") -> str:
"""生成回复"""
try:
# 构建消息列表
messages = []
# 添加系统消息
if context:
messages.append(SystemMessage(content=context))
# 添加历史对话
history = self.memory.chat_memory.message
© 版权声明
文章版权归作者所有,未经允许请勿转载。如内容涉嫌侵权,请在本页底部进入<联系我们>进行举报投诉!
THE END



















暂无评论内容