实现本地大模型微调+知识库+接口管理(稳定待完善)
安装 Docker
curl -fsSL https://get.docker.com | bash -s docker --mirror Aliyun
systemctl enable --now docker
安装 docker-compose
curl -L https://github.com/docker/compose/releases/download/v2.20.3/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
config.json
json
{
"systemEnv": {
"openapiPrefix": "fastgpt",
"vectorMaxProcess": 15,
"qaMaxProcess": 15,
"pgHNSWEfSearch": 100
},
"llmModels": [
{
"model": "chatglm3",
"name": "chatglm3",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 13000,
"maxTemperature": 1.2,
"charsPointsPrice": 0,
"censor": false,
"vision": false,
"datasetProcess": false, # 报错的话需要改为true
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
"customExtractPrompt": "",
"defaultSystemChatPrompt": "",
"defaultConfig": {}
},
{
"model": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 13000,
"maxTemperature": 1.2,
"charsPointsPrice": 0,
"censor": false,
"vision": false,
"datasetProcess": false,
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
"customExtractPrompt": "",
"defaultSystemChatPrompt": "",
"defaultConfig": {}
},
{
"model": "gpt-3.5-turbo-16k",
"name": "gpt-3.5-turbo-16k",
"maxContext": 16000,
"maxResponse": 16000,
"quoteMaxToken": 13000,
"maxTemperature": 1.2,
"charsPointsPrice": 0,
"censor": false,
"vision": false,
"datasetProcess": true,
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
"customExtractPrompt": "",
"defaultSystemChatPrompt": "",
"defaultConfig": {}
},
{
"model": "gpt-4-0125-preview",
"name": "gpt-4-turbo",
"maxContext": 125000,
"maxResponse": 4000,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"charsPointsPrice": 0,
"censor": false,
"vision": false,
"datasetProcess": false,
"usedInClassify": true,
"usedInExtractFields": true,
"usedInToolCall": true,
"usedInQueryExtension": true,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
"customExtractPrompt": "",
"defaultSystemChatPrompt": "",
"defaultConfig": {}
},
{
"model": "gpt-4-vision-preview",
"name": "gpt-4-vision",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"charsPointsPrice": 0,
"censor": false,
"vision": true,
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"usedInToolCall": false,
"usedInQueryExtension": false,
"toolChoice": true,
"functionCall": false,
"customCQPrompt": "",
"customExtractPrompt": "",
"defaultSystemChatPrompt": "",
"defaultConfig": {}
}
],
"vectorModels": [
# 使用自定义模型需要打开配置
; {
; "model": "m3e",
; "name": "m3e",
; "charsPointsPrice": 0,
; "defaultToken": 700,
; "maxToken": 3000,
; "weight": 100
; },
{
"model": "text-embedding-ada-002",
"name": "Embedding-2",
"charsPointsPrice": 0,
"defaultToken": 700,
"maxToken": 3000,
"weight": 100
}
],
"reRankModels": [],
"audioSpeechModels": [
{
"model": "tts-1",
"name": "OpenAI TTS1",
"charsPointsPrice": 0,
"voices": [
{
"label": "Alloy",
"value": "alloy",
"bufferId": "openai-Alloy"
},
{
"label": "Echo",
"value": "echo",
"bufferId": "openai-Echo"
},
{
"label": "Fable",
"value": "fable",
"bufferId": "openai-Fable"
},
{
"label": "Onyx",
"value": "onyx",
"bufferId": "openai-Onyx"
},
{
"label": "Nova",
"value": "nova",
"bufferId": "openai-Nova"
},
{
"label": "Shimmer",
"value": "shimmer",
"bufferId": "openai-Shimmer"
}
]
}
],
"whisperModel": {
"model": "whisper-1",
"name": "Whisper1",
"charsPointsPrice": 0
}
}}
验证安装
docker -v
docker-compose -v
二、创建目录并下载 docker-compose.yml
依次执行下面命令,创建 FastGPT 文件并拉取docker-compose.yml和config.json,执行完后目录下会有 2 个文件。
非 Linux 环境或无法访问外网环境,可手动创建一个目录,并下载下面2个链接的文件: docker-compose.yml,config.json
注意: docker-compose.yml 配置文件中 Mongo 为 5.x,部分服务器不支持,需手动更改其镜像版本为 4.4.24
mkdir fastgpt
cd fastgpt
curl -O https://raw.githubusercontent.com/labring/FastGPT/main/files/deploy/fastgpt/docker-compose.yml
curl -O https://raw.githubusercontent.com/labring/FastGPT/main/projects/app/data/config.json
- 部署one-api
docker run --name one-api -d --restart always -p 3080:3000 -e TZ=Asia/Shanghai -v /home/ubuntu/data/one-api:/data justsong/one-api
#### 三、修改 docker-compose.yml 的环境变量
修改docker-compose.yml中的OPENAI_BASE_URL(API 接口的地址,需要加/v1)和CHAT_API_KEY(API 接口的凭证)。
使用 OneAPI 的话,OPENAI_BASE_URL=OneAPI访问地址/v1;CHAT_API_KEY=令牌
四、启动容器
在 docker-compose.yml 同级目录下执行
#### 进入项目目录
cd 项目目录
language
启动容器
docker-compose pull
docker-compose up -d
五、初始化 Mongo 副本集(4.6.8以前可忽略)
FastGPT 4.6.8 后使用了 MongoDB 的事务,需要运行在副本集上。副本集没法自动化初始化,需手动操作。
# 查看 mongo 容器是否正常运行
docker ps
# 进入容器
docker exec -it mongo bash
# 连接数据库(这里要填Mongo的用户名和密码)
mongo -u myusername -p mypassword --authenticationDatabase admin
# 初始化副本集。如果需要外网访问,mongo:27017 可以改成 ip:27017。但是需要同时修改 FastGPT 连接的参数(MONGODB_URI=mongodb://myname:mypassword@mongo:27017/fastgpt?authSource=admin => MONGODB_URI=mongodb://myname:mypassword@ip:27017/fastgpt?authSource=admin)
rs.initiate({
_id: "rs0",
members: [
{ _id: 0, host: "mongo:27017" }
]
})
# 检查状态。如果提示 rs0 状态,则代表运行成功
rs.status()
关于 host: “mongo:27017” 说明
mongo:27017 代表指向同一个 docker 网络的 mongo 容器的 27017 服务。因此,如果使用该参数,外网是无法访问到数据库的。
ip:27017 (ip替换成公网IP):代表通过你的公网IP进行访问。如果用该方法,同时需要修改 docker-compose 中 mongo 的连接参数,因为默认是用 mongo:27017 进行连接。
配置fastGPT
- docker-comopse.yml
# 非 host 版本, 不使用本机代理
# (不懂 Docker 的,只需要关心 OPENAI_BASE_URL 和 CHAT_API_KEY 即可!)
version: '3.3'
services:
pg:
image: ankane/pgvector:v0.5.0 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/pgvector:v0.5.0 # 阿里云
container_name: pg
restart: always
ports: # 生产环境建议不要暴露
- 5432:5432
networks:
- fastgpt
environment:
# 这里的配置只有首次运行生效。修改后,重启镜像是不会生效的。需要把持久化数据删除再重启,才有效果
- POSTGRES_USER=username
- POSTGRES_PASSWORD=password
- POSTGRES_DB=postgres
volumes:
- ./pg/data:/var/lib/postgresql/data
mongo:
# image: mongo:5.0.18
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/mongo:5.0.18 # 阿里云
container_name: mongo
restart: always
ports:
- 27017:27017
networks:
- fastgpt
command: mongod --keyFile /data/mongodb.key --replSet rs0
environment:
- MONGO_INITDB_ROOT_USERNAME=myusername
- MONGO_INITDB_ROOT_PASSWORD=mypassword
volumes:
- ./mongo/data:/data/db
entrypoint:
- bash
- -c
- |
openssl rand -base64 128 > /data/mongodb.key
chmod 400 /data/mongodb.key
chown 999:999 /data/mongodb.key
exec docker-entrypoint.sh $$@
fastgpt:
container_name: fastgpt
#image: ghcr.io/labring/fastgpt:latest # git
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:latest # 阿里云
ports:
- 3000:3000
networks:
- fastgpt
depends_on:
- mongo
- pg
restart: always
environment:
# root 密码,用户名为: root
- DEFAULT_ROOT_PSW=1234
# 中转地址,如果是用官方号,不需要管。务必加 /v1
- OPENAI_BASE_URL=http://49.235.134.82:3080/v1
- CHAT_API_KEY=sk-KPG9q0FW5eqyqXFbA0A1Bf68485649E6961c0a243b3d024c
- DB_MAX_LINK=5 # database max link
- TOKEN_KEY=any
- ROOT_KEY=root_key
- FILE_TOKEN_KEY=filetoken
# mongo 配置,不需要改. 用户名myusername,密码mypassword。
- MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin
# pg配置. 不需要改
- PG_URL=postgresql://username:password@pg:5432/postgres
volumes:
- ./config.json:/app/data/config.json
networks:
fastgpt:
部署chatGLM3-6B(需要一台高性能服务器)
-
通过终端连接
-
更新源 apt update
-
安装git apt install apt install git-lfs
-
登陆魔达社区下载模型
1.chatglm3-6b -
clone 大模型
#新建目录 mkdir webcodes 并进入
git clone https://github.com/THUDM/ChatGLM3.git
# cd项目下载依赖
pip install -r requirements.txt
-
进入chatgml3-6b大模型,复制路径 /root/models/chatglm3-6b
-
修改chatGLM3中/basic_demo/web_demo_streamlit.py文件
-
将文件路径替换为大模型路径
MODEL_PATH = os.environ.get('MODEL_PATH', '/root/models/chatglm3-6b')
- 启动项目
cd basic_demo/
streamlit run web_demo_streamlit.py
配置chatGLM3
进入chatGlm3
cd openai_api_demo/
# 修改api_server.py
MODEL_PATH = os.environ.get('MODEL_PATH', '/root/models/chatglm3-6b')
# 修改向量模型为m3e
EMBEDDING_PATH = os.environ.get('EMBEDDING_PATH', '/root/models/m3e-base')
#修改device-map=auto 为cuda
model = AutoModel.from_pretrained(MODEL_PATH, trust_remote_code=True, device_map="cuda").eval()
# 保存完以后运行
python api_server.py
参考资料: