Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 101df2b470 | |||
| c055f40dff | |||
| 7da3f88e54 | |||
| 10b79effab | |||
| 7e41b4bc94 | |||
| ed6081845a | |||
| cda7b607cb | |||
| 962c66714e | |||
| 39f1feaccb | |||
| 1dada69daa | |||
| fe2f5205fc | |||
| ac574af60a | |||
| 0499a3f621 | |||
| 453c29170f | |||
| e8570da856 | |||
| dd7559a009 | |||
| 3719ff7299 | |||
| 800b5c7aaa | |||
| f12f30bb7b | |||
| 30846c83b2 | |||
| 2afe7a74b3 | |||
| d4e0bfc8a5 | |||
| 044daff668 | |||
| 03f8b01b3b | |||
| ad6f0a1ce5 | |||
| b3843138f4 | |||
| e0bdcbbeba | |||
| 582340a184 | |||
| 890561703b | |||
| a7be5d4e8b | |||
| c344486aa0 | |||
| 111501af5e | |||
| 9e75bd4d88 |
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.sh text eol=lf
|
||||
5
.github/pull_request_template.md
vendored
@ -2,16 +2,11 @@
|
||||
|
||||
_Briefly describe what this PR aims to solve. Include background context that will help reviewers understand the purpose of the PR._
|
||||
|
||||
Issue link:#[Link the issue here]
|
||||
|
||||
### Type of change
|
||||
|
||||
- [ ] Bug Fix (non-breaking change which fixes an issue)
|
||||
- [ ] New Feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking Change (fix or feature that could cause existing functionality not to work as expected)
|
||||
- [ ] Documentation Update
|
||||
- [ ] Refactoring
|
||||
- [ ] Performance Improvement
|
||||
- [ ] Test cases
|
||||
- [ ] Python SDK impacted, Need to update PyPI
|
||||
- [ ] Other (please describe):
|
||||
|
||||
40
Dockerfile
@ -1,20 +1,20 @@
|
||||
FROM swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow-base:v1.0
|
||||
USER root
|
||||
|
||||
WORKDIR /ragflow
|
||||
|
||||
ADD ./web ./web
|
||||
RUN cd ./web && npm i && npm run build
|
||||
|
||||
ADD ./api ./api
|
||||
ADD ./conf ./conf
|
||||
ADD ./deepdoc ./deepdoc
|
||||
ADD ./rag ./rag
|
||||
|
||||
ENV PYTHONPATH=/ragflow/
|
||||
ENV HF_ENDPOINT=https://hf-mirror.com
|
||||
|
||||
ADD docker/entrypoint.sh ./entrypoint.sh
|
||||
RUN chmod +x ./entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
FROM swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow-base:v1.0
|
||||
USER root
|
||||
|
||||
WORKDIR /ragflow
|
||||
|
||||
ADD ./web ./web
|
||||
RUN cd ./web && npm i && npm run build
|
||||
|
||||
ADD ./api ./api
|
||||
ADD ./conf ./conf
|
||||
ADD ./deepdoc ./deepdoc
|
||||
ADD ./rag ./rag
|
||||
|
||||
ENV PYTHONPATH=/ragflow/
|
||||
ENV HF_ENDPOINT=https://hf-mirror.com
|
||||
|
||||
ADD docker/entrypoint.sh ./entrypoint.sh
|
||||
RUN chmod +x ./entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
54
Dockerfile.scratch
Normal file
@ -0,0 +1,54 @@
|
||||
FROM ubuntu:22.04
|
||||
USER root
|
||||
|
||||
WORKDIR /ragflow
|
||||
|
||||
RUN apt-get update && apt-get install -y wget curl build-essential libopenmpi-dev
|
||||
|
||||
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
|
||||
bash ~/miniconda.sh -b -p /root/miniconda3 && \
|
||||
rm ~/miniconda.sh && ln -s /root/miniconda3/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \
|
||||
echo ". /root/miniconda3/etc/profile.d/conda.sh" >> ~/.bashrc && \
|
||||
echo "conda activate base" >> ~/.bashrc
|
||||
|
||||
ENV PATH /root/miniconda3/bin:$PATH
|
||||
|
||||
RUN conda create -y --name py11 python=3.11
|
||||
|
||||
ENV CONDA_DEFAULT_ENV py11
|
||||
ENV CONDA_PREFIX /root/miniconda3/envs/py11
|
||||
ENV PATH $CONDA_PREFIX/bin:$PATH
|
||||
|
||||
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
RUN apt-get install -y nginx
|
||||
|
||||
ADD ./web ./web
|
||||
ADD ./api ./api
|
||||
ADD ./conf ./conf
|
||||
ADD ./deepdoc ./deepdoc
|
||||
ADD ./rag ./rag
|
||||
ADD ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN apt install openmpi-bin openmpi-common libopenmpi-dev
|
||||
ENV LD_LIBRARY_PATH /usr/lib/x86_64-linux-gnu/openmpi/lib:$LD_LIBRARY_PATH
|
||||
RUN rm /root/miniconda3/envs/py11/compiler_compat/ld
|
||||
RUN cd ./web && npm i && npm run build
|
||||
RUN conda run -n py11 pip install -i https://mirrors.aliyun.com/pypi/simple/ -r ./requirements.txt
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y libglib2.0-0 libgl1-mesa-glx && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN conda run -n py11 pip install -i https://mirrors.aliyun.com/pypi/simple/ ollama
|
||||
RUN conda run -n py11 python -m nltk.downloader punkt
|
||||
RUN conda run -n py11 python -m nltk.downloader wordnet
|
||||
|
||||
ENV PYTHONPATH=/ragflow/
|
||||
ENV HF_ENDPOINT=https://hf-mirror.com
|
||||
|
||||
ADD docker/entrypoint.sh ./entrypoint.sh
|
||||
RUN chmod +x ./entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
13
README.md
@ -15,7 +15,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/RAGFLOW-LLM-white?&labelColor=dd0af7"></a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v1.0-brightgreen"
|
||||
alt="docker pull ragflow:v1.0"></a>
|
||||
alt="docker pull infiniflow/ragflow:v0.3.0"></a>
|
||||
<a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
|
||||
<img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?style=flat-square&labelColor=d4eaf7&color=7d09f1" alt="license">
|
||||
</a>
|
||||
@ -55,6 +55,9 @@
|
||||
|
||||
## 📌 Latest Features
|
||||
|
||||
- 2024-04-19 Support conversation API([detail](./docs/conversation_api.md)).
|
||||
- 2024-04-16 Add an embedding model 'bce-embedding-base_v1' from [BCEmbedding](https://github.com/netease-youdao/BCEmbedding).
|
||||
- 2024-04-16 Add [FastEmbed](https://github.com/qdrant/fastembed), which is designed specifically for light and speedy embedding.
|
||||
- 2024-04-11 Support [Xinference](./docs/xinference.md) for local LLM deployment.
|
||||
- 2024-04-10 Add a new layout recognization model for analyzing Laws documentation.
|
||||
- 2024-04-08 Support [Ollama](./docs/ollama.md) for local LLM deployment.
|
||||
@ -70,8 +73,8 @@
|
||||
|
||||
### 📝 Prerequisites
|
||||
|
||||
- CPU >= 2 cores
|
||||
- RAM >= 8 GB
|
||||
- CPU >= 4 cores
|
||||
- RAM >= 12 GB
|
||||
- Docker >= 24.0.0 & Docker Compose >= v2.26.1
|
||||
> If you have not installed Docker on your local machine (Windows, Mac, or Linux), see [Install Docker Engine](https://docs.docker.com/engine/install/).
|
||||
|
||||
@ -137,7 +140,7 @@
|
||||
```
|
||||
|
||||
5. In your web browser, enter the IP address of your server and log in to RAGFlow.
|
||||
> In the given scenario, you only need to enter `http://IP_OF_YOUR_MACHINE` (sans port number) as the default HTTP serving port `80` can be omitted when using the default configurations.
|
||||
> In the given scenario, you only need to enter `http://IP_OF_YOUR_MACHINE` (**sans** port number) as the default HTTP serving port `80` can be omitted when using the default configurations.
|
||||
6. In [service_conf.yaml](./docker/service_conf.yaml), select the desired LLM factory in `user_default_llm` and update the `API_KEY` field with the corresponding API key.
|
||||
|
||||
> See [./docs/llm_api_key_setup.md](./docs/llm_api_key_setup.md) for more information.
|
||||
@ -171,7 +174,7 @@ To build the Docker images from source:
|
||||
```bash
|
||||
$ git clone https://github.com/infiniflow/ragflow.git
|
||||
$ cd ragflow/
|
||||
$ docker build -t infiniflow/ragflow:v1.0 .
|
||||
$ docker build -t infiniflow/ragflow:v0.3.0 .
|
||||
$ cd ragflow/docker
|
||||
$ chmod +x ./entrypoint.sh
|
||||
$ docker compose up -d
|
||||
|
||||
11
README_ja.md
@ -15,7 +15,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/RAGFLOW-LLM-white?&labelColor=dd0af7"></a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v1.0-brightgreen"
|
||||
alt="docker pull ragflow:v1.0"></a>
|
||||
alt="docker pull infiniflow/ragflow:v0.3.0"></a>
|
||||
<a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
|
||||
<img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?style=flat-square&labelColor=d4eaf7&color=7d09f1" alt="license">
|
||||
</a>
|
||||
@ -55,6 +55,9 @@
|
||||
|
||||
## 📌 最新の機能
|
||||
|
||||
- 2024-04-19 会話 API をサポートします([詳細](./docs/conversation_api.md))。
|
||||
- 2024-04-16 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) から埋め込みモデル「bce-embedding-base_v1」を追加します。
|
||||
- 2024-04-16 [FastEmbed](https://github.com/qdrant/fastembed) は、軽量かつ高速な埋め込み用に設計されています。
|
||||
- 2024-04-11 ローカル LLM デプロイメント用に [Xinference](./docs/xinference.md) をサポートします。
|
||||
- 2024-04-10 メソッド「Laws」に新しいレイアウト認識モデルを追加します。
|
||||
- 2024-04-08 [Ollama](./docs/ollama.md) を使用した大規模モデルのローカライズされたデプロイメントをサポートします。
|
||||
@ -70,8 +73,8 @@
|
||||
|
||||
### 📝 必要条件
|
||||
|
||||
- CPU >= 2 cores
|
||||
- RAM >= 8 GB
|
||||
- CPU >= 4 cores
|
||||
- RAM >= 12 GB
|
||||
- Docker >= 24.0.0 & Docker Compose >= v2.26.1
|
||||
> ローカルマシン(Windows、Mac、または Linux)に Docker をインストールしていない場合は、[Docker Engine のインストール](https://docs.docker.com/engine/install/) を参照してください。
|
||||
|
||||
@ -171,7 +174,7 @@
|
||||
```bash
|
||||
$ git clone https://github.com/infiniflow/ragflow.git
|
||||
$ cd ragflow/
|
||||
$ docker build -t infiniflow/ragflow:v1.0 .
|
||||
$ docker build -t infiniflow/ragflow:v0.3.0 .
|
||||
$ cd ragflow/docker
|
||||
$ chmod +x ./entrypoint.sh
|
||||
$ docker compose up -d
|
||||
|
||||
11
README_zh.md
@ -15,7 +15,7 @@
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/RAGFLOW-LLM-white?&labelColor=dd0af7"></a>
|
||||
<a href="https://hub.docker.com/r/infiniflow/ragflow" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docker_pull-ragflow:v1.0-brightgreen"
|
||||
alt="docker pull ragflow:v1.0"></a>
|
||||
alt="docker pull infiniflow/ragflow:v0.3.0"></a>
|
||||
<a href="https://github.com/infiniflow/ragflow/blob/main/LICENSE">
|
||||
<img height="21" src="https://img.shields.io/badge/License-Apache--2.0-ffffff?style=flat-square&labelColor=d4eaf7&color=7d09f1" alt="license">
|
||||
</a>
|
||||
@ -55,6 +55,9 @@
|
||||
|
||||
## 📌 新增功能
|
||||
|
||||
- 2024-04-19 支持对话 API([更多](./docs/conversation_api.md)).
|
||||
- 2024-04-16 添加嵌入模型 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) 。
|
||||
- 2024-04-16 添加 [FastEmbed](https://github.com/qdrant/fastembed) 专为轻型和高速嵌入而设计。
|
||||
- 2024-04-11 支持用 [Xinference](./docs/xinference.md) 本地化部署大模型。
|
||||
- 2024-04-10 为‘Laws’版面分析增加了底层模型。
|
||||
- 2024-04-08 支持用 [Ollama](./docs/ollama.md) 本地化部署大模型。
|
||||
@ -70,8 +73,8 @@
|
||||
|
||||
### 📝 前提条件
|
||||
|
||||
- CPU >= 2 核
|
||||
- RAM >= 8 GB
|
||||
- CPU >= 4 核
|
||||
- RAM >= 12 GB
|
||||
- Docker >= 24.0.0 & Docker Compose >= v2.26.1
|
||||
> 如果你并没有在本机安装 Docker(Windows、Mac,或者 Linux), 可以参考文档 [Install Docker Engine](https://docs.docker.com/engine/install/) 自行安装。
|
||||
|
||||
@ -171,7 +174,7 @@
|
||||
```bash
|
||||
$ git clone https://github.com/infiniflow/ragflow.git
|
||||
$ cd ragflow/
|
||||
$ docker build -t infiniflow/ragflow:v1.0 .
|
||||
$ docker build -t infiniflow/ragflow:v0.3.0 .
|
||||
$ cd ragflow/docker
|
||||
$ chmod +x ./entrypoint.sh
|
||||
$ docker compose up -d
|
||||
|
||||
@ -105,8 +105,8 @@ def stats():
|
||||
res = {
|
||||
"pv": [(o["dt"], o["pv"]) for o in objs],
|
||||
"uv": [(o["dt"], o["uv"]) for o in objs],
|
||||
"speed": [(o["dt"], o["tokens"]/o["duration"]) for o in objs],
|
||||
"tokens": [(o["dt"], o["tokens"]/1000.) for o in objs],
|
||||
"speed": [(o["dt"], float(o["tokens"])/(float(o["duration"]+0.1))) for o in objs],
|
||||
"tokens": [(o["dt"], float(o["tokens"])/1000.) for o in objs],
|
||||
"round": [(o["dt"], o["round"]) for o in objs],
|
||||
"thumb_up": [(o["dt"], o["thumb_up"]) for o in objs]
|
||||
}
|
||||
@ -115,8 +115,7 @@ def stats():
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@manager.route('/new_conversation', methods=['POST'])
|
||||
@validate_request("user_id")
|
||||
@manager.route('/new_conversation', methods=['GET'])
|
||||
def set_conversation():
|
||||
token = request.headers.get('Authorization').split()[1]
|
||||
objs = APIToken.query(token=token)
|
||||
@ -131,7 +130,7 @@ def set_conversation():
|
||||
conv = {
|
||||
"id": get_uuid(),
|
||||
"dialog_id": dia.id,
|
||||
"user_id": req["user_id"],
|
||||
"user_id": request.args.get("user_id", ""),
|
||||
"message": [{"role": "assistant", "content": dia.prompt_config["prologue"]}]
|
||||
}
|
||||
API4ConversationService.save(**conv)
|
||||
@ -177,7 +176,6 @@ def completion():
|
||||
conv.reference.append(ans["reference"])
|
||||
conv.message.append({"role": "assistant", "content": ans["answer"]})
|
||||
API4ConversationService.append_message(conv.id, conv.to_dict())
|
||||
APITokenService.APITokenService(token)
|
||||
return get_json_result(data=ans)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -252,7 +252,7 @@ def retrieval_test():
|
||||
return get_data_error_result(retmsg="Knowledgebase not found!")
|
||||
|
||||
embd_mdl = TenantLLMService.model_instance(
|
||||
kb.tenant_id, LLMType.EMBEDDING.value)
|
||||
kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
|
||||
ranks = retrievaler.retrieval(question, embd_mdl, kb.tenant_id, [kb_id], page, size, similarity_threshold,
|
||||
vector_similarity_weight, top, doc_ids)
|
||||
for c in ranks["chunks"]:
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
#
|
||||
|
||||
import base64
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
|
||||
@ -57,7 +58,7 @@ def upload():
|
||||
if not e:
|
||||
return get_data_error_result(
|
||||
retmsg="Can't find this knowledgebase!")
|
||||
if DocumentService.get_doc_count(kb.tenant_id) >= 128:
|
||||
if DocumentService.get_doc_count(kb.tenant_id) >= int(os.environ.get('MAX_FILE_NUM_PER_USER', 8192)):
|
||||
return get_data_error_result(
|
||||
retmsg="Exceed the maximum file number of a free user!")
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ from rag.llm import EmbeddingModel, ChatModel
|
||||
def factories():
|
||||
try:
|
||||
fac = LLMFactoriesService.get_all()
|
||||
return get_json_result(data=[f.to_dict() for f in fac])
|
||||
return get_json_result(data=[f.to_dict() for f in fac if f.name not in ["QAnything", "FastEmbed"]])
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
@ -174,7 +174,7 @@ def list():
|
||||
llms = [m.to_dict()
|
||||
for m in llms if m.status == StatusEnum.VALID.value]
|
||||
for m in llms:
|
||||
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding"
|
||||
m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in ["QAnything","FastEmbed"]
|
||||
|
||||
llm_set = set([m["llm_name"] for m in llms])
|
||||
for o in objs:
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from flask import request, session, redirect
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
@ -22,7 +23,7 @@ from flask_login import login_required, current_user, login_user, logout_user
|
||||
from api.db.db_models import TenantLLM
|
||||
from api.db.services.llm_service import TenantLLMService, LLMService
|
||||
from api.utils.api_utils import server_error_response, validate_request
|
||||
from api.utils import get_uuid, get_format_time, decrypt, download_img
|
||||
from api.utils import get_uuid, get_format_time, decrypt, download_img, current_timestamp, datetime_format
|
||||
from api.db import UserTenantRole, LLMType
|
||||
from api.settings import RetCode, GITHUB_OAUTH, CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, API_KEY, \
|
||||
LLM_FACTORY, LLM_BASE_URL
|
||||
@ -56,6 +57,8 @@ def login():
|
||||
response_data = user.to_json()
|
||||
user.access_token = get_uuid()
|
||||
login_user(user)
|
||||
user.update_time = current_timestamp(),
|
||||
user.update_date = datetime_format(datetime.now()),
|
||||
user.save()
|
||||
msg = "Welcome back!"
|
||||
return cors_reponse(data=response_data, auth=user.get_id(), retmsg=msg)
|
||||
|
||||
@ -629,7 +629,7 @@ class Document(DataBaseModel):
|
||||
max_length=128,
|
||||
null=False,
|
||||
default="local",
|
||||
help_text="where dose this document from")
|
||||
help_text="where dose this document come from")
|
||||
type = CharField(max_length=32, null=False, help_text="file extension")
|
||||
created_by = CharField(
|
||||
max_length=32,
|
||||
|
||||
@ -18,7 +18,7 @@ import time
|
||||
import uuid
|
||||
|
||||
from api.db import LLMType, UserTenantRole
|
||||
from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM
|
||||
from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
|
||||
from api.db.services import UserService
|
||||
from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
|
||||
from api.db.services.user_service import TenantService, UserTenantService
|
||||
@ -114,12 +114,16 @@ factory_infos = [{
|
||||
"logo": "",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},
|
||||
{
|
||||
}, {
|
||||
"name": "Xinference",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "QAnything",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
},
|
||||
# {
|
||||
# "name": "文心一言",
|
||||
@ -254,12 +258,6 @@ def init_llm_factory():
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 7900,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[4]["name"],
|
||||
"llm_name": "flag-embedding",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 128 * 1000,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[4]["name"],
|
||||
"llm_name": "moonshot-v1-32k",
|
||||
@ -325,6 +323,14 @@ def init_llm_factory():
|
||||
"max_tokens": 2147483648,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ QAnything -----------------------
|
||||
{
|
||||
"fid": factory_infos[7]["name"],
|
||||
"llm_name": "maidalun1020/bce-embedding-base_v1",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
]
|
||||
for info in factory_infos:
|
||||
try:
|
||||
@ -337,8 +343,10 @@ def init_llm_factory():
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
LLMFactoriesService.filter_delete([LLMFactories.name=="Local"])
|
||||
LLMService.filter_delete([LLM.fid=="Local"])
|
||||
LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
|
||||
LLMService.filter_delete([LLM.fid == "Local"])
|
||||
LLMService.filter_delete([LLM.fid == "Moonshot", LLM.llm_name == "flag-embedding"])
|
||||
TenantLLMService.filter_delete([TenantLLM.llm_factory == "Moonshot", TenantLLM.llm_name == "flag-embedding"])
|
||||
|
||||
"""
|
||||
drop table llm;
|
||||
|
||||
@ -40,8 +40,8 @@ class API4ConversationService(CommonService):
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def append_message(cls, id, conversation):
|
||||
cls.model.update_by_id(id, conversation)
|
||||
return cls.model.update(round=cls.model.round + 1).where(id=id).execute()
|
||||
cls.update_by_id(id, conversation)
|
||||
return cls.model.update(round=cls.model.round + 1).where(cls.model.id==id).execute()
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
|
||||
@ -80,8 +80,12 @@ def chat(dialog, messages, **kwargs):
|
||||
raise LookupError("LLM(%s) not found" % dialog.llm_id)
|
||||
max_tokens = 1024
|
||||
else: max_tokens = llm[0].max_tokens
|
||||
kbs = KnowledgebaseService.get_by_ids(dialog.kb_ids)
|
||||
embd_nms = list(set([kb.embd_id for kb in kbs]))
|
||||
assert len(embd_nms) == 1, "Knowledge bases use different embedding models."
|
||||
|
||||
questions = [m["content"] for m in messages if m["role"] == "user"]
|
||||
embd_mdl = LLMBundle(dialog.tenant_id, LLMType.EMBEDDING)
|
||||
embd_mdl = LLMBundle(dialog.tenant_id, LLMType.EMBEDDING, embd_nms[0])
|
||||
chat_mdl = LLMBundle(dialog.tenant_id, LLMType.CHAT, dialog.llm_id)
|
||||
|
||||
prompt_config = dialog.prompt_config
|
||||
|
||||
@ -66,7 +66,7 @@ class TenantLLMService(CommonService):
|
||||
raise LookupError("Tenant not found")
|
||||
|
||||
if llm_type == LLMType.EMBEDDING.value:
|
||||
mdlnm = tenant.embd_id
|
||||
mdlnm = tenant.embd_id if not llm_name else llm_name
|
||||
elif llm_type == LLMType.SPEECH2TEXT.value:
|
||||
mdlnm = tenant.asr_id
|
||||
elif llm_type == LLMType.IMAGE2TEXT.value:
|
||||
@ -77,9 +77,19 @@ class TenantLLMService(CommonService):
|
||||
assert False, "LLM type error"
|
||||
|
||||
model_config = cls.get_api_key(tenant_id, mdlnm)
|
||||
if model_config: model_config = model_config.to_dict()
|
||||
if not model_config:
|
||||
raise LookupError("Model({}) not authorized".format(mdlnm))
|
||||
model_config = model_config.to_dict()
|
||||
if llm_type == LLMType.EMBEDDING.value:
|
||||
llm = LLMService.query(llm_name=llm_name)
|
||||
if llm and llm[0].fid in ["QAnything", "FastEmbed"]:
|
||||
model_config = {"llm_factory": llm[0].fid, "api_key":"", "llm_name": llm_name, "api_base": ""}
|
||||
if not model_config:
|
||||
if llm_name == "flag-embedding":
|
||||
model_config = {"llm_factory": "Tongyi-Qianwen", "api_key": "",
|
||||
"llm_name": llm_name, "api_base": ""}
|
||||
else:
|
||||
raise LookupError("Model({}) not authorized".format(mdlnm))
|
||||
|
||||
if llm_type == LLMType.EMBEDDING.value:
|
||||
if model_config["llm_factory"] not in EmbeddingModel:
|
||||
return
|
||||
|
||||
@ -13,6 +13,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import random
|
||||
|
||||
from peewee import Expression
|
||||
from api.db.db_models import DB
|
||||
from api.db import StatusEnum, FileType, TaskStatus
|
||||
@ -26,7 +28,7 @@ class TaskService(CommonService):
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def get_tasks(cls, tm, mod=0, comm=1, items_per_page=64):
|
||||
def get_tasks(cls, tm, mod=0, comm=1, items_per_page=1, takeit=True):
|
||||
fields = [
|
||||
cls.model.id,
|
||||
cls.model.doc_id,
|
||||
@ -41,24 +43,32 @@ class TaskService(CommonService):
|
||||
Document.size,
|
||||
Knowledgebase.tenant_id,
|
||||
Knowledgebase.language,
|
||||
Tenant.embd_id,
|
||||
Knowledgebase.embd_id,
|
||||
Tenant.img2txt_id,
|
||||
Tenant.asr_id,
|
||||
cls.model.update_time]
|
||||
docs = cls.model.select(*fields) \
|
||||
.join(Document, on=(cls.model.doc_id == Document.id)) \
|
||||
.join(Knowledgebase, on=(Document.kb_id == Knowledgebase.id)) \
|
||||
.join(Tenant, on=(Knowledgebase.tenant_id == Tenant.id))\
|
||||
.where(
|
||||
Document.status == StatusEnum.VALID.value,
|
||||
Document.run == TaskStatus.RUNNING.value,
|
||||
~(Document.type == FileType.VIRTUAL.value),
|
||||
cls.model.progress == 0,
|
||||
cls.model.update_time >= tm,
|
||||
(Expression(cls.model.create_time, "%%", comm) == mod))\
|
||||
.order_by(cls.model.update_time.asc())\
|
||||
.paginate(1, items_per_page)
|
||||
return list(docs.dicts())
|
||||
with DB.lock("get_task", -1):
|
||||
docs = cls.model.select(*fields) \
|
||||
.join(Document, on=(cls.model.doc_id == Document.id)) \
|
||||
.join(Knowledgebase, on=(Document.kb_id == Knowledgebase.id)) \
|
||||
.join(Tenant, on=(Knowledgebase.tenant_id == Tenant.id))\
|
||||
.where(
|
||||
Document.status == StatusEnum.VALID.value,
|
||||
Document.run == TaskStatus.RUNNING.value,
|
||||
~(Document.type == FileType.VIRTUAL.value),
|
||||
cls.model.progress == 0,
|
||||
#cls.model.update_time >= tm,
|
||||
#(Expression(cls.model.create_time, "%%", comm) == mod)
|
||||
)\
|
||||
.order_by(cls.model.update_time.asc())\
|
||||
.paginate(0, items_per_page)
|
||||
docs = list(docs.dicts())
|
||||
if not docs: return []
|
||||
if not takeit: return docs
|
||||
|
||||
cls.model.update(progress_msg=cls.model.progress_msg + "\n" + "Task has been received.", progress=random.random()/10.).where(
|
||||
cls.model.id == docs[0]["id"]).execute()
|
||||
return docs
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
@ -74,9 +84,10 @@ class TaskService(CommonService):
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
def update_progress(cls, id, info):
|
||||
if info["progress_msg"]:
|
||||
cls.model.update(progress_msg=cls.model.progress_msg + "\n" + info["progress_msg"]).where(
|
||||
cls.model.id == id).execute()
|
||||
if "progress" in info:
|
||||
cls.model.update(progress=info["progress"]).where(
|
||||
cls.model.id == id).execute()
|
||||
with DB.lock("update_progress", -1):
|
||||
if info["progress_msg"]:
|
||||
cls.model.update(progress_msg=cls.model.progress_msg + "\n" + info["progress_msg"]).where(
|
||||
cls.model.id == id).execute()
|
||||
if "progress" in info:
|
||||
cls.model.update(progress=info["progress"]).where(
|
||||
cls.model.id == id).execute()
|
||||
|
||||
@ -3,6 +3,8 @@ from openpyxl import load_workbook
|
||||
import sys
|
||||
from io import BytesIO
|
||||
|
||||
from rag.nlp import find_codec
|
||||
|
||||
|
||||
class HuExcelParser:
|
||||
def html(self, fnm):
|
||||
@ -66,7 +68,8 @@ class HuExcelParser:
|
||||
return total
|
||||
|
||||
if fnm.split(".")[-1].lower() in ["csv", "txt"]:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
return len(txt.split("\n"))
|
||||
|
||||
|
||||
|
||||
@ -43,7 +43,9 @@ class HuParser:
|
||||
model_dir, "updown_concat_xgb.model"))
|
||||
except Exception as e:
|
||||
model_dir = snapshot_download(
|
||||
repo_id="InfiniFlow/text_concat_xgb_v1.0")
|
||||
repo_id="InfiniFlow/text_concat_xgb_v1.0",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/deepdoc"),
|
||||
local_dir_use_symlinks=False)
|
||||
self.updown_cnt_mdl.load_model(os.path.join(
|
||||
model_dir, "updown_concat_xgb.model"))
|
||||
|
||||
@ -62,7 +64,7 @@ class HuParser:
|
||||
"""
|
||||
|
||||
def __char_width(self, c):
|
||||
return (c["x1"] - c["x0"]) // len(c["text"])
|
||||
return (c["x1"] - c["x0"]) // max(len(c["text"]), 1)
|
||||
|
||||
def __height(self, c):
|
||||
return c["bottom"] - c["top"]
|
||||
@ -828,9 +830,13 @@ class HuParser:
|
||||
pn = [bx["page_number"]]
|
||||
top = bx["top"] - self.page_cum_height[pn[0] - 1]
|
||||
bott = bx["bottom"] - self.page_cum_height[pn[0] - 1]
|
||||
page_images_cnt = len(self.page_images)
|
||||
if pn[-1] - 1 >= page_images_cnt: return ""
|
||||
while bott * ZM > self.page_images[pn[-1] - 1].size[1]:
|
||||
bott -= self.page_images[pn[-1] - 1].size[1] / ZM
|
||||
pn.append(pn[-1] + 1)
|
||||
if pn[-1] - 1 >= page_images_cnt:
|
||||
return ""
|
||||
|
||||
return "@@{}\t{:.1f}\t{:.1f}\t{:.1f}\t{:.1f}##" \
|
||||
.format("-".join([str(p) for p in pn]),
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re, copy, time, datetime, demjson, \
|
||||
import re, copy, time, datetime, demjson3, \
|
||||
traceback, signal
|
||||
import numpy as np
|
||||
from deepdoc.parser.resume.entities import degrees, schools, corporations
|
||||
@ -197,7 +197,7 @@ def forProj(cv):
|
||||
|
||||
|
||||
def json_loads(line):
|
||||
return demjson.decode(re.sub(r": *(True|False)", r": '\1'", line))
|
||||
return demjson3.decode(re.sub(r": *(True|False)", r": '\1'", line))
|
||||
|
||||
|
||||
def forWork(cv):
|
||||
|
||||
@ -43,7 +43,9 @@ class LayoutRecognizer(Recognizer):
|
||||
"rag/res/deepdoc")
|
||||
super().__init__(self.labels, domain, model_dir)
|
||||
except Exception as e:
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc")
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/deepdoc"),
|
||||
local_dir_use_symlinks=False)
|
||||
super().__init__(self.labels, domain, model_dir)
|
||||
|
||||
self.garbage_layouts = ["footer", "header", "reference"]
|
||||
|
||||
@ -486,7 +486,9 @@ class OCR(object):
|
||||
self.text_detector = TextDetector(model_dir)
|
||||
self.text_recognizer = TextRecognizer(model_dir)
|
||||
except Exception as e:
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc")
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/deepdoc"),
|
||||
local_dir_use_symlinks=False)
|
||||
self.text_detector = TextDetector(model_dir)
|
||||
self.text_recognizer = TextRecognizer(model_dir)
|
||||
|
||||
|
||||
@ -41,7 +41,9 @@ class Recognizer(object):
|
||||
"rag/res/deepdoc")
|
||||
model_file_path = os.path.join(model_dir, task_name + ".onnx")
|
||||
if not os.path.exists(model_file_path):
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc")
|
||||
model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/deepdoc"),
|
||||
local_dir_use_symlinks=False)
|
||||
model_file_path = os.path.join(model_dir, task_name + ".onnx")
|
||||
else:
|
||||
model_file_path = os.path.join(model_dir, task_name + ".onnx")
|
||||
|
||||
@ -39,7 +39,9 @@ class TableStructureRecognizer(Recognizer):
|
||||
get_project_base_directory(),
|
||||
"rag/res/deepdoc"))
|
||||
except Exception as e:
|
||||
super().__init__(self.labels, "tsr", snapshot_download(repo_id="InfiniFlow/deepdoc"))
|
||||
super().__init__(self.labels, "tsr", snapshot_download(repo_id="InfiniFlow/deepdoc",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/deepdoc"),
|
||||
local_dir_use_symlinks=False))
|
||||
|
||||
def __call__(self, images, thr=0.2):
|
||||
tbls = super().__call__(images, thr)
|
||||
|
||||
@ -16,11 +16,17 @@ MEM_LIMIT=4073741824
|
||||
MYSQL_PASSWORD=infini_rag_flow
|
||||
MYSQL_PORT=5455
|
||||
|
||||
# Port to expose minio to the host
|
||||
MINIO_CONSOLE_PORT=9001
|
||||
MINIO_PORT=9000
|
||||
|
||||
MINIO_USER=rag_flow
|
||||
MINIO_PASSWORD=infini_rag_flow
|
||||
|
||||
SVR_HTTP_PORT=9380
|
||||
|
||||
RAGFLOW_VERSION=v0.3.0
|
||||
|
||||
TIMEZONE='Asia/Shanghai'
|
||||
|
||||
######## OS setup for ES ###########
|
||||
|
||||
@ -9,7 +9,7 @@ services:
|
||||
condition: service_healthy
|
||||
es01:
|
||||
condition: service_healthy
|
||||
image: swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow:v1.0
|
||||
image: swr.cn-north-4.myhuaweicloud.com/infiniflow/ragflow:${RAGFLOW_VERSION}
|
||||
container_name: ragflow-server
|
||||
ports:
|
||||
- ${SVR_HTTP_PORT}:9380
|
||||
|
||||
@ -80,8 +80,8 @@ services:
|
||||
container_name: ragflow-minio
|
||||
command: server --console-address ":9001" /data
|
||||
ports:
|
||||
- 9000:9000
|
||||
- 9001:9001
|
||||
- ${MINIO_PORT}:9000
|
||||
- ${MINIO_CONSOLE_PORT}:9001
|
||||
environment:
|
||||
- MINIO_ROOT_USER=${MINIO_USER}
|
||||
- MINIO_ROOT_PASSWORD=${MINIO_PASSWORD}
|
||||
|
||||
@ -9,9 +9,10 @@ services:
|
||||
condition: service_healthy
|
||||
es01:
|
||||
condition: service_healthy
|
||||
image: infiniflow/ragflow:v1.0
|
||||
image: infiniflow/ragflow:${RAGFLOW_VERSION}
|
||||
container_name: ragflow-server
|
||||
ports:
|
||||
- ${SVR_HTTP_PORT}:9380
|
||||
- ${SVR_HTTP_PORT}:9380
|
||||
- 80:80
|
||||
- 443:443
|
||||
@ -23,7 +24,7 @@ services:
|
||||
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
|
||||
environment:
|
||||
- TZ=${TIMEZONE}
|
||||
- HF_ENDPOINT=https://huggingface.com
|
||||
- HF_ENDPOINT=https://huggingface.co
|
||||
networks:
|
||||
- ragflow
|
||||
restart: always
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
# Conversation API Instruction
|
||||
|
||||
<div align="center" style="margin-top:20px;margin-bottom:20px;">
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/df0dcc3d-789a-44f7-89f1-7a5f044ab729" width="830"/>
|
||||
</div>
|
||||
|
||||
## Base URL
|
||||
```buildoutcfg
|
||||
https://demo.ragflow.io/v1/
|
||||
|
||||
94
docs/faq.md
@ -76,11 +76,15 @@ You can use Ollama to deploy local LLM. See [here](https://github.com/infiniflow
|
||||
|
||||
## Debugging
|
||||
|
||||
### How to handle `WARNING: can't find /raglof/rag/res/borker.tm`?
|
||||
### `WARNING: can't find /raglof/rag/res/borker.tm`
|
||||
|
||||
Ignore this warning and continue. All system warnings can be ignored.
|
||||
|
||||
### How to handle `Realtime synonym is disabled, since no redis connection`?
|
||||
### `dependency failed to start: container ragflow-mysql is unhealthy`
|
||||
|
||||
`dependency failed to start: container ragflow-mysql is unhealthy` means that your MySQL container failed to start. If you are using a Mac with an M1/M2 chip, replace `mysql:5.7.18` with `mariadb:10.5.8` in **docker-compose-base.yml**.
|
||||
|
||||
### `Realtime synonym is disabled, since no redis connection`
|
||||
|
||||
Ignore this warning and continue. All system warnings can be ignored.
|
||||
|
||||
@ -90,7 +94,30 @@ Ignore this warning and continue. All system warnings can be ignored.
|
||||
|
||||
Parsing requests have to wait in queue due to limited server resources. We are currently enhancing our algorithms and increasing computing power.
|
||||
|
||||
### How to handle `Index failure`?
|
||||
### Why does my document parsing stall at under one percent?
|
||||
|
||||

|
||||
|
||||
If your RAGFlow is deployed *locally*, try the following:
|
||||
|
||||
1. Check the log of your RAGFlow server to see if it is running properly:
|
||||
```bash
|
||||
docker logs -f ragflow-server
|
||||
```
|
||||
2. Check if the **tast_executor.py** process exist.
|
||||
3. Check if your RAGFlow server can access hf-mirror.com or huggingface.com.
|
||||
|
||||
### `MaxRetryError: HTTPSConnectionPool(host='hf-mirror.com', port=443)`
|
||||
|
||||
This error suggests that you do not have Internet access or are unable to connect to hf-mirror.com. Try the following:
|
||||
|
||||
1. Manually download the resource files from [huggingface.co/InfiniFlow/deepdoc](https://huggingface.co/InfiniFlow/deepdoc) to your local folder **~/deepdoc**.
|
||||
2. Add a volumes to **docker-compose.yml**, for example:
|
||||
```
|
||||
- ~/deepdoc:/ragflow/rag/res/deepdoc
|
||||
```
|
||||
|
||||
### `Index failure`
|
||||
|
||||
An index failure usually indicates an unavailable Elasticsearch service.
|
||||
|
||||
@ -108,13 +135,13 @@ $ docker ps
|
||||
*The system displays the following if all your RAGFlow components are running properly:*
|
||||
|
||||
```
|
||||
5bc45806b680 infiniflow/ragflow:v1.0 "./entrypoint.sh" 11 hours ago Up 11 hours 0.0.0.0:80->80/tcp, :::80->80/tcp, 0.0.0.0:443->443/tcp, :::443->443/tcp, 0.0.0.0:9380->9380/tcp, :::9380->9380/tcp ragflow-server
|
||||
5bc45806b680 infiniflow/ragflow:v0.3.0 "./entrypoint.sh" 11 hours ago Up 11 hours 0.0.0.0:80->80/tcp, :::80->80/tcp, 0.0.0.0:443->443/tcp, :::443->443/tcp, 0.0.0.0:9380->9380/tcp, :::9380->9380/tcp ragflow-server
|
||||
91220e3285dd docker.elastic.co/elasticsearch/elasticsearch:8.11.3 "/bin/tini -- /usr/l…" 11 hours ago Up 11 hours (healthy) 9300/tcp, 0.0.0.0:9200->9200/tcp, :::9200->9200/tcp ragflow-es-01
|
||||
d8c86f06c56b mysql:5.7.18 "docker-entrypoint.s…" 7 days ago Up 16 seconds (healthy) 0.0.0.0:3306->3306/tcp, :::3306->3306/tcp ragflow-mysql
|
||||
cd29bcb254bc quay.io/minio/minio:RELEASE.2023-12-20T01-00-02Z "/usr/bin/docker-ent…" 2 weeks ago Up 11 hours 0.0.0.0:9001->9001/tcp, :::9001->9001/tcp, 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp ragflow-minio
|
||||
```
|
||||
|
||||
### How to handle `Exception: Can't connect to ES cluster`?
|
||||
### `Exception: Can't connect to ES cluster`
|
||||
|
||||
1. Check the status of your Elasticsearch component:
|
||||
|
||||
@ -142,12 +169,67 @@ $ docker ps
|
||||
```
|
||||
|
||||
|
||||
### How to handle `{"data":null,"retcode":100,"retmsg":"<NotFound '404: Not Found'>"}`?
|
||||
### `{"data":null,"retcode":100,"retmsg":"<NotFound '404: Not Found'>"}`
|
||||
|
||||
Your IP address or port number may be incorrect. If you are using the default configurations, enter http://<IP_OF_YOUR_MACHINE> (**NOT `localhost`, NOT 9380, AND NO PORT NUMBER REQUIRED!**) in your browser. This should work.
|
||||
|
||||
### `Ollama - Mistral instance running at 127.0.0.1:11434 but cannot add Ollama as model in RagFlow`
|
||||
|
||||
A correct Ollama IP address and port is crucial to adding models to Ollama:
|
||||
|
||||
- If you are on demo.ragflow.io, ensure that the server hosting Ollama has a publicly accessible IP address.Note that 127.0.0.1 is not a publicly accessible IP address.
|
||||
- If you deploy RAGFlow locally, ensure that Ollama and RAGFlow are in the same LAN and can comunicate with each other.
|
||||
|
||||
### Do you offer examples of using deepdoc to parse PDF or other files?
|
||||
|
||||
Yes, we do. See the Python files under the **rag/app** folder.
|
||||
|
||||
### Why did I fail to upload a 10MB+ file to my locally deployed RAGFlow?
|
||||
|
||||
You probably forgot to update the **MAX_CONTENT_LENGTH** environment variable:
|
||||
|
||||
1. Add environment variable `MAX_CONTENT_LENGTH` to **ragflow/docker/.env**:
|
||||
```
|
||||
MAX_CONTENT_LENGTH=100000000
|
||||
```
|
||||
2. Update **docker-compose.yml**:
|
||||
```
|
||||
environment:
|
||||
- MAX_CONTENT_LENGTH=${MAX_CONTENT_LENGTH}
|
||||
```
|
||||
3. Restart the RAGFlow server:
|
||||
```
|
||||
docker compose up ragflow -d
|
||||
```
|
||||
*Now you should be able to upload files of sizes less than 100MB.*
|
||||
|
||||
### `Table 'rag_flow.document' doesn't exist`
|
||||
|
||||
This exception occurs when starting up the RAGFlow server. Try the following:
|
||||
|
||||
1. Prolong the sleep time: Go to **docker/entrypoint.sh**, locate line 26, and replace `sleep 60` with `sleep 280`.
|
||||
2. If using Windows, ensure that the **entrypoint.sh** has LF end-lines.
|
||||
3. Go to **docker/docker-compose.yml**, add the following:
|
||||
```
|
||||
./entrypoint.sh:/ragflow/entrypoint.sh
|
||||
```
|
||||
4. Change directory:
|
||||
```bash
|
||||
cd docker
|
||||
```
|
||||
5. Stop the RAGFlow server:
|
||||
```bash
|
||||
docker compose stop
|
||||
```
|
||||
6. Restart up the RAGFlow server:
|
||||
```bash
|
||||
docker compose up
|
||||
```
|
||||
|
||||
### `hint : 102 Fail to access model Connection error`
|
||||
|
||||

|
||||
|
||||
1. Ensure that the RAGFlow server can access the base URL.
|
||||
2. Do not forget to append **/v1/** to **http://IP:port**:
|
||||
**http://IP:port/v1/**
|
||||
@ -31,7 +31,7 @@ $ docker exec -it ollama ollama run mistral
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/a9df198a-226d-4f30-b8d7-829f00256d46" width="1300"/>
|
||||
</div>
|
||||
|
||||
> Base URL: Enter the base URL where the Ollama service is accessible, like, http://<your-ollama-endpoint-domain>:11434
|
||||
> Base URL: Enter the base URL where the Ollama service is accessible, like, `http://<your-ollama-endpoint-domain>:11434`.
|
||||
|
||||
- Use Ollama Models.
|
||||
|
||||
|
||||
@ -31,7 +31,7 @@ $ xinference launch -u mistral --model-name mistral-v0.1 --size-in-billions 7 --
|
||||
<img src="https://github.com/infiniflow/ragflow/assets/12318111/bcbf4d7a-ade6-44c7-ad5f-0a92c8a73789" width="1300"/>
|
||||
</div>
|
||||
|
||||
> Base URL: Enter the base URL where the Xinference service is accessible, like, http://<your-xinference-endpoint-domain>:9997/v1
|
||||
> Base URL: Enter the base URL where the Xinference service is accessible, like, `http://<your-xinference-endpoint-domain>:9997/v1`.
|
||||
|
||||
- Use Xinference Models.
|
||||
|
||||
|
||||
67
printEnvironment.sh
Normal file
@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
# The function is used to obtain distribution information
|
||||
get_distro_info() {
|
||||
local distro_id=$(lsb_release -i -s 2>/dev/null)
|
||||
local distro_version=$(lsb_release -r -s 2>/dev/null)
|
||||
local kernel_version=$(uname -r)
|
||||
|
||||
# If lsd_release is not available, try parsing the/etc/* - release file
|
||||
if [ -z "$distro_id" ] || [ -z "$distro_version" ]; then
|
||||
distro_id=$(grep '^ID=' /etc/*-release | cut -d= -f2 | tr -d '"')
|
||||
distro_version=$(grep '^VERSION_ID=' /etc/*-release | cut -d= -f2 | tr -d '"')
|
||||
fi
|
||||
|
||||
echo "$distro_id $distro_version (Kernel version: $kernel_version)"
|
||||
}
|
||||
|
||||
# get Git repo name
|
||||
git_repo_name=''
|
||||
if git rev-parse --is-inside-work-tree > /dev/null 2>&1; then
|
||||
git_repo_name=$(basename "$(git rev-parse --show-toplevel)")
|
||||
if [ $? -ne 0 ]; then
|
||||
git_repo_name="(Can't get repo name)"
|
||||
fi
|
||||
else
|
||||
git_repo_name="It NOT a Git repo"
|
||||
fi
|
||||
|
||||
# get CPU type
|
||||
cpu_model=$(uname -m)
|
||||
|
||||
# get memory size
|
||||
memory_size=$(free -h | grep Mem | awk '{print $2}')
|
||||
|
||||
# get docker version
|
||||
docker_version=''
|
||||
if command -v docker &> /dev/null; then
|
||||
docker_version=$(docker --version | cut -d ' ' -f3)
|
||||
else
|
||||
docker_version="Docker not installed"
|
||||
fi
|
||||
|
||||
# get python version
|
||||
python_version=''
|
||||
if command -v python &> /dev/null; then
|
||||
python_version=$(python --version | cut -d ' ' -f2)
|
||||
else
|
||||
python_version="Python not installed"
|
||||
fi
|
||||
|
||||
# Print all infomation
|
||||
echo "Current Repo: $git_repo_name"
|
||||
|
||||
# get Commit ID
|
||||
git_version=$(git log -1 --pretty=format:'%h')
|
||||
|
||||
if [ -z "$git_version" ]; then
|
||||
echo "Commit Id: The current directory is not a Git repository, or the Git command is not installed."
|
||||
else
|
||||
echo "Commit Id: $git_version"
|
||||
fi
|
||||
|
||||
echo "Operating system: $(get_distro_info)"
|
||||
echo "CPU Type: $cpu_model"
|
||||
echo "Memory: $memory_size"
|
||||
echo "Docker Version: $docker_version"
|
||||
echo "Python Version: $python_version"
|
||||
@ -15,7 +15,8 @@ import re
|
||||
from io import BytesIO
|
||||
|
||||
from rag.nlp import bullets_category, is_english, tokenize, remove_contents_table, \
|
||||
hierarchical_merge, make_colon_as_title, naive_merge, random_choices, tokenize_table, add_positions, tokenize_chunks
|
||||
hierarchical_merge, make_colon_as_title, naive_merge, random_choices, tokenize_table, add_positions, \
|
||||
tokenize_chunks, find_codec
|
||||
from rag.nlp import huqie
|
||||
from deepdoc.parser import PdfParser, DocxParser, PlainParser
|
||||
|
||||
@ -87,7 +88,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
|
||||
@ -17,7 +17,7 @@ from docx import Document
|
||||
|
||||
from api.db import ParserType
|
||||
from rag.nlp import bullets_category, is_english, tokenize, remove_contents_table, hierarchical_merge, \
|
||||
make_colon_as_title, add_positions, tokenize_chunks
|
||||
make_colon_as_title, add_positions, tokenize_chunks, find_codec
|
||||
from rag.nlp import huqie
|
||||
from deepdoc.parser import PdfParser, DocxParser, PlainParser
|
||||
from rag.settings import cron_logger
|
||||
@ -111,7 +111,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
|
||||
@ -14,8 +14,7 @@ from io import BytesIO
|
||||
from docx import Document
|
||||
import re
|
||||
from deepdoc.parser.pdf_parser import PlainParser
|
||||
from rag.app import laws
|
||||
from rag.nlp import huqie, is_english, tokenize, naive_merge, tokenize_table, add_positions, tokenize_chunks
|
||||
from rag.nlp import huqie, naive_merge, tokenize_table, tokenize_chunks, find_codec
|
||||
from deepdoc.parser import PdfParser, ExcelParser, DocxParser
|
||||
from rag.settings import cron_logger
|
||||
|
||||
@ -140,7 +139,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
#
|
||||
import re
|
||||
from rag.app import laws
|
||||
from rag.nlp import huqie, tokenize
|
||||
from rag.nlp import huqie, tokenize, find_codec
|
||||
from deepdoc.parser import PdfParser, ExcelParser, PlainParser
|
||||
|
||||
|
||||
@ -82,7 +82,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
|
||||
@ -15,7 +15,7 @@ from copy import deepcopy
|
||||
from io import BytesIO
|
||||
from nltk import word_tokenize
|
||||
from openpyxl import load_workbook
|
||||
from rag.nlp import is_english, random_choices
|
||||
from rag.nlp import is_english, random_choices, find_codec
|
||||
from rag.nlp import huqie
|
||||
from deepdoc.parser import ExcelParser
|
||||
|
||||
@ -106,7 +106,8 @@ def chunk(filename, binary=None, lang="Chinese", callback=None, **kwargs):
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
|
||||
@ -20,7 +20,7 @@ from openpyxl import load_workbook
|
||||
from dateutil.parser import parse as datetime_parse
|
||||
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from rag.nlp import huqie, is_english, tokenize
|
||||
from rag.nlp import huqie, is_english, tokenize, find_codec
|
||||
from deepdoc.parser import ExcelParser
|
||||
|
||||
|
||||
@ -147,7 +147,8 @@ def chunk(filename, binary=None, from_page=0, to_page=10000000000,
|
||||
callback(0.1, "Start to parse.")
|
||||
txt = ""
|
||||
if binary:
|
||||
txt = binary.decode("utf-8")
|
||||
encoding = find_codec(binary)
|
||||
txt = binary.decode(encoding)
|
||||
else:
|
||||
with open(filename, "r") as f:
|
||||
while True:
|
||||
@ -199,7 +200,7 @@ def chunk(filename, binary=None, from_page=0, to_page=10000000000,
|
||||
re.sub(
|
||||
r"(/.*|([^()]+?)|\([^()]+?\))",
|
||||
"",
|
||||
n),
|
||||
str(n)),
|
||||
'_')[0] for n in clmns]
|
||||
clmn_tys = []
|
||||
for j in range(len(clmns)):
|
||||
@ -208,7 +209,7 @@ def chunk(filename, binary=None, from_page=0, to_page=10000000000,
|
||||
df[clmns[j]] = cln
|
||||
if ty == "text":
|
||||
txts.extend([str(c) for c in cln if c])
|
||||
clmns_map = [(py_clmns[i].lower() + fieds_map[clmn_tys[i]], clmns[i].replace("_", " "))
|
||||
clmns_map = [(py_clmns[i].lower() + fieds_map[clmn_tys[i]], str(clmns[i]).replace("_", " "))
|
||||
for i in range(len(clmns))]
|
||||
|
||||
eng = lang.lower() == "english" # is_english(txts)
|
||||
@ -223,8 +224,8 @@ def chunk(filename, binary=None, from_page=0, to_page=10000000000,
|
||||
continue
|
||||
if not str(row[clmns[j]]):
|
||||
continue
|
||||
#if pd.isna(row[clmns[j]]):
|
||||
# continue
|
||||
if pd.isna(row[clmns[j]]):
|
||||
continue
|
||||
fld = clmns_map[j][0]
|
||||
d[fld] = row[clmns[j]] if clmn_tys[j] != "text" else huqie.qie(
|
||||
row[clmns[j]])
|
||||
|
||||
@ -24,8 +24,8 @@ EmbeddingModel = {
|
||||
"Xinference": XinferenceEmbed,
|
||||
"Tongyi-Qianwen": HuEmbedding, #QWenEmbed,
|
||||
"ZHIPU-AI": ZhipuEmbed,
|
||||
"Moonshot": HuEmbedding,
|
||||
"FastEmbed": FastEmbed
|
||||
"FastEmbed": FastEmbed,
|
||||
"QAnything": QAnythingEmbed
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -14,13 +14,14 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
from typing import Optional
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
from zhipuai import ZhipuAI
|
||||
import os
|
||||
from abc import ABC
|
||||
from ollama import Client
|
||||
import dashscope
|
||||
from openai import OpenAI
|
||||
from fastembed import TextEmbedding
|
||||
from FlagEmbedding import FlagModel
|
||||
import torch
|
||||
import numpy as np
|
||||
@ -28,16 +29,20 @@ import numpy as np
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from rag.utils import num_tokens_from_string
|
||||
|
||||
|
||||
try:
|
||||
flag_model = FlagModel(os.path.join(
|
||||
get_project_base_directory(),
|
||||
"rag/res/bge-large-zh-v1.5"),
|
||||
get_project_base_directory(),
|
||||
"rag/res/bge-large-zh-v1.5"),
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
except Exception as e:
|
||||
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5",
|
||||
local_dir=os.path.join(get_project_base_directory(), "rag/res/bge-large-zh-v1.5"),
|
||||
local_dir_use_symlinks=False)
|
||||
flag_model = FlagModel(model_dir,
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
except Exception as e:
|
||||
flag_model = FlagModel("BAAI/bge-large-zh-v1.5",
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
|
||||
|
||||
class Base(ABC):
|
||||
@ -82,8 +87,10 @@ class HuEmbedding(Base):
|
||||
|
||||
|
||||
class OpenAIEmbed(Base):
|
||||
def __init__(self, key, model_name="text-embedding-ada-002", base_url="https://api.openai.com/v1"):
|
||||
if not base_url: base_url="https://api.openai.com/v1"
|
||||
def __init__(self, key, model_name="text-embedding-ada-002",
|
||||
base_url="https://api.openai.com/v1"):
|
||||
if not base_url:
|
||||
base_url = "https://api.openai.com/v1"
|
||||
self.client = OpenAI(api_key=key, base_url=base_url)
|
||||
self.model_name = model_name
|
||||
|
||||
@ -142,7 +149,7 @@ class ZhipuEmbed(Base):
|
||||
tks_num = 0
|
||||
for txt in texts:
|
||||
res = self.client.embeddings.create(input=txt,
|
||||
model=self.model_name)
|
||||
model=self.model_name)
|
||||
arr.append(res.data[0].embedding)
|
||||
tks_num += res.usage.total_tokens
|
||||
return np.array(arr), tks_num
|
||||
@ -163,14 +170,14 @@ class OllamaEmbed(Base):
|
||||
tks_num = 0
|
||||
for txt in texts:
|
||||
res = self.client.embeddings(prompt=txt,
|
||||
model=self.model_name)
|
||||
model=self.model_name)
|
||||
arr.append(res["embedding"])
|
||||
tks_num += 128
|
||||
return np.array(arr), tks_num
|
||||
|
||||
def encode_queries(self, text):
|
||||
res = self.client.embeddings(prompt=text,
|
||||
model=self.model_name)
|
||||
model=self.model_name)
|
||||
return np.array(res["embedding"]), 128
|
||||
|
||||
|
||||
@ -183,10 +190,12 @@ class FastEmbed(Base):
|
||||
threads: Optional[int] = None,
|
||||
**kwargs,
|
||||
):
|
||||
from fastembed import TextEmbedding
|
||||
self._model = TextEmbedding(model_name, cache_dir, threads, **kwargs)
|
||||
|
||||
def encode(self, texts: list, batch_size=32):
|
||||
# Using the internal tokenizer to encode the texts and get the total number of tokens
|
||||
# Using the internal tokenizer to encode the texts and get the total
|
||||
# number of tokens
|
||||
encodings = self._model.model.tokenizer.encode_batch(texts)
|
||||
total_tokens = sum(len(e) for e in encodings)
|
||||
|
||||
@ -195,7 +204,8 @@ class FastEmbed(Base):
|
||||
return np.array(embeddings), total_tokens
|
||||
|
||||
def encode_queries(self, text: str):
|
||||
# Using the internal tokenizer to encode the texts and get the total number of tokens
|
||||
# Using the internal tokenizer to encode the texts and get the total
|
||||
# number of tokens
|
||||
encoding = self._model.model.tokenizer.encode(text)
|
||||
embedding = next(self._model.query_embed(text)).tolist()
|
||||
|
||||
@ -218,3 +228,33 @@ class XinferenceEmbed(Base):
|
||||
model=self.model_name)
|
||||
return np.array(res.data[0].embedding), res.usage.total_tokens
|
||||
|
||||
|
||||
class QAnythingEmbed(Base):
|
||||
_client = None
|
||||
|
||||
def __init__(self, key=None, model_name="maidalun1020/bce-embedding-base_v1", **kwargs):
|
||||
from BCEmbedding import EmbeddingModel as qanthing
|
||||
if not QAnythingEmbed._client:
|
||||
try:
|
||||
print("LOADING BCE...")
|
||||
QAnythingEmbed._client = qanthing(model_name_or_path=os.path.join(
|
||||
get_project_base_directory(),
|
||||
"rag/res/bce-embedding-base_v1"))
|
||||
except Exception as e:
|
||||
QAnythingEmbed._client = qanthing(
|
||||
model_name_or_path=model_name.replace(
|
||||
"maidalun1020", "InfiniFlow"))
|
||||
|
||||
def encode(self, texts: list, batch_size=10):
|
||||
res = []
|
||||
token_count = 0
|
||||
for t in texts:
|
||||
token_count += num_tokens_from_string(t)
|
||||
for i in range(0, len(texts), batch_size):
|
||||
embds = QAnythingEmbed._client.encode(texts[i:i + batch_size])
|
||||
res.extend(embds)
|
||||
return np.array(res), token_count
|
||||
|
||||
def encode_queries(self, text):
|
||||
embds = QAnythingEmbed._client.encode([text])
|
||||
return np.array(embds[0]), num_tokens_from_string(text)
|
||||
|
||||
@ -6,6 +6,35 @@ from . import huqie
|
||||
import re
|
||||
import copy
|
||||
|
||||
all_codecs = [
|
||||
'utf-8', 'gb2312', 'gbk', 'utf_16', 'ascii', 'big5', 'big5hkscs',
|
||||
'cp037', 'cp273', 'cp424', 'cp437',
|
||||
'cp500', 'cp720', 'cp737', 'cp775', 'cp850', 'cp852', 'cp855', 'cp856', 'cp857',
|
||||
'cp858', 'cp860', 'cp861', 'cp862', 'cp863', 'cp864', 'cp865', 'cp866', 'cp869',
|
||||
'cp874', 'cp875', 'cp932', 'cp949', 'cp950', 'cp1006', 'cp1026', 'cp1125',
|
||||
'cp1140', 'cp1250', 'cp1251', 'cp1252', 'cp1253', 'cp1254', 'cp1255', 'cp1256',
|
||||
'cp1257', 'cp1258', 'euc_jp', 'euc_jis_2004', 'euc_jisx0213', 'euc_kr',
|
||||
'gb2312', 'gb18030', 'hz', 'iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2',
|
||||
'iso2022_jp_2004', 'iso2022_jp_3', 'iso2022_jp_ext', 'iso2022_kr', 'latin_1',
|
||||
'iso8859_2', 'iso8859_3', 'iso8859_4', 'iso8859_5', 'iso8859_6', 'iso8859_7',
|
||||
'iso8859_8', 'iso8859_9', 'iso8859_10', 'iso8859_11', 'iso8859_13',
|
||||
'iso8859_14', 'iso8859_15', 'iso8859_16', 'johab', 'koi8_r', 'koi8_t', 'koi8_u',
|
||||
'kz1048', 'mac_cyrillic', 'mac_greek', 'mac_iceland', 'mac_latin2', 'mac_roman',
|
||||
'mac_turkish', 'ptcp154', 'shift_jis', 'shift_jis_2004', 'shift_jisx0213',
|
||||
'utf_32', 'utf_32_be', 'utf_32_le''utf_16_be', 'utf_16_le', 'utf_7'
|
||||
]
|
||||
|
||||
|
||||
def find_codec(blob):
|
||||
global all_codecs
|
||||
for c in all_codecs:
|
||||
try:
|
||||
blob.decode(c)
|
||||
return c
|
||||
except Exception as e:
|
||||
pass
|
||||
return "utf-8"
|
||||
|
||||
|
||||
BULLET_PATTERN = [[
|
||||
r"第[零一二三四五六七八九十百0-9]+(分?编|部分)",
|
||||
|
||||
@ -8,6 +8,7 @@ import re
|
||||
import string
|
||||
import sys
|
||||
from hanziconv import HanziConv
|
||||
from huggingface_hub import snapshot_download
|
||||
from nltk import word_tokenize
|
||||
from nltk.stem import PorterStemmer, WordNetLemmatizer
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
|
||||
@ -46,7 +46,7 @@ class Dealer:
|
||||
"k": topk,
|
||||
"similarity": sim,
|
||||
"num_candidates": topk * 2,
|
||||
"query_vector": list(qv)
|
||||
"query_vector": [float(v) for v in qv]
|
||||
}
|
||||
|
||||
def search(self, req, idxnm, emb_mdl=None):
|
||||
@ -68,7 +68,7 @@ class Dealer:
|
||||
pg = int(req.get("page", 1)) - 1
|
||||
ps = int(req.get("size", 1000))
|
||||
topk = int(req.get("topk", 1024))
|
||||
src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id",
|
||||
src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
|
||||
"image_id", "doc_id", "q_512_vec", "q_768_vec", "position_int",
|
||||
"q_1024_vec", "q_1536_vec", "available_int", "content_with_weight"])
|
||||
|
||||
@ -237,7 +237,7 @@ class Dealer:
|
||||
pieces_.append(t)
|
||||
es_logger.info("{} => {}".format(answer, pieces_))
|
||||
if not pieces_:
|
||||
return answer
|
||||
return answer, set([])
|
||||
|
||||
ans_v, _ = embd_mdl.encode(pieces_)
|
||||
assert len(ans_v[0]) == len(chunk_v[0]), "The dimension of query and chunk do not match: {} vs. {}".format(
|
||||
@ -289,8 +289,18 @@ class Dealer:
|
||||
sres.field[i].get("q_%d_vec" % len(sres.query_vector), "\t".join(["0"] * len(sres.query_vector)))) for i in sres.ids]
|
||||
if not ins_embd:
|
||||
return [], [], []
|
||||
ins_tw = [sres.field[i][cfield].split(" ")
|
||||
for i in sres.ids]
|
||||
|
||||
for i in sres.ids:
|
||||
if isinstance(sres.field[i].get("important_kwd", []), str):
|
||||
sres.field[i]["important_kwd"] = [sres.field[i]["important_kwd"]]
|
||||
ins_tw = []
|
||||
for i in sres.ids:
|
||||
content_ltks = sres.field[i][cfield].split(" ")
|
||||
title_tks = [t for t in sres.field[i].get("title_tks", "").split(" ") if t]
|
||||
important_kwd = sres.field[i].get("important_kwd", [])
|
||||
tks = content_ltks + title_tks + important_kwd
|
||||
ins_tw.append(tks)
|
||||
|
||||
sim, tksim, vtsim = self.qryr.hybrid_similarity(sres.query_vector,
|
||||
ins_embd,
|
||||
keywords,
|
||||
@ -368,7 +378,7 @@ class Dealer:
|
||||
|
||||
def sql_retrieval(self, sql, fetch_size=128, format="json"):
|
||||
from api.settings import chat_logger
|
||||
sql = re.sub(r"[ ]+", " ", sql)
|
||||
sql = re.sub(r"[ `]+", " ", sql)
|
||||
sql = sql.replace("%", "")
|
||||
es_logger.info(f"Get es sql: {sql}")
|
||||
replaces = []
|
||||
|
||||
@ -19,6 +19,7 @@ import logging
|
||||
import os
|
||||
import hashlib
|
||||
import copy
|
||||
import random
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
@ -92,6 +93,7 @@ def set_progress(task_id, from_page=0, to_page=-1,
|
||||
|
||||
def collect(comm, mod, tm):
|
||||
tasks = TaskService.get_tasks(tm, mod, comm)
|
||||
#print(tasks)
|
||||
if len(tasks) == 0:
|
||||
time.sleep(1)
|
||||
return pd.DataFrame()
|
||||
@ -243,9 +245,11 @@ def main(comm, mod):
|
||||
tmf = open(tm_fnm, "a+")
|
||||
for _, r in rows.iterrows():
|
||||
callback = partial(set_progress, r["id"], r["from_page"], r["to_page"])
|
||||
#callback(random.random()/10., "Task has been received.")
|
||||
try:
|
||||
embd_mdl = LLMBundle(r["tenant_id"], LLMType.EMBEDDING)
|
||||
embd_mdl = LLMBundle(r["tenant_id"], LLMType.EMBEDDING, llm_name=r["embd_id"], lang=r["language"])
|
||||
except Exception as e:
|
||||
traceback.print_stack(e)
|
||||
callback(prog=-1, msg=str(e))
|
||||
continue
|
||||
|
||||
@ -299,9 +303,8 @@ if __name__ == "__main__":
|
||||
peewee_logger.addHandler(database_logger.handlers[0])
|
||||
peewee_logger.setLevel(database_logger.level)
|
||||
|
||||
from mpi4py import MPI
|
||||
|
||||
comm = MPI.COMM_WORLD
|
||||
#from mpi4py import MPI
|
||||
#comm = MPI.COMM_WORLD
|
||||
while True:
|
||||
main(int(sys.argv[2]), int(sys.argv[1]))
|
||||
close_connection()
|
||||
|
||||
@ -2,6 +2,7 @@ import re
|
||||
import json
|
||||
import time
|
||||
import copy
|
||||
|
||||
import elasticsearch
|
||||
from elastic_transport import ConnectionTimeout
|
||||
from elasticsearch import Elasticsearch
|
||||
|
||||
@ -19,7 +19,7 @@ cryptography==42.0.5
|
||||
dashscope==1.14.1
|
||||
datasets==2.17.1
|
||||
datrie==0.8.2
|
||||
demjson==2.2.4
|
||||
demjson3==3.0.6
|
||||
dill==0.3.8
|
||||
distro==1.9.0
|
||||
elastic-transport==8.12.0
|
||||
@ -132,3 +132,5 @@ xpinyin==0.7.6
|
||||
xxhash==3.4.1
|
||||
yarl==1.9.4
|
||||
zhipuai==2.0.1
|
||||
BCEmbedding
|
||||
loguru==0.7.2
|
||||
|
||||
345
web/package-lock.json
generated
@ -13,19 +13,21 @@
|
||||
"antd": "^5.12.7",
|
||||
"axios": "^1.6.3",
|
||||
"classnames": "^2.5.1",
|
||||
"dayjs": "^1.11.10",
|
||||
"i18next": "^23.7.16",
|
||||
"js-base64": "^3.7.5",
|
||||
"jsencrypt": "^3.3.2",
|
||||
"lodash": "^4.17.21",
|
||||
"moment": "^2.30.1",
|
||||
"rc-tween-one": "^3.0.6",
|
||||
"react-chat-elements": "^12.0.13",
|
||||
"react-copy-to-clipboard": "^5.1.0",
|
||||
"react-i18next": "^14.0.0",
|
||||
"react-infinite-scroll-component": "^6.1.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-pdf-highlighter": "^6.1.0",
|
||||
"react-string-replace": "^1.1.1",
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"recharts": "^2.12.4",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"umi": "^4.0.90",
|
||||
"umi-request": "^1.4.0",
|
||||
@ -36,6 +38,7 @@
|
||||
"@react-dev-inspector/umi4-plugin": "^2.0.1",
|
||||
"@types/lodash": "^4.14.202",
|
||||
"@types/react": "^18.0.33",
|
||||
"@types/react-copy-to-clipboard": "^5.0.7",
|
||||
"@types/react-dom": "^18.0.11",
|
||||
"@types/react-syntax-highlighter": "^15.5.11",
|
||||
"@types/uuid": "^9.0.8",
|
||||
@ -2676,6 +2679,60 @@
|
||||
"@babel/types": "^7.20.7"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/d3-array": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-array/-/d3-array-3.2.1.tgz",
|
||||
"integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg=="
|
||||
},
|
||||
"node_modules/@types/d3-color": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-color/-/d3-color-3.1.3.tgz",
|
||||
"integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A=="
|
||||
},
|
||||
"node_modules/@types/d3-ease": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-ease/-/d3-ease-3.0.2.tgz",
|
||||
"integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA=="
|
||||
},
|
||||
"node_modules/@types/d3-interpolate": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz",
|
||||
"integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==",
|
||||
"dependencies": {
|
||||
"@types/d3-color": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/d3-path": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-path/-/d3-path-3.1.0.tgz",
|
||||
"integrity": "sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ=="
|
||||
},
|
||||
"node_modules/@types/d3-scale": {
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-scale/-/d3-scale-4.0.8.tgz",
|
||||
"integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==",
|
||||
"dependencies": {
|
||||
"@types/d3-time": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/d3-shape": {
|
||||
"version": "3.1.6",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-shape/-/d3-shape-3.1.6.tgz",
|
||||
"integrity": "sha512-5KKk5aKGu2I+O6SONMYSNflgiP0WfZIQvVUMan50wHsLG1G94JlxEVnCpQARfTtzytuY0p/9PXXZb3I7giofIA==",
|
||||
"dependencies": {
|
||||
"@types/d3-path": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/d3-time": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-time/-/d3-time-3.0.3.tgz",
|
||||
"integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw=="
|
||||
},
|
||||
"node_modules/@types/d3-timer": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/@types/d3-timer/-/d3-timer-3.0.2.tgz",
|
||||
"integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw=="
|
||||
},
|
||||
"node_modules/@types/debug": {
|
||||
"version": "4.1.12",
|
||||
"resolved": "https://registry.npmmirror.com/@types/debug/-/debug-4.1.12.tgz",
|
||||
@ -2884,6 +2941,15 @@
|
||||
"csstype": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/react-copy-to-clipboard": {
|
||||
"version": "5.0.7",
|
||||
"resolved": "https://registry.npmmirror.com/@types/react-copy-to-clipboard/-/react-copy-to-clipboard-5.0.7.tgz",
|
||||
"integrity": "sha512-Gft19D+as4M+9Whq1oglhmK49vqPhcLzk8WfvfLvaYMIPYanyfLy0+CwFucMJfdKoSFyySPmkkWn8/E6voQXjQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/react": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/react-dom": {
|
||||
"version": "18.2.18",
|
||||
"resolved": "https://registry.npmmirror.com/@types/react-dom/-/react-dom-18.2.18.tgz",
|
||||
@ -5832,6 +5898,14 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/clsx": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/clsx/-/clsx-2.1.0.tgz",
|
||||
"integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/coa": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/coa/-/coa-2.0.2.tgz",
|
||||
@ -6640,11 +6714,132 @@
|
||||
"resolved": "https://registry.npmmirror.com/d3-array/-/d3-array-1.2.4.tgz",
|
||||
"integrity": "sha512-KHW6M86R+FUPYGb3R5XiYjXPq7VzwxZ22buHhAEVG5ztoEcZZMLov530mmccaqA1GghZArjQV46fuc8kUqhhHw=="
|
||||
},
|
||||
"node_modules/d3-color": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-color/-/d3-color-3.1.0.tgz",
|
||||
"integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-ease": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/d3-ease/-/d3-ease-3.0.1.tgz",
|
||||
"integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-format": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-format/-/d3-format-3.1.0.tgz",
|
||||
"integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-interpolate": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz",
|
||||
"integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==",
|
||||
"dependencies": {
|
||||
"d3-color": "1 - 3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-path": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-path/-/d3-path-3.1.0.tgz",
|
||||
"integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-polygon": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmmirror.com/d3-polygon/-/d3-polygon-1.0.6.tgz",
|
||||
"integrity": "sha512-k+RF7WvI08PC8reEoXa/w2nSg5AUMTi+peBD9cmFc+0ixHfbs4QmxxkarVal1IkVkgxVuk9JSHhJURHiyHKAuQ=="
|
||||
},
|
||||
"node_modules/d3-scale": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/d3-scale/-/d3-scale-4.0.2.tgz",
|
||||
"integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==",
|
||||
"dependencies": {
|
||||
"d3-array": "2.10.0 - 3",
|
||||
"d3-format": "1 - 3",
|
||||
"d3-interpolate": "1.2.0 - 3",
|
||||
"d3-time": "2.1.1 - 3",
|
||||
"d3-time-format": "2 - 4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-scale/node_modules/d3-array": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmmirror.com/d3-array/-/d3-array-3.2.4.tgz",
|
||||
"integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
|
||||
"dependencies": {
|
||||
"internmap": "1 - 2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-shape": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-shape/-/d3-shape-3.2.0.tgz",
|
||||
"integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
|
||||
"dependencies": {
|
||||
"d3-path": "^3.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-time": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-time/-/d3-time-3.1.0.tgz",
|
||||
"integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
|
||||
"dependencies": {
|
||||
"d3-array": "2 - 3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-time-format": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/d3-time-format/-/d3-time-format-4.1.0.tgz",
|
||||
"integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==",
|
||||
"dependencies": {
|
||||
"d3-time": "1 - 3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-time/node_modules/d3-array": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmmirror.com/d3-array/-/d3-array-3.2.4.tgz",
|
||||
"integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
|
||||
"dependencies": {
|
||||
"internmap": "1 - 2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/d3-timer": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/d3-timer/-/d3-timer-3.0.1.tgz",
|
||||
"integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/data-uri-to-buffer": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
|
||||
@ -6705,6 +6900,11 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/decimal.js-light": {
|
||||
"version": "2.5.1",
|
||||
"resolved": "https://registry.npmmirror.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz",
|
||||
"integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg=="
|
||||
},
|
||||
"node_modules/decode-named-character-reference": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmmirror.com/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz",
|
||||
@ -7032,6 +7232,15 @@
|
||||
"utila": "~0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-helpers": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmmirror.com/dom-helpers/-/dom-helpers-5.2.1.tgz",
|
||||
"integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.8.7",
|
||||
"csstype": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-serializer": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmmirror.com/dom-serializer/-/dom-serializer-1.4.1.tgz",
|
||||
@ -8151,6 +8360,11 @@
|
||||
"es5-ext": "~0.10.14"
|
||||
}
|
||||
},
|
||||
"node_modules/eventemitter3": {
|
||||
"version": "4.0.7",
|
||||
"resolved": "https://registry.npmmirror.com/eventemitter3/-/eventemitter3-4.0.7.tgz",
|
||||
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmmirror.com/events/-/events-3.3.0.tgz",
|
||||
@ -8356,6 +8570,14 @@
|
||||
"resolved": "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||
},
|
||||
"node_modules/fast-equals": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/fast-equals/-/fast-equals-5.0.1.tgz",
|
||||
"integrity": "sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ==",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-glob": {
|
||||
"version": "3.2.12",
|
||||
"resolved": "https://registry.npmmirror.com/fast-glob/-/fast-glob-3.2.12.tgz",
|
||||
@ -9693,6 +9915,14 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/internmap": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmmirror.com/internmap/-/internmap-2.0.3.tgz",
|
||||
"integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/intersection-observer": {
|
||||
"version": "0.12.2",
|
||||
"resolved": "https://registry.npmmirror.com/intersection-observer/-/intersection-observer-0.12.2.tgz",
|
||||
@ -11925,6 +12155,7 @@
|
||||
"version": "2.30.1",
|
||||
"resolved": "https://registry.npmmirror.com/moment/-/moment-2.30.1.tgz",
|
||||
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
|
||||
"devOptional": true,
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
@ -14356,6 +14587,18 @@
|
||||
"react-dom": "18.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-copy-to-clipboard": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmmirror.com/react-copy-to-clipboard/-/react-copy-to-clipboard-5.1.0.tgz",
|
||||
"integrity": "sha512-k61RsNgAayIJNoy9yDsYzDe/yAZAzEbEgcz3DZMhF686LEyukcE1hzurxe85JandPUG+yTfGVFzuEw3xt8WP/A==",
|
||||
"dependencies": {
|
||||
"copy-to-clipboard": "^3.3.1",
|
||||
"prop-types": "^15.8.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^15.3.0 || 16 || 17 || 18"
|
||||
}
|
||||
},
|
||||
"node_modules/react-dev-inspector": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/react-dev-inspector/-/react-dev-inspector-2.0.1.tgz",
|
||||
@ -14934,6 +15177,20 @@
|
||||
"react": ">=15"
|
||||
}
|
||||
},
|
||||
"node_modules/react-smooth": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmmirror.com/react-smooth/-/react-smooth-4.0.1.tgz",
|
||||
"integrity": "sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w==",
|
||||
"dependencies": {
|
||||
"fast-equals": "^5.0.1",
|
||||
"prop-types": "^15.8.1",
|
||||
"react-transition-group": "^4.4.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-spinkit": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmmirror.com/react-spinkit/-/react-spinkit-3.0.0.tgz",
|
||||
@ -14968,6 +15225,21 @@
|
||||
"react": ">= 0.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-transition-group": {
|
||||
"version": "4.4.5",
|
||||
"resolved": "https://registry.npmmirror.com/react-transition-group/-/react-transition-group-4.4.5.tgz",
|
||||
"integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.5.5",
|
||||
"dom-helpers": "^5.0.1",
|
||||
"loose-envify": "^1.4.0",
|
||||
"prop-types": "^15.6.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": ">=16.6.0",
|
||||
"react-dom": ">=16.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/reactcss": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmmirror.com/reactcss/-/reactcss-1.2.3.tgz",
|
||||
@ -15145,6 +15417,41 @@
|
||||
"node": ">= 12.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/recharts": {
|
||||
"version": "2.12.4",
|
||||
"resolved": "https://registry.npmmirror.com/recharts/-/recharts-2.12.4.tgz",
|
||||
"integrity": "sha512-dM4skmk4fDKEDjL9MNunxv6zcTxePGVEzRnLDXALRpfJ85JoQ0P0APJ/CoJlmnQI0gPjBlOkjzrwrfQrRST3KA==",
|
||||
"dependencies": {
|
||||
"clsx": "^2.0.0",
|
||||
"eventemitter3": "^4.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"react-is": "^16.10.2",
|
||||
"react-smooth": "^4.0.0",
|
||||
"recharts-scale": "^0.4.4",
|
||||
"tiny-invariant": "^1.3.1",
|
||||
"victory-vendor": "^36.6.8"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.0.0 || ^17.0.0 || ^18.0.0",
|
||||
"react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/recharts-scale": {
|
||||
"version": "0.4.5",
|
||||
"resolved": "https://registry.npmmirror.com/recharts-scale/-/recharts-scale-0.4.5.tgz",
|
||||
"integrity": "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==",
|
||||
"dependencies": {
|
||||
"decimal.js-light": "^2.4.1"
|
||||
}
|
||||
},
|
||||
"node_modules/recharts/node_modules/react-is": {
|
||||
"version": "16.13.1",
|
||||
"resolved": "https://registry.npmmirror.com/react-is/-/react-is-16.13.1.tgz",
|
||||
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
|
||||
},
|
||||
"node_modules/recursive-readdir": {
|
||||
"version": "2.2.3",
|
||||
"resolved": "https://registry.npmmirror.com/recursive-readdir/-/recursive-readdir-2.2.3.tgz",
|
||||
@ -17000,9 +17307,7 @@
|
||||
"node_modules/tiny-invariant": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmmirror.com/tiny-invariant/-/tiny-invariant-1.3.1.tgz",
|
||||
"integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==",
|
||||
"dev": true,
|
||||
"peer": true
|
||||
"integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw=="
|
||||
},
|
||||
"node_modules/tiny-warning": {
|
||||
"version": "1.0.3",
|
||||
@ -18221,6 +18526,38 @@
|
||||
"unist-util-stringify-position": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/victory-vendor": {
|
||||
"version": "36.9.2",
|
||||
"resolved": "https://registry.npmmirror.com/victory-vendor/-/victory-vendor-36.9.2.tgz",
|
||||
"integrity": "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==",
|
||||
"dependencies": {
|
||||
"@types/d3-array": "^3.0.3",
|
||||
"@types/d3-ease": "^3.0.0",
|
||||
"@types/d3-interpolate": "^3.0.1",
|
||||
"@types/d3-scale": "^4.0.2",
|
||||
"@types/d3-shape": "^3.1.0",
|
||||
"@types/d3-time": "^3.0.0",
|
||||
"@types/d3-timer": "^3.0.0",
|
||||
"d3-array": "^3.1.6",
|
||||
"d3-ease": "^3.0.1",
|
||||
"d3-interpolate": "^3.0.1",
|
||||
"d3-scale": "^4.0.2",
|
||||
"d3-shape": "^3.1.0",
|
||||
"d3-time": "^3.0.0",
|
||||
"d3-timer": "^3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/victory-vendor/node_modules/d3-array": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmmirror.com/d3-array/-/d3-array-3.2.4.tgz",
|
||||
"integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
|
||||
"dependencies": {
|
||||
"internmap": "1 - 2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmmirror.com/vite/-/vite-4.3.1.tgz",
|
||||
|
||||
@ -17,19 +17,21 @@
|
||||
"antd": "^5.12.7",
|
||||
"axios": "^1.6.3",
|
||||
"classnames": "^2.5.1",
|
||||
"dayjs": "^1.11.10",
|
||||
"i18next": "^23.7.16",
|
||||
"js-base64": "^3.7.5",
|
||||
"jsencrypt": "^3.3.2",
|
||||
"lodash": "^4.17.21",
|
||||
"moment": "^2.30.1",
|
||||
"rc-tween-one": "^3.0.6",
|
||||
"react-chat-elements": "^12.0.13",
|
||||
"react-copy-to-clipboard": "^5.1.0",
|
||||
"react-i18next": "^14.0.0",
|
||||
"react-infinite-scroll-component": "^6.1.0",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-pdf-highlighter": "^6.1.0",
|
||||
"react-string-replace": "^1.1.1",
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"recharts": "^2.12.4",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"umi": "^4.0.90",
|
||||
"umi-request": "^1.4.0",
|
||||
@ -40,6 +42,7 @@
|
||||
"@react-dev-inspector/umi4-plugin": "^2.0.1",
|
||||
"@types/lodash": "^4.14.202",
|
||||
"@types/react": "^18.0.33",
|
||||
"@types/react-copy-to-clipboard": "^5.0.7",
|
||||
"@types/react-dom": "^18.0.11",
|
||||
"@types/react-syntax-highlighter": "^15.5.11",
|
||||
"@types/uuid": "^9.0.8",
|
||||
|
||||
@ -6,6 +6,21 @@ import zh_HK from 'antd/locale/zh_HK';
|
||||
import React, { ReactNode, useEffect, useState } from 'react';
|
||||
import storage from './utils/authorizationUtil';
|
||||
|
||||
import dayjs from 'dayjs';
|
||||
import advancedFormat from 'dayjs/plugin/advancedFormat';
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat';
|
||||
import localeData from 'dayjs/plugin/localeData';
|
||||
import weekday from 'dayjs/plugin/weekday';
|
||||
import weekOfYear from 'dayjs/plugin/weekOfYear';
|
||||
import weekYear from 'dayjs/plugin/weekYear';
|
||||
|
||||
dayjs.extend(customParseFormat);
|
||||
dayjs.extend(advancedFormat);
|
||||
dayjs.extend(weekday);
|
||||
dayjs.extend(localeData);
|
||||
dayjs.extend(weekOfYear);
|
||||
dayjs.extend(weekYear);
|
||||
|
||||
const AntLanguageMap = {
|
||||
en: enUS,
|
||||
zh: zhCN,
|
||||
|
||||
|
Before Width: | Height: | Size: 545 KiB After Width: | Height: | Size: 406 KiB |
|
Before Width: | Height: | Size: 390 KiB After Width: | Height: | Size: 388 KiB |
|
Before Width: | Height: | Size: 321 KiB After Width: | Height: | Size: 467 KiB |
|
Before Width: | Height: | Size: 2.0 MiB After Width: | Height: | Size: 1.1 MiB |
|
Before Width: | Height: | Size: 311 KiB After Width: | Height: | Size: 966 KiB |
|
Before Width: | Height: | Size: 599 KiB After Width: | Height: | Size: 515 KiB |
|
Before Width: | Height: | Size: 872 KiB After Width: | Height: | Size: 196 KiB |
|
Before Width: | Height: | Size: 366 KiB After Width: | Height: | Size: 296 KiB |
27
web/src/components/copy-to-clipboard.tsx
Normal file
@ -0,0 +1,27 @@
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { CheckOutlined, CopyOutlined } from '@ant-design/icons';
|
||||
import { Tooltip } from 'antd';
|
||||
import { useState } from 'react';
|
||||
import { CopyToClipboard as Clipboard, Props } from 'react-copy-to-clipboard';
|
||||
|
||||
const CopyToClipboard = ({ text }: Props) => {
|
||||
const [copied, setCopied] = useState(false);
|
||||
const { t } = useTranslate('common');
|
||||
|
||||
const handleCopy = () => {
|
||||
setCopied(true);
|
||||
setTimeout(() => {
|
||||
setCopied(false);
|
||||
}, 2000);
|
||||
};
|
||||
|
||||
return (
|
||||
<Tooltip title={copied ? t('copied') : t('copy')}>
|
||||
<Clipboard text={text} onCopy={handleCopy}>
|
||||
{copied ? <CheckOutlined /> : <CopyOutlined />}
|
||||
</Clipboard>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
|
||||
export default CopyToClipboard;
|
||||
36
web/src/components/highlight-markdown/index.tsx
Normal file
@ -0,0 +1,36 @@
|
||||
import Markdown from 'react-markdown';
|
||||
import SyntaxHighlighter from 'react-syntax-highlighter';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
|
||||
const HightLightMarkdown = ({
|
||||
children,
|
||||
}: {
|
||||
children: string | null | undefined;
|
||||
}) => {
|
||||
return (
|
||||
<Markdown
|
||||
remarkPlugins={[remarkGfm]}
|
||||
components={
|
||||
{
|
||||
code(props: any) {
|
||||
const { children, className, node, ...rest } = props;
|
||||
const match = /language-(\w+)/.exec(className || '');
|
||||
return match ? (
|
||||
<SyntaxHighlighter {...rest} PreTag="div" language={match[1]}>
|
||||
{String(children).replace(/\n$/, '')}
|
||||
</SyntaxHighlighter>
|
||||
) : (
|
||||
<code {...rest} className={className}>
|
||||
{children}
|
||||
</code>
|
||||
);
|
||||
},
|
||||
} as any
|
||||
}
|
||||
>
|
||||
{children}
|
||||
</Markdown>
|
||||
);
|
||||
};
|
||||
|
||||
export default HightLightMarkdown;
|
||||
89
web/src/components/line-chart/index.tsx
Normal file
@ -0,0 +1,89 @@
|
||||
import {
|
||||
CartesianGrid,
|
||||
Legend,
|
||||
Line,
|
||||
LineChart,
|
||||
ResponsiveContainer,
|
||||
Tooltip,
|
||||
XAxis,
|
||||
YAxis,
|
||||
} from 'recharts';
|
||||
import { CategoricalChartProps } from 'recharts/types/chart/generateCategoricalChart';
|
||||
|
||||
const data = [
|
||||
{
|
||||
name: 'Page A',
|
||||
uv: 4000,
|
||||
pv: 2400,
|
||||
},
|
||||
{
|
||||
name: 'Page B',
|
||||
uv: 3000,
|
||||
pv: 1398,
|
||||
},
|
||||
{
|
||||
name: 'Page C',
|
||||
uv: 2000,
|
||||
pv: 9800,
|
||||
},
|
||||
{
|
||||
name: 'Page D',
|
||||
uv: 2780,
|
||||
pv: 3908,
|
||||
},
|
||||
{
|
||||
name: 'Page E',
|
||||
uv: 1890,
|
||||
pv: 4800,
|
||||
},
|
||||
{
|
||||
name: 'Page F',
|
||||
uv: 2390,
|
||||
pv: 3800,
|
||||
},
|
||||
{
|
||||
name: 'Page G',
|
||||
uv: 3490,
|
||||
pv: 4300,
|
||||
},
|
||||
];
|
||||
|
||||
interface IProps extends CategoricalChartProps {
|
||||
data?: Array<{ xAxis: string; yAxis: number }>;
|
||||
showLegend?: boolean;
|
||||
}
|
||||
|
||||
const RagLineChart = ({ data, showLegend = false }: IProps) => {
|
||||
return (
|
||||
<ResponsiveContainer width="100%" height="100%">
|
||||
<LineChart
|
||||
// width={500}
|
||||
// height={300}
|
||||
data={data}
|
||||
margin={
|
||||
{
|
||||
// top: 5,
|
||||
// right: 30,
|
||||
// left: 20,
|
||||
// bottom: 10,
|
||||
}
|
||||
}
|
||||
>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis dataKey="xAxis" />
|
||||
<YAxis />
|
||||
<Tooltip />
|
||||
{showLegend && <Legend />}
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="yAxis"
|
||||
stroke="#8884d8"
|
||||
activeDot={{ r: 8 }}
|
||||
/>
|
||||
{/* <Line type="monotone" dataKey="uv" stroke="#82ca9d" /> */}
|
||||
</LineChart>
|
||||
</ResponsiveContainer>
|
||||
);
|
||||
};
|
||||
|
||||
export default RagLineChart;
|
||||
@ -1,6 +1,19 @@
|
||||
@import url(./inter.less);
|
||||
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: Inter;
|
||||
margin: 0;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#root {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.ant-app {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
@ -1,4 +1,9 @@
|
||||
import { IConversation, IDialog } from '@/interfaces/database/chat';
|
||||
import {
|
||||
IConversation,
|
||||
IDialog,
|
||||
IStats,
|
||||
IToken,
|
||||
} from '@/interfaces/database/chat';
|
||||
import { useCallback } from 'react';
|
||||
import { useDispatch, useSelector } from 'umi';
|
||||
|
||||
@ -164,3 +169,134 @@ export const useCompleteConversation = () => {
|
||||
|
||||
return completeConversation;
|
||||
};
|
||||
|
||||
// #region API provided for external calls
|
||||
|
||||
export const useCreateToken = (dialogId: string) => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const createToken = useCallback(() => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/createToken',
|
||||
payload: { dialogId },
|
||||
});
|
||||
}, [dispatch, dialogId]);
|
||||
|
||||
return createToken;
|
||||
};
|
||||
|
||||
export const useListToken = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const listToken = useCallback(
|
||||
(dialogId: string) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/listToken',
|
||||
payload: { dialogId },
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return listToken;
|
||||
};
|
||||
|
||||
export const useSelectTokenList = () => {
|
||||
const tokenList: IToken[] = useSelector(
|
||||
(state: any) => state.chatModel.tokenList,
|
||||
);
|
||||
|
||||
return tokenList;
|
||||
};
|
||||
|
||||
export const useRemoveToken = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const removeToken = useCallback(
|
||||
(payload: { tenantId: string; dialogId: string; tokens: string[] }) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/removeToken',
|
||||
payload: payload,
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return removeToken;
|
||||
};
|
||||
|
||||
export const useFetchStats = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const fetchStats = useCallback(
|
||||
(payload: any) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/getStats',
|
||||
payload,
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return fetchStats;
|
||||
};
|
||||
|
||||
export const useSelectStats = () => {
|
||||
const stats: IStats = useSelector((state: any) => state.chatModel.stats);
|
||||
|
||||
return stats;
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region shared chat
|
||||
|
||||
export const useCreateSharedConversation = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const createSharedConversation = useCallback(
|
||||
(userId?: string) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/createExternalConversation',
|
||||
payload: { userId },
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return createSharedConversation;
|
||||
};
|
||||
|
||||
export const useFetchSharedConversation = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const fetchSharedConversation = useCallback(
|
||||
(conversationId: string) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/getExternalConversation',
|
||||
payload: conversationId,
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return fetchSharedConversation;
|
||||
};
|
||||
|
||||
export const useCompleteSharedConversation = () => {
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const completeSharedConversation = useCallback(
|
||||
(payload: any) => {
|
||||
return dispatch<any>({
|
||||
type: 'chatModel/completeExternalConversation',
|
||||
payload: payload,
|
||||
});
|
||||
},
|
||||
[dispatch],
|
||||
);
|
||||
|
||||
return completeSharedConversation;
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
@ -91,3 +91,21 @@ export interface Docagg {
|
||||
// term_similarity: number;
|
||||
// vector_similarity: number;
|
||||
// }
|
||||
|
||||
export interface IToken {
|
||||
create_date: string;
|
||||
create_time: number;
|
||||
tenant_id: string;
|
||||
token: string;
|
||||
update_date?: any;
|
||||
update_time?: any;
|
||||
}
|
||||
|
||||
export interface IStats {
|
||||
pv: [string, number][];
|
||||
uv: [string, number][];
|
||||
speed: [string, number][];
|
||||
tokens: [string, number][];
|
||||
round: [string, number][];
|
||||
thumb_up: [string, number][];
|
||||
}
|
||||
|
||||
@ -33,3 +33,12 @@
|
||||
.pointerCursor() {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.clearCardBody() {
|
||||
:global {
|
||||
.ant-card-body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -20,6 +20,8 @@ export default {
|
||||
language: 'Language',
|
||||
languageMessage: 'Please input your language!',
|
||||
languagePlaceholder: 'select your language',
|
||||
copy: 'Copy',
|
||||
copied: 'Copied',
|
||||
},
|
||||
login: {
|
||||
login: 'Sign in',
|
||||
@ -169,7 +171,7 @@ export default {
|
||||
methodTitle: 'Chunking Method Description',
|
||||
methodExamples: 'Examples',
|
||||
methodExamplesDescription:
|
||||
'This visual guides is in order to make understanding easier for you.',
|
||||
'The following screenshots are presented to facilitate understanding.',
|
||||
dialogueExamplesTitle: 'Dialogue Examples',
|
||||
methodEmpty:
|
||||
'This will display a visual explanation of the knowledge base categories',
|
||||
@ -199,15 +201,27 @@ export default {
|
||||
presentation: `<p>The supported file formats are <b>PDF</b>, <b>PPTX</b>.</p><p>
|
||||
Every page will be treated as a chunk. And the thumbnail of every page will be stored.</p><p>
|
||||
<i>All the PPT files you uploaded will be chunked by using this method automatically, setting-up for every PPT file is not necessary.</i></p>`,
|
||||
qa: `<p><b>EXCEL</b> and <b>CSV/TXT</b> files are supported.</p><p>
|
||||
If the file is in excel format, there should be 2 columns question and answer without header.
|
||||
And question column is ahead of answer column.
|
||||
And it's O.K if it has multiple sheets as long as the columns are rightly composed.</p><p>
|
||||
|
||||
If it's in csv format, it should be UTF-8 encoded. Use TAB as delimiter to separate question and answer.</p><p>
|
||||
|
||||
<i>All the deformed lines will be ignored.
|
||||
Every pair of Q&A will be treated as a chunk.</i></p>`,
|
||||
qa: `
|
||||
<p>
|
||||
This chunk method supports <b>EXCEL</b> and <b>CSV/TXT</b> file formats.
|
||||
</p>
|
||||
<li>
|
||||
If the file is in <b>Excel</b> format, it should consist of two columns
|
||||
without headers: one for questions and the other for answers, with the
|
||||
question column preceding the answer column. Multiple sheets are
|
||||
acceptable as long as the columns are correctly structured.
|
||||
</li>
|
||||
<li>
|
||||
If the file is in <b>CSV/TXT</b> format, it must be UTF-8 encoded with TAB
|
||||
used as the delimiter to separate questions and answers.
|
||||
</li>
|
||||
<p>
|
||||
<i>
|
||||
Lines of texts that fail to follow the above rules will be ignored, and
|
||||
each Q&A pair will be considered a distinct chunk.
|
||||
</i>
|
||||
</p>
|
||||
`,
|
||||
resume: `<p>The supported file formats are <b>DOCX</b>, <b>PDF</b>, <b>TXT</b>.
|
||||
</p><p>
|
||||
The résumé comes in a variety of formats, just like a person’s personality, but we often have to organize them into structured data that makes it easy to search.
|
||||
@ -335,6 +349,31 @@ export default {
|
||||
'This sets the maximum length of the model’s output, measured in the number of tokens (words or pieces of words).',
|
||||
quote: 'Show Quote',
|
||||
quoteTip: 'Should the source of the original text be displayed?',
|
||||
overview: 'Chat Bot API',
|
||||
pv: 'Number of messages',
|
||||
uv: 'Active user number',
|
||||
speed: 'Token output speed',
|
||||
tokens: 'Consume the token number',
|
||||
round: 'Session Interaction Number',
|
||||
thumbUp: 'customer satisfaction',
|
||||
preview: 'Preview',
|
||||
embedded: 'Embedded',
|
||||
serviceApiEndpoint: 'Service API Endpoint',
|
||||
apiKey: 'Api Key',
|
||||
apiReference: 'API Documents',
|
||||
dateRange: 'Date Range:',
|
||||
backendServiceApi: 'Backend service API',
|
||||
createNewKey: 'Create new key',
|
||||
created: 'Created',
|
||||
action: 'Action',
|
||||
embedModalTitle: 'Embed into website',
|
||||
comingSoon: 'Coming Soon',
|
||||
fullScreenTitle: 'Full Embed',
|
||||
fullScreenDescription:
|
||||
'Embed the following iframe into your website at the desired location',
|
||||
partialTitle: 'Partial Embed',
|
||||
extensionTitle: 'Chrome Extension',
|
||||
tokenError: 'Please create API Token first!',
|
||||
},
|
||||
setting: {
|
||||
profile: 'Profile',
|
||||
|
||||
@ -15,11 +15,13 @@ export default {
|
||||
edit: '編輯',
|
||||
upload: '上傳',
|
||||
english: '英語',
|
||||
chinese: '中文簡體',
|
||||
traditionalChinese: '中文繁體',
|
||||
chinese: '簡體中文',
|
||||
traditionalChinese: '繁體中文',
|
||||
language: '語言',
|
||||
languageMessage: '請輸入語言',
|
||||
languagePlaceholder: '請選擇語言',
|
||||
copy: '複製',
|
||||
copied: '複製成功',
|
||||
},
|
||||
login: {
|
||||
login: '登入',
|
||||
@ -142,12 +144,12 @@ export default {
|
||||
languagePlaceholder: '請輸入語言',
|
||||
permissions: '權限',
|
||||
embeddingModel: '嵌入模型',
|
||||
chunkTokenNumber: '塊令牌數',
|
||||
chunkTokenNumberMessage: '塊令牌數是必填項',
|
||||
chunkTokenNumber: '塊Token數',
|
||||
chunkTokenNumberMessage: '塊Token數是必填項',
|
||||
embeddingModelTip:
|
||||
'用於嵌入塊的嵌入模型。一旦知識庫有了塊,它就無法更改。如果你想改變它,你需要刪除所有的塊。',
|
||||
permissionsTip: '如果權限是“團隊”,則所有團隊成員都可以操作知識庫。',
|
||||
chunkTokenNumberTip: '它大致確定了一個塊的令牌數量。',
|
||||
chunkTokenNumberTip: '它大致確定了一個塊的Token數量。',
|
||||
chunkMethod: '解析方法',
|
||||
chunkMethodTip: '說明位於右側。',
|
||||
upload: '上傳',
|
||||
@ -161,7 +163,7 @@ export default {
|
||||
cancel: '取消',
|
||||
methodTitle: '分塊方法說明',
|
||||
methodExamples: '示例',
|
||||
methodExamplesDescription: '這個視覺指南是為了讓您更容易理解。',
|
||||
methodExamplesDescription: '提出以下屏幕截圖以促進理解。',
|
||||
dialogueExamplesTitle: '對話示例',
|
||||
methodEmpty: '這將顯示知識庫類別的可視化解釋',
|
||||
book: `<p>支持的文件格式為<b>DOCX</b>、<b>PDF</b>、<b>TXT</b>。</p><p>
|
||||
@ -180,7 +182,7 @@ export default {
|
||||
<p>此方法將簡單的方法應用於塊文件:</p>
|
||||
<p>
|
||||
<li>系統將使用視覺檢測模型將連續文本分割成多個片段。</li>
|
||||
<li>接下來,這些連續的片段被合併成令牌數不超過“令牌數”的塊。</li></p>`,
|
||||
<li>接下來,這些連續的片段被合併成Token數不超過“Token數”的塊。</li></p>`,
|
||||
paper: `<p>僅支持<b>PDF</b>文件。</p><p>
|
||||
如果我們的模型運行良好,論文將按其部分進行切片,例如<i>摘要、1.1、1.2</i>等。</p><p>
|
||||
這樣做的好處是LLM可以更好的概括論文中相關章節的內容,
|
||||
@ -190,15 +192,24 @@ export default {
|
||||
presentation: `<p>支持的文件格式為<b>PDF</b>、<b>PPTX</b>。</p><p>
|
||||
每個頁面都將被視為一個塊。並且每個頁面的縮略圖都會被存儲。</p><p>
|
||||
<i>您上傳的所有PPT文件都會使用此方法自動分塊,無需為每個PPT文件進行設置。</i></p>`,
|
||||
qa: `支持<p><b>EXCEL</b>和<b>CSV/TXT</b>文件。</p><p>
|
||||
如果文件是Excel格式,應該有2列問題和答案,沒有標題。
|
||||
問題欄位於答案欄之前。
|
||||
如果有多個工作表也沒關係,只要列的組合正確即可。</p><p>
|
||||
|
||||
如果是 csv 格式,則應採用 UTF-8 編碼。使用 TAB 作為分隔符來分隔問題和答案。</p><p>
|
||||
|
||||
<i>所有變形的線都將被忽略。
|
||||
每對問答都將被視為一個塊。</i></p>`,
|
||||
qa: `<p>
|
||||
此塊方法支持<b> excel </b>和<b> csv/txt </b>文件格式。
|
||||
</p>
|
||||
<li>
|
||||
如果文件以<b> excel </b>格式,則應由兩個列組成
|
||||
沒有標題:一個提出問題,另一個用於答案,
|
||||
答案列之前的問題列。多張紙是
|
||||
只要列正確結構,就可以接受。
|
||||
</li>
|
||||
<li>
|
||||
如果文件以<b> csv/txt </b>格式為
|
||||
用作分開問題和答案的定界符。
|
||||
</li>
|
||||
<p>
|
||||
<i>
|
||||
未能遵循上述規則的文本行將被忽略,並且
|
||||
每個問答對將被認為是一個獨特的部分。
|
||||
</i>`,
|
||||
resume: `<p>支持的文件格式為<b>DOCX</b>、<b>PDF</b>、<b>TXT</b>。
|
||||
</p><p>
|
||||
簡歷有多種格式,就像一個人的個性一樣,但我們經常必須將它們組織成結構化數據,以便於搜索。
|
||||
@ -269,7 +280,7 @@ export default {
|
||||
systemMessage: '請輸入',
|
||||
systemTip:
|
||||
'當LLM回答問題時,你需要LLM遵循的說明,比如角色設計、答案長度和答案語言等。',
|
||||
topN: 'top n',
|
||||
topN: 'Top N',
|
||||
topNTip: `並非所有相似度得分高於“相似度閾值”的塊都會被提供給法學碩士。LLM 只能看到這些“Top N”塊。`,
|
||||
variable: '變量',
|
||||
variableTip: `如果您使用对话 API,变量可能会帮助您使用不同的策略与客户聊天。
|
||||
@ -310,6 +321,30 @@ export default {
|
||||
'這設置了模型輸出的最大長度,以標記(單詞或單詞片段)的數量來衡量。',
|
||||
quote: '顯示引文',
|
||||
quoteTip: '是否應該顯示原文出處?',
|
||||
overview: '聊天 API',
|
||||
pv: '消息數',
|
||||
uv: '活躍用戶數',
|
||||
speed: 'Token 輸出速度',
|
||||
tokens: '消耗Token數',
|
||||
round: '會話互動數',
|
||||
thumbUp: '用戶滿意度',
|
||||
preview: '預覽',
|
||||
embedded: '嵌入',
|
||||
serviceApiEndpoint: '服務 API 端點',
|
||||
apiKey: 'API 鍵',
|
||||
apiReference: 'API 文件',
|
||||
dateRange: '日期範圍:',
|
||||
backendServiceApi: '後端服務 API',
|
||||
createNewKey: '創建新密鑰',
|
||||
created: '創建於',
|
||||
action: '操作',
|
||||
embedModalTitle: '嵌入網站',
|
||||
comingSoon: '即將推出',
|
||||
fullScreenTitle: '全屏嵌入',
|
||||
fullScreenDescription: '將以下iframe嵌入您的網站處於所需位置',
|
||||
partialTitle: '部分嵌入',
|
||||
extensionTitle: 'Chrome 插件',
|
||||
tokenError: '請先創建 Api Token!',
|
||||
},
|
||||
setting: {
|
||||
profile: '概述',
|
||||
@ -394,7 +429,7 @@ export default {
|
||||
202: '一個請求已經進入後台排隊(異步任務)。',
|
||||
204: '刪除數據成功。',
|
||||
400: '發出的請求有錯誤,服務器沒有進行新建或修改數據的操作。',
|
||||
401: '用戶沒有權限(令牌、用戶名、密碼錯誤)。',
|
||||
401: '用戶沒有權限(Token、用戶名、密碼錯誤)。',
|
||||
403: '用戶得到授權,但是訪問是被禁止的。',
|
||||
404: '發出的請求針對的是不存在的記錄,服務器沒有進行操作。',
|
||||
406: '請求的格式不可得。',
|
||||
|
||||
@ -15,11 +15,13 @@ export default {
|
||||
edit: '编辑',
|
||||
upload: '上传',
|
||||
english: '英文',
|
||||
chinese: '中文简体',
|
||||
traditionalChinese: '中文繁体',
|
||||
chinese: '简体中文',
|
||||
traditionalChinese: '繁体中文',
|
||||
language: '语言',
|
||||
languageMessage: '请输入语言',
|
||||
languagePlaceholder: '请选择语言',
|
||||
copy: '复制',
|
||||
copied: '复制成功',
|
||||
},
|
||||
login: {
|
||||
login: '登录',
|
||||
@ -143,12 +145,12 @@ export default {
|
||||
languagePlaceholder: '请输入语言',
|
||||
permissions: '权限',
|
||||
embeddingModel: '嵌入模型',
|
||||
chunkTokenNumber: '块令牌数',
|
||||
chunkTokenNumberMessage: '块令牌数是必填项',
|
||||
chunkTokenNumber: '块Token数',
|
||||
chunkTokenNumberMessage: '块Token数是必填项',
|
||||
embeddingModelTip:
|
||||
'用于嵌入块的嵌入模型。 一旦知识库有了块,它就无法更改。 如果你想改变它,你需要删除所有的块。',
|
||||
permissionsTip: '如果权限是“团队”,则所有团队成员都可以操作知识库。',
|
||||
chunkTokenNumberTip: '它大致确定了一个块的令牌数量。',
|
||||
chunkTokenNumberTip: '它大致确定了一个块的Token数量。',
|
||||
chunkMethod: '解析方法',
|
||||
chunkMethodTip: '说明位于右侧。',
|
||||
upload: '上传',
|
||||
@ -162,7 +164,7 @@ export default {
|
||||
cancel: '取消',
|
||||
methodTitle: '分块方法说明',
|
||||
methodExamples: '示例',
|
||||
methodExamplesDescription: '这个视觉指南是为了让您更容易理解。',
|
||||
methodExamplesDescription: '提出以下屏幕截图以促进理解。',
|
||||
dialogueExamplesTitle: '对话示例',
|
||||
methodEmpty: '这将显示知识库类别的可视化解释',
|
||||
book: `<p>支持的文件格式为<b>DOCX</b>、<b>PDF</b>、<b>TXT</b>。</p><p>
|
||||
@ -181,7 +183,7 @@ export default {
|
||||
<p>此方法将简单的方法应用于块文件:</p>
|
||||
<p>
|
||||
<li>系统将使用视觉检测模型将连续文本分割成多个片段。</li>
|
||||
<li>接下来,这些连续的片段被合并成令牌数不超过“令牌数”的块。</li></p>`,
|
||||
<li>接下来,这些连续的片段被合并成Token数不超过“Token数”的块。</li></p>`,
|
||||
paper: `<p>仅支持<b>PDF</b>文件。</p><p>
|
||||
如果我们的模型运行良好,论文将按其部分进行切片,例如<i>摘要、1.1、1.2</i>等。</p><p>
|
||||
这样做的好处是LLM可以更好的概括论文中相关章节的内容,
|
||||
@ -191,15 +193,25 @@ export default {
|
||||
presentation: `<p>支持的文件格式为<b>PDF</b>、<b>PPTX</b>。</p><p>
|
||||
每个页面都将被视为一个块。 并且每个页面的缩略图都会被存储。</p><p>
|
||||
<i>您上传的所有PPT文件都会使用此方法自动分块,无需为每个PPT文件进行设置。</i></p>`,
|
||||
qa: `支持<p><b>EXCEL</b>和<b>CSV/TXT</b>文件。</p><p>
|
||||
如果文件是Excel格式,应该有2列问题和答案,没有标题。
|
||||
问题栏位于答案栏之前。
|
||||
如果有多个工作表也没关系,只要列的组合正确即可。</p><p>
|
||||
|
||||
如果是 csv 格式,则应采用 UTF-8 编码。 使用 TAB 作为分隔符来分隔问题和答案。</p><p>
|
||||
|
||||
<i>所有变形的线都将被忽略。
|
||||
每对问答都将被视为一个块。</i></p>`,
|
||||
qa: ` <p>
|
||||
此块方法支持<b> excel </b>和<b> csv/txt </b>文件格式。
|
||||
</p>
|
||||
<li>
|
||||
如果文件以<b> excel </b>格式,则应由两个列组成
|
||||
没有标题:一个提出问题,另一个用于答案,
|
||||
答案列之前的问题列。多张纸是
|
||||
只要列正确结构,就可以接受。
|
||||
</li>
|
||||
<li>
|
||||
如果文件以<b> csv/txt </b>格式为
|
||||
用作分开问题和答案的定界符。
|
||||
</li>
|
||||
<p>
|
||||
<i>
|
||||
未能遵循上述规则的文本行将被忽略,并且
|
||||
每个问答对将被认为是一个独特的部分。
|
||||
</i>
|
||||
</p>`,
|
||||
resume: `<p>支持的文件格式为<b>DOCX</b>、<b>PDF</b>、<b>TXT</b>。
|
||||
</p><p>
|
||||
简历有多种格式,就像一个人的个性一样,但我们经常必须将它们组织成结构化数据,以便于搜索。
|
||||
@ -326,6 +338,30 @@ export default {
|
||||
'这设置了模型输出的最大长度,以标记(单词或单词片段)的数量来衡量。',
|
||||
quote: '显示引文',
|
||||
quoteTip: '是否应该显示原文出处?',
|
||||
overview: '聊天 API',
|
||||
pv: '消息数',
|
||||
uv: '活跃用户数',
|
||||
speed: 'Token 输出速度',
|
||||
tokens: '消耗Token数',
|
||||
round: '会话互动数',
|
||||
thumbUp: '用户满意度',
|
||||
preview: '预览',
|
||||
embedded: '嵌入',
|
||||
serviceApiEndpoint: '服务API端点',
|
||||
apiKey: 'API 键',
|
||||
apiReference: 'API 文档',
|
||||
dateRange: '日期范围:',
|
||||
backendServiceApi: '后端服务 API',
|
||||
createNewKey: '创建新密钥',
|
||||
created: '创建于',
|
||||
action: '操作',
|
||||
embedModalTitle: '嵌入网站',
|
||||
comingSoon: '即将推出',
|
||||
fullScreenTitle: '全屏嵌入',
|
||||
fullScreenDescription: '将以下iframe嵌入您的网站处于所需位置',
|
||||
partialTitle: '部分嵌入',
|
||||
extensionTitle: 'Chrome 插件',
|
||||
tokenError: '请先创建 Api Token!',
|
||||
},
|
||||
setting: {
|
||||
profile: '概要',
|
||||
@ -410,7 +446,7 @@ export default {
|
||||
202: '一个请求已经进入后台排队(异步任务)。',
|
||||
204: '删除数据成功。',
|
||||
400: '发出的请求有错误,服务器没有进行新建或修改数据的操作。',
|
||||
401: '用户没有权限(令牌、用户名、密码错误)。',
|
||||
401: '用户没有权限(Token、用户名、密码错误)。',
|
||||
403: '用户得到授权,但是访问是被禁止的。',
|
||||
404: '发出的请求针对的是不存在的记录,服务器没有进行操作。',
|
||||
406: '请求的格式不可得。',
|
||||
|
||||
@ -26,6 +26,7 @@ import ParsingActionCell from './parsing-action-cell';
|
||||
import ParsingStatusCell from './parsing-status-cell';
|
||||
import RenameModal from './rename-modal';
|
||||
|
||||
import { formatDate } from '@/utils/date';
|
||||
import styles from './index.less';
|
||||
|
||||
const KnowledgeFile = () => {
|
||||
@ -94,6 +95,9 @@ const KnowledgeFile = () => {
|
||||
title: t('uploadDate'),
|
||||
dataIndex: 'create_date',
|
||||
key: 'create_date',
|
||||
render(value) {
|
||||
return formatDate(value);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('chunkMethod'),
|
||||
|
||||
72
web/src/pages/chat/chat-api-key-modal/index.tsx
Normal file
@ -0,0 +1,72 @@
|
||||
import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IToken } from '@/interfaces/database/chat';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { DeleteOutlined } from '@ant-design/icons';
|
||||
import type { TableProps } from 'antd';
|
||||
import { Button, Modal, Space, Table } from 'antd';
|
||||
import { useOperateApiKey } from '../hooks';
|
||||
|
||||
const ChatApiKeyModal = ({
|
||||
visible,
|
||||
dialogId,
|
||||
hideModal,
|
||||
}: IModalProps<any> & { dialogId: string }) => {
|
||||
const { createToken, removeToken, tokenList, listLoading, creatingLoading } =
|
||||
useOperateApiKey(visible, dialogId);
|
||||
const { t } = useTranslate('chat');
|
||||
|
||||
const columns: TableProps<IToken>['columns'] = [
|
||||
{
|
||||
title: 'Token',
|
||||
dataIndex: 'token',
|
||||
key: 'token',
|
||||
render: (text) => <a>{text}</a>,
|
||||
},
|
||||
{
|
||||
title: t('created'),
|
||||
dataIndex: 'create_date',
|
||||
key: 'create_date',
|
||||
render: (text) => formatDate(text),
|
||||
},
|
||||
{
|
||||
title: t('action'),
|
||||
key: 'action',
|
||||
render: (_, record) => (
|
||||
<Space size="middle">
|
||||
<CopyToClipboard text={record.token}></CopyToClipboard>
|
||||
<DeleteOutlined
|
||||
onClick={() => removeToken(record.token, record.tenant_id)}
|
||||
/>
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title={t('apiKey')}
|
||||
open={visible}
|
||||
onCancel={hideModal}
|
||||
cancelButtonProps={{ style: { display: 'none' } }}
|
||||
style={{ top: 300 }}
|
||||
onOk={hideModal}
|
||||
width={'50vw'}
|
||||
>
|
||||
<Table
|
||||
columns={columns}
|
||||
dataSource={tokenList}
|
||||
rowKey={'token'}
|
||||
loading={listLoading}
|
||||
/>
|
||||
<Button onClick={createToken} loading={creatingLoading}>
|
||||
{t('createNewKey')}
|
||||
</Button>
|
||||
</Modal>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChatApiKeyModal;
|
||||
@ -1,6 +1,6 @@
|
||||
import { useFetchKnowledgeList } from '@/hooks/knowledgeHook';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Form, Input, Select, Upload } from 'antd';
|
||||
import { Form, Input, Select, Switch, Upload } from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import { ISegmentedContentProps } from '../interface';
|
||||
|
||||
@ -83,6 +83,15 @@ const AssistantSetting = ({ show }: ISegmentedContentProps) => {
|
||||
>
|
||||
<Input.TextArea autoSize={{ minRows: 5 }} />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label={t('quote')}
|
||||
valuePropName="checked"
|
||||
name={['prompt_config', 'quote']}
|
||||
tooltip={t('quoteTip')}
|
||||
initialValue={true}
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label={t('knowledgeBases')}
|
||||
name="kb_ids"
|
||||
|
||||
@ -172,15 +172,7 @@ const PromptEngine = (
|
||||
>
|
||||
<Slider max={30} />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label={t('quote')}
|
||||
valuePropName="checked"
|
||||
name={['prompt_config', 'quote']}
|
||||
tooltip={t('quoteTip')}
|
||||
initialValue={true}
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
|
||||
<section className={classNames(styles.variableContainer)}>
|
||||
<Row align={'middle'} justify="end">
|
||||
<Col span={7} className={styles.variableAlign}>
|
||||
|
||||
@ -33,9 +33,9 @@
|
||||
.messageEmpty {
|
||||
width: 300px;
|
||||
}
|
||||
.referenceIcon {
|
||||
padding: 0 6px;
|
||||
}
|
||||
// .referenceIcon {
|
||||
// padding: 0 6px;
|
||||
// }
|
||||
}
|
||||
|
||||
.messageItemLeft {
|
||||
@ -46,24 +46,24 @@
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.referencePopoverWrapper {
|
||||
max-width: 50vw;
|
||||
}
|
||||
// .referencePopoverWrapper {
|
||||
// max-width: 50vw;
|
||||
// }
|
||||
|
||||
.referenceChunkImage {
|
||||
width: 10vw;
|
||||
object-fit: contain;
|
||||
}
|
||||
// .referenceChunkImage {
|
||||
// width: 10vw;
|
||||
// object-fit: contain;
|
||||
// }
|
||||
|
||||
.referenceImagePreview {
|
||||
max-width: 45vw;
|
||||
max-height: 45vh;
|
||||
}
|
||||
.chunkContentText {
|
||||
.chunkText;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.documentLink {
|
||||
padding: 0;
|
||||
}
|
||||
// .referenceImagePreview {
|
||||
// max-width: 45vw;
|
||||
// max-height: 45vh;
|
||||
// }
|
||||
// .chunkContentText {
|
||||
// .chunkText;
|
||||
// max-height: 45vh;
|
||||
// overflow-y: auto;
|
||||
// }
|
||||
// .documentLink {
|
||||
// padding: 0;
|
||||
// }
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import { ReactComponent as AssistantIcon } from '@/assets/svg/assistant.svg';
|
||||
import Image from '@/components/image';
|
||||
import NewDocumentLink from '@/components/new-document-link';
|
||||
import DocumentPreviewer from '@/components/pdf-previewer';
|
||||
import { MessageType } from '@/constants/chat';
|
||||
@ -7,7 +6,6 @@ import { useSelectFileThumbnails } from '@/hooks/knowledgeHook';
|
||||
import { useSelectUserInfo } from '@/hooks/userSettingHook';
|
||||
import { IReference, Message } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Avatar,
|
||||
Button,
|
||||
@ -15,18 +13,11 @@ import {
|
||||
Flex,
|
||||
Input,
|
||||
List,
|
||||
Popover,
|
||||
Skeleton,
|
||||
Space,
|
||||
Spin,
|
||||
} from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import Markdown from 'react-markdown';
|
||||
import reactStringReplace from 'react-string-replace';
|
||||
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import { visitParents } from 'unist-util-visit-parents';
|
||||
import { useMemo } from 'react';
|
||||
import {
|
||||
useClickDrawer,
|
||||
useFetchConversationOnMount,
|
||||
@ -35,33 +26,13 @@ import {
|
||||
useSelectConversationLoading,
|
||||
useSendMessage,
|
||||
} from '../hooks';
|
||||
import MarkdownContent from '../markdown-content';
|
||||
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { getExtension, isPdf } from '@/utils/documentUtils';
|
||||
import styles from './index.less';
|
||||
|
||||
const reg = /(#{2}\d+\${2})/g;
|
||||
|
||||
const getChunkIndex = (match: string) => Number(match.slice(2, -2));
|
||||
|
||||
const rehypeWrapReference = () => {
|
||||
return function wrapTextTransform(tree: any) {
|
||||
visitParents(tree, 'text', (node, ancestors) => {
|
||||
const latestAncestor = ancestors.at(-1);
|
||||
if (
|
||||
latestAncestor.tagName !== 'custom-typography' &&
|
||||
latestAncestor.tagName !== 'code'
|
||||
) {
|
||||
node.type = 'element';
|
||||
node.tagName = 'custom-typography';
|
||||
node.properties = {};
|
||||
node.children = [{ type: 'text', value: node.value }];
|
||||
}
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const MessageItem = ({
|
||||
item,
|
||||
reference,
|
||||
@ -76,100 +47,6 @@ const MessageItem = ({
|
||||
|
||||
const isAssistant = item.role === MessageType.Assistant;
|
||||
|
||||
const handleDocumentButtonClick = useCallback(
|
||||
(documentId: string, chunk: IChunk, isPdf: boolean) => () => {
|
||||
if (!isPdf) {
|
||||
return;
|
||||
}
|
||||
clickDocumentButton(documentId, chunk);
|
||||
},
|
||||
[clickDocumentButton],
|
||||
);
|
||||
|
||||
const getPopoverContent = useCallback(
|
||||
(chunkIndex: number) => {
|
||||
const chunks = reference?.chunks ?? [];
|
||||
const chunkItem = chunks[chunkIndex];
|
||||
const document = reference?.doc_aggs.find(
|
||||
(x) => x?.doc_id === chunkItem?.doc_id,
|
||||
);
|
||||
const documentId = document?.doc_id;
|
||||
const fileThumbnail = documentId ? fileThumbnails[documentId] : '';
|
||||
const fileExtension = documentId ? getExtension(document?.doc_name) : '';
|
||||
const imageId = chunkItem?.img_id;
|
||||
return (
|
||||
<Flex
|
||||
key={chunkItem?.chunk_id}
|
||||
gap={10}
|
||||
className={styles.referencePopoverWrapper}
|
||||
>
|
||||
{imageId && (
|
||||
<Popover
|
||||
placement="left"
|
||||
content={
|
||||
<Image
|
||||
id={imageId}
|
||||
className={styles.referenceImagePreview}
|
||||
></Image>
|
||||
}
|
||||
>
|
||||
<Image
|
||||
id={imageId}
|
||||
className={styles.referenceChunkImage}
|
||||
></Image>
|
||||
</Popover>
|
||||
)}
|
||||
<Space direction={'vertical'}>
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: chunkItem?.content_with_weight,
|
||||
}}
|
||||
className={styles.chunkContentText}
|
||||
></div>
|
||||
{documentId && (
|
||||
<Flex gap={'small'}>
|
||||
{fileThumbnail ? (
|
||||
<img src={fileThumbnail} alt="" />
|
||||
) : (
|
||||
<SvgIcon
|
||||
name={`file-icon/${fileExtension}`}
|
||||
width={24}
|
||||
></SvgIcon>
|
||||
)}
|
||||
<Button
|
||||
type="link"
|
||||
className={styles.documentLink}
|
||||
onClick={handleDocumentButtonClick(
|
||||
documentId,
|
||||
chunkItem,
|
||||
fileExtension === 'pdf',
|
||||
)}
|
||||
>
|
||||
{document?.doc_name}
|
||||
</Button>
|
||||
</Flex>
|
||||
)}
|
||||
</Space>
|
||||
</Flex>
|
||||
);
|
||||
},
|
||||
[reference, fileThumbnails, handleDocumentButtonClick],
|
||||
);
|
||||
|
||||
const renderReference = useCallback(
|
||||
(text: string) => {
|
||||
return reactStringReplace(text, reg, (match, i) => {
|
||||
const chunkIndex = getChunkIndex(match);
|
||||
return (
|
||||
<Popover content={getPopoverContent(chunkIndex)}>
|
||||
<InfoCircleOutlined key={i} className={styles.referenceIcon} />
|
||||
</Popover>
|
||||
);
|
||||
});
|
||||
},
|
||||
[getPopoverContent],
|
||||
);
|
||||
|
||||
const referenceDocumentList = useMemo(() => {
|
||||
return reference?.doc_aggs ?? [];
|
||||
}, [reference?.doc_aggs]);
|
||||
@ -207,38 +84,11 @@ const MessageItem = ({
|
||||
<b>{isAssistant ? '' : userInfo.nickname}</b>
|
||||
<div className={styles.messageText}>
|
||||
{item.content !== '' ? (
|
||||
<Markdown
|
||||
rehypePlugins={[rehypeWrapReference]}
|
||||
remarkPlugins={[remarkGfm]}
|
||||
components={
|
||||
{
|
||||
'custom-typography': ({
|
||||
children,
|
||||
}: {
|
||||
children: string;
|
||||
}) => renderReference(children),
|
||||
code(props: any) {
|
||||
const { children, className, node, ...rest } = props;
|
||||
const match = /language-(\w+)/.exec(className || '');
|
||||
return match ? (
|
||||
<SyntaxHighlighter
|
||||
{...rest}
|
||||
PreTag="div"
|
||||
language={match[1]}
|
||||
>
|
||||
{String(children).replace(/\n$/, '')}
|
||||
</SyntaxHighlighter>
|
||||
) : (
|
||||
<code {...rest} className={className}>
|
||||
{children}
|
||||
</code>
|
||||
);
|
||||
},
|
||||
} as any
|
||||
}
|
||||
>
|
||||
{item.content}
|
||||
</Markdown>
|
||||
<MarkdownContent
|
||||
content={item.content}
|
||||
reference={reference}
|
||||
clickDocumentButton={clickDocumentButton}
|
||||
></MarkdownContent>
|
||||
) : (
|
||||
<Skeleton active className={styles.messageEmpty} />
|
||||
)}
|
||||
|
||||
21
web/src/pages/chat/chat-overview-modal/index.less
Normal file
@ -0,0 +1,21 @@
|
||||
.chartWrapper {
|
||||
height: 40vh;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.chartItem {
|
||||
height: 300px;
|
||||
padding: 10px 0 50px;
|
||||
}
|
||||
|
||||
.chartLabel {
|
||||
display: inline-block;
|
||||
padding-left: 60px;
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
.linkText {
|
||||
border-radius: 6px;
|
||||
padding: 6px 10px;
|
||||
background-color: #eff8ff;
|
||||
border: 1px;
|
||||
}
|
||||
142
web/src/pages/chat/chat-overview-modal/index.tsx
Normal file
@ -0,0 +1,142 @@
|
||||
import LineChart from '@/components/line-chart';
|
||||
import { useSetModalState, useTranslate } from '@/hooks/commonHooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IDialog, IStats } from '@/interfaces/database/chat';
|
||||
import { formatDate } from '@/utils/date';
|
||||
import { Button, Card, DatePicker, Flex, Modal, Space, Typography } from 'antd';
|
||||
import { RangePickerProps } from 'antd/es/date-picker';
|
||||
import dayjs from 'dayjs';
|
||||
import camelCase from 'lodash/camelCase';
|
||||
import ChatApiKeyModal from '../chat-api-key-modal';
|
||||
import EmbedModal from '../embed-modal';
|
||||
import {
|
||||
useFetchStatsOnMount,
|
||||
usePreviewChat,
|
||||
useSelectChartStatsList,
|
||||
useShowEmbedModal,
|
||||
} from '../hooks';
|
||||
import styles from './index.less';
|
||||
|
||||
const { Paragraph } = Typography;
|
||||
const { RangePicker } = DatePicker;
|
||||
|
||||
const StatsLineChart = ({ statsType }: { statsType: keyof IStats }) => {
|
||||
const { t } = useTranslate('chat');
|
||||
const chartList = useSelectChartStatsList();
|
||||
const list =
|
||||
chartList[statsType]?.map((x) => ({
|
||||
...x,
|
||||
xAxis: formatDate(x.xAxis),
|
||||
})) ?? [];
|
||||
|
||||
return (
|
||||
<div className={styles.chartItem}>
|
||||
<b className={styles.chartLabel}>{t(camelCase(statsType))}</b>
|
||||
<LineChart data={list}></LineChart>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const ChatOverviewModal = ({
|
||||
visible,
|
||||
hideModal,
|
||||
dialog,
|
||||
}: IModalProps<any> & { dialog: IDialog }) => {
|
||||
const { t } = useTranslate('chat');
|
||||
const {
|
||||
visible: apiKeyVisible,
|
||||
hideModal: hideApiKeyModal,
|
||||
showModal: showApiKeyModal,
|
||||
} = useSetModalState();
|
||||
const {
|
||||
embedVisible,
|
||||
hideEmbedModal,
|
||||
showEmbedModal,
|
||||
embedToken,
|
||||
errorContextHolder,
|
||||
} = useShowEmbedModal(dialog.id);
|
||||
|
||||
const { pickerValue, setPickerValue } = useFetchStatsOnMount(visible);
|
||||
|
||||
const disabledDate: RangePickerProps['disabledDate'] = (current) => {
|
||||
return current && current > dayjs().endOf('day');
|
||||
};
|
||||
|
||||
const { handlePreview, contextHolder } = usePreviewChat(dialog.id);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal
|
||||
title={t('overview')}
|
||||
open={visible}
|
||||
onCancel={hideModal}
|
||||
cancelButtonProps={{ style: { display: 'none' } }}
|
||||
onOk={hideModal}
|
||||
width={'100vw'}
|
||||
>
|
||||
<Flex vertical gap={'middle'}>
|
||||
<Card title={t('backendServiceApi')}>
|
||||
<Flex gap={8} vertical>
|
||||
{t('serviceApiEndpoint')}
|
||||
<Paragraph copyable className={styles.linkText}>
|
||||
https://demo.ragflow.io/v1/api/
|
||||
</Paragraph>
|
||||
</Flex>
|
||||
<Space size={'middle'}>
|
||||
<Button onClick={showApiKeyModal}>{t('apiKey')}</Button>
|
||||
<a
|
||||
href={
|
||||
'https://github.com/infiniflow/ragflow/blob/main/docs/conversation_api.md'
|
||||
}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<Button>{t('apiReference')}</Button>
|
||||
</a>
|
||||
</Space>
|
||||
</Card>
|
||||
<Card title={`${dialog.name} Web App`}>
|
||||
<Flex gap={8} vertical>
|
||||
<Space size={'middle'}>
|
||||
<Button onClick={handlePreview}>{t('preview')}</Button>
|
||||
<Button onClick={showEmbedModal}>{t('embedded')}</Button>
|
||||
</Space>
|
||||
</Flex>
|
||||
</Card>
|
||||
|
||||
<Space>
|
||||
<b>{t('dateRange')}</b>
|
||||
<RangePicker
|
||||
disabledDate={disabledDate}
|
||||
value={pickerValue}
|
||||
onChange={setPickerValue}
|
||||
allowClear={false}
|
||||
/>
|
||||
</Space>
|
||||
<div className={styles.chartWrapper}>
|
||||
<StatsLineChart statsType={'pv'}></StatsLineChart>
|
||||
<StatsLineChart statsType={'round'}></StatsLineChart>
|
||||
<StatsLineChart statsType={'speed'}></StatsLineChart>
|
||||
<StatsLineChart statsType={'thumb_up'}></StatsLineChart>
|
||||
<StatsLineChart statsType={'tokens'}></StatsLineChart>
|
||||
<StatsLineChart statsType={'uv'}></StatsLineChart>
|
||||
</div>
|
||||
</Flex>
|
||||
<ChatApiKeyModal
|
||||
visible={apiKeyVisible}
|
||||
hideModal={hideApiKeyModal}
|
||||
dialogId={dialog.id}
|
||||
></ChatApiKeyModal>
|
||||
<EmbedModal
|
||||
token={embedToken}
|
||||
visible={embedVisible}
|
||||
hideModal={hideEmbedModal}
|
||||
></EmbedModal>
|
||||
{contextHolder}
|
||||
{errorContextHolder}
|
||||
</Modal>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChatOverviewModal;
|
||||
8
web/src/pages/chat/embed-modal/index.less
Normal file
@ -0,0 +1,8 @@
|
||||
.codeCard {
|
||||
.clearCardBody();
|
||||
}
|
||||
|
||||
.codeText {
|
||||
padding: 10px;
|
||||
background-color: #e8e8ea;
|
||||
}
|
||||
70
web/src/pages/chat/embed-modal/index.tsx
Normal file
@ -0,0 +1,70 @@
|
||||
import CopyToClipboard from '@/components/copy-to-clipboard';
|
||||
import HightLightMarkdown from '@/components/highlight-markdown';
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { Card, Modal, Tabs, TabsProps } from 'antd';
|
||||
import styles from './index.less';
|
||||
|
||||
const EmbedModal = ({
|
||||
visible,
|
||||
hideModal,
|
||||
token = '',
|
||||
}: IModalProps<any> & { token: string }) => {
|
||||
const { t } = useTranslate('chat');
|
||||
|
||||
const text = `
|
||||
~~~ html
|
||||
<iframe
|
||||
src="https://demo.ragflow.io/chat/share?shared_id=${token}"
|
||||
style="width: 100%; height: 100%; min-height: 600px"
|
||||
frameborder="0"
|
||||
>
|
||||
</iframe>
|
||||
~~~
|
||||
`;
|
||||
|
||||
const items: TabsProps['items'] = [
|
||||
{
|
||||
key: '1',
|
||||
label: t('fullScreenTitle'),
|
||||
children: (
|
||||
<Card
|
||||
title={t('fullScreenDescription')}
|
||||
extra={<CopyToClipboard text={text}></CopyToClipboard>}
|
||||
className={styles.codeCard}
|
||||
>
|
||||
<HightLightMarkdown>{text}</HightLightMarkdown>
|
||||
</Card>
|
||||
),
|
||||
},
|
||||
{
|
||||
key: '2',
|
||||
label: t('partialTitle'),
|
||||
children: t('comingSoon'),
|
||||
},
|
||||
{
|
||||
key: '3',
|
||||
label: t('extensionTitle'),
|
||||
children: t('comingSoon'),
|
||||
},
|
||||
];
|
||||
|
||||
const onChange = (key: string) => {
|
||||
console.log(key);
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('embedModalTitle')}
|
||||
open={visible}
|
||||
style={{ top: 300 }}
|
||||
width={'50vw'}
|
||||
onOk={hideModal}
|
||||
onCancel={hideModal}
|
||||
>
|
||||
<Tabs defaultActiveKey="1" items={items} onChange={onChange} />
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default EmbedModal;
|
||||
@ -2,22 +2,34 @@ import { MessageType } from '@/constants/chat';
|
||||
import { fileIconMap } from '@/constants/common';
|
||||
import {
|
||||
useCompleteConversation,
|
||||
useCreateToken,
|
||||
useFetchConversation,
|
||||
useFetchConversationList,
|
||||
useFetchDialog,
|
||||
useFetchDialogList,
|
||||
useFetchStats,
|
||||
useListToken,
|
||||
useRemoveConversation,
|
||||
useRemoveDialog,
|
||||
useRemoveToken,
|
||||
useSelectConversationList,
|
||||
useSelectDialogList,
|
||||
useSelectStats,
|
||||
useSelectTokenList,
|
||||
useSetDialog,
|
||||
useUpdateConversation,
|
||||
} from '@/hooks/chatHooks';
|
||||
import { useSetModalState, useShowDeleteConfirm } from '@/hooks/commonHooks';
|
||||
import {
|
||||
useSetModalState,
|
||||
useShowDeleteConfirm,
|
||||
useTranslate,
|
||||
} from '@/hooks/commonHooks';
|
||||
import { useOneNamespaceEffectsLoading } from '@/hooks/storeHooks';
|
||||
import { IConversation, IDialog } from '@/interfaces/database/chat';
|
||||
import { IConversation, IDialog, IStats } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { getFileExtension } from '@/utils';
|
||||
import { message } from 'antd';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import omit from 'lodash/omit';
|
||||
import {
|
||||
ChangeEventHandler,
|
||||
@ -704,3 +716,202 @@ export const useGetSendButtonDisabled = () => {
|
||||
return dialogId === '' && conversationId === '';
|
||||
};
|
||||
//#endregion
|
||||
|
||||
//#region API provided for external calls
|
||||
|
||||
type RangeValue = [Dayjs | null, Dayjs | null] | null;
|
||||
|
||||
const getDay = (date: Dayjs) => date.format('YYYY-MM-DD');
|
||||
|
||||
export const useFetchStatsOnMount = (visible: boolean) => {
|
||||
const fetchStats = useFetchStats();
|
||||
const [pickerValue, setPickerValue] = useState<RangeValue>([
|
||||
dayjs(),
|
||||
dayjs().subtract(7, 'day'),
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (visible && Array.isArray(pickerValue) && pickerValue[0]) {
|
||||
fetchStats({
|
||||
fromDate: getDay(pickerValue[0]),
|
||||
toDate: getDay(pickerValue[1] ?? dayjs()),
|
||||
});
|
||||
}
|
||||
}, [fetchStats, pickerValue, visible]);
|
||||
|
||||
return {
|
||||
pickerValue,
|
||||
setPickerValue,
|
||||
};
|
||||
};
|
||||
|
||||
export const useOperateApiKey = (visible: boolean, dialogId: string) => {
|
||||
const removeToken = useRemoveToken();
|
||||
const createToken = useCreateToken(dialogId);
|
||||
const listToken = useListToken();
|
||||
const tokenList = useSelectTokenList();
|
||||
const creatingLoading = useOneNamespaceEffectsLoading('chatModel', [
|
||||
'createToken',
|
||||
]);
|
||||
const listLoading = useOneNamespaceEffectsLoading('chatModel', ['list']);
|
||||
|
||||
const showDeleteConfirm = useShowDeleteConfirm();
|
||||
|
||||
const onRemoveToken = (token: string, tenantId: string) => {
|
||||
showDeleteConfirm({
|
||||
onOk: () => removeToken({ dialogId, tokens: [token], tenantId }),
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (visible && dialogId) {
|
||||
listToken(dialogId);
|
||||
}
|
||||
}, [listToken, dialogId, visible]);
|
||||
|
||||
return {
|
||||
removeToken: onRemoveToken,
|
||||
createToken,
|
||||
tokenList,
|
||||
creatingLoading,
|
||||
listLoading,
|
||||
};
|
||||
};
|
||||
|
||||
type ChartStatsType = {
|
||||
[k in keyof IStats]: Array<{ xAxis: string; yAxis: number }>;
|
||||
};
|
||||
|
||||
export const useSelectChartStatsList = (): ChartStatsType => {
|
||||
const stats: IStats = useSelectStats();
|
||||
// const stats = {
|
||||
// pv: [
|
||||
// ['2024-06-01', 1],
|
||||
// ['2024-07-24', 3],
|
||||
// ['2024-09-01', 10],
|
||||
// ],
|
||||
// uv: [
|
||||
// ['2024-02-01', 0],
|
||||
// ['2024-03-01', 99],
|
||||
// ['2024-05-01', 3],
|
||||
// ],
|
||||
// speed: [
|
||||
// ['2024-09-01', 2],
|
||||
// ['2024-09-01', 3],
|
||||
// ],
|
||||
// tokens: [
|
||||
// ['2024-09-01', 1],
|
||||
// ['2024-09-01', 3],
|
||||
// ],
|
||||
// round: [
|
||||
// ['2024-09-01', 0],
|
||||
// ['2024-09-01', 3],
|
||||
// ],
|
||||
// thumb_up: [
|
||||
// ['2024-09-01', 3],
|
||||
// ['2024-09-01', 9],
|
||||
// ],
|
||||
// };
|
||||
|
||||
return Object.keys(stats).reduce((pre, cur) => {
|
||||
const item = stats[cur as keyof IStats];
|
||||
if (item.length > 0) {
|
||||
pre[cur as keyof IStats] = item.map((x) => ({
|
||||
xAxis: x[0] as string,
|
||||
yAxis: x[1] as number,
|
||||
}));
|
||||
}
|
||||
return pre;
|
||||
}, {} as ChartStatsType);
|
||||
};
|
||||
|
||||
export const useShowTokenEmptyError = () => {
|
||||
const [messageApi, contextHolder] = message.useMessage();
|
||||
const { t } = useTranslate('chat');
|
||||
|
||||
const showTokenEmptyError = useCallback(() => {
|
||||
messageApi.error(t('tokenError'));
|
||||
}, [messageApi, t]);
|
||||
return { showTokenEmptyError, contextHolder };
|
||||
};
|
||||
|
||||
const getUrlWithToken = (token: string) => {
|
||||
const { protocol, host } = window.location;
|
||||
return `${protocol}//${host}/chat/share?shared_id=${token}`;
|
||||
};
|
||||
|
||||
const useFetchTokenListBeforeOtherStep = (dialogId: string) => {
|
||||
const { showTokenEmptyError, contextHolder } = useShowTokenEmptyError();
|
||||
|
||||
const listToken = useListToken();
|
||||
const tokenList = useSelectTokenList();
|
||||
|
||||
const token =
|
||||
Array.isArray(tokenList) && tokenList.length > 0 ? tokenList[0].token : '';
|
||||
|
||||
const handleOperate = useCallback(async () => {
|
||||
const data = await listToken(dialogId);
|
||||
const list = data.data;
|
||||
if (data.retcode === 0 && Array.isArray(list) && list.length > 0) {
|
||||
return list[0]?.token;
|
||||
} else {
|
||||
showTokenEmptyError();
|
||||
return false;
|
||||
}
|
||||
}, [dialogId, listToken, showTokenEmptyError]);
|
||||
|
||||
return {
|
||||
token,
|
||||
contextHolder,
|
||||
handleOperate,
|
||||
};
|
||||
};
|
||||
|
||||
export const useShowEmbedModal = (dialogId: string) => {
|
||||
const {
|
||||
visible: embedVisible,
|
||||
hideModal: hideEmbedModal,
|
||||
showModal: showEmbedModal,
|
||||
} = useSetModalState();
|
||||
|
||||
const { handleOperate, token, contextHolder } =
|
||||
useFetchTokenListBeforeOtherStep(dialogId);
|
||||
|
||||
const handleShowEmbedModal = useCallback(async () => {
|
||||
const succeed = await handleOperate();
|
||||
if (succeed) {
|
||||
showEmbedModal();
|
||||
}
|
||||
}, [handleOperate, showEmbedModal]);
|
||||
|
||||
return {
|
||||
showEmbedModal: handleShowEmbedModal,
|
||||
hideEmbedModal,
|
||||
embedVisible,
|
||||
embedToken: token,
|
||||
errorContextHolder: contextHolder,
|
||||
};
|
||||
};
|
||||
|
||||
export const usePreviewChat = (dialogId: string) => {
|
||||
const { handleOperate, contextHolder } =
|
||||
useFetchTokenListBeforeOtherStep(dialogId);
|
||||
|
||||
const open = useCallback((t: string) => {
|
||||
window.open(getUrlWithToken(t), '_blank');
|
||||
}, []);
|
||||
|
||||
const handlePreview = useCallback(async () => {
|
||||
const token = await handleOperate();
|
||||
if (token) {
|
||||
open(token);
|
||||
}
|
||||
}, [handleOperate, open]);
|
||||
|
||||
return {
|
||||
handlePreview,
|
||||
contextHolder,
|
||||
};
|
||||
};
|
||||
|
||||
//#endregion
|
||||
|
||||
@ -1,6 +1,11 @@
|
||||
import { ReactComponent as ChatAppCube } from '@/assets/svg/chat-app-cube.svg';
|
||||
import RenameModal from '@/components/rename-modal';
|
||||
import { DeleteOutlined, EditOutlined, FormOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
CloudOutlined,
|
||||
DeleteOutlined,
|
||||
EditOutlined,
|
||||
FormOutlined,
|
||||
} from '@ant-design/icons';
|
||||
import {
|
||||
Avatar,
|
||||
Button,
|
||||
@ -35,7 +40,10 @@ import {
|
||||
useSelectFirstDialogOnMount,
|
||||
} from './hooks';
|
||||
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { useSetModalState, useTranslate } from '@/hooks/commonHooks';
|
||||
import { useSetSelectedRecord } from '@/hooks/logicHooks';
|
||||
import { IDialog } from '@/interfaces/database/chat';
|
||||
import ChatOverviewModal from './chat-overview-modal';
|
||||
import styles from './index.less';
|
||||
|
||||
const Chat = () => {
|
||||
@ -73,6 +81,12 @@ const Chat = () => {
|
||||
const dialogLoading = useSelectDialogListLoading();
|
||||
const conversationLoading = useSelectConversationListLoading();
|
||||
const { t } = useTranslate('chat');
|
||||
const {
|
||||
visible: overviewVisible,
|
||||
hideModal: hideOverviewModal,
|
||||
showModal: showOverviewModal,
|
||||
} = useSetModalState();
|
||||
const { currentRecord, setRecord } = useSetSelectedRecord<IDialog>();
|
||||
|
||||
useFetchDialogOnMount(dialogId, true);
|
||||
|
||||
@ -100,6 +114,15 @@ const Chat = () => {
|
||||
onRemoveDialog([dialogId]);
|
||||
};
|
||||
|
||||
const handleShowOverviewModal =
|
||||
(dialog: IDialog): any =>
|
||||
(info: any) => {
|
||||
info?.domEvent?.preventDefault();
|
||||
info?.domEvent?.stopPropagation();
|
||||
setRecord(dialog);
|
||||
showOverviewModal();
|
||||
};
|
||||
|
||||
const handleRemoveConversation =
|
||||
(conversationId: string): MenuItemProps['onClick'] =>
|
||||
({ domEvent }) => {
|
||||
@ -141,7 +164,9 @@ const Chat = () => {
|
||||
},
|
||||
];
|
||||
|
||||
const buildAppItems = (dialogId: string) => {
|
||||
const buildAppItems = (dialog: IDialog) => {
|
||||
const dialogId = dialog.id;
|
||||
|
||||
const appItems: MenuProps['items'] = [
|
||||
{
|
||||
key: '1',
|
||||
@ -164,6 +189,17 @@ const Chat = () => {
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
{ type: 'divider' },
|
||||
{
|
||||
key: '3',
|
||||
onClick: handleShowOverviewModal(dialog),
|
||||
label: (
|
||||
<Space>
|
||||
<CloudOutlined />
|
||||
{t('overview')}
|
||||
</Space>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
return appItems;
|
||||
@ -230,7 +266,7 @@ const Chat = () => {
|
||||
</Space>
|
||||
{activated === x.id && (
|
||||
<section>
|
||||
<Dropdown menu={{ items: buildAppItems(x.id) }}>
|
||||
<Dropdown menu={{ items: buildAppItems(x) }}>
|
||||
<ChatAppCube
|
||||
className={styles.cubeIcon}
|
||||
></ChatAppCube>
|
||||
@ -315,6 +351,11 @@ const Chat = () => {
|
||||
initialName={initialConversationName}
|
||||
loading={conversationRenameLoading}
|
||||
></RenameModal>
|
||||
<ChatOverviewModal
|
||||
visible={overviewVisible}
|
||||
hideModal={hideOverviewModal}
|
||||
dialog={currentRecord}
|
||||
></ChatOverviewModal>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
25
web/src/pages/chat/markdown-content/index.less
Normal file
@ -0,0 +1,25 @@
|
||||
.referencePopoverWrapper {
|
||||
max-width: 50vw;
|
||||
}
|
||||
|
||||
.referenceChunkImage {
|
||||
width: 10vw;
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
.referenceImagePreview {
|
||||
max-width: 45vw;
|
||||
max-height: 45vh;
|
||||
}
|
||||
.chunkContentText {
|
||||
.chunkText;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.documentLink {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.referenceIcon {
|
||||
padding: 0 6px;
|
||||
}
|
||||
173
web/src/pages/chat/markdown-content/index.tsx
Normal file
@ -0,0 +1,173 @@
|
||||
import Image from '@/components/image';
|
||||
import SvgIcon from '@/components/svg-icon';
|
||||
import { useSelectFileThumbnails } from '@/hooks/knowledgeHook';
|
||||
import { IReference } from '@/interfaces/database/chat';
|
||||
import { IChunk } from '@/interfaces/database/knowledge';
|
||||
import { getExtension } from '@/utils/documentUtils';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Button, Flex, Popover, Space } from 'antd';
|
||||
import { useCallback } from 'react';
|
||||
import Markdown from 'react-markdown';
|
||||
import reactStringReplace from 'react-string-replace';
|
||||
import SyntaxHighlighter from 'react-syntax-highlighter';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import { visitParents } from 'unist-util-visit-parents';
|
||||
|
||||
import styles from './index.less';
|
||||
|
||||
const reg = /(#{2}\d+\${2})/g;
|
||||
|
||||
const getChunkIndex = (match: string) => Number(match.slice(2, -2));
|
||||
// TODO: The display of the table is inconsistent with the display previously placed in the MessageItem.
|
||||
const MarkdownContent = ({
|
||||
reference,
|
||||
clickDocumentButton,
|
||||
content,
|
||||
}: {
|
||||
content: string;
|
||||
reference: IReference;
|
||||
clickDocumentButton: (documentId: string, chunk: IChunk) => void;
|
||||
}) => {
|
||||
const fileThumbnails = useSelectFileThumbnails();
|
||||
|
||||
const handleDocumentButtonClick = useCallback(
|
||||
(documentId: string, chunk: IChunk, isPdf: boolean) => () => {
|
||||
if (!isPdf) {
|
||||
return;
|
||||
}
|
||||
clickDocumentButton(documentId, chunk);
|
||||
},
|
||||
[clickDocumentButton],
|
||||
);
|
||||
|
||||
const rehypeWrapReference = () => {
|
||||
return function wrapTextTransform(tree: any) {
|
||||
visitParents(tree, 'text', (node, ancestors) => {
|
||||
const latestAncestor = ancestors.at(-1);
|
||||
if (
|
||||
latestAncestor.tagName !== 'custom-typography' &&
|
||||
latestAncestor.tagName !== 'code'
|
||||
) {
|
||||
node.type = 'element';
|
||||
node.tagName = 'custom-typography';
|
||||
node.properties = {};
|
||||
node.children = [{ type: 'text', value: node.value }];
|
||||
}
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const getPopoverContent = useCallback(
|
||||
(chunkIndex: number) => {
|
||||
const chunks = reference?.chunks ?? [];
|
||||
const chunkItem = chunks[chunkIndex];
|
||||
const document = reference?.doc_aggs.find(
|
||||
(x) => x?.doc_id === chunkItem?.doc_id,
|
||||
);
|
||||
const documentId = document?.doc_id;
|
||||
const fileThumbnail = documentId ? fileThumbnails[documentId] : '';
|
||||
const fileExtension = documentId ? getExtension(document?.doc_name) : '';
|
||||
const imageId = chunkItem?.img_id;
|
||||
return (
|
||||
<Flex
|
||||
key={chunkItem?.chunk_id}
|
||||
gap={10}
|
||||
className={styles.referencePopoverWrapper}
|
||||
>
|
||||
{imageId && (
|
||||
<Popover
|
||||
placement="left"
|
||||
content={
|
||||
<Image
|
||||
id={imageId}
|
||||
className={styles.referenceImagePreview}
|
||||
></Image>
|
||||
}
|
||||
>
|
||||
<Image
|
||||
id={imageId}
|
||||
className={styles.referenceChunkImage}
|
||||
></Image>
|
||||
</Popover>
|
||||
)}
|
||||
<Space direction={'vertical'}>
|
||||
<div
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: chunkItem?.content_with_weight,
|
||||
}}
|
||||
className={styles.chunkContentText}
|
||||
></div>
|
||||
{documentId && (
|
||||
<Flex gap={'small'}>
|
||||
{fileThumbnail ? (
|
||||
<img src={fileThumbnail} alt="" />
|
||||
) : (
|
||||
<SvgIcon
|
||||
name={`file-icon/${fileExtension}`}
|
||||
width={24}
|
||||
></SvgIcon>
|
||||
)}
|
||||
<Button
|
||||
type="link"
|
||||
className={styles.documentLink}
|
||||
onClick={handleDocumentButtonClick(
|
||||
documentId,
|
||||
chunkItem,
|
||||
fileExtension === 'pdf',
|
||||
)}
|
||||
>
|
||||
{document?.doc_name}
|
||||
</Button>
|
||||
</Flex>
|
||||
)}
|
||||
</Space>
|
||||
</Flex>
|
||||
);
|
||||
},
|
||||
[reference, fileThumbnails, handleDocumentButtonClick],
|
||||
);
|
||||
|
||||
const renderReference = useCallback(
|
||||
(text: string) => {
|
||||
return reactStringReplace(text, reg, (match, i) => {
|
||||
const chunkIndex = getChunkIndex(match);
|
||||
return (
|
||||
<Popover content={getPopoverContent(chunkIndex)}>
|
||||
<InfoCircleOutlined key={i} className={styles.referenceIcon} />
|
||||
</Popover>
|
||||
);
|
||||
});
|
||||
},
|
||||
[getPopoverContent],
|
||||
);
|
||||
|
||||
return (
|
||||
<Markdown
|
||||
rehypePlugins={[rehypeWrapReference]}
|
||||
remarkPlugins={[remarkGfm]}
|
||||
components={
|
||||
{
|
||||
'custom-typography': ({ children }: { children: string }) =>
|
||||
renderReference(children),
|
||||
code(props: any) {
|
||||
const { children, className, node, ...rest } = props;
|
||||
const match = /language-(\w+)/.exec(className || '');
|
||||
return match ? (
|
||||
<SyntaxHighlighter {...rest} PreTag="div" language={match[1]}>
|
||||
{String(children).replace(/\n$/, '')}
|
||||
</SyntaxHighlighter>
|
||||
) : (
|
||||
<code {...rest} className={className}>
|
||||
{children}
|
||||
</code>
|
||||
);
|
||||
},
|
||||
} as any
|
||||
}
|
||||
>
|
||||
{content}
|
||||
</Markdown>
|
||||
);
|
||||
};
|
||||
|
||||
export default MarkdownContent;
|
||||
@ -1,7 +1,14 @@
|
||||
import { IConversation, IDialog, Message } from '@/interfaces/database/chat';
|
||||
import {
|
||||
IConversation,
|
||||
IDialog,
|
||||
IStats,
|
||||
IToken,
|
||||
Message,
|
||||
} from '@/interfaces/database/chat';
|
||||
import i18n from '@/locales/config';
|
||||
import chatService from '@/services/chatService';
|
||||
import { message } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
import { DvaModel } from 'umi';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { IClientConversation, IMessage } from './interface';
|
||||
@ -13,6 +20,8 @@ export interface ChatModelState {
|
||||
currentDialog: IDialog;
|
||||
conversationList: IConversation[];
|
||||
currentConversation: IClientConversation;
|
||||
tokenList: IToken[];
|
||||
stats: IStats;
|
||||
}
|
||||
|
||||
const model: DvaModel<ChatModelState> = {
|
||||
@ -23,6 +32,8 @@ const model: DvaModel<ChatModelState> = {
|
||||
currentDialog: <IDialog>{},
|
||||
conversationList: [],
|
||||
currentConversation: {} as IClientConversation,
|
||||
tokenList: [],
|
||||
stats: {} as IStats,
|
||||
},
|
||||
reducers: {
|
||||
save(state, action) {
|
||||
@ -60,6 +71,18 @@ const model: DvaModel<ChatModelState> = {
|
||||
currentConversation: { ...payload, message: messageList },
|
||||
};
|
||||
},
|
||||
setTokenList(state, { payload }) {
|
||||
return {
|
||||
...state,
|
||||
tokenList: payload,
|
||||
};
|
||||
},
|
||||
setStats(state, { payload }) {
|
||||
return {
|
||||
...state,
|
||||
stats: payload,
|
||||
};
|
||||
},
|
||||
},
|
||||
|
||||
effects: {
|
||||
@ -135,7 +158,7 @@ const model: DvaModel<ChatModelState> = {
|
||||
}
|
||||
return data;
|
||||
},
|
||||
*completeConversation({ payload }, { call, put }) {
|
||||
*completeConversation({ payload }, { call }) {
|
||||
const { data } = yield call(chatService.completeConversation, payload);
|
||||
// if (data.retcode === 0) {
|
||||
// yield put({
|
||||
@ -160,6 +183,79 @@ const model: DvaModel<ChatModelState> = {
|
||||
}
|
||||
return data.retcode;
|
||||
},
|
||||
*createToken({ payload }, { call, put }) {
|
||||
const { data } = yield call(chatService.createToken, payload);
|
||||
if (data.retcode === 0) {
|
||||
yield put({
|
||||
type: 'listToken',
|
||||
payload: payload,
|
||||
});
|
||||
message.success(i18n.t('message.created'));
|
||||
}
|
||||
return data;
|
||||
},
|
||||
*listToken({ payload }, { call, put }) {
|
||||
const { data } = yield call(chatService.listToken, payload);
|
||||
if (data.retcode === 0) {
|
||||
yield put({
|
||||
type: 'setTokenList',
|
||||
payload: data.data,
|
||||
});
|
||||
}
|
||||
return data;
|
||||
},
|
||||
*removeToken({ payload }, { call, put }) {
|
||||
const { data } = yield call(
|
||||
chatService.removeToken,
|
||||
omit(payload, ['dialogId']),
|
||||
);
|
||||
if (data.retcode === 0) {
|
||||
message.success(i18n.t('message.deleted'));
|
||||
yield put({
|
||||
type: 'listToken',
|
||||
payload: { dialog_id: payload.dialogId },
|
||||
});
|
||||
}
|
||||
return data.retcode;
|
||||
},
|
||||
*getStats({ payload }, { call, put }) {
|
||||
const { data } = yield call(chatService.getStats, payload);
|
||||
if (data.retcode === 0) {
|
||||
yield put({
|
||||
type: 'setStats',
|
||||
payload: data.data,
|
||||
});
|
||||
}
|
||||
return data.retcode;
|
||||
},
|
||||
*createExternalConversation({ payload }, { call, put }) {
|
||||
const { data } = yield call(
|
||||
chatService.createExternalConversation,
|
||||
payload,
|
||||
);
|
||||
// if (data.retcode === 0) {
|
||||
// yield put({
|
||||
// type: 'getExternalConversation',
|
||||
// payload: data.data.id,
|
||||
// });
|
||||
// }
|
||||
return data;
|
||||
},
|
||||
*getExternalConversation({ payload }, { call }) {
|
||||
const { data } = yield call(
|
||||
chatService.getExternalConversation,
|
||||
null,
|
||||
payload,
|
||||
);
|
||||
return data;
|
||||
},
|
||||
*completeExternalConversation({ payload }, { call }) {
|
||||
const { data } = yield call(
|
||||
chatService.completeExternalConversation,
|
||||
payload,
|
||||
);
|
||||
return data.retcode;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
50
web/src/pages/chat/share/index.less
Normal file
@ -0,0 +1,50 @@
|
||||
.chatWrapper {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.chatContainer {
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
height: 100%;
|
||||
.messageContainer {
|
||||
overflow-y: auto;
|
||||
padding-right: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.messageItem {
|
||||
padding: 24px 0;
|
||||
.messageItemSection {
|
||||
display: inline-block;
|
||||
}
|
||||
.messageItemSectionLeft {
|
||||
width: 70%;
|
||||
}
|
||||
.messageItemSectionRight {
|
||||
width: 40%;
|
||||
}
|
||||
.messageItemContent {
|
||||
display: inline-flex;
|
||||
gap: 20px;
|
||||
}
|
||||
.messageItemContentReverse {
|
||||
flex-direction: row-reverse;
|
||||
}
|
||||
.messageText {
|
||||
.chunkText();
|
||||
padding: 0 14px;
|
||||
background-color: rgba(249, 250, 251, 1);
|
||||
word-break: break-all;
|
||||
}
|
||||
.messageEmpty {
|
||||
width: 300px;
|
||||
}
|
||||
}
|
||||
|
||||
.messageItemLeft {
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.messageItemRight {
|
||||
text-align: right;
|
||||
}
|
||||
53
web/src/pages/chat/share/index.tsx
Normal file
@ -0,0 +1,53 @@
|
||||
import { useEffect } from 'react';
|
||||
import {
|
||||
useCreateSharedConversationOnMount,
|
||||
useSelectCurrentSharedConversation,
|
||||
useSendSharedMessage,
|
||||
} from '../shared-hooks';
|
||||
import ChatContainer from './large';
|
||||
|
||||
import styles from './index.less';
|
||||
|
||||
const SharedChat = () => {
|
||||
const { conversationId } = useCreateSharedConversationOnMount();
|
||||
const {
|
||||
currentConversation,
|
||||
addNewestConversation,
|
||||
removeLatestMessage,
|
||||
ref,
|
||||
loading,
|
||||
setCurrentConversation,
|
||||
} = useSelectCurrentSharedConversation(conversationId);
|
||||
|
||||
const {
|
||||
handlePressEnter,
|
||||
handleInputChange,
|
||||
value,
|
||||
loading: sendLoading,
|
||||
} = useSendSharedMessage(
|
||||
currentConversation,
|
||||
addNewestConversation,
|
||||
removeLatestMessage,
|
||||
setCurrentConversation,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
console.info(location.href);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className={styles.chatWrapper}>
|
||||
<ChatContainer
|
||||
value={value}
|
||||
handleInputChange={handleInputChange}
|
||||
handlePressEnter={handlePressEnter}
|
||||
loading={loading}
|
||||
sendLoading={sendLoading}
|
||||
conversation={currentConversation}
|
||||
ref={ref}
|
||||
></ChatContainer>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SharedChat;
|
||||
122
web/src/pages/chat/share/large.tsx
Normal file
@ -0,0 +1,122 @@
|
||||
import { ReactComponent as AssistantIcon } from '@/assets/svg/assistant.svg';
|
||||
import { MessageType } from '@/constants/chat';
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { Message } from '@/interfaces/database/chat';
|
||||
import { Avatar, Button, Flex, Input, Skeleton, Spin } from 'antd';
|
||||
import classNames from 'classnames';
|
||||
import { useSelectConversationLoading } from '../hooks';
|
||||
|
||||
import HightLightMarkdown from '@/components/highlight-markdown';
|
||||
import React, { ChangeEventHandler, forwardRef } from 'react';
|
||||
import { IClientConversation } from '../interface';
|
||||
import styles from './index.less';
|
||||
|
||||
const MessageItem = ({ item }: { item: Message }) => {
|
||||
const isAssistant = item.role === MessageType.Assistant;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={classNames(styles.messageItem, {
|
||||
[styles.messageItemLeft]: item.role === MessageType.Assistant,
|
||||
[styles.messageItemRight]: item.role === MessageType.User,
|
||||
})}
|
||||
>
|
||||
<section
|
||||
className={classNames(styles.messageItemSection, {
|
||||
[styles.messageItemSectionLeft]: item.role === MessageType.Assistant,
|
||||
[styles.messageItemSectionRight]: item.role === MessageType.User,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className={classNames(styles.messageItemContent, {
|
||||
[styles.messageItemContentReverse]: item.role === MessageType.User,
|
||||
})}
|
||||
>
|
||||
{item.role === MessageType.User ? (
|
||||
<Avatar
|
||||
size={40}
|
||||
src={
|
||||
'https://zos.alipayobjects.com/rmsportal/jkjgkEfvpUPVyRjUImniVslZfWPnJuuZ.png'
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<AssistantIcon></AssistantIcon>
|
||||
)}
|
||||
<Flex vertical gap={8} flex={1}>
|
||||
<b>{isAssistant ? '' : 'You'}</b>
|
||||
<div className={styles.messageText}>
|
||||
{item.content !== '' ? (
|
||||
<HightLightMarkdown>{item.content}</HightLightMarkdown>
|
||||
) : (
|
||||
<Skeleton active className={styles.messageEmpty} />
|
||||
)}
|
||||
</div>
|
||||
</Flex>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
interface IProps {
|
||||
handlePressEnter(): void;
|
||||
handleInputChange: ChangeEventHandler<HTMLInputElement>;
|
||||
value: string;
|
||||
loading: boolean;
|
||||
sendLoading: boolean;
|
||||
conversation: IClientConversation;
|
||||
ref: React.LegacyRef<any>;
|
||||
}
|
||||
|
||||
const ChatContainer = (
|
||||
{
|
||||
handlePressEnter,
|
||||
handleInputChange,
|
||||
value,
|
||||
loading: sendLoading,
|
||||
conversation,
|
||||
}: IProps,
|
||||
ref: React.LegacyRef<any>,
|
||||
) => {
|
||||
const loading = useSelectConversationLoading();
|
||||
const { t } = useTranslate('chat');
|
||||
|
||||
return (
|
||||
<>
|
||||
<Flex flex={1} className={styles.chatContainer} vertical>
|
||||
<Flex flex={1} vertical className={styles.messageContainer}>
|
||||
<div>
|
||||
<Spin spinning={loading}>
|
||||
{conversation?.message?.map((message) => {
|
||||
return (
|
||||
<MessageItem key={message.id} item={message}></MessageItem>
|
||||
);
|
||||
})}
|
||||
</Spin>
|
||||
</div>
|
||||
<div ref={ref} />
|
||||
</Flex>
|
||||
<Input
|
||||
size="large"
|
||||
placeholder={t('sendPlaceholder')}
|
||||
value={value}
|
||||
// disabled={disabled}
|
||||
suffix={
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handlePressEnter}
|
||||
loading={sendLoading}
|
||||
// disabled={disabled}
|
||||
>
|
||||
{t('send')}
|
||||
</Button>
|
||||
}
|
||||
onPressEnter={handlePressEnter}
|
||||
onChange={handleInputChange}
|
||||
/>
|
||||
</Flex>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default forwardRef(ChatContainer);
|
||||
192
web/src/pages/chat/shared-hooks.ts
Normal file
@ -0,0 +1,192 @@
|
||||
import { MessageType } from '@/constants/chat';
|
||||
import {
|
||||
useCompleteSharedConversation,
|
||||
useCreateSharedConversation,
|
||||
useFetchSharedConversation,
|
||||
} from '@/hooks/chatHooks';
|
||||
import { useOneNamespaceEffectsLoading } from '@/hooks/storeHooks';
|
||||
import omit from 'lodash/omit';
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useSearchParams } from 'umi';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { useHandleMessageInputChange, useScrollToBottom } from './hooks';
|
||||
import { IClientConversation, IMessage } from './interface';
|
||||
|
||||
export const useCreateSharedConversationOnMount = () => {
|
||||
const [currentQueryParameters] = useSearchParams();
|
||||
const [conversationId, setConversationId] = useState('');
|
||||
|
||||
const createConversation = useCreateSharedConversation();
|
||||
const sharedId = currentQueryParameters.get('shared_id');
|
||||
const userId = currentQueryParameters.get('user_id');
|
||||
|
||||
const setConversation = useCallback(async () => {
|
||||
console.info(sharedId);
|
||||
if (sharedId) {
|
||||
const data = await createConversation(userId ?? undefined);
|
||||
const id = data.data?.id;
|
||||
if (id) {
|
||||
setConversationId(id);
|
||||
}
|
||||
}
|
||||
}, [createConversation, sharedId, userId]);
|
||||
|
||||
useEffect(() => {
|
||||
setConversation();
|
||||
}, [setConversation]);
|
||||
|
||||
return { conversationId };
|
||||
};
|
||||
|
||||
export const useSelectCurrentSharedConversation = (conversationId: string) => {
|
||||
const [currentConversation, setCurrentConversation] =
|
||||
useState<IClientConversation>({} as IClientConversation);
|
||||
const fetchConversation = useFetchSharedConversation();
|
||||
const loading = useOneNamespaceEffectsLoading('chatModel', [
|
||||
'getExternalConversation',
|
||||
]);
|
||||
|
||||
const ref = useScrollToBottom(currentConversation);
|
||||
|
||||
const addNewestConversation = useCallback((message: string) => {
|
||||
setCurrentConversation((pre) => {
|
||||
return {
|
||||
...pre,
|
||||
message: [
|
||||
...(pre.message ?? []),
|
||||
{
|
||||
role: MessageType.User,
|
||||
content: message,
|
||||
id: uuid(),
|
||||
} as IMessage,
|
||||
{
|
||||
role: MessageType.Assistant,
|
||||
content: '',
|
||||
id: uuid(),
|
||||
reference: [],
|
||||
} as IMessage,
|
||||
],
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
const removeLatestMessage = useCallback(() => {
|
||||
setCurrentConversation((pre) => {
|
||||
const nextMessages = pre.message.slice(0, -2);
|
||||
return {
|
||||
...pre,
|
||||
message: nextMessages,
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
const fetchConversationOnMount = useCallback(async () => {
|
||||
if (conversationId) {
|
||||
const data = await fetchConversation(conversationId);
|
||||
if (data.retcode === 0) {
|
||||
setCurrentConversation(data.data);
|
||||
}
|
||||
}
|
||||
}, [conversationId, fetchConversation]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchConversationOnMount();
|
||||
}, [fetchConversationOnMount]);
|
||||
|
||||
return {
|
||||
currentConversation,
|
||||
addNewestConversation,
|
||||
removeLatestMessage,
|
||||
loading,
|
||||
ref,
|
||||
setCurrentConversation,
|
||||
};
|
||||
};
|
||||
|
||||
export const useSendSharedMessage = (
|
||||
conversation: IClientConversation,
|
||||
addNewestConversation: (message: string) => void,
|
||||
removeLatestMessage: () => void,
|
||||
setCurrentConversation: Dispatch<SetStateAction<IClientConversation>>,
|
||||
) => {
|
||||
const conversationId = conversation.id;
|
||||
const loading = useOneNamespaceEffectsLoading('chatModel', [
|
||||
'completeExternalConversation',
|
||||
]);
|
||||
const setConversation = useCreateSharedConversation();
|
||||
const { handleInputChange, value, setValue } = useHandleMessageInputChange();
|
||||
|
||||
const fetchConversation = useFetchSharedConversation();
|
||||
const completeConversation = useCompleteSharedConversation();
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (message: string, id?: string) => {
|
||||
const retcode = await completeConversation({
|
||||
conversation_id: id ?? conversationId,
|
||||
messages: [
|
||||
...(conversation?.message ?? []).map((x: IMessage) => omit(x, 'id')),
|
||||
{
|
||||
role: MessageType.User,
|
||||
content: message,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (retcode === 0) {
|
||||
const data = await fetchConversation(conversationId);
|
||||
if (data.retcode === 0) {
|
||||
setCurrentConversation(data.data);
|
||||
}
|
||||
} else {
|
||||
// cancel loading
|
||||
setValue(message);
|
||||
removeLatestMessage();
|
||||
}
|
||||
},
|
||||
[
|
||||
conversationId,
|
||||
conversation?.message,
|
||||
fetchConversation,
|
||||
removeLatestMessage,
|
||||
setValue,
|
||||
completeConversation,
|
||||
setCurrentConversation,
|
||||
],
|
||||
);
|
||||
|
||||
const handleSendMessage = useCallback(
|
||||
async (message: string) => {
|
||||
if (conversationId !== '') {
|
||||
sendMessage(message);
|
||||
} else {
|
||||
const data = await setConversation('user id');
|
||||
if (data.retcode === 0) {
|
||||
const id = data.data.id;
|
||||
sendMessage(message, id);
|
||||
}
|
||||
}
|
||||
},
|
||||
[conversationId, setConversation, sendMessage],
|
||||
);
|
||||
|
||||
const handlePressEnter = () => {
|
||||
if (!loading) {
|
||||
setValue('');
|
||||
addNewestConversation(value);
|
||||
handleSendMessage(value.trim());
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
handlePressEnter,
|
||||
handleInputChange,
|
||||
value,
|
||||
loading,
|
||||
};
|
||||
};
|
||||
@ -4,6 +4,11 @@ const routes = [
|
||||
component: '@/pages/login',
|
||||
layout: false,
|
||||
},
|
||||
{
|
||||
path: '/chat/share',
|
||||
component: '@/pages/chat/share',
|
||||
layout: false,
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
component: '@/layouts',
|
||||
|
||||
@ -12,6 +12,13 @@ const {
|
||||
completeConversation,
|
||||
listConversation,
|
||||
removeConversation,
|
||||
createToken,
|
||||
listToken,
|
||||
removeToken,
|
||||
getStats,
|
||||
createExternalConversation,
|
||||
getExternalConversation,
|
||||
completeExternalConversation,
|
||||
} = api;
|
||||
|
||||
const methods = {
|
||||
@ -51,6 +58,34 @@ const methods = {
|
||||
url: removeConversation,
|
||||
method: 'post',
|
||||
},
|
||||
createToken: {
|
||||
url: createToken,
|
||||
method: 'post',
|
||||
},
|
||||
listToken: {
|
||||
url: listToken,
|
||||
method: 'get',
|
||||
},
|
||||
removeToken: {
|
||||
url: removeToken,
|
||||
method: 'post',
|
||||
},
|
||||
getStats: {
|
||||
url: getStats,
|
||||
method: 'get',
|
||||
},
|
||||
createExternalConversation: {
|
||||
url: createExternalConversation,
|
||||
method: 'get',
|
||||
},
|
||||
getExternalConversation: {
|
||||
url: getExternalConversation,
|
||||
method: 'get',
|
||||
},
|
||||
completeExternalConversation: {
|
||||
url: completeExternalConversation,
|
||||
method: 'post',
|
||||
},
|
||||
} as const;
|
||||
|
||||
const chatService = registerServer<keyof typeof methods>(methods, request);
|
||||
|
||||
@ -3,7 +3,7 @@ let api_host = `/v1`;
|
||||
export { api_host };
|
||||
|
||||
export default {
|
||||
// 用户
|
||||
// user
|
||||
login: `${api_host}/user/login`,
|
||||
logout: `${api_host}/user/logout`,
|
||||
register: `${api_host}/user/register`,
|
||||
@ -12,21 +12,21 @@ export default {
|
||||
tenant_info: `${api_host}/user/tenant_info`,
|
||||
set_tenant_info: `${api_host}/user/set_tenant_info`,
|
||||
|
||||
// 模型管理
|
||||
// llm model
|
||||
factories_list: `${api_host}/llm/factories`,
|
||||
llm_list: `${api_host}/llm/list`,
|
||||
my_llm: `${api_host}/llm/my_llms`,
|
||||
set_api_key: `${api_host}/llm/set_api_key`,
|
||||
add_llm: `${api_host}/llm/add_llm`,
|
||||
|
||||
//知识库管理
|
||||
// knowledge base
|
||||
kb_list: `${api_host}/kb/list`,
|
||||
create_kb: `${api_host}/kb/create`,
|
||||
update_kb: `${api_host}/kb/update`,
|
||||
rm_kb: `${api_host}/kb/rm`,
|
||||
get_kb_detail: `${api_host}/kb/detail`,
|
||||
|
||||
// chunk管理
|
||||
// chunk
|
||||
chunk_list: `${api_host}/chunk/list`,
|
||||
create_chunk: `${api_host}/chunk/create`,
|
||||
set_chunk: `${api_host}/chunk/set`,
|
||||
@ -35,7 +35,7 @@ export default {
|
||||
rm_chunk: `${api_host}/chunk/rm`,
|
||||
retrieval_test: `${api_host}/chunk/retrieval_test`,
|
||||
|
||||
// 文件管理
|
||||
// document
|
||||
upload: `${api_host}/document/upload`,
|
||||
get_document_list: `${api_host}/document/list`,
|
||||
document_change_status: `${api_host}/document/change_status`,
|
||||
@ -48,14 +48,22 @@ export default {
|
||||
get_document_file: `${api_host}/document/get`,
|
||||
document_upload: `${api_host}/document/upload`,
|
||||
|
||||
// chat
|
||||
setDialog: `${api_host}/dialog/set`,
|
||||
getDialog: `${api_host}/dialog/get`,
|
||||
removeDialog: `${api_host}/dialog/rm`,
|
||||
listDialog: `${api_host}/dialog/list`,
|
||||
|
||||
setConversation: `${api_host}/conversation/set`,
|
||||
getConversation: `${api_host}/conversation/get`,
|
||||
listConversation: `${api_host}/conversation/list`,
|
||||
removeConversation: `${api_host}/conversation/rm`,
|
||||
completeConversation: `${api_host}/conversation/completion`,
|
||||
// chat for external
|
||||
createToken: `${api_host}/api/new_token`,
|
||||
listToken: `${api_host}/api/token_list`,
|
||||
removeToken: `${api_host}/api/rm`,
|
||||
getStats: `${api_host}/api/stats`,
|
||||
createExternalConversation: `${api_host}/api/new_conversation`,
|
||||
getExternalConversation: `${api_host}/api/conversation`,
|
||||
completeExternalConversation: `${api_host}/api/completion`,
|
||||
};
|
||||
|
||||
22
web/src/utils/commonUtil.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import isObject from 'lodash/isObject';
|
||||
import snakeCase from 'lodash/snakeCase';
|
||||
|
||||
export const isFormData = (data: unknown): data is FormData => {
|
||||
return data instanceof FormData;
|
||||
};
|
||||
|
||||
export const convertTheKeysOfTheObjectToSnake = (data: unknown) => {
|
||||
if (isObject(data) && !isFormData(data)) {
|
||||
return Object.keys(data).reduce<Record<string, any>>((pre, cur) => {
|
||||
const value = (data as Record<string, any>)[cur];
|
||||
pre[isFormData(value) ? cur : snakeCase(cur)] = value;
|
||||
return pre;
|
||||
}, {});
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
export const getSearchValue = (key: string) => {
|
||||
const params = new URL(document.location as any).searchParams;
|
||||
return params.get(key);
|
||||
};
|
||||
@ -1,20 +1,20 @@
|
||||
import moment from 'moment';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
export function today() {
|
||||
return formatDate(moment());
|
||||
return formatDate(dayjs());
|
||||
}
|
||||
|
||||
export function lastDay() {
|
||||
return formatDate(moment().subtract(1, 'days'));
|
||||
return formatDate(dayjs().subtract(1, 'days'));
|
||||
}
|
||||
|
||||
export function lastWeek() {
|
||||
return formatDate(moment().subtract(1, 'weeks'));
|
||||
return formatDate(dayjs().subtract(1, 'weeks'));
|
||||
}
|
||||
|
||||
export function formatDate(date: any) {
|
||||
if (!date) {
|
||||
return '';
|
||||
}
|
||||
return moment(date).format('DD/MM/YYYY');
|
||||
return dayjs(date).format('DD/MM/YYYY');
|
||||
}
|
||||
|
||||
@ -8,16 +8,20 @@ const registerServer = <T extends string>(
|
||||
) => {
|
||||
const server: Service<T> = {} as Service<T>;
|
||||
for (let key in opt) {
|
||||
server[key] = (params) => {
|
||||
server[key] = (params: any, urlAppendix?: string) => {
|
||||
let url = opt[key].url;
|
||||
if (urlAppendix) {
|
||||
url = url + '/' + urlAppendix;
|
||||
}
|
||||
if (opt[key].method === 'post' || opt[key].method === 'POST') {
|
||||
return request(opt[key].url, {
|
||||
return request(url, {
|
||||
method: opt[key].method,
|
||||
data: params,
|
||||
});
|
||||
}
|
||||
|
||||
if (opt[key].method === 'get' || opt[key].method === 'GET') {
|
||||
return request.get(opt[key].url, {
|
||||
return request.get(url, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
@ -4,6 +4,7 @@ import authorizationUtil from '@/utils/authorizationUtil';
|
||||
import { message, notification } from 'antd';
|
||||
import { history } from 'umi';
|
||||
import { RequestMethod, extend } from 'umi-request';
|
||||
import { convertTheKeysOfTheObjectToSnake, getSearchValue } from './commonUtil';
|
||||
|
||||
const ABORT_REQUEST_ERR_MESSAGE = 'The user aborted a request.'; // 手动中断请求。errorHandler 抛出的error message
|
||||
|
||||
@ -86,11 +87,19 @@ const request: RequestMethod = extend({
|
||||
});
|
||||
|
||||
request.interceptors.request.use((url: string, options: any) => {
|
||||
const authorization = authorizationUtil.getAuthorization();
|
||||
const sharedId = getSearchValue('shared_id');
|
||||
const authorization = sharedId
|
||||
? 'Bearer ' + sharedId
|
||||
: authorizationUtil.getAuthorization();
|
||||
const data = convertTheKeysOfTheObjectToSnake(options.data);
|
||||
const params = convertTheKeysOfTheObjectToSnake(options.params);
|
||||
|
||||
return {
|
||||
url,
|
||||
options: {
|
||||
...options,
|
||||
data,
|
||||
params,
|
||||
headers: {
|
||||
...(options.skipToken ? undefined : { [Authorization]: authorization }),
|
||||
...options.headers,
|
||||
|
||||