Update RetCode to common.constants (#10984)

### What problem does this PR solve?

1. Update RetCode to common.constants
2. Decouple the admin and API modules

### Type of change

- [x] Refactoring

---------

Signed-off-by: Jin Hai <haijin.chn@gmail.com>
This commit is contained in:
Jin Hai
2025-11-04 15:12:53 +08:00
committed by GitHub
parent 16d2be623c
commit 03038c7d3d
25 changed files with 257 additions and 250 deletions

View File

@ -19,7 +19,7 @@ import time
from typing import Any, cast
from api.db.services.canvas_service import UserCanvasService
from api.db.services.user_canvas_version import UserCanvasVersionService
from api.settings import RetCode
from common.contants import RetCode
from common.misc_utils import get_uuid
from api.utils.api_utils import get_data_error_result, get_error_data_result, get_json_result, token_required
from api.utils.api_utils import get_result

View File

@ -17,13 +17,13 @@ import logging
from flask import request
from api import settings
from api.db import StatusEnum
from api.db.services.dialog_service import DialogService
from api.db.services.knowledgebase_service import KnowledgebaseService
from api.db.services.tenant_llm_service import TenantLLMService
from api.db.services.user_service import TenantService
from common.misc_utils import get_uuid
from common.contants import RetCode
from api.utils.api_utils import check_duplicate_ids, get_error_data_result, get_result, token_required
@ -45,7 +45,7 @@ def create(tenant_id):
embd_ids = [TenantLLMService.split_model_name_and_factory(kb.embd_id)[0] for kb in kbs] # remove vendor suffix for comparison
embd_count = list(set(embd_ids))
if len(embd_count) > 1:
return get_result(message='Datasets use different embedding models."', code=settings.RetCode.AUTHENTICATION_ERROR)
return get_result(message='Datasets use different embedding models."', code=RetCode.AUTHENTICATION_ERROR)
req["kb_ids"] = ids
# llm
llm = req.get("llm")
@ -167,7 +167,7 @@ def update(tenant_id, chat_id):
embd_ids = [TenantLLMService.split_model_name_and_factory(kb.embd_id)[0] for kb in kbs] # remove vendor suffix for comparison
embd_count = list(set(embd_ids))
if len(embd_count) > 1:
return get_result(message='Datasets use different embedding models."', code=settings.RetCode.AUTHENTICATION_ERROR)
return get_result(message='Datasets use different embedding models."', code=RetCode.AUTHENTICATION_ERROR)
req["kb_ids"] = ids
else:
req["kb_ids"] = []

View File

@ -28,6 +28,7 @@ from api.db.services.file2document_service import File2DocumentService
from api.db.services.file_service import FileService
from api.db.services.knowledgebase_service import KnowledgebaseService
from api.db.services.user_service import TenantService
from common.contants import RetCode
from api.utils.api_utils import (
deep_merge,
get_error_argument_result,
@ -484,7 +485,7 @@ def knowledge_graph(tenant_id, dataset_id):
return get_result(
data=False,
message='No authorization.',
code=settings.RetCode.AUTHENTICATION_ERROR
code=RetCode.AUTHENTICATION_ERROR
)
_, kb = KnowledgebaseService.get_by_id(dataset_id)
req = {
@ -525,7 +526,7 @@ def delete_knowledge_graph(tenant_id, dataset_id):
return get_result(
data=False,
message='No authorization.',
code=settings.RetCode.AUTHENTICATION_ERROR
code=RetCode.AUTHENTICATION_ERROR
)
_, kb = KnowledgebaseService.get_by_id(dataset_id)
settings.docStoreConn.delete({"knowledge_graph_kwd": ["graph", "subgraph", "entity", "relation"]},

View File

@ -25,7 +25,7 @@ from api import settings
from api.utils.api_utils import validate_request, build_error_result, apikey_required
from rag.app.tag import label_question
from api.db.services.dialog_service import meta_filter, convert_conditions
from common.contants import RetCode
@manager.route('/dify/retrieval', methods=['POST']) # noqa: F821
@apikey_required
@ -129,7 +129,7 @@ def retrieval(tenant_id):
e, kb = KnowledgebaseService.get_by_id(kb_id)
if not e:
return build_error_result(message="Knowledgebase not found!", code=settings.RetCode.NOT_FOUND)
return build_error_result(message="Knowledgebase not found!", code=RetCode.NOT_FOUND)
embd_mdl = LLMBundle(kb.tenant_id, LLMType.EMBEDDING.value, llm_name=kb.embd_id)
print(metadata_condition)
@ -179,7 +179,7 @@ def retrieval(tenant_id):
if str(e).find("not_found") > 0:
return build_error_result(
message='No chunk found! Check the chunk status please!',
code=settings.RetCode.NOT_FOUND
code=RetCode.NOT_FOUND
)
logging.exception(e)
return build_error_result(message=str(e), code=settings.RetCode.SERVER_ERROR)
return build_error_result(message=str(e), code=RetCode.SERVER_ERROR)

View File

@ -43,6 +43,7 @@ from rag.nlp import rag_tokenizer, search
from rag.prompts.generator import cross_languages, keyword_extraction
from rag.utils.storage_factory import STORAGE_IMPL
from common.string_utils import remove_redundant_spaces
from common.contants import RetCode
MAXIMUM_OF_UPLOADING_FILES = 256
@ -127,13 +128,13 @@ def upload(dataset_id, tenant_id):
description: Processing status.
"""
if "file" not in request.files:
return get_error_data_result(message="No file part!", code=settings.RetCode.ARGUMENT_ERROR)
return get_error_data_result(message="No file part!", code=RetCode.ARGUMENT_ERROR)
file_objs = request.files.getlist("file")
for file_obj in file_objs:
if file_obj.filename == "":
return get_result(message="No file selected!", code=settings.RetCode.ARGUMENT_ERROR)
return get_result(message="No file selected!", code=RetCode.ARGUMENT_ERROR)
if len(file_obj.filename.encode("utf-8")) > FILE_NAME_LEN_LIMIT:
return get_result(message=f"File name must be {FILE_NAME_LEN_LIMIT} bytes or less.", code=settings.RetCode.ARGUMENT_ERROR)
return get_result(message=f"File name must be {FILE_NAME_LEN_LIMIT} bytes or less.", code=RetCode.ARGUMENT_ERROR)
"""
# total size
total_size = 0
@ -145,7 +146,7 @@ def upload(dataset_id, tenant_id):
if total_size > MAX_TOTAL_FILE_SIZE:
return get_result(
message=f"Total file size exceeds 10MB limit! ({total_size / (1024 * 1024):.2f} MB)",
code=settings.RetCode.ARGUMENT_ERROR,
code=RetCode.ARGUMENT_ERROR,
)
"""
e, kb = KnowledgebaseService.get_by_id(dataset_id)
@ -153,7 +154,7 @@ def upload(dataset_id, tenant_id):
raise LookupError(f"Can't find the dataset with ID {dataset_id}!")
err, files = FileService.upload_document(kb, file_objs, tenant_id)
if err:
return get_result(message="\n".join(err), code=settings.RetCode.SERVER_ERROR)
return get_result(message="\n".join(err), code=RetCode.SERVER_ERROR)
# rename key's name
renamed_doc_list = []
for file in files:
@ -253,12 +254,12 @@ def update_doc(tenant_id, dataset_id, document_id):
if len(req["name"].encode("utf-8")) > FILE_NAME_LEN_LIMIT:
return get_result(
message=f"File name must be {FILE_NAME_LEN_LIMIT} bytes or less.",
code=settings.RetCode.ARGUMENT_ERROR,
code=RetCode.ARGUMENT_ERROR,
)
if pathlib.Path(req["name"].lower()).suffix != pathlib.Path(doc.name.lower()).suffix:
return get_result(
message="The extension of file can't be changed",
code=settings.RetCode.ARGUMENT_ERROR,
code=RetCode.ARGUMENT_ERROR,
)
for d in DocumentService.query(name=req["name"], kb_id=doc.kb_id):
if d.name == req["name"]:
@ -402,7 +403,7 @@ def download(tenant_id, dataset_id, document_id):
doc_id, doc_location = File2DocumentService.get_storage_address(doc_id=document_id) # minio address
file_stream = STORAGE_IMPL.get(doc_id, doc_location)
if not file_stream:
return construct_json_result(message="This file is empty.", code=settings.RetCode.DATA_ERROR)
return construct_json_result(message="This file is empty.", code=RetCode.DATA_ERROR)
file = BytesIO(file_stream)
# Use send_file with a proper filename and MIME type
return send_file(
@ -676,10 +677,10 @@ def delete(tenant_id, dataset_id):
errors += str(e)
if not_found:
return get_result(message=f"Documents not found: {not_found}", code=settings.RetCode.DATA_ERROR)
return get_result(message=f"Documents not found: {not_found}", code=RetCode.DATA_ERROR)
if errors:
return get_result(message=errors, code=settings.RetCode.SERVER_ERROR)
return get_result(message=errors, code=RetCode.SERVER_ERROR)
if duplicate_messages:
if success_count > 0:
@ -763,7 +764,7 @@ def parse(tenant_id, dataset_id):
queue_tasks(doc, bucket, name, 0)
success_count += 1
if not_found:
return get_result(message=f"Documents not found: {not_found}", code=settings.RetCode.DATA_ERROR)
return get_result(message=f"Documents not found: {not_found}", code=RetCode.DATA_ERROR)
if duplicate_messages:
if success_count > 0:
return get_result(
@ -969,7 +970,7 @@ def list_chunks(tenant_id, dataset_id, document_id):
if req.get("id"):
chunk = settings.docStoreConn.get(req.get("id"), search.index_name(tenant_id), [dataset_id])
if not chunk:
return get_result(message=f"Chunk not found: {dataset_id}/{req.get('id')}", code=settings.RetCode.NOT_FOUND)
return get_result(message=f"Chunk not found: {dataset_id}/{req.get('id')}", code=RetCode.NOT_FOUND)
k = []
for n in chunk.keys():
if re.search(r"(_vec$|_sm_|_tks|_ltks)", n):
@ -1418,7 +1419,7 @@ def retrieval_test(tenant_id):
if len(embd_nms) != 1:
return get_result(
message='Datasets use different embedding models."',
code=settings.RetCode.DATA_ERROR,
code=RetCode.DATA_ERROR,
)
if "question" not in req:
return get_error_data_result("`question` is required.")
@ -1509,6 +1510,6 @@ def retrieval_test(tenant_id):
if str(e).find("not_found") > 0:
return get_result(
message="No chunk found! Check the chunk status please!",
code=settings.RetCode.DATA_ERROR,
code=RetCode.DATA_ERROR,
)
return server_error_response(e)

View File

@ -41,7 +41,7 @@ from api.utils.api_utils import check_duplicate_ids, get_data_openai, get_error_
from rag.app.tag import label_question
from rag.prompts.template import load_prompt
from rag.prompts.generator import cross_languages, gen_meta_filter, keyword_extraction, chunks_format
from common.contants import RetCode
@manager.route("/chats/<chat_id>/sessions", methods=["POST"]) # noqa: F821
@token_required
@ -959,7 +959,7 @@ def retrieval_test_embedded():
kb_ids = [kb_ids]
if not kb_ids:
return get_json_result(data=False, message='Please specify dataset firstly.',
code=settings.RetCode.DATA_ERROR)
code=RetCode.DATA_ERROR)
doc_ids = req.get("doc_ids", [])
similarity_threshold = float(req.get("similarity_threshold", 0.0))
vector_similarity_weight = float(req.get("vector_similarity_weight", 0.3))
@ -996,7 +996,7 @@ def retrieval_test_embedded():
break
else:
return get_json_result(data=False, message="Only owner of knowledgebase authorized for this operation.",
code=settings.RetCode.OPERATING_ERROR)
code=RetCode.OPERATING_ERROR)
e, kb = KnowledgebaseService.get_by_id(kb_ids[0])
if not e:
@ -1034,7 +1034,7 @@ def retrieval_test_embedded():
except Exception as e:
if str(e).find("not_found") > 0:
return get_json_result(data=False, message="No chunk found! Check the chunk status please!",
code=settings.RetCode.DATA_ERROR)
code=RetCode.DATA_ERROR)
return server_error_response(e)
@ -1104,7 +1104,7 @@ def detail_share_embedded():
break
else:
return get_json_result(data=False, message="Has no permission for this operation.",
code=settings.RetCode.OPERATING_ERROR)
code=RetCode.OPERATING_ERROR)
search = SearchService.get_detail(search_id)
if not search: