Add dataset with table parser type for Infinity and answer question in chat using SQL (#12541)

### What problem does this PR solve?

1) Create  dataset using table parser for infinity
2) Answer questions in chat using SQL

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
qinling0210
2026-01-19 19:35:14 +08:00
committed by GitHub
parent 05da2a5872
commit b40d639fdb
19 changed files with 1003 additions and 101 deletions

View File

@ -25,6 +25,7 @@ from api.utils.api_utils import get_data_error_result, get_json_result, get_requ
from common.misc_utils import get_uuid
from common.constants import RetCode
from api.apps import login_required, current_user
import logging
@manager.route('/set', methods=['POST']) # noqa: F821
@ -69,6 +70,19 @@ async def set_dialog():
meta_data_filter = req.get("meta_data_filter", {})
prompt_config = req["prompt_config"]
# Set default parameters for datasets with knowledge retrieval
# All datasets with {knowledge} in system prompt need "knowledge" parameter to enable retrieval
kb_ids = req.get("kb_ids", [])
parameters = prompt_config.get("parameters")
logging.debug(f"set_dialog: kb_ids={kb_ids}, parameters={parameters}, is_create={not is_create}")
# Check if parameters is missing, None, or empty list
if kb_ids and not parameters:
# Check if system prompt uses {knowledge} placeholder
if "{knowledge}" in prompt_config.get("system", ""):
# Set default parameters for any dataset with knowledge placeholder
prompt_config["parameters"] = [{"key": "knowledge", "optional": False}]
logging.debug(f"Set default parameters for datasets with knowledge placeholder: {kb_ids}")
if not is_create:
# only for chat updating
if not req.get("kb_ids", []) and not prompt_config.get("tavily_api_key") and "{knowledge}" in prompt_config.get("system", ""):

View File

@ -295,12 +295,19 @@ async def rm():
File.name == kbs[0].name,
]
)
# Delete the table BEFORE deleting the database record
for kb in kbs:
try:
settings.docStoreConn.delete({"kb_id": kb.id}, search.index_name(kb.tenant_id), kb.id)
settings.docStoreConn.delete_idx(search.index_name(kb.tenant_id), kb.id)
logging.info(f"Dropped index for dataset {kb.id}")
except Exception as e:
logging.error(f"Failed to drop index for dataset {kb.id}: {e}")
if not KnowledgebaseService.delete_by_id(req["kb_id"]):
return get_data_error_result(
message="Database error (Knowledgebase removal)!")
for kb in kbs:
settings.docStoreConn.delete({"kb_id": kb.id}, search.index_name(kb.tenant_id), kb.id)
settings.docStoreConn.delete_idx(search.index_name(kb.tenant_id), kb.id)
if hasattr(settings.STORAGE_IMPL, 'remove_bucket'):
settings.STORAGE_IMPL.remove_bucket(kb.id)
return get_json_result(data=True)

View File

@ -233,6 +233,15 @@ async def delete(tenant_id):
File2DocumentService.delete_by_document_id(doc.id)
FileService.filter_delete(
[File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kb.name])
# Drop index for this dataset
try:
from rag.nlp import search
idxnm = search.index_name(kb.tenant_id)
settings.docStoreConn.delete_idx(idxnm, kb_id)
except Exception as e:
logging.warning(f"Failed to drop index for dataset {kb_id}: {e}")
if not KnowledgebaseService.delete_by_id(kb_id):
errors.append(f"Delete dataset error for {kb_id}")
continue