mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Migrate infinity at startup (#3858)
### What problem does this PR solve? Migrate infinity at startup #3809 https://github.com/infiniflow/infinity/issues/2321 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
@ -25,11 +25,11 @@ from rag.utils.doc_store_conn import (
|
||||
|
||||
logger = logging.getLogger('ragflow.infinity_conn')
|
||||
|
||||
def equivalent_condition_to_str(condition: dict) -> str:
|
||||
def equivalent_condition_to_str(condition: dict) -> str|None:
|
||||
assert "_id" not in condition
|
||||
cond = list()
|
||||
for k, v in condition.items():
|
||||
if not isinstance(k, str) or not v:
|
||||
if not isinstance(k, str) or k in ["kb_id"] or not v:
|
||||
continue
|
||||
if isinstance(v, list):
|
||||
inCond = list()
|
||||
@ -46,7 +46,7 @@ def equivalent_condition_to_str(condition: dict) -> str:
|
||||
cond.append(f"{k}='{v}'")
|
||||
else:
|
||||
cond.append(f"{k}={str(v)}")
|
||||
return " AND ".join(cond)
|
||||
return " AND ".join(cond) if cond else None
|
||||
|
||||
|
||||
def concat_dataframes(df_list: list[pl.DataFrame], selectFields: list[str]) -> pl.DataFrame:
|
||||
@ -75,10 +75,12 @@ class InfinityConnection(DocStoreConnection):
|
||||
connPool = ConnectionPool(infinity_uri)
|
||||
inf_conn = connPool.get_conn()
|
||||
res = inf_conn.show_current_node()
|
||||
connPool.release_conn(inf_conn)
|
||||
self.connPool = connPool
|
||||
if res.error_code == ErrorCode.OK and res.server_status=="started":
|
||||
self._migrate_db(inf_conn)
|
||||
self.connPool = connPool
|
||||
connPool.release_conn(inf_conn)
|
||||
break
|
||||
connPool.release_conn(inf_conn)
|
||||
logger.warn(f"Infinity status: {res.server_status}. Waiting Infinity {infinity_uri} to be healthy.")
|
||||
time.sleep(5)
|
||||
except Exception as e:
|
||||
@ -90,6 +92,41 @@ class InfinityConnection(DocStoreConnection):
|
||||
raise Exception(msg)
|
||||
logger.info(f"Infinity {infinity_uri} is healthy.")
|
||||
|
||||
def _migrate_db(self, inf_conn):
|
||||
inf_db = inf_conn.create_database(self.dbName, ConflictType.Ignore)
|
||||
fp_mapping = os.path.join(
|
||||
get_project_base_directory(), "conf", "infinity_mapping.json"
|
||||
)
|
||||
if not os.path.exists(fp_mapping):
|
||||
raise Exception(f"Mapping file not found at {fp_mapping}")
|
||||
schema = json.load(open(fp_mapping))
|
||||
table_names = inf_db.list_tables().table_names
|
||||
for table_name in table_names:
|
||||
inf_table = inf_db.get_table(table_name)
|
||||
index_names = inf_table.list_indexes().index_names
|
||||
if "q_vec_idx" not in index_names:
|
||||
# Skip tables not created by me
|
||||
continue
|
||||
column_names = inf_table.show_columns()["name"]
|
||||
column_names = set(column_names)
|
||||
for field_name, field_info in schema.items():
|
||||
if field_name in column_names:
|
||||
continue
|
||||
res = inf_table.add_columns({field_name: field_info})
|
||||
assert res.error_code == infinity.ErrorCode.OK
|
||||
logger.info(
|
||||
f"INFINITY added following column to table {table_name}: {field_name} {field_info}"
|
||||
)
|
||||
if field_info["type"] != "varchar" or "analyzer" not in field_info:
|
||||
continue
|
||||
inf_table.create_index(
|
||||
f"text_idx_{field_name}",
|
||||
IndexInfo(
|
||||
field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}
|
||||
),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
|
||||
"""
|
||||
Database operations
|
||||
"""
|
||||
@ -148,20 +185,16 @@ class InfinityConnection(DocStoreConnection):
|
||||
),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
text_suffix = ["_tks", "_ltks", "_kwd"]
|
||||
for field_name, field_info in schema.items():
|
||||
if field_info["type"] != "varchar":
|
||||
if field_info["type"] != "varchar" or "analyzer" not in field_info:
|
||||
continue
|
||||
for suffix in text_suffix:
|
||||
if field_name.endswith(suffix):
|
||||
inf_table.create_index(
|
||||
f"text_idx_{field_name}",
|
||||
IndexInfo(
|
||||
field_name, IndexType.FullText, {"ANALYZER": "standard"}
|
||||
),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
break
|
||||
inf_table.create_index(
|
||||
f"text_idx_{field_name}",
|
||||
IndexInfo(
|
||||
field_name, IndexType.FullText, {"ANALYZER": field_info["analyzer"]}
|
||||
),
|
||||
ConflictType.Ignore,
|
||||
)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
logger.info(
|
||||
f"INFINITY created table {table_name}, vector size {vectorSize}"
|
||||
@ -202,7 +235,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
limit: int,
|
||||
indexNames: str | list[str],
|
||||
knowledgebaseIds: list[str],
|
||||
) -> list[dict] | pl.DataFrame:
|
||||
) -> tuple[pl.DataFrame, int]:
|
||||
"""
|
||||
TODO: Infinity doesn't provide highlight
|
||||
"""
|
||||
@ -217,21 +250,18 @@ class InfinityConnection(DocStoreConnection):
|
||||
selectFields.append("id")
|
||||
|
||||
# Prepare expressions common to all tables
|
||||
filter_cond = ""
|
||||
filter_cond = None
|
||||
filter_fulltext = ""
|
||||
if condition:
|
||||
filter_cond = equivalent_condition_to_str(condition)
|
||||
for matchExpr in matchExprs:
|
||||
if isinstance(matchExpr, MatchTextExpr):
|
||||
if len(filter_cond) != 0 and "filter" not in matchExpr.extra_options:
|
||||
if filter_cond and "filter" not in matchExpr.extra_options:
|
||||
matchExpr.extra_options.update({"filter": filter_cond})
|
||||
fields = ",".join(matchExpr.fields)
|
||||
filter_fulltext = (
|
||||
f"filter_fulltext('{fields}', '{matchExpr.matching_text}')"
|
||||
)
|
||||
if len(filter_cond) != 0:
|
||||
filter_fulltext = f"filter_fulltext('{fields}', '{matchExpr.matching_text}')"
|
||||
if filter_cond:
|
||||
filter_fulltext = f"({filter_cond}) AND {filter_fulltext}"
|
||||
logger.debug(f"filter_fulltext: {filter_fulltext}")
|
||||
minimum_should_match = matchExpr.extra_options.get("minimum_should_match", 0.0)
|
||||
if isinstance(minimum_should_match, float):
|
||||
str_minimum_should_match = str(int(minimum_should_match * 100)) + "%"
|
||||
@ -239,12 +269,16 @@ class InfinityConnection(DocStoreConnection):
|
||||
for k, v in matchExpr.extra_options.items():
|
||||
if not isinstance(v, str):
|
||||
matchExpr.extra_options[k] = str(v)
|
||||
logger.debug(f"INFINITY search MatchTextExpr: {json.dumps(matchExpr.__dict__)}")
|
||||
elif isinstance(matchExpr, MatchDenseExpr):
|
||||
if len(filter_cond) != 0 and "filter" not in matchExpr.extra_options:
|
||||
if filter_cond and "filter" not in matchExpr.extra_options:
|
||||
matchExpr.extra_options.update({"filter": filter_fulltext})
|
||||
for k, v in matchExpr.extra_options.items():
|
||||
if not isinstance(v, str):
|
||||
matchExpr.extra_options[k] = str(v)
|
||||
logger.debug(f"INFINITY search MatchDenseExpr: {json.dumps(matchExpr.__dict__)}")
|
||||
elif isinstance(matchExpr, FusionExpr):
|
||||
logger.debug(f"INFINITY search FusionExpr: {json.dumps(matchExpr.__dict__)}")
|
||||
|
||||
order_by_expr_list = list()
|
||||
if orderBy.fields:
|
||||
@ -254,6 +288,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
else:
|
||||
order_by_expr_list.append((order_field[0], SortType.Desc))
|
||||
|
||||
total_hits_count = 0
|
||||
# Scatter search tables and gather the results
|
||||
for indexName in indexNames:
|
||||
for knowledgebaseId in knowledgebaseIds:
|
||||
@ -293,12 +328,14 @@ class InfinityConnection(DocStoreConnection):
|
||||
if orderBy.fields:
|
||||
builder.sort(order_by_expr_list)
|
||||
builder.offset(offset).limit(limit)
|
||||
kb_res = builder.to_pl()
|
||||
kb_res, extra_result = builder.option({"total_hits_count": True}).to_pl()
|
||||
if extra_result:
|
||||
total_hits_count += int(extra_result["total_hits_count"])
|
||||
df_list.append(kb_res)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
res = concat_dataframes(df_list, selectFields)
|
||||
logger.debug(f"INFINITY search tables: {str(table_list)}, result: {str(res)}")
|
||||
return res
|
||||
return res, total_hits_count
|
||||
|
||||
def get(
|
||||
self, chunkId: str, indexName: str, knowledgebaseIds: list[str]
|
||||
@ -312,7 +349,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
table_list.append(table_name)
|
||||
table_instance = db_instance.get_table(table_name)
|
||||
kb_res = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_pl()
|
||||
kb_res, _ = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_pl()
|
||||
if len(kb_res) != 0 and kb_res.shape[0] > 0:
|
||||
df_list.append(kb_res)
|
||||
|
||||
@ -372,7 +409,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
# logger.info(f"InfinityConnection.insert {json.dumps(documents)}")
|
||||
table_instance.insert(documents)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
logger.debug(f"inserted into {table_name} {str_ids}.")
|
||||
logger.debug(f"INFINITY inserted into {table_name} {str_ids}.")
|
||||
return []
|
||||
|
||||
def update(
|
||||
@ -398,6 +435,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
elif k in ["page_num_int", "top_int"]:
|
||||
assert isinstance(v, list)
|
||||
newValue[k] = "_".join(f"{num:08x}" for num in v)
|
||||
logger.debug(f"INFINITY update table {table_name}, filter {filter}, newValue {newValue}.")
|
||||
table_instance.update(filter, newValue)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
return True
|
||||
@ -414,6 +452,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
f"Skipped deleting `{filter}` from table {table_name} since the table doesn't exist."
|
||||
)
|
||||
return 0
|
||||
logger.debug(f"INFINITY delete table {table_name}, filter {filter}.")
|
||||
res = table_instance.delete(filter)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
return res.deleted_rows
|
||||
@ -422,13 +461,19 @@ class InfinityConnection(DocStoreConnection):
|
||||
Helper functions for search result
|
||||
"""
|
||||
|
||||
def getTotal(self, res):
|
||||
def getTotal(self, res: tuple[pl.DataFrame, int] | pl.DataFrame) -> int:
|
||||
if isinstance(res, tuple):
|
||||
return res[1]
|
||||
return len(res)
|
||||
|
||||
def getChunkIds(self, res):
|
||||
def getChunkIds(self, res: tuple[pl.DataFrame, int] | pl.DataFrame) -> list[str]:
|
||||
if isinstance(res, tuple):
|
||||
res = res[0]
|
||||
return list(res["id"])
|
||||
|
||||
def getFields(self, res, fields: list[str]) -> list[str, dict]:
|
||||
def getFields(self, res: tuple[pl.DataFrame, int] | pl.DataFrame, fields: list[str]) -> list[str, dict]:
|
||||
if isinstance(res, tuple):
|
||||
res = res[0]
|
||||
res_fields = {}
|
||||
if not fields:
|
||||
return {}
|
||||
@ -469,7 +514,9 @@ class InfinityConnection(DocStoreConnection):
|
||||
res_fields[id] = m
|
||||
return res_fields
|
||||
|
||||
def getHighlight(self, res, keywords: list[str], fieldnm: str):
|
||||
def getHighlight(self, res: tuple[pl.DataFrame, int] | pl.DataFrame, keywords: list[str], fieldnm: str):
|
||||
if isinstance(res, tuple):
|
||||
res = res[0]
|
||||
ans = {}
|
||||
num_rows = len(res)
|
||||
column_id = res["id"]
|
||||
@ -495,7 +542,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
ans[id] = "...".join(txts)
|
||||
return ans
|
||||
|
||||
def getAggregation(self, res, fieldnm: str):
|
||||
def getAggregation(self, res: tuple[pl.DataFrame, int] | pl.DataFrame, fieldnm: str):
|
||||
"""
|
||||
TODO: Infinity doesn't provide aggregation
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user