mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Rename page_num_list, top_list, position_list (#3940)
### What problem does this PR solve? Rename page_num_list, top_list, position_list to page_num_int, top_int, position_int ### Type of change - [x] Refactoring
This commit is contained in:
@ -20,7 +20,6 @@ from rag.nlp import tokenize, is_english
|
||||
from rag.nlp import rag_tokenizer
|
||||
from deepdoc.parser import PdfParser, PptParser, PlainParser
|
||||
from PyPDF2 import PdfReader as pdf2_read
|
||||
import json
|
||||
|
||||
|
||||
class Ppt(PptParser):
|
||||
@ -109,9 +108,9 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
d = copy.deepcopy(doc)
|
||||
pn += from_page
|
||||
d["image"] = img
|
||||
d["page_num_list"] = json.dumps([pn + 1])
|
||||
d["top_list"] = json.dumps([0])
|
||||
d["position_list"] = json.dumps([(pn + 1, 0, img.size[0], 0, img.size[1])])
|
||||
d["page_num_int"] = [pn + 1]
|
||||
d["top_int"] = [0]
|
||||
d["position_int"] = [(pn + 1, 0, img.size[0], 0, img.size[1])]
|
||||
tokenize(d, txt, eng)
|
||||
res.append(d)
|
||||
return res
|
||||
@ -125,10 +124,9 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
|
||||
pn += from_page
|
||||
if img:
|
||||
d["image"] = img
|
||||
d["page_num_list"] = json.dumps([pn + 1])
|
||||
d["top_list"] = json.dumps([0])
|
||||
d["position_list"] = json.dumps([
|
||||
(pn + 1, 0, img.size[0] if img else 0, 0, img.size[1] if img else 0)])
|
||||
d["page_num_int"] = [pn + 1]
|
||||
d["top_int"] = [0]
|
||||
d["position_int"] = [(pn + 1, 0, img.size[0] if img else 0, 0, img.size[1] if img else 0)]
|
||||
tokenize(d, txt, eng)
|
||||
res.append(d)
|
||||
return res
|
||||
|
||||
@ -22,7 +22,6 @@ from rag.utils import num_tokens_from_string
|
||||
from . import rag_tokenizer
|
||||
import re
|
||||
import copy
|
||||
import json
|
||||
import roman_numbers as r
|
||||
from word2number import w2n
|
||||
from cn2an import cn2an
|
||||
@ -311,16 +310,16 @@ def tokenize_table(tbls, doc, eng, batch_size=10):
|
||||
def add_positions(d, poss):
|
||||
if not poss:
|
||||
return
|
||||
page_num_list = []
|
||||
position_list = []
|
||||
top_list = []
|
||||
page_num_int = []
|
||||
position_int = []
|
||||
top_int = []
|
||||
for pn, left, right, top, bottom in poss:
|
||||
page_num_list.append(int(pn + 1))
|
||||
top_list.append(int(top))
|
||||
position_list.append((int(pn + 1), int(left), int(right), int(top), int(bottom)))
|
||||
d["page_num_list"] = json.dumps(page_num_list)
|
||||
d["position_list"] = json.dumps(position_list)
|
||||
d["top_list"] = json.dumps(top_list)
|
||||
page_num_int.append(int(pn + 1))
|
||||
top_int.append(int(top))
|
||||
position_int.append((int(pn + 1), int(left), int(right), int(top), int(bottom)))
|
||||
d["page_num_int"] = page_num_int
|
||||
d["position_int"] = position_int
|
||||
d["top_int"] = top_int
|
||||
|
||||
|
||||
def remove_contents_table(sections, eng=False):
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
|
||||
import logging
|
||||
import re
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
|
||||
from rag.utils import rmSpace
|
||||
@ -74,7 +73,7 @@ class Dealer:
|
||||
offset, limit = pg * ps, (pg + 1) * ps
|
||||
|
||||
src = req.get("fields", ["docnm_kwd", "content_ltks", "kb_id", "img_id", "title_tks", "important_kwd",
|
||||
"doc_id", "position_list", "knowledge_graph_kwd", "question_kwd", "question_tks",
|
||||
"doc_id", "page_num_int", "top_int", "create_timestamp_flt", "knowledge_graph_kwd", "question_kwd", "question_tks",
|
||||
"available_int", "content_with_weight", "pagerank_fea"])
|
||||
kwds = set([])
|
||||
|
||||
@ -82,6 +81,8 @@ class Dealer:
|
||||
q_vec = []
|
||||
if not qst:
|
||||
if req.get("sort"):
|
||||
orderBy.asc("page_num_int")
|
||||
orderBy.asc("top_int")
|
||||
orderBy.desc("create_timestamp_flt")
|
||||
res = self.dataStore.search(src, [], filters, [], orderBy, offset, limit, idx_names, kb_ids)
|
||||
total=self.dataStore.getTotal(res)
|
||||
@ -340,7 +341,7 @@ class Dealer:
|
||||
chunk = sres.field[id]
|
||||
dnm = chunk["docnm_kwd"]
|
||||
did = chunk["doc_id"]
|
||||
position_list = chunk.get("position_list", "[]")
|
||||
position_int = chunk.get("position_int", [])
|
||||
d = {
|
||||
"chunk_id": id,
|
||||
"content_ltks": chunk["content_ltks"],
|
||||
@ -354,7 +355,7 @@ class Dealer:
|
||||
"vector_similarity": vsim[i],
|
||||
"term_similarity": tsim[i],
|
||||
"vector": chunk.get(vector_column, zero_vector),
|
||||
"positions": json.loads(position_list)
|
||||
"positions": position_int,
|
||||
}
|
||||
if highlight and sres.highlight:
|
||||
if id in sres.highlight:
|
||||
|
||||
@ -211,9 +211,9 @@ def build_chunks(task, progress_callback):
|
||||
if not d.get("image"):
|
||||
_ = d.pop("image", None)
|
||||
d["img_id"] = ""
|
||||
d["page_num_list"] = json.dumps([])
|
||||
d["position_list"] = json.dumps([])
|
||||
d["top_list"] = json.dumps([])
|
||||
d["page_num_int"] = []
|
||||
d["position_int"] = []
|
||||
d["top_int"] = []
|
||||
docs.append(d)
|
||||
continue
|
||||
|
||||
|
||||
@ -185,8 +185,14 @@ class ESConnection(DocStoreConnection):
|
||||
orders = list()
|
||||
for field, order in orderBy.fields:
|
||||
order = "asc" if order == 0 else "desc"
|
||||
orders.append({field: {"order": order, "unmapped_type": "float",
|
||||
"mode": "avg", "numeric_type": "double"}})
|
||||
if field in ["page_num_int", "top_int"]:
|
||||
order_info = {"order": order, "unmapped_type": "float",
|
||||
"mode": "avg", "numeric_type": "double"}
|
||||
elif field.endswith("_int") or field.endswith("_flt"):
|
||||
order_info = {"order": order, "unmapped_type": "float"}
|
||||
else:
|
||||
order_info = {"order": order, "unmapped_type": "text"}
|
||||
orders.append({field: order_info})
|
||||
s = s.sort(*orders)
|
||||
|
||||
if limit > 0:
|
||||
|
||||
@ -297,7 +297,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
df_list.append(kb_res)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
res = concat_dataframes(df_list, selectFields)
|
||||
logger.debug("INFINITY search tables: " + str(table_list))
|
||||
logger.debug(f"INFINITY search tables: {str(table_list)}, result: {str(res)}")
|
||||
return res
|
||||
|
||||
def get(
|
||||
@ -307,8 +307,10 @@ class InfinityConnection(DocStoreConnection):
|
||||
db_instance = inf_conn.get_database(self.dbName)
|
||||
df_list = list()
|
||||
assert isinstance(knowledgebaseIds, list)
|
||||
table_list = list()
|
||||
for knowledgebaseId in knowledgebaseIds:
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
table_list.append(table_name)
|
||||
table_instance = db_instance.get_table(table_name)
|
||||
kb_res = table_instance.output(["*"]).filter(f"id = '{chunkId}'").to_pl()
|
||||
if len(kb_res) != 0 and kb_res.shape[0] > 0:
|
||||
@ -316,6 +318,7 @@ class InfinityConnection(DocStoreConnection):
|
||||
|
||||
self.connPool.release_conn(inf_conn)
|
||||
res = concat_dataframes(df_list, ["id"])
|
||||
logger.debug(f"INFINITY get tables: {str(table_list)}, result: {str(res)}")
|
||||
res_fields = self.getFields(res, res.columns)
|
||||
return res_fields.get(chunkId, None)
|
||||
|
||||
@ -349,15 +352,22 @@ class InfinityConnection(DocStoreConnection):
|
||||
for k, v in d.items():
|
||||
if k.endswith("_kwd") and isinstance(v, list):
|
||||
d[k] = " ".join(v)
|
||||
if k == 'kb_id':
|
||||
elif k == 'kb_id':
|
||||
if isinstance(d[k], list):
|
||||
d[k] = d[k][0] # since d[k] is a list, but we need a str
|
||||
elif k == "position_int":
|
||||
assert isinstance(v, list)
|
||||
arr = [num for row in v for num in row]
|
||||
d[k] = "_".join(f"{num:08x}" for num in arr)
|
||||
elif k in ["page_num_int", "top_int", "position_int"]:
|
||||
assert isinstance(v, list)
|
||||
d[k] = "_".join(f"{num:08x}" for num in v)
|
||||
ids = ["'{}'".format(d["id"]) for d in documents]
|
||||
str_ids = ", ".join(ids)
|
||||
str_filter = f"id IN ({str_ids})"
|
||||
table_instance.delete(str_filter)
|
||||
# for doc in documents:
|
||||
# logger.info(f"insert position_list: {doc['position_list']}")
|
||||
# logger.info(f"insert position_int: {doc['position_int']}")
|
||||
# logger.info(f"InfinityConnection.insert {json.dumps(documents)}")
|
||||
table_instance.insert(documents)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
@ -367,8 +377,8 @@ class InfinityConnection(DocStoreConnection):
|
||||
def update(
|
||||
self, condition: dict, newValue: dict, indexName: str, knowledgebaseId: str
|
||||
) -> bool:
|
||||
# if 'position_list' in newValue:
|
||||
# logger.info(f"upsert position_list: {newValue['position_list']}")
|
||||
# if 'position_int' in newValue:
|
||||
# logger.info(f"update position_int: {newValue['position_int']}")
|
||||
inf_conn = self.connPool.get_conn()
|
||||
db_instance = inf_conn.get_database(self.dbName)
|
||||
table_name = f"{indexName}_{knowledgebaseId}"
|
||||
@ -377,6 +387,16 @@ class InfinityConnection(DocStoreConnection):
|
||||
for k, v in newValue.items():
|
||||
if k.endswith("_kwd") and isinstance(v, list):
|
||||
newValue[k] = " ".join(v)
|
||||
elif k == 'kb_id':
|
||||
if isinstance(newValue[k], list):
|
||||
newValue[k] = newValue[k][0] # since d[k] is a list, but we need a str
|
||||
elif k == "position_int":
|
||||
assert isinstance(v, list)
|
||||
arr = [num for row in v for num in row]
|
||||
newValue[k] = "_".join(f"{num:08x}" for num in arr)
|
||||
elif k in ["page_num_int", "top_int"]:
|
||||
assert isinstance(v, list)
|
||||
newValue[k] = "_".join(f"{num:08x}" for num in v)
|
||||
table_instance.update(filter, newValue)
|
||||
self.connPool.release_conn(inf_conn)
|
||||
return True
|
||||
@ -423,9 +443,22 @@ class InfinityConnection(DocStoreConnection):
|
||||
v = res[fieldnm][i]
|
||||
if isinstance(v, Series):
|
||||
v = list(v)
|
||||
elif fieldnm == "important_kwd":
|
||||
elif fieldnm.endswith("_kwd"):
|
||||
assert isinstance(v, str)
|
||||
v = v.split()
|
||||
elif fieldnm == "position_int":
|
||||
assert isinstance(v, str)
|
||||
if v:
|
||||
arr = [int(hex_val, 16) for hex_val in v.split('_')]
|
||||
v = [arr[i:i + 4] for i in range(0, len(arr), 4)]
|
||||
else:
|
||||
v = []
|
||||
elif fieldnm in ["page_num_int", "top_int"]:
|
||||
assert isinstance(v, str)
|
||||
if v:
|
||||
v = [int(hex_val, 16) for hex_val in v.split('_')]
|
||||
else:
|
||||
v = []
|
||||
else:
|
||||
if not isinstance(v, str):
|
||||
v = str(v)
|
||||
|
||||
Reference in New Issue
Block a user