Feat: add support for pipeline logs operation (#10207)

### What problem does this PR solve?

Add support for pipeline logs operation

### Type of change


- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Yongteng Lei
2025-09-23 09:46:31 +08:00
committed by GitHub
parent d0bfe8b10c
commit 0c557e37ad
8 changed files with 340 additions and 17 deletions

View File

@ -231,7 +231,6 @@ class Parser(ProcessBase):
if conf.get("output_format") == "json":
self.set_output("json", bboxes)
if conf.get("output_format") == "markdown":
mkdn = ""
for b in bboxes:
@ -295,6 +294,7 @@ class Parser(ProcessBase):
def _markdown(self, name, blob):
from functools import reduce
from rag.app.naive import Markdown as naive_markdown_parser
from rag.nlp import concat_img
@ -346,7 +346,7 @@ class Parser(ProcessBase):
else:
# use VLM to describe the picture
cv_model = LLMBundle(self._canvas.get_tenant_id(), LLMType.IMAGE2TEXT, llm_name=conf["llm_id"],lang=lang)
cv_model = LLMBundle(self._canvas.get_tenant_id(), LLMType.IMAGE2TEXT, llm_name=conf["llm_id"], lang=lang)
img_binary = io.BytesIO()
img.save(img_binary, format="JPEG")
img_binary.seek(0)

View File

@ -19,10 +19,13 @@ import logging
import random
import time
from timeit import default_timer as timer
import trio
from agent.canvas import Graph
from api.db import PipelineTaskType
from api.db.services.document_service import DocumentService
from api.db.services.pipeline_operation_log_service import PipelineOperationLogService
from rag.utils.redis_conn import REDIS_CONN
@ -44,22 +47,40 @@ class Pipeline(Graph):
obj = json.loads(bin.encode("utf-8"))
if obj:
if obj[-1]["component_id"] == component_name:
obj[-1]["trace"].append({"progress": progress, "message": message, "datetime": datetime.datetime.now().strftime("%H:%M:%S"), "timestamp": timestamp, "elapsed_time": timestamp-obj[-1]["trace"][-1]["timestamp"]})
obj[-1]["trace"].append(
{
"progress": progress,
"message": message,
"datetime": datetime.datetime.now().strftime("%H:%M:%S"),
"timestamp": timestamp,
"elapsed_time": timestamp - obj[-1]["trace"][-1]["timestamp"],
}
)
else:
obj.append({"component_id": component_name, "trace": [{"progress": progress, "message": message, "datetime": datetime.datetime.now().strftime("%H:%M:%S"), "timestamp": timestamp, "elapsed_time": 0}]})
obj.append(
{
"component_id": component_name,
"trace": [{"progress": progress, "message": message, "datetime": datetime.datetime.now().strftime("%H:%M:%S"), "timestamp": timestamp, "elapsed_time": 0}],
}
)
else:
obj = [{"component_id": component_name, "trace": [{"progress": progress, "message": message, "datetime": datetime.datetime.now().strftime("%H:%M:%S"), "timestamp": timestamp, "elapsed_time": 0}]}]
obj = [
{
"component_id": component_name,
"trace": [{"progress": progress, "message": message, "datetime": datetime.datetime.now().strftime("%H:%M:%S"), "timestamp": timestamp, "elapsed_time": 0}],
}
]
REDIS_CONN.set_obj(log_key, obj, 60 * 30)
if self._doc_id:
percentage = 1./len(self.components.items())
percentage = 1.0 / len(self.components.items())
msg = ""
finished = 0.
finished = 0.0
for o in obj:
if o['component_id'] == "END":
if o["component_id"] == "END":
continue
msg += f"\n[{o['component_id']}]:\n"
for t in o["trace"]:
msg += "%s: %s\n"%(t["datetime"], t["message"])
msg += "%s: %s\n" % (t["datetime"], t["message"])
if t["progress"] < 0:
finished = -1
break
@ -129,8 +150,13 @@ class Pipeline(Graph):
self.callback("END", 1, json.dumps(self.get_component_obj(self.path[-1]).output(), ensure_ascii=False))
if self._doc_id:
DocumentService.update_by_id(self._doc_id,{
"progress": 1 if not self.error else -1,
"progress_msg": "Pipeline finished...\n" + self.error,
"process_duration": time.perf_counter() - st
})
DocumentService.update_by_id(
self._doc_id,
{
"progress": 1 if not self.error else -1,
"progress_msg": "Pipeline finished...\n" + self.error,
"process_duration": time.perf_counter() - st,
},
)
PipelineOperationLogService.create(document_id=self._doc_id, pipeline_id=self._flow_id, task_type=PipelineTaskType.PARSE)

View File

@ -21,6 +21,7 @@ import sys
import threading
import time
from api.db.services.canvas_service import UserCanvasService
from api.db.services.pipeline_operation_log_service import PipelineOperationLogService
from api.utils.api_utils import timeout
from api.utils.base64_image import image2id
from api.utils.log_utils import init_root_logger, get_project_base_directory
@ -45,7 +46,7 @@ import exceptiongroup
import faulthandler
import numpy as np
from peewee import DoesNotExist
from api.db import LLMType, ParserType
from api.db import LLMType, ParserType, PipelineTaskType
from api.db.services.document_service import DocumentService
from api.db.services.llm_service import LLMBundle
from api.db.services.task_service import TaskService, has_canceled
@ -650,6 +651,7 @@ async def do_handle_task(task):
timer() - start_ts))
DocumentService.increment_chunk_num(task_doc_id, task_dataset_id, token_count, chunk_count, 0)
PipelineOperationLogService.record_pipeline_operation(document_id=task_doc_id, pipeline_id="", task_type=PipelineTaskType.PARSE)
time_cost = timer() - start_ts
task_time_cost = timer() - task_start_ts
@ -685,6 +687,7 @@ async def handle_task():
except Exception:
pass
logging.exception(f"handle_task got exception for task {json.dumps(task)}")
PipelineOperationLogService.record_pipeline_operation(document_id=task["doc_id"], pipeline_id=task.get("dataflow_id", "") or "", task_type=PipelineTaskType.PARSE)
redis_msg.ack()