mirror of
https://github.com/infiniflow/ragflow.git
synced 2026-01-30 23:26:36 +08:00
Rework logging (#3358)
Unified all log files into one. ### What problem does this PR solve? Unified all log files into one. ### Type of change - [x] Refactoring
This commit is contained in:
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
@ -30,18 +29,14 @@ from api.utils import CustomJSONEncoder, commands
|
||||
|
||||
from flask_session import Session
|
||||
from flask_login import LoginManager
|
||||
from api.settings import SECRET_KEY, stat_logger
|
||||
from api.settings import API_VERSION, access_logger
|
||||
from api.settings import SECRET_KEY
|
||||
from api.settings import API_VERSION
|
||||
from api.utils.api_utils import server_error_response
|
||||
from api.utils.log_utils import logger
|
||||
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
||||
|
||||
__all__ = ["app"]
|
||||
|
||||
|
||||
logger = logging.getLogger("flask.app")
|
||||
for h in access_logger.handlers:
|
||||
logger.addHandler(h)
|
||||
|
||||
Request.json = property(lambda self: self.get_json(force=True, silent=True))
|
||||
|
||||
app = Flask(__name__)
|
||||
@ -158,8 +153,8 @@ def load_user(web_request):
|
||||
return user[0]
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
stat_logger.exception(e)
|
||||
except Exception:
|
||||
logger.exception("load_user got exception")
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
@ -23,6 +23,7 @@ from api.utils import get_uuid
|
||||
from api.utils.api_utils import get_json_result, server_error_response, validate_request, get_data_error_result
|
||||
from agent.canvas import Canvas
|
||||
from peewee import MySQLDatabase, PostgresqlDatabase
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
@manager.route('/templates', methods=['GET'])
|
||||
@ -114,7 +115,7 @@ def run():
|
||||
pass
|
||||
canvas.add_user_input(req["message"])
|
||||
answer = canvas.run(stream=stream)
|
||||
print(canvas)
|
||||
logger.info(canvas)
|
||||
except Exception as e:
|
||||
return server_error_response(e)
|
||||
|
||||
|
||||
@ -25,6 +25,7 @@ from api.db.db_models import TenantLLM
|
||||
from api.utils.api_utils import get_json_result
|
||||
from rag.llm import EmbeddingModel, ChatModel, RerankModel, CvModel, TTSModel
|
||||
import requests
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
@manager.route('/factories', methods=['GET'])
|
||||
@ -89,7 +90,7 @@ def set_api_key():
|
||||
if len(arr) == 0 or tc == 0:
|
||||
raise Exception("Fail")
|
||||
rerank_passed = True
|
||||
print(f'passed model rerank{llm.llm_name}',flush=True)
|
||||
logger.info(f'passed model rerank {llm.llm_name}')
|
||||
except Exception as e:
|
||||
msg += f"\nFail to access model({llm.llm_name}) using this api key." + str(
|
||||
e)
|
||||
|
||||
@ -526,4 +526,4 @@ def list(tenant_id):
|
||||
new_key = key_mapping.get(key, key)
|
||||
renamed_data[new_key] = value
|
||||
renamed_list.append(renamed_data)
|
||||
return get_result(data=renamed_list)
|
||||
return get_result(data=renamed_list)
|
||||
|
||||
@ -53,8 +53,8 @@ from api.settings import (
|
||||
)
|
||||
from api.db.services.user_service import UserService, TenantService, UserTenantService
|
||||
from api.db.services.file_service import FileService
|
||||
from api.settings import stat_logger
|
||||
from api.utils.api_utils import get_json_result, construct_response
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
@manager.route("/login", methods=["POST", "GET"])
|
||||
@ -177,7 +177,7 @@ def github_callback():
|
||||
try:
|
||||
avatar = download_img(user_info["avatar_url"])
|
||||
except Exception as e:
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
avatar = ""
|
||||
users = user_register(
|
||||
user_id,
|
||||
@ -202,7 +202,7 @@ def github_callback():
|
||||
return redirect("/?auth=%s" % user.get_id())
|
||||
except Exception as e:
|
||||
rollback_user_registration(user_id)
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
return redirect("/?error=%s" % str(e))
|
||||
|
||||
# User has already registered, try to log in
|
||||
@ -279,7 +279,7 @@ def feishu_callback():
|
||||
try:
|
||||
avatar = download_img(user_info["avatar_url"])
|
||||
except Exception as e:
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
avatar = ""
|
||||
users = user_register(
|
||||
user_id,
|
||||
@ -304,7 +304,7 @@ def feishu_callback():
|
||||
return redirect("/?auth=%s" % user.get_id())
|
||||
except Exception as e:
|
||||
rollback_user_registration(user_id)
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
return redirect("/?error=%s" % str(e))
|
||||
|
||||
# User has already registered, try to log in
|
||||
@ -436,7 +436,7 @@ def setting_user():
|
||||
UserService.update_by_id(current_user.id, update_dict)
|
||||
return get_json_result(data=True)
|
||||
except Exception as e:
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
return get_json_result(
|
||||
data=False, message="Update failure!", code=RetCode.EXCEPTION_ERROR
|
||||
)
|
||||
@ -621,7 +621,7 @@ def user_add():
|
||||
)
|
||||
except Exception as e:
|
||||
rollback_user_registration(user_id)
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
return get_json_result(
|
||||
data=False,
|
||||
message=f"User registration failure, error: {str(e)}",
|
||||
|
||||
@ -30,12 +30,9 @@ from peewee import (
|
||||
)
|
||||
from playhouse.pool import PooledMySQLDatabase, PooledPostgresqlDatabase
|
||||
from api.db import SerializedType, ParserType
|
||||
from api.settings import DATABASE, stat_logger, SECRET_KEY, DATABASE_TYPE
|
||||
from api.utils.log_utils import getLogger
|
||||
from api.settings import DATABASE, SECRET_KEY, DATABASE_TYPE
|
||||
from api import utils
|
||||
|
||||
LOGGER = getLogger()
|
||||
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
def singleton(cls, *args, **kw):
|
||||
instances = {}
|
||||
@ -288,7 +285,7 @@ class BaseDataBase:
|
||||
database_config = DATABASE.copy()
|
||||
db_name = database_config.pop("name")
|
||||
self.database_connection = PooledDatabase[DATABASE_TYPE.upper()].value(db_name, **database_config)
|
||||
stat_logger.info('init database on cluster mode successfully')
|
||||
logger.info('init database on cluster mode successfully')
|
||||
|
||||
class PostgresDatabaseLock:
|
||||
def __init__(self, lock_name, timeout=10, db=None):
|
||||
@ -396,7 +393,7 @@ def close_connection():
|
||||
if DB:
|
||||
DB.close_stale(age=30)
|
||||
except Exception as e:
|
||||
LOGGER.exception(e)
|
||||
logger.exception(e)
|
||||
|
||||
|
||||
class DataBaseModel(BaseModel):
|
||||
@ -412,15 +409,15 @@ def init_database_tables(alter_fields=[]):
|
||||
for name, obj in members:
|
||||
if obj != DataBaseModel and issubclass(obj, DataBaseModel):
|
||||
table_objs.append(obj)
|
||||
LOGGER.info(f"start create table {obj.__name__}")
|
||||
logger.info(f"start create table {obj.__name__}")
|
||||
try:
|
||||
obj.create_table()
|
||||
LOGGER.info(f"create table success: {obj.__name__}")
|
||||
logger.info(f"create table success: {obj.__name__}")
|
||||
except Exception as e:
|
||||
LOGGER.exception(e)
|
||||
logger.exception(e)
|
||||
create_failed_list.append(obj.__name__)
|
||||
if create_failed_list:
|
||||
LOGGER.info(f"create tables failed: {create_failed_list}")
|
||||
logger.info(f"create tables failed: {create_failed_list}")
|
||||
raise Exception(f"create tables failed: {create_failed_list}")
|
||||
migrate_db()
|
||||
|
||||
|
||||
@ -22,12 +22,6 @@ from playhouse.pool import PooledMySQLDatabase
|
||||
from api.utils import current_timestamp, timestamp_to_date
|
||||
|
||||
from api.db.db_models import DB, DataBaseModel
|
||||
from api.db.runtime_config import RuntimeConfig
|
||||
from api.utils.log_utils import getLogger
|
||||
from enum import Enum
|
||||
|
||||
|
||||
LOGGER = getLogger()
|
||||
|
||||
|
||||
@DB.connection_context()
|
||||
|
||||
@ -30,6 +30,7 @@ from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantL
|
||||
from api.db.services.user_service import TenantService, UserTenantService
|
||||
from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
def encode_to_base64(input_string):
|
||||
@ -69,36 +70,34 @@ def init_superuser():
|
||||
"api_key": API_KEY, "api_base": LLM_BASE_URL})
|
||||
|
||||
if not UserService.save(**user_info):
|
||||
print("\033[93m【ERROR】\033[0mcan't init admin.")
|
||||
logger.info("can't init admin.")
|
||||
return
|
||||
TenantService.insert(**tenant)
|
||||
UserTenantService.insert(**usr_tenant)
|
||||
TenantLLMService.insert_many(tenant_llm)
|
||||
print(
|
||||
"【INFO】Super user initialized. \033[93memail: admin@ragflow.io, password: admin\033[0m. Changing the password after logining is strongly recomanded.")
|
||||
logger.info(
|
||||
"Super user initialized. email: admin@ragflow.io, password: admin. Changing the password after logining is strongly recomanded.")
|
||||
|
||||
chat_mdl = LLMBundle(tenant["id"], LLMType.CHAT, tenant["llm_id"])
|
||||
msg = chat_mdl.chat(system="", history=[
|
||||
{"role": "user", "content": "Hello!"}], gen_conf={})
|
||||
if msg.find("ERROR: ") == 0:
|
||||
print(
|
||||
"\33[91m【ERROR】\33[0m: ",
|
||||
logger.error(
|
||||
"'{}' dosen't work. {}".format(
|
||||
tenant["llm_id"],
|
||||
msg))
|
||||
embd_mdl = LLMBundle(tenant["id"], LLMType.EMBEDDING, tenant["embd_id"])
|
||||
v, c = embd_mdl.encode(["Hello!"])
|
||||
if c == 0:
|
||||
print(
|
||||
"\33[91m【ERROR】\33[0m:",
|
||||
" '{}' dosen't work!".format(
|
||||
logger.error(
|
||||
"'{}' dosen't work!".format(
|
||||
tenant["embd_id"]))
|
||||
|
||||
|
||||
def init_llm_factory():
|
||||
try:
|
||||
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
factory_llm_infos = json.load(
|
||||
@ -111,14 +110,14 @@ def init_llm_factory():
|
||||
llm_infos = factory_llm_info.pop("llm")
|
||||
try:
|
||||
LLMFactoriesService.save(**factory_llm_info)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
LLMService.filter_delete([LLM.fid == factory_llm_info["name"]])
|
||||
for llm_info in llm_infos:
|
||||
llm_info["fid"] = factory_llm_info["name"]
|
||||
try:
|
||||
LLMService.save(**llm_info)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
|
||||
@ -145,7 +144,7 @@ def init_llm_factory():
|
||||
row = deepcopy(row)
|
||||
row["llm_name"] = "text-embedding-3-large"
|
||||
TenantLLMService.save(**row)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
for kb_id in KnowledgebaseService.get_all_ids():
|
||||
@ -169,9 +168,8 @@ def add_graph_templates():
|
||||
CanvasTemplateService.save(**cnvs)
|
||||
except:
|
||||
CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
|
||||
except Exception as e:
|
||||
print("Add graph templates error: ", e)
|
||||
print("------------", flush=True)
|
||||
except Exception:
|
||||
logger.exception("Add graph templates error: ")
|
||||
|
||||
|
||||
def init_web_data():
|
||||
@ -182,7 +180,7 @@ def init_web_data():
|
||||
# init_superuser()
|
||||
|
||||
add_graph_templates()
|
||||
print("init web data success:{}".format(time.time() - start_time))
|
||||
logger.info("init web data success:{}".format(time.time() - start_time))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
#
|
||||
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import operator
|
||||
import time
|
||||
import typing
|
||||
from api.utils.log_utils import sql_logger
|
||||
import peewee
|
||||
@ -26,11 +26,12 @@ from api.db.db_models import Dialog, Conversation,DB
|
||||
from api.db.services.common_service import CommonService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.llm_service import LLMService, TenantLLMService, LLMBundle
|
||||
from api.settings import chat_logger, retrievaler, kg_retrievaler
|
||||
from api.settings import retrievaler, kg_retrievaler
|
||||
from rag.app.resume import forbidden_select_fields4resume
|
||||
from rag.nlp.search import index_name
|
||||
from rag.utils import rmSpace, num_tokens_from_string, encoder
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
class DialogService(CommonService):
|
||||
@ -177,7 +178,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
tts_mdl = LLMBundle(dialog.tenant_id, LLMType.TTS)
|
||||
# try to use sql if field mapping is good to go
|
||||
if field_map:
|
||||
chat_logger.info("Use SQL to retrieval:{}".format(questions[-1]))
|
||||
logger.info("Use SQL to retrieval:{}".format(questions[-1]))
|
||||
ans = use_sql(questions[-1], field_map, dialog.tenant_id, chat_mdl, prompt_config.get("quote", True))
|
||||
if ans:
|
||||
yield ans
|
||||
@ -219,7 +220,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
doc_ids=attachments,
|
||||
top=dialog.top_k, aggs=False, rerank_mdl=rerank_mdl)
|
||||
knowledges = [ck["content_with_weight"] for ck in kbinfos["chunks"]]
|
||||
chat_logger.info(
|
||||
logger.info(
|
||||
"{}->{}".format(" ".join(questions), "\n->".join(knowledges)))
|
||||
retrieval_tm = timer()
|
||||
|
||||
@ -291,7 +292,7 @@ def chat(dialog, messages, stream=True, **kwargs):
|
||||
yield decorate_answer(answer)
|
||||
else:
|
||||
answer = chat_mdl.chat(prompt, msg[1:], gen_conf)
|
||||
chat_logger.info("User: {}|Assistant: {}".format(
|
||||
logger.info("User: {}|Assistant: {}".format(
|
||||
msg[-1]["content"], answer))
|
||||
res = decorate_answer(answer)
|
||||
res["audio_binary"] = tts(tts_mdl, answer)
|
||||
@ -319,8 +320,7 @@ def use_sql(question, field_map, tenant_id, chat_mdl, quota=True):
|
||||
nonlocal sys_prompt, user_promt, question, tried_times
|
||||
sql = chat_mdl.chat(sys_prompt, [{"role": "user", "content": user_promt}], {
|
||||
"temperature": 0.06})
|
||||
print(user_promt, sql)
|
||||
chat_logger.info(f"“{question}”==>{user_promt} get SQL: {sql}")
|
||||
logger.info(f"{question} ==> {user_promt} get SQL: {sql}")
|
||||
sql = re.sub(r"[\r\n]+", " ", sql.lower())
|
||||
sql = re.sub(r".*select ", "select ", sql.lower())
|
||||
sql = re.sub(r" +", " ", sql)
|
||||
@ -340,9 +340,7 @@ def use_sql(question, field_map, tenant_id, chat_mdl, quota=True):
|
||||
flds.append(k)
|
||||
sql = "select doc_id,docnm_kwd," + ",".join(flds) + sql[8:]
|
||||
|
||||
print(f"“{question}” get SQL(refined): {sql}")
|
||||
|
||||
chat_logger.info(f"“{question}” get SQL(refined): {sql}")
|
||||
logger.info(f"{question} get SQL(refined): {sql}")
|
||||
tried_times += 1
|
||||
return retrievaler.sql_retrieval(sql, format="json"), sql
|
||||
|
||||
@ -371,10 +369,9 @@ def use_sql(question, field_map, tenant_id, chat_mdl, quota=True):
|
||||
question, sql, tbl["error"]
|
||||
)
|
||||
tbl, sql = get_table()
|
||||
chat_logger.info("TRY it again: {}".format(sql))
|
||||
logger.info("TRY it again: {}".format(sql))
|
||||
|
||||
chat_logger.info("GET table: {}".format(tbl))
|
||||
print(tbl)
|
||||
logger.info("GET table: {}".format(tbl))
|
||||
if tbl.get("error") or len(tbl["rows"]) == 0:
|
||||
return None
|
||||
|
||||
@ -404,7 +401,7 @@ def use_sql(question, field_map, tenant_id, chat_mdl, quota=True):
|
||||
rows = re.sub(r"T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+Z)?\|", "|", rows)
|
||||
|
||||
if not docid_idx or not docnm_idx:
|
||||
chat_logger.warning("SQL missing field: " + sql)
|
||||
logger.warning("SQL missing field: " + sql)
|
||||
return {
|
||||
"answer": "\n".join([clmns, line, rows]),
|
||||
"reference": {"chunks": [], "doc_aggs": []},
|
||||
|
||||
@ -17,7 +17,6 @@ import hashlib
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import traceback
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
@ -26,7 +25,7 @@ from io import BytesIO
|
||||
from peewee import fn
|
||||
|
||||
from api.db.db_utils import bulk_insert_into_db
|
||||
from api.settings import stat_logger, docStoreConn
|
||||
from api.settings import docStoreConn
|
||||
from api.utils import current_timestamp, get_format_time, get_uuid
|
||||
from graphrag.mind_map_extractor import MindMapExtractor
|
||||
from rag.settings import SVR_QUEUE_NAME
|
||||
@ -40,6 +39,7 @@ from api.db.services.common_service import CommonService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db import StatusEnum
|
||||
from rag.utils.redis_conn import REDIS_CONN
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
class DocumentService(CommonService):
|
||||
@ -387,7 +387,7 @@ class DocumentService(CommonService):
|
||||
cls.update_by_id(d["id"], info)
|
||||
except Exception as e:
|
||||
if str(e).find("'0'") < 0:
|
||||
stat_logger.error("fetch task exception:" + str(e))
|
||||
logger.exception("fetch task exception")
|
||||
|
||||
@classmethod
|
||||
@DB.connection_context()
|
||||
@ -544,7 +544,7 @@ def doc_upload_and_parse(conversation_id, file_objs, user_id):
|
||||
"knowledge_graph_kwd": "mind_map"
|
||||
})
|
||||
except Exception as e:
|
||||
stat_logger.error("Mind map generation error:", traceback.format_exc())
|
||||
logger.exception("Mind map generation error")
|
||||
|
||||
vects = embedding(doc_id, [c["content_with_weight"] for c in cks])
|
||||
assert len(cks) == len(vects)
|
||||
|
||||
@ -28,6 +28,7 @@ from api.db.services.file2document_service import File2DocumentService
|
||||
from api.utils import get_uuid
|
||||
from api.utils.file_utils import filename_type, thumbnail_img
|
||||
from rag.utils.storage_factory import STORAGE_IMPL
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
class FileService(CommonService):
|
||||
@ -272,8 +273,8 @@ class FileService(CommonService):
|
||||
cls.delete_folder_by_pf_id(user_id, file.id)
|
||||
return cls.model.delete().where((cls.model.tenant_id == user_id)
|
||||
& (cls.model.id == folder_id)).execute(),
|
||||
except Exception as e:
|
||||
print(e)
|
||||
except Exception:
|
||||
logger.exception("delete_folder_by_pf_id")
|
||||
raise RuntimeError("Database error (File retrieval)!")
|
||||
|
||||
@classmethod
|
||||
@ -321,8 +322,8 @@ class FileService(CommonService):
|
||||
def move_file(cls, file_ids, folder_id):
|
||||
try:
|
||||
cls.filter_update((cls.model.id << file_ids, ), { 'parent_id': folder_id })
|
||||
except Exception as e:
|
||||
print(e)
|
||||
except Exception:
|
||||
logger.exception("move_file")
|
||||
raise RuntimeError("Database error (File move)!")
|
||||
|
||||
@classmethod
|
||||
|
||||
@ -14,12 +14,12 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
from api.db.services.user_service import TenantService
|
||||
from api.settings import database_logger
|
||||
from rag.llm import EmbeddingModel, CvModel, ChatModel, RerankModel, Seq2txtModel, TTSModel
|
||||
from api.db import LLMType
|
||||
from api.db.db_models import DB
|
||||
from api.db.db_models import LLMFactories, LLM, TenantLLM
|
||||
from api.db.services.common_service import CommonService
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
|
||||
class LLMFactoriesService(CommonService):
|
||||
@ -209,40 +209,40 @@ class LLMBundle(object):
|
||||
emd, used_tokens = self.mdl.encode(texts, batch_size)
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/EMBEDDING used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.encode can't update token usage for {}/EMBEDDING used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
return emd, used_tokens
|
||||
|
||||
def encode_queries(self, query: str):
|
||||
emd, used_tokens = self.mdl.encode_queries(query)
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/EMBEDDING used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.encode_queries can't update token usage for {}/EMBEDDING used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
return emd, used_tokens
|
||||
|
||||
def similarity(self, query: str, texts: list):
|
||||
sim, used_tokens = self.mdl.similarity(query, texts)
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/RERANK used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.similarity can't update token usage for {}/RERANK used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
return sim, used_tokens
|
||||
|
||||
def describe(self, image, max_tokens=300):
|
||||
txt, used_tokens = self.mdl.describe(image, max_tokens)
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/IMAGE2TEXT used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.describe can't update token usage for {}/IMAGE2TEXT used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
return txt
|
||||
|
||||
def transcription(self, audio):
|
||||
txt, used_tokens = self.mdl.transcription(audio)
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/SEQUENCE2TXT used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.transcription can't update token usage for {}/SEQUENCE2TXT used_tokens: {}".format(self.tenant_id, used_tokens))
|
||||
return txt
|
||||
|
||||
def tts(self, text):
|
||||
@ -250,8 +250,8 @@ class LLMBundle(object):
|
||||
if isinstance(chunk,int):
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, chunk, self.llm_name):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/TTS".format(self.tenant_id))
|
||||
logger.error(
|
||||
"LLMBundle.tts can't update token usage for {}/TTS".format(self.tenant_id))
|
||||
return
|
||||
yield chunk
|
||||
|
||||
@ -259,8 +259,8 @@ class LLMBundle(object):
|
||||
txt, used_tokens = self.mdl.chat(system, history, gen_conf)
|
||||
if isinstance(txt, int) and not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, used_tokens, self.llm_name):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/CHAT llm_name: {}, used_tokens: {}".format(self.tenant_id, self.llm_name, used_tokens))
|
||||
logger.error(
|
||||
"LLMBundle.chat can't update token usage for {}/CHAT llm_name: {}, used_tokens: {}".format(self.tenant_id, self.llm_name, used_tokens))
|
||||
return txt
|
||||
|
||||
def chat_streamly(self, system, history, gen_conf):
|
||||
@ -268,7 +268,7 @@ class LLMBundle(object):
|
||||
if isinstance(txt, int):
|
||||
if not TenantLLMService.increase_usage(
|
||||
self.tenant_id, self.llm_type, txt, self.llm_name):
|
||||
database_logger.error(
|
||||
"Can't update token usage for {}/CHAT llm_name: {}, content: {}".format(self.tenant_id, self.llm_name, txt))
|
||||
logger.error(
|
||||
"LLMBundle.chat_streamly can't update token usage for {}/CHAT llm_name: {}, content: {}".format(self.tenant_id, self.llm_name, txt))
|
||||
return
|
||||
yield txt
|
||||
|
||||
@ -27,13 +27,10 @@ from api.apps import app
|
||||
from api.db.runtime_config import RuntimeConfig
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.settings import (
|
||||
HOST,
|
||||
HTTP_PORT,
|
||||
access_logger,
|
||||
database_logger,
|
||||
stat_logger,
|
||||
HOST, HTTP_PORT
|
||||
)
|
||||
from api import utils
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
from api.db.db_models import init_database_tables as init_web_db
|
||||
from api.db.init_data import init_web_data
|
||||
@ -45,23 +42,22 @@ def update_progress():
|
||||
time.sleep(3)
|
||||
try:
|
||||
DocumentService.update_progress()
|
||||
except Exception as e:
|
||||
stat_logger.error("update_progress exception:" + str(e))
|
||||
except Exception:
|
||||
logger.exception("update_progress exception")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(
|
||||
r"""
|
||||
if __name__ == '__main__':
|
||||
logger.info(r"""
|
||||
____ ___ ______ ______ __
|
||||
/ __ \ / | / ____// ____// /____ _ __
|
||||
/ /_/ // /| | / / __ / /_ / // __ \| | /| / /
|
||||
/ _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
|
||||
/_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
|
||||
|
||||
""",
|
||||
flush=True,
|
||||
""")
|
||||
logger.info(
|
||||
f'project base: {utils.file_utils.get_project_base_directory()}'
|
||||
)
|
||||
stat_logger.info(f"project base: {utils.file_utils.get_project_base_directory()}")
|
||||
|
||||
# init db
|
||||
init_web_db()
|
||||
@ -83,7 +79,7 @@ if __name__ == "__main__":
|
||||
|
||||
RuntimeConfig.DEBUG = args.debug
|
||||
if RuntimeConfig.DEBUG:
|
||||
stat_logger.info("run on debug mode")
|
||||
logger.info("run on debug mode")
|
||||
|
||||
RuntimeConfig.init_env()
|
||||
RuntimeConfig.init_config(JOB_SERVER_HOST=HOST, HTTP_PORT=HTTP_PORT)
|
||||
@ -91,17 +87,17 @@ if __name__ == "__main__":
|
||||
peewee_logger = logging.getLogger("peewee")
|
||||
peewee_logger.propagate = False
|
||||
# rag_arch.common.log.ROpenHandler
|
||||
peewee_logger.addHandler(database_logger.handlers[0])
|
||||
peewee_logger.setLevel(database_logger.level)
|
||||
peewee_logger.addHandler(logger.handlers[0])
|
||||
peewee_logger.setLevel(logger.handlers[0].level)
|
||||
|
||||
thr = ThreadPoolExecutor(max_workers=1)
|
||||
thr.submit(update_progress)
|
||||
|
||||
# start http server
|
||||
try:
|
||||
stat_logger.info("RAG Flow http server start...")
|
||||
logger.info("RAG Flow http server start...")
|
||||
werkzeug_logger = logging.getLogger("werkzeug")
|
||||
for h in access_logger.handlers:
|
||||
for h in logger.handlers:
|
||||
werkzeug_logger.addHandler(h)
|
||||
run_simple(
|
||||
hostname=HOST,
|
||||
|
||||
@ -17,24 +17,9 @@ import os
|
||||
from datetime import date
|
||||
from enum import IntEnum, Enum
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
from api.utils.log_utils import LoggerFactory, getLogger
|
||||
import rag.utils.es_conn
|
||||
import rag.utils.infinity_conn
|
||||
|
||||
# Logger
|
||||
LoggerFactory.set_directory(
|
||||
os.path.join(
|
||||
get_project_base_directory(),
|
||||
"logs",
|
||||
"api"))
|
||||
# {CRITICAL: 50, FATAL:50, ERROR:40, WARNING:30, WARN:30, INFO:20, DEBUG:10, NOTSET:0}
|
||||
LoggerFactory.LEVEL = 30
|
||||
|
||||
stat_logger = getLogger("stat")
|
||||
access_logger = getLogger("access")
|
||||
database_logger = getLogger("database")
|
||||
chat_logger = getLogger("chat")
|
||||
|
||||
import rag.utils
|
||||
from rag.nlp import search
|
||||
from graphrag import search as kg_search
|
||||
@ -47,8 +32,6 @@ TEMP_DIRECTORY = os.path.join(get_project_base_directory(), "temp")
|
||||
RAG_FLOW_CONF_PATH = os.path.join(get_project_base_directory(), "conf")
|
||||
LIGHTEN = int(os.environ.get('LIGHTEN', "0"))
|
||||
|
||||
SUBPROCESS_STD_LOG_NAME = "std.log"
|
||||
|
||||
ERROR_REPORT = True
|
||||
ERROR_REPORT_WITH_PATH = False
|
||||
|
||||
|
||||
@ -35,11 +35,12 @@ from werkzeug.http import HTTP_STATUS_CODES
|
||||
from api.db.db_models import APIToken
|
||||
from api.settings import (
|
||||
REQUEST_MAX_WAIT_SEC, REQUEST_WAIT_SEC,
|
||||
stat_logger, CLIENT_AUTHENTICATION, HTTP_APP_KEY, SECRET_KEY
|
||||
CLIENT_AUTHENTICATION, HTTP_APP_KEY, SECRET_KEY
|
||||
)
|
||||
from api.settings import RetCode
|
||||
from api.utils import CustomJSONEncoder, get_uuid
|
||||
from api.utils import json_dumps
|
||||
from api.utils.log_utils import logger
|
||||
|
||||
requests.models.complexjson.dumps = functools.partial(
|
||||
json.dumps, cls=CustomJSONEncoder)
|
||||
@ -117,7 +118,7 @@ def get_data_error_result(code=RetCode.DATA_ERROR,
|
||||
|
||||
|
||||
def server_error_response(e):
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
try:
|
||||
if e.code == 401:
|
||||
return get_json_result(code=401, message=repr(e))
|
||||
@ -258,7 +259,7 @@ def construct_json_result(code=RetCode.SUCCESS, message='success', data=None):
|
||||
|
||||
|
||||
def construct_error_response(e):
|
||||
stat_logger.exception(e)
|
||||
logger.exception(e)
|
||||
try:
|
||||
if e.code == 401:
|
||||
return construct_json_result(code=RetCode.UNAUTHORIZED, message=repr(e))
|
||||
|
||||
@ -14,300 +14,38 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
import os
|
||||
import typing
|
||||
import traceback
|
||||
import logging
|
||||
import inspect
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from threading import RLock
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from api.utils import file_utils
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
|
||||
LOG_LEVEL = logging.INFO
|
||||
LOG_FILE = os.path.abspath(os.path.join(get_project_base_directory(), "logs", f"ragflow_{os.getpid()}.log"))
|
||||
LOG_FORMAT = "%(asctime)-15s %(levelname)-8s %(process)d %(message)s"
|
||||
logger = None
|
||||
|
||||
class LoggerFactory(object):
|
||||
TYPE = "FILE"
|
||||
LOG_FORMAT = "[%(levelname)s] [%(asctime)s] [%(module)s.%(funcName)s] [line:%(lineno)d]: %(message)s"
|
||||
logging.basicConfig(format=LOG_FORMAT)
|
||||
LEVEL = logging.DEBUG
|
||||
logger_dict = {}
|
||||
global_handler_dict = {}
|
||||
|
||||
LOG_DIR = None
|
||||
PARENT_LOG_DIR = None
|
||||
log_share = True
|
||||
|
||||
append_to_parent_log = None
|
||||
|
||||
lock = RLock()
|
||||
# CRITICAL = 50
|
||||
# FATAL = CRITICAL
|
||||
# ERROR = 40
|
||||
# WARNING = 30
|
||||
# WARN = WARNING
|
||||
# INFO = 20
|
||||
# DEBUG = 10
|
||||
# NOTSET = 0
|
||||
levels = (10, 20, 30, 40)
|
||||
schedule_logger_dict = {}
|
||||
|
||||
@staticmethod
|
||||
def set_directory(directory=None, parent_log_dir=None,
|
||||
append_to_parent_log=None, force=False):
|
||||
if parent_log_dir:
|
||||
LoggerFactory.PARENT_LOG_DIR = parent_log_dir
|
||||
if append_to_parent_log:
|
||||
LoggerFactory.append_to_parent_log = append_to_parent_log
|
||||
with LoggerFactory.lock:
|
||||
if not directory:
|
||||
directory = file_utils.get_project_base_directory("logs")
|
||||
if not LoggerFactory.LOG_DIR or force:
|
||||
LoggerFactory.LOG_DIR = directory
|
||||
if LoggerFactory.log_share:
|
||||
oldmask = os.umask(000)
|
||||
os.makedirs(LoggerFactory.LOG_DIR, exist_ok=True)
|
||||
os.umask(oldmask)
|
||||
else:
|
||||
os.makedirs(LoggerFactory.LOG_DIR, exist_ok=True)
|
||||
for loggerName, ghandler in LoggerFactory.global_handler_dict.items():
|
||||
for className, (logger,
|
||||
handler) in LoggerFactory.logger_dict.items():
|
||||
logger.removeHandler(ghandler)
|
||||
ghandler.close()
|
||||
LoggerFactory.global_handler_dict = {}
|
||||
for className, (logger,
|
||||
handler) in LoggerFactory.logger_dict.items():
|
||||
logger.removeHandler(handler)
|
||||
_handler = None
|
||||
if handler:
|
||||
handler.close()
|
||||
if className != "default":
|
||||
_handler = LoggerFactory.get_handler(className)
|
||||
logger.addHandler(_handler)
|
||||
LoggerFactory.assemble_global_handler(logger)
|
||||
LoggerFactory.logger_dict[className] = logger, _handler
|
||||
|
||||
@staticmethod
|
||||
def new_logger(name):
|
||||
logger = logging.getLogger(name)
|
||||
logger.propagate = False
|
||||
logger.setLevel(LoggerFactory.LEVEL)
|
||||
def getLogger():
|
||||
global logger
|
||||
if logger is not None:
|
||||
return logger
|
||||
|
||||
@staticmethod
|
||||
def get_logger(class_name=None):
|
||||
with LoggerFactory.lock:
|
||||
if class_name in LoggerFactory.logger_dict.keys():
|
||||
logger, handler = LoggerFactory.logger_dict[class_name]
|
||||
if not logger:
|
||||
logger, handler = LoggerFactory.init_logger(class_name)
|
||||
else:
|
||||
logger, handler = LoggerFactory.init_logger(class_name)
|
||||
return logger
|
||||
print(f"log file path: {LOG_FILE}")
|
||||
os.makedirs(os.path.dirname(LOG_FILE), exist_ok=True)
|
||||
logger = logging.getLogger("ragflow")
|
||||
logger.setLevel(LOG_LEVEL)
|
||||
|
||||
@staticmethod
|
||||
def get_global_handler(logger_name, level=None, log_dir=None):
|
||||
if not LoggerFactory.LOG_DIR:
|
||||
return logging.StreamHandler()
|
||||
if log_dir:
|
||||
logger_name_key = logger_name + "_" + log_dir
|
||||
else:
|
||||
logger_name_key = logger_name + "_" + LoggerFactory.LOG_DIR
|
||||
# if loggerName not in LoggerFactory.globalHandlerDict:
|
||||
if logger_name_key not in LoggerFactory.global_handler_dict:
|
||||
with LoggerFactory.lock:
|
||||
if logger_name_key not in LoggerFactory.global_handler_dict:
|
||||
handler = LoggerFactory.get_handler(
|
||||
logger_name, level, log_dir)
|
||||
LoggerFactory.global_handler_dict[logger_name_key] = handler
|
||||
return LoggerFactory.global_handler_dict[logger_name_key]
|
||||
handler1 = RotatingFileHandler(LOG_FILE, maxBytes=10*1024*1024, backupCount=5)
|
||||
handler1.setLevel(LOG_LEVEL)
|
||||
formatter1 = logging.Formatter(LOG_FORMAT)
|
||||
handler1.setFormatter(formatter1)
|
||||
logger.addHandler(handler1)
|
||||
|
||||
@staticmethod
|
||||
def get_handler(class_name, level=None, log_dir=None,
|
||||
log_type=None, job_id=None):
|
||||
if not log_type:
|
||||
if not LoggerFactory.LOG_DIR or not class_name:
|
||||
return logging.StreamHandler()
|
||||
# return Diy_StreamHandler()
|
||||
handler2 = logging.StreamHandler()
|
||||
handler2.setLevel(LOG_LEVEL)
|
||||
formatter2 = logging.Formatter(LOG_FORMAT)
|
||||
handler2.setFormatter(formatter2)
|
||||
logger.addHandler(handler2)
|
||||
|
||||
if not log_dir:
|
||||
log_file = os.path.join(
|
||||
LoggerFactory.LOG_DIR,
|
||||
"{}.log".format(class_name))
|
||||
else:
|
||||
log_file = os.path.join(log_dir, "{}.log".format(class_name))
|
||||
else:
|
||||
log_file = os.path.join(log_dir, "rag_flow_{}.log".format(
|
||||
log_type) if level == LoggerFactory.LEVEL else 'rag_flow_{}_error.log'.format(log_type))
|
||||
|
||||
os.makedirs(os.path.dirname(log_file), exist_ok=True)
|
||||
if LoggerFactory.log_share:
|
||||
handler = ROpenHandler(log_file,
|
||||
when='D',
|
||||
interval=1,
|
||||
backupCount=14,
|
||||
delay=True)
|
||||
else:
|
||||
handler = TimedRotatingFileHandler(log_file,
|
||||
when='D',
|
||||
interval=1,
|
||||
backupCount=14,
|
||||
delay=True)
|
||||
if level:
|
||||
handler.level = level
|
||||
|
||||
return handler
|
||||
|
||||
@staticmethod
|
||||
def init_logger(class_name):
|
||||
with LoggerFactory.lock:
|
||||
logger = LoggerFactory.new_logger(class_name)
|
||||
handler = None
|
||||
if class_name:
|
||||
handler = LoggerFactory.get_handler(class_name)
|
||||
logger.addHandler(handler)
|
||||
LoggerFactory.logger_dict[class_name] = logger, handler
|
||||
|
||||
else:
|
||||
LoggerFactory.logger_dict["default"] = logger, handler
|
||||
|
||||
LoggerFactory.assemble_global_handler(logger)
|
||||
return logger, handler
|
||||
|
||||
@staticmethod
|
||||
def assemble_global_handler(logger):
|
||||
if LoggerFactory.LOG_DIR:
|
||||
for level in LoggerFactory.levels:
|
||||
if level >= LoggerFactory.LEVEL:
|
||||
level_logger_name = logging._levelToName[level]
|
||||
logger.addHandler(
|
||||
LoggerFactory.get_global_handler(
|
||||
level_logger_name, level))
|
||||
if LoggerFactory.append_to_parent_log and LoggerFactory.PARENT_LOG_DIR:
|
||||
for level in LoggerFactory.levels:
|
||||
if level >= LoggerFactory.LEVEL:
|
||||
level_logger_name = logging._levelToName[level]
|
||||
logger.addHandler(
|
||||
LoggerFactory.get_global_handler(level_logger_name, level, LoggerFactory.PARENT_LOG_DIR))
|
||||
|
||||
|
||||
def setDirectory(directory=None):
|
||||
LoggerFactory.set_directory(directory)
|
||||
|
||||
|
||||
def setLevel(level):
|
||||
LoggerFactory.LEVEL = level
|
||||
|
||||
|
||||
def getLogger(className=None, useLevelFile=False):
|
||||
if className is None:
|
||||
frame = inspect.stack()[1]
|
||||
module = inspect.getmodule(frame[0])
|
||||
className = 'stat'
|
||||
return LoggerFactory.get_logger(className)
|
||||
|
||||
|
||||
def exception_to_trace_string(ex):
|
||||
return "".join(traceback.TracebackException.from_exception(ex).format())
|
||||
|
||||
|
||||
class ROpenHandler(TimedRotatingFileHandler):
|
||||
def _open(self):
|
||||
prevumask = os.umask(000)
|
||||
rtv = TimedRotatingFileHandler._open(self)
|
||||
os.umask(prevumask)
|
||||
return rtv
|
||||
|
||||
|
||||
def sql_logger(job_id='', log_type='sql'):
|
||||
key = job_id + log_type
|
||||
if key in LoggerFactory.schedule_logger_dict.keys():
|
||||
return LoggerFactory.schedule_logger_dict[key]
|
||||
return get_job_logger(job_id=job_id, log_type=log_type)
|
||||
|
||||
|
||||
def ready_log(msg, job=None, task=None, role=None, party_id=None, detail=None):
|
||||
prefix, suffix = base_msg(job, task, role, party_id, detail)
|
||||
return f"{prefix}{msg} ready{suffix}"
|
||||
|
||||
|
||||
def start_log(msg, job=None, task=None, role=None, party_id=None, detail=None):
|
||||
prefix, suffix = base_msg(job, task, role, party_id, detail)
|
||||
return f"{prefix}start to {msg}{suffix}"
|
||||
|
||||
|
||||
def successful_log(msg, job=None, task=None, role=None,
|
||||
party_id=None, detail=None):
|
||||
prefix, suffix = base_msg(job, task, role, party_id, detail)
|
||||
return f"{prefix}{msg} successfully{suffix}"
|
||||
|
||||
|
||||
def warning_log(msg, job=None, task=None, role=None,
|
||||
party_id=None, detail=None):
|
||||
prefix, suffix = base_msg(job, task, role, party_id, detail)
|
||||
return f"{prefix}{msg} is not effective{suffix}"
|
||||
|
||||
|
||||
def failed_log(msg, job=None, task=None, role=None,
|
||||
party_id=None, detail=None):
|
||||
prefix, suffix = base_msg(job, task, role, party_id, detail)
|
||||
return f"{prefix}failed to {msg}{suffix}"
|
||||
|
||||
|
||||
def base_msg(job=None, task=None, role: str = None,
|
||||
party_id: typing.Union[str, int] = None, detail=None):
|
||||
if detail:
|
||||
detail_msg = f" detail: \n{detail}"
|
||||
else:
|
||||
detail_msg = ""
|
||||
if task is not None:
|
||||
return f"task {task.f_task_id} {task.f_task_version} ", f" on {task.f_role} {task.f_party_id}{detail_msg}"
|
||||
elif job is not None:
|
||||
return "", f" on {job.f_role} {job.f_party_id}{detail_msg}"
|
||||
elif role and party_id:
|
||||
return "", f" on {role} {party_id}{detail_msg}"
|
||||
else:
|
||||
return "", f"{detail_msg}"
|
||||
|
||||
|
||||
def exception_to_trace_string(ex):
|
||||
return "".join(traceback.TracebackException.from_exception(ex).format())
|
||||
|
||||
|
||||
def get_logger_base_dir():
|
||||
job_log_dir = file_utils.get_rag_flow_directory('logs')
|
||||
return job_log_dir
|
||||
|
||||
|
||||
def get_job_logger(job_id, log_type):
|
||||
rag_flow_log_dir = file_utils.get_rag_flow_directory('logs', 'rag_flow')
|
||||
job_log_dir = file_utils.get_rag_flow_directory('logs', job_id)
|
||||
if not job_id:
|
||||
log_dirs = [rag_flow_log_dir]
|
||||
else:
|
||||
if log_type == 'audit':
|
||||
log_dirs = [job_log_dir, rag_flow_log_dir]
|
||||
else:
|
||||
log_dirs = [job_log_dir]
|
||||
if LoggerFactory.log_share:
|
||||
oldmask = os.umask(000)
|
||||
os.makedirs(job_log_dir, exist_ok=True)
|
||||
os.makedirs(rag_flow_log_dir, exist_ok=True)
|
||||
os.umask(oldmask)
|
||||
else:
|
||||
os.makedirs(job_log_dir, exist_ok=True)
|
||||
os.makedirs(rag_flow_log_dir, exist_ok=True)
|
||||
logger = LoggerFactory.new_logger(f"{job_id}_{log_type}")
|
||||
for job_log_dir in log_dirs:
|
||||
handler = LoggerFactory.get_handler(class_name=None, level=LoggerFactory.LEVEL,
|
||||
log_dir=job_log_dir, log_type=log_type, job_id=job_id)
|
||||
error_handler = LoggerFactory.get_handler(
|
||||
class_name=None,
|
||||
level=logging.ERROR,
|
||||
log_dir=job_log_dir,
|
||||
log_type=log_type,
|
||||
job_id=job_id)
|
||||
logger.addHandler(handler)
|
||||
logger.addHandler(error_handler)
|
||||
with LoggerFactory.lock:
|
||||
LoggerFactory.schedule_logger_dict[job_id + log_type] = logger
|
||||
return logger
|
||||
|
||||
logger = getLogger()
|
||||
|
||||
Reference in New Issue
Block a user