mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-08 20:42:30 +08:00
Move var from rag.settings to common.globals (#11022)
### What problem does this PR solve? As title. ### Type of change - [x] Refactoring --------- Signed-off-by: Jin Hai <haijin.chn@gmail.com>
This commit is contained in:
@ -27,7 +27,7 @@ from api.db.services.common_service import CommonService
|
|||||||
from common.misc_utils import get_uuid
|
from common.misc_utils import get_uuid
|
||||||
from common.time_utils import current_timestamp, datetime_format
|
from common.time_utils import current_timestamp, datetime_format
|
||||||
from common.constants import StatusEnum
|
from common.constants import StatusEnum
|
||||||
from rag.settings import MINIO
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
class UserService(CommonService):
|
class UserService(CommonService):
|
||||||
@ -221,7 +221,7 @@ class TenantService(CommonService):
|
|||||||
@DB.connection_context()
|
@DB.connection_context()
|
||||||
def user_gateway(cls, tenant_id):
|
def user_gateway(cls, tenant_id):
|
||||||
hash_obj = hashlib.sha256(tenant_id.encode("utf-8"))
|
hash_obj = hashlib.sha256(tenant_id.encode("utf-8"))
|
||||||
return int(hash_obj.hexdigest(), 16)%len(MINIO)
|
return int(hash_obj.hexdigest(), 16)%len(globals.MINIO)
|
||||||
|
|
||||||
|
|
||||||
class UserTenantService(CommonService):
|
class UserTenantService(CommonService):
|
||||||
|
|||||||
@ -19,7 +19,6 @@ from timeit import default_timer as timer
|
|||||||
|
|
||||||
from api import settings
|
from api import settings
|
||||||
from api.db.db_models import DB
|
from api.db.db_models import DB
|
||||||
from rag import settings as rag_settings
|
|
||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL
|
from rag.utils.storage_factory import STORAGE_IMPL
|
||||||
from rag.utils.es_conn import ESConnection
|
from rag.utils.es_conn import ESConnection
|
||||||
@ -121,7 +120,7 @@ def get_mysql_status():
|
|||||||
def check_minio_alive():
|
def check_minio_alive():
|
||||||
start_time = timer()
|
start_time = timer()
|
||||||
try:
|
try:
|
||||||
response = requests.get(f'http://{rag_settings.MINIO["host"]}/minio/health/live')
|
response = requests.get(f'http://{globals.MINIO["host"]}/minio/health/live')
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
return {"status": "alive", "message": f"Confirm elapsed: {(timer() - start_time) * 1000.0:.1f} ms."}
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -13,13 +13,52 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
import os
|
||||||
|
from common.config_utils import get_base_config, decrypt_database_config
|
||||||
|
|
||||||
EMBEDDING_MDL = ""
|
EMBEDDING_MDL = ""
|
||||||
|
|
||||||
EMBEDDING_CFG = ""
|
EMBEDDING_CFG = ""
|
||||||
|
|
||||||
DOC_ENGINE = None
|
DOC_ENGINE = os.getenv('DOC_ENGINE', 'elasticsearch')
|
||||||
|
|
||||||
docStoreConn = None
|
docStoreConn = None
|
||||||
|
|
||||||
retriever = None
|
retriever = None
|
||||||
|
|
||||||
|
# move from rag.settings
|
||||||
|
ES = {}
|
||||||
|
INFINITY = {}
|
||||||
|
AZURE = {}
|
||||||
|
S3 = {}
|
||||||
|
MINIO = {}
|
||||||
|
OSS = {}
|
||||||
|
OS = {}
|
||||||
|
REDIS = {}
|
||||||
|
|
||||||
|
STORAGE_IMPL_TYPE = os.getenv('STORAGE_IMPL', 'MINIO')
|
||||||
|
|
||||||
|
# Initialize the selected configuration data based on environment variables to solve the problem of initialization errors due to lack of configuration
|
||||||
|
if DOC_ENGINE == 'elasticsearch':
|
||||||
|
ES = get_base_config("es", {})
|
||||||
|
elif DOC_ENGINE == 'opensearch':
|
||||||
|
OS = get_base_config("os", {})
|
||||||
|
elif DOC_ENGINE == 'infinity':
|
||||||
|
INFINITY = get_base_config("infinity", {"uri": "infinity:23817"})
|
||||||
|
|
||||||
|
if STORAGE_IMPL_TYPE in ['AZURE_SPN', 'AZURE_SAS']:
|
||||||
|
AZURE = get_base_config("azure", {})
|
||||||
|
elif STORAGE_IMPL_TYPE == 'AWS_S3':
|
||||||
|
S3 = get_base_config("s3", {})
|
||||||
|
elif STORAGE_IMPL_TYPE == 'MINIO':
|
||||||
|
MINIO = decrypt_database_config(name="minio")
|
||||||
|
elif STORAGE_IMPL_TYPE == 'OSS':
|
||||||
|
OSS = get_base_config("oss", {})
|
||||||
|
|
||||||
|
try:
|
||||||
|
REDIS = decrypt_database_config(name="redis")
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
REDIS = get_base_config("redis", {})
|
||||||
|
except Exception:
|
||||||
|
REDIS = {}
|
||||||
@ -15,50 +15,12 @@
|
|||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from common.config_utils import get_base_config, decrypt_database_config
|
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
from common.misc_utils import pip_install_torch
|
from common.misc_utils import pip_install_torch
|
||||||
from common import globals
|
|
||||||
|
|
||||||
# Server
|
# Server
|
||||||
RAG_CONF_PATH = os.path.join(get_project_base_directory(), "conf")
|
RAG_CONF_PATH = os.path.join(get_project_base_directory(), "conf")
|
||||||
|
|
||||||
# Get storage type and document engine from system environment variables
|
|
||||||
STORAGE_IMPL_TYPE = os.getenv('STORAGE_IMPL', 'MINIO')
|
|
||||||
globals.DOC_ENGINE = os.getenv('DOC_ENGINE', 'elasticsearch')
|
|
||||||
|
|
||||||
ES = {}
|
|
||||||
INFINITY = {}
|
|
||||||
AZURE = {}
|
|
||||||
S3 = {}
|
|
||||||
MINIO = {}
|
|
||||||
OSS = {}
|
|
||||||
OS = {}
|
|
||||||
|
|
||||||
# Initialize the selected configuration data based on environment variables to solve the problem of initialization errors due to lack of configuration
|
|
||||||
if globals.DOC_ENGINE == 'elasticsearch':
|
|
||||||
ES = get_base_config("es", {})
|
|
||||||
elif globals.DOC_ENGINE == 'opensearch':
|
|
||||||
OS = get_base_config("os", {})
|
|
||||||
elif globals.DOC_ENGINE == 'infinity':
|
|
||||||
INFINITY = get_base_config("infinity", {"uri": "infinity:23817"})
|
|
||||||
|
|
||||||
if STORAGE_IMPL_TYPE in ['AZURE_SPN', 'AZURE_SAS']:
|
|
||||||
AZURE = get_base_config("azure", {})
|
|
||||||
elif STORAGE_IMPL_TYPE == 'AWS_S3':
|
|
||||||
S3 = get_base_config("s3", {})
|
|
||||||
elif STORAGE_IMPL_TYPE == 'MINIO':
|
|
||||||
MINIO = decrypt_database_config(name="minio")
|
|
||||||
elif STORAGE_IMPL_TYPE == 'OSS':
|
|
||||||
OSS = get_base_config("oss", {})
|
|
||||||
|
|
||||||
try:
|
|
||||||
REDIS = decrypt_database_config(name="redis")
|
|
||||||
except Exception:
|
|
||||||
try:
|
|
||||||
REDIS = get_base_config("redis", {})
|
|
||||||
except Exception:
|
|
||||||
REDIS = {}
|
|
||||||
DOC_MAXIMUM_SIZE = int(os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024))
|
DOC_MAXIMUM_SIZE = int(os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024))
|
||||||
DOC_BULK_SIZE = int(os.environ.get("DOC_BULK_SIZE", 4))
|
DOC_BULK_SIZE = int(os.environ.get("DOC_BULK_SIZE", 4))
|
||||||
EMBEDDING_BATCH_SIZE = int(os.environ.get("EMBEDDING_BATCH_SIZE", 16))
|
EMBEDDING_BATCH_SIZE = int(os.environ.get("EMBEDDING_BATCH_SIZE", 16))
|
||||||
|
|||||||
@ -18,17 +18,17 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from rag import settings
|
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from azure.storage.blob import ContainerClient
|
from azure.storage.blob import ContainerClient
|
||||||
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
class RAGFlowAzureSasBlob:
|
class RAGFlowAzureSasBlob:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.conn = None
|
self.conn = None
|
||||||
self.container_url = os.getenv('CONTAINER_URL', settings.AZURE["container_url"])
|
self.container_url = os.getenv('CONTAINER_URL', globals.AZURE["container_url"])
|
||||||
self.sas_token = os.getenv('SAS_TOKEN', settings.AZURE["sas_token"])
|
self.sas_token = os.getenv('SAS_TOKEN', globals.AZURE["sas_token"])
|
||||||
self.__open__()
|
self.__open__()
|
||||||
|
|
||||||
def __open__(self):
|
def __open__(self):
|
||||||
|
|||||||
@ -17,21 +17,21 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from rag import settings
|
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from azure.identity import ClientSecretCredential, AzureAuthorityHosts
|
from azure.identity import ClientSecretCredential, AzureAuthorityHosts
|
||||||
from azure.storage.filedatalake import FileSystemClient
|
from azure.storage.filedatalake import FileSystemClient
|
||||||
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
class RAGFlowAzureSpnBlob:
|
class RAGFlowAzureSpnBlob:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.conn = None
|
self.conn = None
|
||||||
self.account_url = os.getenv('ACCOUNT_URL', settings.AZURE["account_url"])
|
self.account_url = os.getenv('ACCOUNT_URL', globals.AZURE["account_url"])
|
||||||
self.client_id = os.getenv('CLIENT_ID', settings.AZURE["client_id"])
|
self.client_id = os.getenv('CLIENT_ID', globals.AZURE["client_id"])
|
||||||
self.secret = os.getenv('SECRET', settings.AZURE["secret"])
|
self.secret = os.getenv('SECRET', globals.AZURE["secret"])
|
||||||
self.tenant_id = os.getenv('TENANT_ID', settings.AZURE["tenant_id"])
|
self.tenant_id = os.getenv('TENANT_ID', globals.AZURE["tenant_id"])
|
||||||
self.container_name = os.getenv('CONTAINER_NAME', settings.AZURE["container_name"])
|
self.container_name = os.getenv('CONTAINER_NAME', globals.AZURE["container_name"])
|
||||||
self.__open__()
|
self.__open__()
|
||||||
|
|
||||||
def __open__(self):
|
def __open__(self):
|
||||||
|
|||||||
@ -24,7 +24,6 @@ import copy
|
|||||||
from elasticsearch import Elasticsearch, NotFoundError
|
from elasticsearch import Elasticsearch, NotFoundError
|
||||||
from elasticsearch_dsl import UpdateByQuery, Q, Search, Index
|
from elasticsearch_dsl import UpdateByQuery, Q, Search, Index
|
||||||
from elastic_transport import ConnectionTimeout
|
from elastic_transport import ConnectionTimeout
|
||||||
from rag import settings
|
|
||||||
from rag.settings import TAG_FLD, PAGERANK_FLD
|
from rag.settings import TAG_FLD, PAGERANK_FLD
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
@ -33,6 +32,7 @@ from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr,
|
|||||||
FusionExpr
|
FusionExpr
|
||||||
from rag.nlp import is_english, rag_tokenizer
|
from rag.nlp import is_english, rag_tokenizer
|
||||||
from common.float_utils import get_float
|
from common.float_utils import get_float
|
||||||
|
from common import globals
|
||||||
|
|
||||||
ATTEMPT_TIME = 2
|
ATTEMPT_TIME = 2
|
||||||
|
|
||||||
@ -43,17 +43,17 @@ logger = logging.getLogger('ragflow.es_conn')
|
|||||||
class ESConnection(DocStoreConnection):
|
class ESConnection(DocStoreConnection):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.info = {}
|
self.info = {}
|
||||||
logger.info(f"Use Elasticsearch {settings.ES['hosts']} as the doc engine.")
|
logger.info(f"Use Elasticsearch {globals.ES['hosts']} as the doc engine.")
|
||||||
for _ in range(ATTEMPT_TIME):
|
for _ in range(ATTEMPT_TIME):
|
||||||
try:
|
try:
|
||||||
if self._connect():
|
if self._connect():
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"{str(e)}. Waiting Elasticsearch {settings.ES['hosts']} to be healthy.")
|
logger.warning(f"{str(e)}. Waiting Elasticsearch {globals.ES['hosts']} to be healthy.")
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
if not self.es.ping():
|
if not self.es.ping():
|
||||||
msg = f"Elasticsearch {settings.ES['hosts']} is unhealthy in 120s."
|
msg = f"Elasticsearch {globals.ES['hosts']} is unhealthy in 120s."
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
v = self.info.get("version", {"number": "8.11.3"})
|
v = self.info.get("version", {"number": "8.11.3"})
|
||||||
@ -68,14 +68,14 @@ class ESConnection(DocStoreConnection):
|
|||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
self.mapping = json.load(open(fp_mapping, "r"))
|
self.mapping = json.load(open(fp_mapping, "r"))
|
||||||
logger.info(f"Elasticsearch {settings.ES['hosts']} is healthy.")
|
logger.info(f"Elasticsearch {globals.ES['hosts']} is healthy.")
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
self.es = Elasticsearch(
|
self.es = Elasticsearch(
|
||||||
settings.ES["hosts"].split(","),
|
globals.ES["hosts"].split(","),
|
||||||
basic_auth=(settings.ES["username"], settings.ES[
|
basic_auth=(globals.ES["username"], globals.ES[
|
||||||
"password"]) if "username" in settings.ES and "password" in settings.ES else None,
|
"password"]) if "username" in globals.ES and "password" in globals.ES else None,
|
||||||
verify_certs= settings.ES.get("verify_certs", False),
|
verify_certs= globals.ES.get("verify_certs", False),
|
||||||
timeout=600 )
|
timeout=600 )
|
||||||
if self.es:
|
if self.es:
|
||||||
self.info = self.es.info()
|
self.info = self.es.info()
|
||||||
|
|||||||
@ -25,11 +25,11 @@ from infinity.common import ConflictType, InfinityException, SortType
|
|||||||
from infinity.index import IndexInfo, IndexType
|
from infinity.index import IndexInfo, IndexType
|
||||||
from infinity.connection_pool import ConnectionPool
|
from infinity.connection_pool import ConnectionPool
|
||||||
from infinity.errors import ErrorCode
|
from infinity.errors import ErrorCode
|
||||||
from rag import settings
|
|
||||||
from rag.settings import PAGERANK_FLD, TAG_FLD
|
from rag.settings import PAGERANK_FLD, TAG_FLD
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
|
from common import globals
|
||||||
from rag.nlp import is_english
|
from rag.nlp import is_english
|
||||||
|
|
||||||
from rag.utils.doc_store_conn import (
|
from rag.utils.doc_store_conn import (
|
||||||
@ -130,8 +130,8 @@ def concat_dataframes(df_list: list[pd.DataFrame], selectFields: list[str]) -> p
|
|||||||
@singleton
|
@singleton
|
||||||
class InfinityConnection(DocStoreConnection):
|
class InfinityConnection(DocStoreConnection):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.dbName = settings.INFINITY.get("db_name", "default_db")
|
self.dbName = globals.INFINITY.get("db_name", "default_db")
|
||||||
infinity_uri = settings.INFINITY["uri"]
|
infinity_uri = globals.INFINITY["uri"]
|
||||||
if ":" in infinity_uri:
|
if ":" in infinity_uri:
|
||||||
host, port = infinity_uri.split(":")
|
host, port = infinity_uri.split(":")
|
||||||
infinity_uri = infinity.common.NetworkAddress(host, int(port))
|
infinity_uri = infinity.common.NetworkAddress(host, int(port))
|
||||||
|
|||||||
@ -20,8 +20,8 @@ from minio import Minio
|
|||||||
from minio.commonconfig import CopySource
|
from minio.commonconfig import CopySource
|
||||||
from minio.error import S3Error
|
from minio.error import S3Error
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from rag import settings
|
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
@ -38,14 +38,14 @@ class RAGFlowMinio:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.conn = Minio(settings.MINIO["host"],
|
self.conn = Minio(globals.MINIO["host"],
|
||||||
access_key=settings.MINIO["user"],
|
access_key=globals.MINIO["user"],
|
||||||
secret_key=settings.MINIO["password"],
|
secret_key=globals.MINIO["password"],
|
||||||
secure=False
|
secure=False
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logging.exception(
|
logging.exception(
|
||||||
"Fail to connect %s " % settings.MINIO["host"])
|
"Fail to connect %s " % globals.MINIO["host"])
|
||||||
|
|
||||||
def __close__(self):
|
def __close__(self):
|
||||||
del self.conn
|
del self.conn
|
||||||
|
|||||||
@ -24,13 +24,13 @@ import copy
|
|||||||
from opensearchpy import OpenSearch, NotFoundError
|
from opensearchpy import OpenSearch, NotFoundError
|
||||||
from opensearchpy import UpdateByQuery, Q, Search, Index
|
from opensearchpy import UpdateByQuery, Q, Search, Index
|
||||||
from opensearchpy import ConnectionTimeout
|
from opensearchpy import ConnectionTimeout
|
||||||
from rag import settings
|
|
||||||
from rag.settings import TAG_FLD, PAGERANK_FLD
|
from rag.settings import TAG_FLD, PAGERANK_FLD
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from common.file_utils import get_project_base_directory
|
from common.file_utils import get_project_base_directory
|
||||||
from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \
|
from rag.utils.doc_store_conn import DocStoreConnection, MatchExpr, OrderByExpr, MatchTextExpr, MatchDenseExpr, \
|
||||||
FusionExpr
|
FusionExpr
|
||||||
from rag.nlp import is_english, rag_tokenizer
|
from rag.nlp import is_english, rag_tokenizer
|
||||||
|
from common import globals
|
||||||
|
|
||||||
ATTEMPT_TIME = 2
|
ATTEMPT_TIME = 2
|
||||||
|
|
||||||
@ -41,13 +41,13 @@ logger = logging.getLogger('ragflow.opensearch_conn')
|
|||||||
class OSConnection(DocStoreConnection):
|
class OSConnection(DocStoreConnection):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.info = {}
|
self.info = {}
|
||||||
logger.info(f"Use OpenSearch {settings.OS['hosts']} as the doc engine.")
|
logger.info(f"Use OpenSearch {globals.OS['hosts']} as the doc engine.")
|
||||||
for _ in range(ATTEMPT_TIME):
|
for _ in range(ATTEMPT_TIME):
|
||||||
try:
|
try:
|
||||||
self.os = OpenSearch(
|
self.os = OpenSearch(
|
||||||
settings.OS["hosts"].split(","),
|
globals.OS["hosts"].split(","),
|
||||||
http_auth=(settings.OS["username"], settings.OS[
|
http_auth=(globals.OS["username"], globals.OS[
|
||||||
"password"]) if "username" in settings.OS and "password" in settings.OS else None,
|
"password"]) if "username" in globals.OS and "password" in globals.OS else None,
|
||||||
verify_certs=False,
|
verify_certs=False,
|
||||||
timeout=600
|
timeout=600
|
||||||
)
|
)
|
||||||
@ -55,10 +55,10 @@ class OSConnection(DocStoreConnection):
|
|||||||
self.info = self.os.info()
|
self.info = self.os.info()
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"{str(e)}. Waiting OpenSearch {settings.OS['hosts']} to be healthy.")
|
logger.warning(f"{str(e)}. Waiting OpenSearch {globals.OS['hosts']} to be healthy.")
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if not self.os.ping():
|
if not self.os.ping():
|
||||||
msg = f"OpenSearch {settings.OS['hosts']} is unhealthy in 120s."
|
msg = f"OpenSearch {globals.OS['hosts']} is unhealthy in 120s."
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
v = self.info.get("version", {"number": "2.18.0"})
|
v = self.info.get("version", {"number": "2.18.0"})
|
||||||
@ -73,7 +73,7 @@ class OSConnection(DocStoreConnection):
|
|||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
self.mapping = json.load(open(fp_mapping, "r"))
|
self.mapping = json.load(open(fp_mapping, "r"))
|
||||||
logger.info(f"OpenSearch {settings.OS['hosts']} is healthy.")
|
logger.info(f"OpenSearch {globals.OS['hosts']} is healthy.")
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Database operations
|
Database operations
|
||||||
|
|||||||
@ -20,14 +20,14 @@ from botocore.config import Config
|
|||||||
import time
|
import time
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from rag import settings
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
class RAGFlowOSS:
|
class RAGFlowOSS:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.conn = None
|
self.conn = None
|
||||||
self.oss_config = settings.OSS
|
self.oss_config = globals.OSS
|
||||||
self.access_key = self.oss_config.get('access_key', None)
|
self.access_key = self.oss_config.get('access_key', None)
|
||||||
self.secret_key = self.oss_config.get('secret_key', None)
|
self.secret_key = self.oss_config.get('secret_key', None)
|
||||||
self.endpoint_url = self.oss_config.get('endpoint_url', None)
|
self.endpoint_url = self.oss_config.get('endpoint_url', None)
|
||||||
|
|||||||
@ -19,8 +19,8 @@ import json
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import valkey as redis
|
import valkey as redis
|
||||||
from rag import settings
|
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
|
from common import globals
|
||||||
from valkey.lock import Lock
|
from valkey.lock import Lock
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ class RedisDB:
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.REDIS = None
|
self.REDIS = None
|
||||||
self.config = settings.REDIS
|
self.config = globals.REDIS
|
||||||
self.__open__()
|
self.__open__()
|
||||||
|
|
||||||
def register_scripts(self) -> None:
|
def register_scripts(self) -> None:
|
||||||
|
|||||||
@ -21,13 +21,14 @@ from botocore.config import Config
|
|||||||
import time
|
import time
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from common.decorator import singleton
|
from common.decorator import singleton
|
||||||
from rag import settings
|
from common import globals
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
class RAGFlowS3:
|
class RAGFlowS3:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.conn = None
|
self.conn = None
|
||||||
self.s3_config = settings.S3
|
self.s3_config = globals.S3
|
||||||
self.access_key = self.s3_config.get('access_key', None)
|
self.access_key = self.s3_config.get('access_key', None)
|
||||||
self.secret_key = self.s3_config.get('secret_key', None)
|
self.secret_key = self.s3_config.get('secret_key', None)
|
||||||
self.session_token = self.s3_config.get('session_token', None)
|
self.session_token = self.s3_config.get('session_token', None)
|
||||||
|
|||||||
Reference in New Issue
Block a user