mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-12-23 06:46:40 +08:00
Remove Chinese comments and fix function arguments errors (#12052)
### What problem does this PR solve? As title ### Type of change - [x] Refactoring Signed-off-by: Jin Hai <haijin.chn@gmail.com>
This commit is contained in:
@ -163,6 +163,7 @@ def validate_request(*args, **kwargs):
|
|||||||
if error_arguments:
|
if error_arguments:
|
||||||
error_string += "required argument values: {}".format(",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]))
|
error_string += "required argument values: {}".format(",".join(["{}={}".format(a[0], a[1]) for a in error_arguments]))
|
||||||
return error_string
|
return error_string
|
||||||
|
return None
|
||||||
|
|
||||||
def wrapper(func):
|
def wrapper(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
@ -409,7 +410,7 @@ def get_parser_config(chunk_method, parser_config):
|
|||||||
if default_config is None:
|
if default_config is None:
|
||||||
return deep_merge(base_defaults, parser_config)
|
return deep_merge(base_defaults, parser_config)
|
||||||
|
|
||||||
# Ensure raptor and graphrag fields have default values if not provided
|
# Ensure raptor and graph_rag fields have default values if not provided
|
||||||
merged_config = deep_merge(base_defaults, default_config)
|
merged_config = deep_merge(base_defaults, default_config)
|
||||||
merged_config = deep_merge(merged_config, parser_config)
|
merged_config = deep_merge(merged_config, parser_config)
|
||||||
|
|
||||||
|
|||||||
@ -186,7 +186,7 @@ class OnyxConfluence:
|
|||||||
# between the db and redis everywhere the credentials might be updated
|
# between the db and redis everywhere the credentials might be updated
|
||||||
new_credential_str = json.dumps(new_credentials)
|
new_credential_str = json.dumps(new_credentials)
|
||||||
self.redis_client.set(
|
self.redis_client.set(
|
||||||
self.credential_key, new_credential_str, nx=True, ex=self.CREDENTIAL_TTL
|
self.credential_key, new_credential_str, exp=self.CREDENTIAL_TTL
|
||||||
)
|
)
|
||||||
self._credentials_provider.set_credentials(new_credentials)
|
self._credentials_provider.set_credentials(new_credentials)
|
||||||
|
|
||||||
@ -1599,8 +1599,8 @@ class ConfluenceConnector(
|
|||||||
semantic_identifier=semantic_identifier,
|
semantic_identifier=semantic_identifier,
|
||||||
extension=".html", # Confluence pages are HTML
|
extension=".html", # Confluence pages are HTML
|
||||||
blob=page_content.encode("utf-8"), # Encode page content as bytes
|
blob=page_content.encode("utf-8"), # Encode page content as bytes
|
||||||
size_bytes=len(page_content.encode("utf-8")), # Calculate size in bytes
|
|
||||||
doc_updated_at=datetime_from_string(page["version"]["when"]),
|
doc_updated_at=datetime_from_string(page["version"]["when"]),
|
||||||
|
size_bytes=len(page_content.encode("utf-8")), # Calculate size in bytes
|
||||||
primary_owners=primary_owners if primary_owners else None,
|
primary_owners=primary_owners if primary_owners else None,
|
||||||
metadata=metadata if metadata else None,
|
metadata=metadata if metadata else None,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -94,6 +94,7 @@ class Document(BaseModel):
|
|||||||
blob: bytes
|
blob: bytes
|
||||||
doc_updated_at: datetime
|
doc_updated_at: datetime
|
||||||
size_bytes: int
|
size_bytes: int
|
||||||
|
primary_owners: list
|
||||||
metadata: Optional[dict[str, Any]] = None
|
metadata: Optional[dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -167,7 +167,6 @@ def get_latest_message_time(thread: ThreadType) -> datetime:
|
|||||||
|
|
||||||
|
|
||||||
def _build_doc_id(channel_id: str, thread_ts: str) -> str:
|
def _build_doc_id(channel_id: str, thread_ts: str) -> str:
|
||||||
"""构建文档ID"""
|
|
||||||
return f"{channel_id}__{thread_ts}"
|
return f"{channel_id}__{thread_ts}"
|
||||||
|
|
||||||
|
|
||||||
@ -179,7 +178,6 @@ def thread_to_doc(
|
|||||||
user_cache: dict[str, BasicExpertInfo | None],
|
user_cache: dict[str, BasicExpertInfo | None],
|
||||||
channel_access: Any | None,
|
channel_access: Any | None,
|
||||||
) -> Document:
|
) -> Document:
|
||||||
"""将线程转换为文档"""
|
|
||||||
channel_id = channel["id"]
|
channel_id = channel["id"]
|
||||||
|
|
||||||
initial_sender_expert_info = expert_info_from_slack_id(
|
initial_sender_expert_info = expert_info_from_slack_id(
|
||||||
@ -237,7 +235,6 @@ def filter_channels(
|
|||||||
channels_to_connect: list[str] | None,
|
channels_to_connect: list[str] | None,
|
||||||
regex_enabled: bool,
|
regex_enabled: bool,
|
||||||
) -> list[ChannelType]:
|
) -> list[ChannelType]:
|
||||||
"""过滤频道"""
|
|
||||||
if not channels_to_connect:
|
if not channels_to_connect:
|
||||||
return all_channels
|
return all_channels
|
||||||
|
|
||||||
@ -381,7 +378,6 @@ def _process_message(
|
|||||||
[MessageType], SlackMessageFilterReason | None
|
[MessageType], SlackMessageFilterReason | None
|
||||||
] = default_msg_filter,
|
] = default_msg_filter,
|
||||||
) -> ProcessedSlackMessage:
|
) -> ProcessedSlackMessage:
|
||||||
"""处理消息"""
|
|
||||||
thread_ts = message.get("thread_ts")
|
thread_ts = message.get("thread_ts")
|
||||||
thread_or_message_ts = thread_ts or message["ts"]
|
thread_or_message_ts = thread_ts or message["ts"]
|
||||||
try:
|
try:
|
||||||
@ -536,7 +532,6 @@ class SlackConnector(
|
|||||||
end: SecondsSinceUnixEpoch | None = None,
|
end: SecondsSinceUnixEpoch | None = None,
|
||||||
callback: Any = None,
|
callback: Any = None,
|
||||||
) -> GenerateSlimDocumentOutput:
|
) -> GenerateSlimDocumentOutput:
|
||||||
"""获取所有简化文档(带权限同步)"""
|
|
||||||
if self.client is None:
|
if self.client is None:
|
||||||
raise ConnectorMissingCredentialError("Slack")
|
raise ConnectorMissingCredentialError("Slack")
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user