Fix errors detected by Ruff (#3918)

### What problem does this PR solve?

Fix errors detected by Ruff

### Type of change

- [x] Refactoring
This commit is contained in:
Zhichang Yu
2024-12-08 14:21:12 +08:00
committed by GitHub
parent e267a026f3
commit 0d68a6cd1b
97 changed files with 2558 additions and 1976 deletions

View File

@ -1,14 +1,21 @@
from beartype.claw import beartype_this_package
beartype_this_package() # <-- raise exceptions in your code
import importlib.metadata
__version__ = importlib.metadata.version("ragflow_sdk")
from .ragflow import RAGFlow
from .modules.dataset import DataSet
from .modules.chat import Chat
from .modules.session import Session
from .modules.document import Document
from .modules.chunk import Chunk
from .modules.agent import Agent
from .modules.agent import Agent
__version__ = importlib.metadata.version("ragflow_sdk")
__all__ = [
"RAGFlow",
"DataSet",
"Chat",
"Session",
"Document",
"Chunk",
"Agent"
]

View File

@ -29,7 +29,7 @@ class Session(Base):
raise Exception(json_data["message"])
if line.startswith("data:"):
json_data = json.loads(line[5:])
if json_data["data"] != True:
if not json_data["data"]:
answer = json_data["data"]["answer"]
reference = json_data["data"]["reference"]
temp_dict = {

View File

@ -1,5 +1,3 @@
import string
import random
import os
import pytest
import requests

View File

@ -39,7 +39,6 @@ def update_dataset(auth, json_req):
def upload_file(auth, dataset_id, path):
authorization = {"Authorization": auth}
url = f"{HOST_ADDRESS}/v1/document/upload"
base_name = os.path.basename(path)
json_req = {
"kb_id": dataset_id,
}

View File

@ -1,3 +1,3 @@
def test_get_email(get_email):
print(f"\nEmail account:",flush=True)
print("\nEmail account:",flush=True)
print(f"{get_email}\n",flush=True)

View File

@ -13,14 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from common import HOST_ADDRESS, create_dataset, list_dataset, rm_dataset, update_dataset, upload_file, DATASET_NAME_LIMIT
from common import create_dataset, list_dataset, rm_dataset, upload_file
from common import list_document, get_docs_info, parse_docs
from time import sleep
from timeit import default_timer as timer
import re
import pytest
import random
import string
def test_parse_txt_document(get_auth):

View File

@ -1,6 +1,5 @@
from common import HOST_ADDRESS, create_dataset, list_dataset, rm_dataset, update_dataset, DATASET_NAME_LIMIT
from common import create_dataset, list_dataset, rm_dataset, update_dataset, DATASET_NAME_LIMIT
import re
import pytest
import random
import string
@ -33,8 +32,6 @@ def test_dataset(get_auth):
def test_dataset_1k_dataset(get_auth):
# create dataset
authorization = {"Authorization": get_auth}
url = f"{HOST_ADDRESS}/v1/kb/create"
for i in range(1000):
res = create_dataset(get_auth, f"test_create_dataset_{i}")
assert res.get("code") == 0, f"{res.get('message')}"
@ -76,7 +73,7 @@ def test_duplicated_name_dataset(get_auth):
dataset_id = item.get("id")
dataset_list.append(dataset_id)
match = re.match(pattern, dataset_name)
assert match != None
assert match is not None
for dataset_id in dataset_list:
res = rm_dataset(get_auth, dataset_id)

View File

@ -1,3 +1,3 @@
def test_get_email(get_email):
print(f"\nEmail account:",flush=True)
print("\nEmail account:",flush=True)
print(f"{get_email}\n",flush=True)

View File

@ -1,4 +1,4 @@
from ragflow_sdk import RAGFlow,Agent
from ragflow_sdk import RAGFlow
from common import HOST_ADDRESS
import pytest