chore(api): Introduce Ruff Formatter. (#7291)

This commit is contained in:
-LAN-
2024-08-15 12:54:05 +08:00
committed by GitHub
parent 8f16165f92
commit 3571292fbf
61 changed files with 1315 additions and 1335 deletions

View File

@@ -1,13 +1,13 @@
from blinker import signal
# sender: app
app_was_created = signal('app-was-created')
app_was_created = signal("app-was-created")
# sender: app, kwargs: app_model_config
app_model_config_was_updated = signal('app-model-config-was-updated')
app_model_config_was_updated = signal("app-model-config-was-updated")
# sender: app, kwargs: published_workflow
app_published_workflow_was_updated = signal('app-published-workflow-was-updated')
app_published_workflow_was_updated = signal("app-published-workflow-was-updated")
# sender: app, kwargs: synced_draft_workflow
app_draft_workflow_was_synced = signal('app-draft-workflow-was-synced')
app_draft_workflow_was_synced = signal("app-draft-workflow-was-synced")

View File

@@ -1,4 +1,4 @@
from blinker import signal
# sender: dataset
dataset_was_deleted = signal('dataset-was-deleted')
dataset_was_deleted = signal("dataset-was-deleted")

View File

@@ -1,4 +1,4 @@
from blinker import signal
# sender: document
document_was_deleted = signal('document-was-deleted')
document_was_deleted = signal("document-was-deleted")

View File

@@ -5,5 +5,11 @@ from tasks.clean_dataset_task import clean_dataset_task
@dataset_was_deleted.connect
def handle(sender, **kwargs):
dataset = sender
clean_dataset_task.delay(dataset.id, dataset.tenant_id, dataset.indexing_technique,
dataset.index_struct, dataset.collection_binding_id, dataset.doc_form)
clean_dataset_task.delay(
dataset.id,
dataset.tenant_id,
dataset.indexing_technique,
dataset.index_struct,
dataset.collection_binding_id,
dataset.doc_form,
)

View File

@@ -5,7 +5,7 @@ from tasks.clean_document_task import clean_document_task
@document_was_deleted.connect
def handle(sender, **kwargs):
document_id = sender
dataset_id = kwargs.get('dataset_id')
doc_form = kwargs.get('doc_form')
file_id = kwargs.get('file_id')
dataset_id = kwargs.get("dataset_id")
doc_form = kwargs.get("doc_form")
file_id = kwargs.get("file_id")
clean_document_task.delay(document_id, dataset_id, doc_form, file_id)

View File

@@ -14,21 +14,25 @@ from models.dataset import Document
@document_index_created.connect
def handle(sender, **kwargs):
dataset_id = sender
document_ids = kwargs.get('document_ids', None)
document_ids = kwargs.get("document_ids", None)
documents = []
start_at = time.perf_counter()
for document_id in document_ids:
logging.info(click.style('Start process document: {}'.format(document_id), fg='green'))
logging.info(click.style("Start process document: {}".format(document_id), fg="green"))
document = db.session.query(Document).filter(
Document.id == document_id,
Document.dataset_id == dataset_id
).first()
document = (
db.session.query(Document)
.filter(
Document.id == document_id,
Document.dataset_id == dataset_id,
)
.first()
)
if not document:
raise NotFound('Document not found')
raise NotFound("Document not found")
document.indexing_status = 'parsing'
document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
documents.append(document)
db.session.add(document)
@@ -38,8 +42,8 @@ def handle(sender, **kwargs):
indexing_runner = IndexingRunner()
indexing_runner.run(documents)
end_at = time.perf_counter()
logging.info(click.style('Processed dataset: {} latency: {}'.format(dataset_id, end_at - start_at), fg='green'))
logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green"))
except DocumentIsPausedException as ex:
logging.info(click.style(str(ex), fg='yellow'))
logging.info(click.style(str(ex), fg="yellow"))
except Exception:
pass

View File

@@ -10,7 +10,7 @@ def handle(sender, **kwargs):
installed_app = InstalledApp(
tenant_id=app.tenant_id,
app_id=app.id,
app_owner_tenant_id=app.tenant_id
app_owner_tenant_id=app.tenant_id,
)
db.session.add(installed_app)
db.session.commit()

View File

@@ -7,15 +7,15 @@ from models.model import Site
def handle(sender, **kwargs):
"""Create site record when an app is created."""
app = sender
account = kwargs.get('account')
account = kwargs.get("account")
site = Site(
app_id=app.id,
title=app.name,
icon = app.icon,
icon_background = app.icon_background,
icon=app.icon,
icon_background=app.icon_background,
default_language=account.interface_language,
customize_token_strategy='not_allow',
code=Site.generate_code(16)
customize_token_strategy="not_allow",
code=Site.generate_code(16),
)
db.session.add(site)

View File

@@ -8,7 +8,7 @@ from models.provider import Provider, ProviderType
@message_was_created.connect
def handle(sender, **kwargs):
message = sender
application_generate_entity = kwargs.get('application_generate_entity')
application_generate_entity = kwargs.get("application_generate_entity")
if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity):
return
@@ -39,7 +39,7 @@ def handle(sender, **kwargs):
elif quota_unit == QuotaUnit.CREDITS:
used_quota = 1
if 'gpt-4' in model_config.model:
if "gpt-4" in model_config.model:
used_quota = 20
else:
used_quota = 1
@@ -50,6 +50,6 @@ def handle(sender, **kwargs):
Provider.provider_name == model_config.provider,
Provider.provider_type == ProviderType.SYSTEM.value,
Provider.quota_type == system_configuration.current_quota_type.value,
Provider.quota_limit > Provider.quota_used
).update({'quota_used': Provider.quota_used + used_quota})
Provider.quota_limit > Provider.quota_used,
).update({"quota_used": Provider.quota_used + used_quota})
db.session.commit()

View File

@@ -8,8 +8,8 @@ from events.app_event import app_draft_workflow_was_synced
@app_draft_workflow_was_synced.connect
def handle(sender, **kwargs):
app = sender
for node_data in kwargs.get('synced_draft_workflow').graph_dict.get('nodes', []):
if node_data.get('data', {}).get('type') == NodeType.TOOL.value:
for node_data in kwargs.get("synced_draft_workflow").graph_dict.get("nodes", []):
if node_data.get("data", {}).get("type") == NodeType.TOOL.value:
try:
tool_entity = ToolEntity(**node_data["data"])
tool_runtime = ToolManager.get_tool_runtime(
@@ -23,7 +23,7 @@ def handle(sender, **kwargs):
tool_runtime=tool_runtime,
provider_name=tool_entity.provider_name,
provider_type=tool_entity.provider_type,
identity_id=f'WORKFLOW.{app.id}.{node_data.get("id")}'
identity_id=f'WORKFLOW.{app.id}.{node_data.get("id")}',
)
manager.delete_tool_parameters_cache()
except:

View File

@@ -1,4 +1,4 @@
from blinker import signal
# sender: document
document_index_created = signal('document-index-created')
document_index_created = signal("document-index-created")

View File

@@ -7,13 +7,11 @@ from models.model import AppModelConfig
@app_model_config_was_updated.connect
def handle(sender, **kwargs):
app = sender
app_model_config = kwargs.get('app_model_config')
app_model_config = kwargs.get("app_model_config")
dataset_ids = get_dataset_ids_from_model_config(app_model_config)
app_dataset_joins = db.session.query(AppDatasetJoin).filter(
AppDatasetJoin.app_id == app.id
).all()
app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
removed_dataset_ids = []
if not app_dataset_joins:
@@ -29,16 +27,12 @@ def handle(sender, **kwargs):
if removed_dataset_ids:
for dataset_id in removed_dataset_ids:
db.session.query(AppDatasetJoin).filter(
AppDatasetJoin.app_id == app.id,
AppDatasetJoin.dataset_id == dataset_id
AppDatasetJoin.app_id == app.id, AppDatasetJoin.dataset_id == dataset_id
).delete()
if added_dataset_ids:
for dataset_id in added_dataset_ids:
app_dataset_join = AppDatasetJoin(
app_id=app.id,
dataset_id=dataset_id
)
app_dataset_join = AppDatasetJoin(app_id=app.id, dataset_id=dataset_id)
db.session.add(app_dataset_join)
db.session.commit()
@@ -51,7 +45,7 @@ def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set:
agent_mode = app_model_config.agent_mode_dict
tools = agent_mode.get('tools', []) or []
tools = agent_mode.get("tools", []) or []
for tool in tools:
if len(list(tool.keys())) != 1:
continue
@@ -63,11 +57,11 @@ def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set:
# get dataset from dataset_configs
dataset_configs = app_model_config.dataset_configs_dict
datasets = dataset_configs.get('datasets', {}) or {}
for dataset in datasets.get('datasets', []) or []:
datasets = dataset_configs.get("datasets", {}) or {}
for dataset in datasets.get("datasets", []) or []:
keys = list(dataset.keys())
if len(keys) == 1 and keys[0] == 'dataset':
if dataset['dataset'].get('id'):
dataset_ids.add(dataset['dataset'].get('id'))
if len(keys) == 1 and keys[0] == "dataset":
if dataset["dataset"].get("id"):
dataset_ids.add(dataset["dataset"].get("id"))
return dataset_ids

View File

@@ -11,13 +11,11 @@ from models.workflow import Workflow
@app_published_workflow_was_updated.connect
def handle(sender, **kwargs):
app = sender
published_workflow = kwargs.get('published_workflow')
published_workflow = kwargs.get("published_workflow")
published_workflow = cast(Workflow, published_workflow)
dataset_ids = get_dataset_ids_from_workflow(published_workflow)
app_dataset_joins = db.session.query(AppDatasetJoin).filter(
AppDatasetJoin.app_id == app.id
).all()
app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all()
removed_dataset_ids = []
if not app_dataset_joins:
@@ -33,16 +31,12 @@ def handle(sender, **kwargs):
if removed_dataset_ids:
for dataset_id in removed_dataset_ids:
db.session.query(AppDatasetJoin).filter(
AppDatasetJoin.app_id == app.id,
AppDatasetJoin.dataset_id == dataset_id
AppDatasetJoin.app_id == app.id, AppDatasetJoin.dataset_id == dataset_id
).delete()
if added_dataset_ids:
for dataset_id in added_dataset_ids:
app_dataset_join = AppDatasetJoin(
app_id=app.id,
dataset_id=dataset_id
)
app_dataset_join = AppDatasetJoin(app_id=app.id, dataset_id=dataset_id)
db.session.add(app_dataset_join)
db.session.commit()
@@ -54,18 +48,19 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set:
if not graph:
return dataset_ids
nodes = graph.get('nodes', [])
nodes = graph.get("nodes", [])
# fetch all knowledge retrieval nodes
knowledge_retrieval_nodes = [node for node in nodes
if node.get('data', {}).get('type') == NodeType.KNOWLEDGE_RETRIEVAL.value]
knowledge_retrieval_nodes = [
node for node in nodes if node.get("data", {}).get("type") == NodeType.KNOWLEDGE_RETRIEVAL.value
]
if not knowledge_retrieval_nodes:
return dataset_ids
for node in knowledge_retrieval_nodes:
try:
node_data = KnowledgeRetrievalNodeData(**node.get('data', {}))
node_data = KnowledgeRetrievalNodeData(**node.get("data", {}))
dataset_ids.update(node_data.dataset_ids)
except Exception as e:
continue

View File

@@ -9,13 +9,13 @@ from models.provider import Provider
@message_was_created.connect
def handle(sender, **kwargs):
message = sender
application_generate_entity = kwargs.get('application_generate_entity')
application_generate_entity = kwargs.get("application_generate_entity")
if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity):
return
db.session.query(Provider).filter(
Provider.tenant_id == application_generate_entity.app_config.tenant_id,
Provider.provider_name == application_generate_entity.model_conf.provider
).update({'last_used': datetime.now(timezone.utc).replace(tzinfo=None)})
Provider.provider_name == application_generate_entity.model_conf.provider,
).update({"last_used": datetime.now(timezone.utc).replace(tzinfo=None)})
db.session.commit()

View File

@@ -1,4 +1,4 @@
from blinker import signal
# sender: message, kwargs: conversation
message_was_created = signal('message-was-created')
message_was_created = signal("message-was-created")

View File

@@ -1,7 +1,7 @@
from blinker import signal
# sender: tenant
tenant_was_created = signal('tenant-was-created')
tenant_was_created = signal("tenant-was-created")
# sender: tenant
tenant_was_updated = signal('tenant-was-updated')
tenant_was_updated = signal("tenant-was-updated")