Refactor: use logger = logging.getLogger(__name__) in logging (#24515)

Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
This commit is contained in:
Yongtao Huang
2025-08-26 18:10:31 +08:00
committed by GitHub
parent 8af2ae973f
commit fa753239ad
102 changed files with 565 additions and 401 deletions

View File

@@ -67,6 +67,8 @@ from tasks.mail_owner_transfer_task import (
)
from tasks.mail_reset_password_task import send_reset_password_mail_task
logger = logging.getLogger(__name__)
class TokenPair(BaseModel):
access_token: str
@@ -332,9 +334,9 @@ class AccountService:
db.session.add(account_integrate)
db.session.commit()
logging.info("Account %s linked %s account %s.", account.id, provider, open_id)
logger.info("Account %s linked %s account %s.", account.id, provider, open_id)
except Exception as e:
logging.exception("Failed to link %s account %s to Account %s", provider, open_id, account.id)
logger.exception("Failed to link %s account %s to Account %s", provider, open_id, account.id)
raise LinkAccountIntegrateError("Failed to link account.") from e
@staticmethod
@@ -925,7 +927,7 @@ class TenantService:
"""Create tenant member"""
if role == TenantAccountRole.OWNER.value:
if TenantService.has_roles(tenant, [TenantAccountRole.OWNER]):
logging.error("Tenant %s has already an owner.", tenant.id)
logger.error("Tenant %s has already an owner.", tenant.id)
raise Exception("Tenant already has an owner.")
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first()
@@ -1177,7 +1179,7 @@ class RegisterService:
db.session.query(Tenant).delete()
db.session.commit()
logging.exception("Setup account failed, email: %s, name: %s", email, name)
logger.exception("Setup account failed, email: %s, name: %s", email, name)
raise ValueError(f"Setup failed: {e}")
@classmethod
@@ -1222,15 +1224,15 @@ class RegisterService:
db.session.commit()
except WorkSpaceNotAllowedCreateError:
db.session.rollback()
logging.exception("Register failed")
logger.exception("Register failed")
raise AccountRegisterError("Workspace is not allowed to create.")
except AccountRegisterError as are:
db.session.rollback()
logging.exception("Register failed")
logger.exception("Register failed")
raise are
except Exception as e:
db.session.rollback()
logging.exception("Register failed")
logger.exception("Register failed")
raise AccountRegisterError(f"Registration failed: {e}") from e
return account

View File

@@ -25,6 +25,8 @@ from services.feature_service import FeatureService
from services.tag_service import TagService
from tasks.remove_app_and_related_data_task import remove_app_and_related_data_task
logger = logging.getLogger(__name__)
class AppService:
def get_paginate_apps(self, user_id: str, tenant_id: str, args: dict) -> Pagination | None:
@@ -95,7 +97,7 @@ class AppService:
except (ProviderTokenNotInitError, LLMBadRequestError):
model_instance = None
except Exception as e:
logging.exception("Get default model instance failed, tenant_id: %s", tenant_id)
logger.exception("Get default model instance failed, tenant_id: %s", tenant_id)
model_instance = None
if model_instance:

View File

@@ -76,6 +76,8 @@ from tasks.remove_document_from_index_task import remove_document_from_index_tas
from tasks.retry_document_indexing_task import retry_document_indexing_task
from tasks.sync_website_document_indexing_task import sync_website_document_indexing_task
logger = logging.getLogger(__name__)
class DatasetService:
@staticmethod
@@ -615,7 +617,7 @@ class DatasetService:
)
except ProviderTokenNotInitError:
# If we can't get the embedding model, preserve existing settings
logging.warning(
logger.warning(
"Failed to initialize embedding model %s/%s, preserving existing settings",
data["embedding_model_provider"],
data["embedding_model"],
@@ -661,11 +663,11 @@ class DatasetService:
@staticmethod
def check_dataset_permission(dataset, user):
if dataset.tenant_id != user.current_tenant_id:
logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
raise NoPermissionError("You do not have permission to access this dataset.")
if user.current_role != TenantAccountRole.OWNER:
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
# For partial team permission, user needs explicit permission or be the creator
@@ -674,7 +676,7 @@ class DatasetService:
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
)
if not user_permission:
logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id)
raise NoPermissionError("You do not have permission to access this dataset.")
@staticmethod
@@ -1190,7 +1192,7 @@ class DocumentService:
created_by=account.id,
)
else:
logging.warning(
logger.warning(
"Invalid process rule mode: %s, can not find dataset process rule",
process_rule.mode,
)
@@ -1882,7 +1884,7 @@ class DocumentService:
task_func.delay(*task_args)
except Exception as e:
# Log the error but do not rollback the transaction
logging.exception("Error executing async task for document %s", update_info["document"].id)
logger.exception("Error executing async task for document %s", update_info["document"].id)
# don't raise the error immediately, but capture it for later
propagation_error = e
try:
@@ -1893,7 +1895,7 @@ class DocumentService:
redis_client.setex(indexing_cache_key, 600, 1)
except Exception as e:
# Log the error but do not rollback the transaction
logging.exception("Error setting cache for document %s", update_info["document"].id)
logger.exception("Error setting cache for document %s", update_info["document"].id)
# Raise any propagation error after all updates
if propagation_error:
raise propagation_error
@@ -2059,7 +2061,7 @@ class SegmentService:
try:
VectorService.create_segments_vector([args["keywords"]], [segment_document], dataset, document.doc_form)
except Exception as e:
logging.exception("create segment index failed")
logger.exception("create segment index failed")
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
segment_document.status = "error"
@@ -2142,7 +2144,7 @@ class SegmentService:
# save vector index
VectorService.create_segments_vector(keywords_list, pre_segment_data_list, dataset, document.doc_form)
except Exception as e:
logging.exception("create segment index failed")
logger.exception("create segment index failed")
for segment_document in segment_data_list:
segment_document.enabled = False
segment_document.disabled_at = naive_utc_now()
@@ -2314,7 +2316,7 @@ class SegmentService:
VectorService.update_segment_vector(args.keywords, segment, dataset)
except Exception as e:
logging.exception("update segment index failed")
logger.exception("update segment index failed")
segment.enabled = False
segment.disabled_at = naive_utc_now()
segment.status = "error"
@@ -2476,7 +2478,7 @@ class SegmentService:
try:
VectorService.create_child_chunk_vector(child_chunk, dataset)
except Exception as e:
logging.exception("create child chunk index failed")
logger.exception("create child chunk index failed")
db.session.rollback()
raise ChildChunkIndexingError(str(e))
db.session.commit()
@@ -2551,7 +2553,7 @@ class SegmentService:
VectorService.update_child_chunk_vector(new_child_chunks, update_child_chunks, delete_child_chunks, dataset)
db.session.commit()
except Exception as e:
logging.exception("update child chunk index failed")
logger.exception("update child chunk index failed")
db.session.rollback()
raise ChildChunkIndexingError(str(e))
return sorted(new_child_chunks + update_child_chunks, key=lambda x: x.position)
@@ -2575,7 +2577,7 @@ class SegmentService:
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
db.session.commit()
except Exception as e:
logging.exception("update child chunk index failed")
logger.exception("update child chunk index failed")
db.session.rollback()
raise ChildChunkIndexingError(str(e))
return child_chunk
@@ -2586,7 +2588,7 @@ class SegmentService:
try:
VectorService.delete_child_chunk_vector(child_chunk, dataset)
except Exception as e:
logging.exception("delete child chunk index failed")
logger.exception("delete child chunk index failed")
db.session.rollback()
raise ChildChunkDeleteIndexError(str(e))
db.session.commit()

View File

@@ -12,6 +12,8 @@ from extensions.ext_database import db
from models.account import Account
from models.dataset import Dataset, DatasetQuery
logger = logging.getLogger(__name__)
default_retrieval_model = {
"search_method": RetrievalMethod.SEMANTIC_SEARCH.value,
"reranking_enable": False,
@@ -77,7 +79,7 @@ class HitTestingService:
)
end = time.perf_counter()
logging.debug("Hit testing retrieve in %s seconds", end - start)
logger.debug("Hit testing retrieve in %s seconds", end - start)
dataset_query = DatasetQuery(
dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id
@@ -113,7 +115,7 @@ class HitTestingService:
)
end = time.perf_counter()
logging.debug("External knowledge hit testing retrieve in %s seconds", end - start)
logger.debug("External knowledge hit testing retrieve in %s seconds", end - start)
dataset_query = DatasetQuery(
dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id

View File

@@ -15,6 +15,8 @@ from services.entities.knowledge_entities.knowledge_entities import (
MetadataOperationData,
)
logger = logging.getLogger(__name__)
class MetadataService:
@staticmethod
@@ -90,7 +92,7 @@ class MetadataService:
db.session.commit()
return metadata # type: ignore
except Exception:
logging.exception("Update metadata name failed")
logger.exception("Update metadata name failed")
finally:
redis_client.delete(lock_key)
@@ -122,7 +124,7 @@ class MetadataService:
db.session.commit()
return metadata
except Exception:
logging.exception("Delete metadata failed")
logger.exception("Delete metadata failed")
finally:
redis_client.delete(lock_key)
@@ -161,7 +163,7 @@ class MetadataService:
dataset.built_in_field_enabled = True
db.session.commit()
except Exception:
logging.exception("Enable built-in field failed")
logger.exception("Enable built-in field failed")
finally:
redis_client.delete(lock_key)
@@ -192,7 +194,7 @@ class MetadataService:
dataset.built_in_field_enabled = False
db.session.commit()
except Exception:
logging.exception("Disable built-in field failed")
logger.exception("Disable built-in field failed")
finally:
redis_client.delete(lock_key)
@@ -230,7 +232,7 @@ class MetadataService:
db.session.add(dataset_metadata_binding)
db.session.commit()
except Exception:
logging.exception("Update documents metadata failed")
logger.exception("Update documents metadata failed")
finally:
redis_client.delete(lock_key)

View File

@@ -13,7 +13,7 @@ from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegm
from models.dataset import Document as DatasetDocument
from services.entities.knowledge_entities.knowledge_entities import ParentMode
_logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
class VectorService:
@@ -27,7 +27,7 @@ class VectorService:
if doc_form == IndexType.PARENT_CHILD_INDEX:
dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
if not dataset_document:
_logger.warning(
logger.warning(
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
segment.document_id,
segment.id,

View File

@@ -28,7 +28,7 @@ from models.enums import DraftVariableType
from models.workflow import Workflow, WorkflowDraftVariable, is_system_variable_editable
from repositories.factory import DifyAPIRepositoryFactory
_logger = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
@dataclasses.dataclass(frozen=True)
@@ -242,7 +242,7 @@ class WorkflowDraftVariableService:
if conv_var is None:
self._session.delete(instance=variable)
self._session.flush()
_logger.warning(
logger.warning(
"Conversation variable not found for draft variable, id=%s, name=%s", variable.id, variable.name
)
return None
@@ -263,12 +263,12 @@ class WorkflowDraftVariableService:
if variable.node_execution_id is None:
self._session.delete(instance=variable)
self._session.flush()
_logger.warning("draft variable has no node_execution_id, id=%s, name=%s", variable.id, variable.name)
logger.warning("draft variable has no node_execution_id, id=%s, name=%s", variable.id, variable.name)
return None
node_exec = self._api_node_execution_repo.get_execution_by_id(variable.node_execution_id)
if node_exec is None:
_logger.warning(
logger.warning(
"Node exectution not found for draft variable, id=%s, name=%s, node_execution_id=%s",
variable.id,
variable.name,
@@ -351,7 +351,7 @@ class WorkflowDraftVariableService:
return None
segment = draft_var.get_value()
if not isinstance(segment, StringSegment):
_logger.warning(
logger.warning(
"sys.conversation_id variable is not a string: app_id=%s, id=%s",
app_id,
draft_var.id,
@@ -681,7 +681,7 @@ class DraftVariableSaver:
draft_vars = []
for name, value in output.items():
if not self._should_variable_be_saved(name):
_logger.debug(
logger.debug(
"Skip saving variable as it has been excluded by its node_type, name=%s, node_type=%s",
name,
self._node_type,