Compare commits
10 Commits
f0684c1957
...
ad8e82ee1d
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ad8e82ee1d | ||
![]() |
0e1dfb4161 | ||
![]() |
a183b2affb | ||
![]() |
738aaee101 | ||
![]() |
5867e6356d | ||
![]() |
ea066f891f | ||
![]() |
34b9fbab48 | ||
![]() |
1d7a8d94e0 | ||
![]() |
1caeac56f2 | ||
![]() |
6b1606f4f4 |
@@ -5,7 +5,7 @@ import os
|
||||
import secrets
|
||||
import urllib.parse
|
||||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel, ValidationError
|
||||
@@ -99,9 +99,37 @@ def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackSta
|
||||
return full_state_data
|
||||
|
||||
|
||||
def check_support_resource_discovery(server_url: str) -> tuple[bool, str]:
|
||||
"""Check if the server supports OAuth 2.0 Resource Discovery."""
|
||||
b_scheme, b_netloc, b_path, b_params, b_query, b_fragment = urlparse(server_url, "", True)
|
||||
url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource{b_path}"
|
||||
if b_query:
|
||||
url_for_resource_discovery += f"?{b_query}"
|
||||
if b_fragment:
|
||||
url_for_resource_discovery += f"#{b_fragment}"
|
||||
try:
|
||||
headers = {"MCP-Protocol-Version": LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"}
|
||||
response = httpx.get(url_for_resource_discovery, headers=headers)
|
||||
if 200 <= response.status_code < 300:
|
||||
body = response.json()
|
||||
if "authorization_server_url" in body:
|
||||
return True, body["authorization_server_url"][0]
|
||||
else:
|
||||
return False, ""
|
||||
return False, ""
|
||||
except httpx.RequestError as e:
|
||||
# Not support resource discovery, fall back to well-known OAuth metadata
|
||||
return False, ""
|
||||
|
||||
|
||||
def discover_oauth_metadata(server_url: str, protocol_version: Optional[str] = None) -> Optional[OAuthMetadata]:
|
||||
"""Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata."""
|
||||
url = urljoin(server_url, "/.well-known/oauth-authorization-server")
|
||||
# First check if the server supports OAuth 2.0 Resource Discovery
|
||||
support_resource_discovery, oauth_discovery_url = check_support_resource_discovery(server_url)
|
||||
if support_resource_discovery:
|
||||
url = oauth_discovery_url
|
||||
else:
|
||||
url = urljoin(server_url, "/.well-known/oauth-authorization-server")
|
||||
|
||||
try:
|
||||
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION}
|
||||
|
@@ -98,18 +98,26 @@ class AnalyticdbVectorBySql:
|
||||
try:
|
||||
cur.execute(f"CREATE DATABASE {self.databaseName}")
|
||||
except Exception as e:
|
||||
if "already exists" in str(e):
|
||||
return
|
||||
raise e
|
||||
if "already exists" not in str(e):
|
||||
raise e
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
self.pool = self._create_connection_pool()
|
||||
with self._get_cursor() as cur:
|
||||
conn = cur.connection
|
||||
try:
|
||||
cur.execute("CREATE EXTENSION IF NOT EXISTS zhparser;")
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise RuntimeError(
|
||||
"Failed to create zhparser extension. Please ensure it is available in your AnalyticDB."
|
||||
) from e
|
||||
try:
|
||||
cur.execute("CREATE TEXT SEARCH CONFIGURATION zh_cn (PARSER = zhparser)")
|
||||
cur.execute("ALTER TEXT SEARCH CONFIGURATION zh_cn ADD MAPPING FOR n,v,a,i,e,l,x WITH simple")
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
if "already exists" not in str(e):
|
||||
raise e
|
||||
cur.execute(
|
||||
|
@@ -1012,7 +1012,7 @@ class DatasetRetrieval:
|
||||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
if value is None and condition not in ("empty", "not empty"):
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
|
@@ -126,7 +126,7 @@ class SegmentType(StrEnum):
|
||||
"""
|
||||
if self.is_array_type():
|
||||
return self._validate_array(value, array_validation)
|
||||
elif self == SegmentType.NUMBER:
|
||||
elif self in [SegmentType.INTEGER, SegmentType.FLOAT, SegmentType.NUMBER]:
|
||||
return isinstance(value, (int, float))
|
||||
elif self == SegmentType.STRING:
|
||||
return isinstance(value, str)
|
||||
@@ -166,7 +166,6 @@ _ARRAY_TYPES = frozenset(
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
_NUMERICAL_TYPES = frozenset(
|
||||
[
|
||||
SegmentType.NUMBER,
|
||||
|
@@ -572,7 +572,7 @@ class KnowledgeRetrievalNode(BaseNode):
|
||||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
if value is None and condition not in ("empty", "not empty"):
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
|
@@ -313,30 +313,31 @@ class LoopNode(BaseNode):
|
||||
and event.node_type == NodeType.LOOP_END
|
||||
and not isinstance(event, NodeRunStreamChunkEvent)
|
||||
):
|
||||
check_break_result = True
|
||||
# Check if variables in break conditions exist and process conditions
|
||||
# Allow loop internal variables to be used in break conditions
|
||||
available_conditions = []
|
||||
for condition in break_conditions:
|
||||
variable = self.graph_runtime_state.variable_pool.get(condition.variable_selector)
|
||||
if variable:
|
||||
available_conditions.append(condition)
|
||||
|
||||
# Process conditions if at least one variable is available
|
||||
if available_conditions:
|
||||
input_conditions, group_result, check_break_result = condition_processor.process_conditions(
|
||||
variable_pool=self.graph_runtime_state.variable_pool,
|
||||
conditions=available_conditions,
|
||||
operator=logical_operator,
|
||||
)
|
||||
if check_break_result:
|
||||
break
|
||||
else:
|
||||
check_break_result = True
|
||||
yield self._handle_event_metadata(event=event, iter_run_index=current_index)
|
||||
break
|
||||
|
||||
if isinstance(event, NodeRunSucceededEvent):
|
||||
yield self._handle_event_metadata(event=event, iter_run_index=current_index)
|
||||
|
||||
# Check if all variables in break conditions exist
|
||||
exists_variable = False
|
||||
for condition in break_conditions:
|
||||
if not self.graph_runtime_state.variable_pool.get(condition.variable_selector):
|
||||
exists_variable = False
|
||||
break
|
||||
else:
|
||||
exists_variable = True
|
||||
if exists_variable:
|
||||
input_conditions, group_result, check_break_result = condition_processor.process_conditions(
|
||||
variable_pool=self.graph_runtime_state.variable_pool,
|
||||
conditions=break_conditions,
|
||||
operator=logical_operator,
|
||||
)
|
||||
if check_break_result:
|
||||
break
|
||||
|
||||
elif isinstance(event, BaseGraphEvent):
|
||||
if isinstance(event, GraphRunFailedEvent):
|
||||
# Loop run failed
|
||||
|
@@ -250,6 +250,11 @@ class DatasetService:
|
||||
dataset: Optional[Dataset] = db.session.query(Dataset).filter_by(id=dataset_id).first()
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def check_doc_form(dataset: Dataset, doc_form: str):
|
||||
if dataset.doc_form and doc_form != dataset.doc_form:
|
||||
raise ValueError("doc_form is different from the dataset doc_form.")
|
||||
|
||||
@staticmethod
|
||||
def check_dataset_model_setting(dataset):
|
||||
if dataset.indexing_technique == "high_quality":
|
||||
@@ -1085,6 +1090,8 @@ class DocumentService:
|
||||
dataset_process_rule: Optional[DatasetProcessRule] = None,
|
||||
created_from: str = "web",
|
||||
):
|
||||
# check doc_form
|
||||
DatasetService.check_doc_form(dataset, knowledge_config.doc_form)
|
||||
# check document limit
|
||||
features = FeatureService.get_features(current_user.current_tenant_id)
|
||||
|
||||
|
@@ -0,0 +1,620 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from models.model import EndUser, Message
|
||||
from models.web import SavedMessage
|
||||
from services.app_service import AppService
|
||||
from services.saved_message_service import SavedMessageService
|
||||
|
||||
|
||||
class TestSavedMessageService:
|
||||
"""Integration tests for SavedMessageService using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("services.account_service.FeatureService") as mock_account_feature_service,
|
||||
patch("services.app_service.ModelManager") as mock_model_manager,
|
||||
patch("services.saved_message_service.MessageService") as mock_message_service,
|
||||
):
|
||||
# Setup default mock returns
|
||||
mock_account_feature_service.get_system_features.return_value.is_allow_register = True
|
||||
|
||||
# Mock ModelManager for app creation
|
||||
mock_model_instance = mock_model_manager.return_value
|
||||
mock_model_instance.get_default_model_instance.return_value = None
|
||||
mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo")
|
||||
|
||||
# Mock MessageService
|
||||
mock_message_service.get_message.return_value = None
|
||||
mock_message_service.pagination_by_last_id.return_value = None
|
||||
|
||||
yield {
|
||||
"account_feature_service": mock_account_feature_service,
|
||||
"model_manager": mock_model_manager,
|
||||
"message_service": mock_message_service,
|
||||
}
|
||||
|
||||
def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Helper method to create a test app and account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
mock_external_service_dependencies: Mock dependencies
|
||||
|
||||
Returns:
|
||||
tuple: (app, account) - Created app and account instances
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Setup mocks for account creation
|
||||
mock_external_service_dependencies[
|
||||
"account_feature_service"
|
||||
].get_system_features.return_value.is_allow_register = True
|
||||
|
||||
# Create account and tenant first
|
||||
from services.account_service import AccountService, TenantService
|
||||
|
||||
account = AccountService.create_account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
password=fake.password(length=12),
|
||||
)
|
||||
TenantService.create_owner_tenant_if_not_exist(account, name=fake.company())
|
||||
tenant = account.current_tenant
|
||||
|
||||
# Create app with realistic data
|
||||
app_args = {
|
||||
"name": fake.company(),
|
||||
"description": fake.text(max_nb_chars=100),
|
||||
"mode": "chat",
|
||||
"icon_type": "emoji",
|
||||
"icon": "🤖",
|
||||
"icon_background": "#FF6B6B",
|
||||
"api_rph": 100,
|
||||
"api_rpm": 10,
|
||||
}
|
||||
|
||||
app_service = AppService()
|
||||
app = app_service.create_app(tenant.id, app_args, account)
|
||||
|
||||
return app, account
|
||||
|
||||
def _create_test_end_user(self, db_session_with_containers, app):
|
||||
"""
|
||||
Helper method to create a test end user for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
app: App instance to associate the end user with
|
||||
|
||||
Returns:
|
||||
EndUser: Created end user instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
end_user = EndUser(
|
||||
tenant_id=app.tenant_id,
|
||||
app_id=app.id,
|
||||
external_user_id=fake.uuid4(),
|
||||
name=fake.name(),
|
||||
type="normal",
|
||||
session_id=fake.uuid4(),
|
||||
is_anonymous=False,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(end_user)
|
||||
db.session.commit()
|
||||
|
||||
return end_user
|
||||
|
||||
def _create_test_message(self, db_session_with_containers, app, user):
|
||||
"""
|
||||
Helper method to create a test message for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
app: App instance to associate the message with
|
||||
user: User instance (Account or EndUser) to associate the message with
|
||||
|
||||
Returns:
|
||||
Message: Created message instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create a simple conversation first
|
||||
from models.model import Conversation
|
||||
|
||||
conversation = Conversation(
|
||||
app_id=app.id,
|
||||
from_source="account" if hasattr(user, "current_tenant") else "end_user",
|
||||
from_end_user_id=user.id if not hasattr(user, "current_tenant") else None,
|
||||
from_account_id=user.id if hasattr(user, "current_tenant") else None,
|
||||
name=fake.sentence(nb_words=3),
|
||||
inputs={},
|
||||
status="normal",
|
||||
mode="chat",
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(conversation)
|
||||
db.session.commit()
|
||||
|
||||
# Create message
|
||||
message = Message(
|
||||
app_id=app.id,
|
||||
conversation_id=conversation.id,
|
||||
from_source="account" if hasattr(user, "current_tenant") else "end_user",
|
||||
from_end_user_id=user.id if not hasattr(user, "current_tenant") else None,
|
||||
from_account_id=user.id if hasattr(user, "current_tenant") else None,
|
||||
inputs={},
|
||||
query=fake.sentence(nb_words=5),
|
||||
message=fake.text(max_nb_chars=100),
|
||||
answer=fake.text(max_nb_chars=200),
|
||||
message_tokens=50,
|
||||
answer_tokens=100,
|
||||
message_unit_price=0.001,
|
||||
answer_unit_price=0.002,
|
||||
total_price=0.003,
|
||||
currency="USD",
|
||||
status="success",
|
||||
)
|
||||
|
||||
db.session.add(message)
|
||||
db.session.commit()
|
||||
|
||||
return message
|
||||
|
||||
def test_pagination_by_last_id_success_with_account_user(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful pagination by last ID with account user.
|
||||
|
||||
This test verifies:
|
||||
- Proper pagination with account user
|
||||
- Correct filtering by app_id and user
|
||||
- Proper role identification for account users
|
||||
- MessageService integration
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Create test messages
|
||||
message1 = self._create_test_message(db_session_with_containers, app, account)
|
||||
message2 = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Create saved messages
|
||||
saved_message1 = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message1.id,
|
||||
created_by_role="account",
|
||||
created_by=account.id,
|
||||
)
|
||||
saved_message2 = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message2.id,
|
||||
created_by_role="account",
|
||||
created_by=account.id,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add_all([saved_message1, saved_message2])
|
||||
db.session.commit()
|
||||
|
||||
# Mock MessageService.pagination_by_last_id return value
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
|
||||
mock_pagination = InfiniteScrollPagination(data=[message1, message2], limit=10, has_more=False)
|
||||
mock_external_service_dependencies["message_service"].pagination_by_last_id.return_value = mock_pagination
|
||||
|
||||
# Act: Execute the method under test
|
||||
result = SavedMessageService.pagination_by_last_id(app_model=app, user=account, last_id=None, limit=10)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is not None
|
||||
assert result.data == [message1, message2]
|
||||
assert result.limit == 10
|
||||
assert result.has_more is False
|
||||
|
||||
# Verify MessageService was called with correct parameters
|
||||
# Sort the IDs to handle database query order variations
|
||||
expected_include_ids = sorted([message1.id, message2.id])
|
||||
actual_call = mock_external_service_dependencies["message_service"].pagination_by_last_id.call_args
|
||||
actual_include_ids = sorted(actual_call.kwargs.get("include_ids", []))
|
||||
|
||||
assert actual_call.kwargs["app_model"] == app
|
||||
assert actual_call.kwargs["user"] == account
|
||||
assert actual_call.kwargs["last_id"] is None
|
||||
assert actual_call.kwargs["limit"] == 10
|
||||
assert actual_include_ids == expected_include_ids
|
||||
|
||||
# Verify database state
|
||||
db.session.refresh(saved_message1)
|
||||
db.session.refresh(saved_message2)
|
||||
assert saved_message1.id is not None
|
||||
assert saved_message2.id is not None
|
||||
assert saved_message1.created_by_role == "account"
|
||||
assert saved_message2.created_by_role == "account"
|
||||
|
||||
def test_pagination_by_last_id_success_with_end_user(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful pagination by last ID with end user.
|
||||
|
||||
This test verifies:
|
||||
- Proper pagination with end user
|
||||
- Correct filtering by app_id and user
|
||||
- Proper role identification for end users
|
||||
- MessageService integration
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
end_user = self._create_test_end_user(db_session_with_containers, app)
|
||||
|
||||
# Create test messages
|
||||
message1 = self._create_test_message(db_session_with_containers, app, end_user)
|
||||
message2 = self._create_test_message(db_session_with_containers, app, end_user)
|
||||
|
||||
# Create saved messages
|
||||
saved_message1 = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message1.id,
|
||||
created_by_role="end_user",
|
||||
created_by=end_user.id,
|
||||
)
|
||||
saved_message2 = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message2.id,
|
||||
created_by_role="end_user",
|
||||
created_by=end_user.id,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add_all([saved_message1, saved_message2])
|
||||
db.session.commit()
|
||||
|
||||
# Mock MessageService.pagination_by_last_id return value
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
|
||||
mock_pagination = InfiniteScrollPagination(data=[message1, message2], limit=5, has_more=True)
|
||||
mock_external_service_dependencies["message_service"].pagination_by_last_id.return_value = mock_pagination
|
||||
|
||||
# Act: Execute the method under test
|
||||
result = SavedMessageService.pagination_by_last_id(
|
||||
app_model=app, user=end_user, last_id="test_last_id", limit=5
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is not None
|
||||
assert result.data == [message1, message2]
|
||||
assert result.limit == 5
|
||||
assert result.has_more is True
|
||||
|
||||
# Verify MessageService was called with correct parameters
|
||||
# Sort the IDs to handle database query order variations
|
||||
expected_include_ids = sorted([message1.id, message2.id])
|
||||
actual_call = mock_external_service_dependencies["message_service"].pagination_by_last_id.call_args
|
||||
actual_include_ids = sorted(actual_call.kwargs.get("include_ids", []))
|
||||
|
||||
assert actual_call.kwargs["app_model"] == app
|
||||
assert actual_call.kwargs["user"] == end_user
|
||||
assert actual_call.kwargs["last_id"] == "test_last_id"
|
||||
assert actual_call.kwargs["limit"] == 5
|
||||
assert actual_include_ids == expected_include_ids
|
||||
|
||||
# Verify database state
|
||||
db.session.refresh(saved_message1)
|
||||
db.session.refresh(saved_message2)
|
||||
assert saved_message1.id is not None
|
||||
assert saved_message2.id is not None
|
||||
assert saved_message1.created_by_role == "end_user"
|
||||
assert saved_message2.created_by_role == "end_user"
|
||||
|
||||
def test_save_success_with_new_message(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful save of a new message.
|
||||
|
||||
This test verifies:
|
||||
- Proper creation of new saved message
|
||||
- Correct database state after save
|
||||
- Proper relationship establishment
|
||||
- MessageService integration for message retrieval
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
message = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Mock MessageService.get_message return value
|
||||
mock_external_service_dependencies["message_service"].get_message.return_value = message
|
||||
|
||||
# Act: Execute the method under test
|
||||
SavedMessageService.save(app_model=app, user=account, message_id=message.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Check if saved message was created in database
|
||||
from extensions.ext_database import db
|
||||
|
||||
saved_message = (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
SavedMessage.created_by_role == "account",
|
||||
SavedMessage.created_by == account.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
assert saved_message is not None
|
||||
assert saved_message.app_id == app.id
|
||||
assert saved_message.message_id == message.id
|
||||
assert saved_message.created_by_role == "account"
|
||||
assert saved_message.created_by == account.id
|
||||
assert saved_message.created_at is not None
|
||||
|
||||
# Verify MessageService.get_message was called
|
||||
mock_external_service_dependencies["message_service"].get_message.assert_called_once_with(
|
||||
app_model=app, user=account, message_id=message.id
|
||||
)
|
||||
|
||||
# Verify database state
|
||||
db.session.refresh(saved_message)
|
||||
assert saved_message.id is not None
|
||||
|
||||
def test_pagination_by_last_id_error_no_user(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test error handling when no user is provided.
|
||||
|
||||
This test verifies:
|
||||
- Proper error handling for missing user
|
||||
- ValueError is raised when user is None
|
||||
- No database operations are performed
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Act & Assert: Verify proper error handling
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=10)
|
||||
|
||||
assert "User is required" in str(exc_info.value)
|
||||
|
||||
# Verify no database operations were performed
|
||||
from extensions.ext_database import db
|
||||
|
||||
saved_messages = db.session.query(SavedMessage).all()
|
||||
assert len(saved_messages) == 0
|
||||
|
||||
def test_save_error_no_user(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test error handling when saving message with no user.
|
||||
|
||||
This test verifies:
|
||||
- Method returns early when user is None
|
||||
- No database operations are performed
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
message = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Act: Execute the method under test with None user
|
||||
result = SavedMessageService.save(app_model=app, user=None, message_id=message.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is None
|
||||
|
||||
# Verify no saved message was created
|
||||
from extensions.ext_database import db
|
||||
|
||||
saved_message = (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
assert saved_message is None
|
||||
|
||||
def test_delete_success_existing_message(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful deletion of an existing saved message.
|
||||
|
||||
This test verifies:
|
||||
- Proper deletion of existing saved message
|
||||
- Correct database state after deletion
|
||||
- No errors during deletion process
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
message = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Create a saved message first
|
||||
saved_message = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message.id,
|
||||
created_by_role="account",
|
||||
created_by=account.id,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(saved_message)
|
||||
db.session.commit()
|
||||
|
||||
# Verify saved message exists
|
||||
assert (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
SavedMessage.created_by_role == "account",
|
||||
SavedMessage.created_by == account.id,
|
||||
)
|
||||
.first()
|
||||
is not None
|
||||
)
|
||||
|
||||
# Act: Execute the method under test
|
||||
SavedMessageService.delete(app_model=app, user=account, message_id=message.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Check if saved message was deleted from database
|
||||
deleted_saved_message = (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
SavedMessage.created_by_role == "account",
|
||||
SavedMessage.created_by == account.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
assert deleted_saved_message is None
|
||||
|
||||
# Verify database state
|
||||
db.session.commit()
|
||||
# The message should still exist, only the saved_message should be deleted
|
||||
assert db.session.query(Message).where(Message.id == message.id).first() is not None
|
||||
|
||||
def test_pagination_by_last_id_error_no_user(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test error handling when no user is provided.
|
||||
|
||||
This test verifies:
|
||||
- Proper error handling for missing user
|
||||
- ValueError is raised when user is None
|
||||
- No database operations are performed
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
|
||||
# Act & Assert: Verify proper error handling
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=10)
|
||||
|
||||
assert "User is required" in str(exc_info.value)
|
||||
|
||||
# Verify no database operations were performed for this specific test
|
||||
# Note: We don't check total count as other tests may have created data
|
||||
# Instead, we verify that the error was properly raised
|
||||
pass
|
||||
|
||||
def test_save_error_no_user(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test error handling when saving message with no user.
|
||||
|
||||
This test verifies:
|
||||
- Method returns early when user is None
|
||||
- No database operations are performed
|
||||
- No exceptions are raised
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
message = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Act: Execute the method under test with None user
|
||||
result = SavedMessageService.save(app_model=app, user=None, message_id=message.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
assert result is None
|
||||
|
||||
# Verify no saved message was created
|
||||
from extensions.ext_database import db
|
||||
|
||||
saved_message = (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
assert saved_message is None
|
||||
|
||||
def test_delete_success_existing_message(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful deletion of an existing saved message.
|
||||
|
||||
This test verifies:
|
||||
- Proper deletion of existing saved message
|
||||
- Correct database state after deletion
|
||||
- No errors during deletion process
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies)
|
||||
message = self._create_test_message(db_session_with_containers, app, account)
|
||||
|
||||
# Create a saved message first
|
||||
saved_message = SavedMessage(
|
||||
app_id=app.id,
|
||||
message_id=message.id,
|
||||
created_by_role="account",
|
||||
created_by=account.id,
|
||||
)
|
||||
|
||||
from extensions.ext_database import db
|
||||
|
||||
db.session.add(saved_message)
|
||||
db.session.commit()
|
||||
|
||||
# Verify saved message exists
|
||||
assert (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
SavedMessage.created_by_role == "account",
|
||||
SavedMessage.created_by == account.id,
|
||||
)
|
||||
.first()
|
||||
is not None
|
||||
)
|
||||
|
||||
# Act: Execute the method under test
|
||||
SavedMessageService.delete(app_model=app, user=account, message_id=message.id)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Check if saved message was deleted from database
|
||||
deleted_saved_message = (
|
||||
db.session.query(SavedMessage)
|
||||
.where(
|
||||
SavedMessage.app_id == app.id,
|
||||
SavedMessage.message_id == message.id,
|
||||
SavedMessage.created_by_role == "account",
|
||||
SavedMessage.created_by == account.id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
assert deleted_saved_message is None
|
||||
|
||||
# Verify database state
|
||||
db.session.commit()
|
||||
# The message should still exist, only the saved_message should be deleted
|
||||
assert db.session.query(Message).where(Message.id == message.id).first() is not None
|
181
api/tests/unit_tests/core/tools/utils/test_encryption.py
Normal file
181
api/tests/unit_tests/core/tools/utils/test_encryption.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import copy
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.tools.utils.encryption import ProviderConfigEncrypter
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# A no-op cache
|
||||
# ---------------------------
|
||||
class NoopCache:
|
||||
"""Simple cache stub: always returns None, does nothing for set/delete."""
|
||||
|
||||
def get(self):
|
||||
return None
|
||||
|
||||
def set(self, config):
|
||||
pass
|
||||
|
||||
def delete(self):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def secret_field() -> BasicProviderConfig:
|
||||
"""A SECRET_INPUT field named 'password'."""
|
||||
return BasicProviderConfig(
|
||||
name="password",
|
||||
type=BasicProviderConfig.Type.SECRET_INPUT,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def normal_field() -> BasicProviderConfig:
|
||||
"""A TEXT_INPUT field named 'username'."""
|
||||
return BasicProviderConfig(
|
||||
name="username",
|
||||
type=BasicProviderConfig.Type.TEXT_INPUT,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def encrypter_obj(secret_field, normal_field):
|
||||
"""
|
||||
Build ProviderConfigEncrypter with:
|
||||
- tenant_id = tenant123
|
||||
- one secret field (password) and one normal field (username)
|
||||
- NoopCache as cache
|
||||
"""
|
||||
return ProviderConfigEncrypter(
|
||||
tenant_id="tenant123",
|
||||
config=[secret_field, normal_field],
|
||||
provider_config_cache=NoopCache(),
|
||||
)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.encrypt()
|
||||
# ============================================================
|
||||
|
||||
|
||||
def test_encrypt_only_secret_is_encrypted_and_non_secret_unchanged(encrypter_obj):
|
||||
"""
|
||||
Secret field should be encrypted, non-secret field unchanged.
|
||||
Verify encrypt_token called only for secret field.
|
||||
Also check deep copy (input not modified).
|
||||
"""
|
||||
data_in = {"username": "alice", "password": "plain_pwd"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
with patch("core.tools.utils.encryption.encrypter.encrypt_token", return_value="CIPHERTEXT") as mock_encrypt:
|
||||
out = encrypter_obj.encrypt(data_in)
|
||||
|
||||
assert out["username"] == "alice"
|
||||
assert out["password"] == "CIPHERTEXT"
|
||||
mock_encrypt.assert_called_once_with("tenant123", "plain_pwd")
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
def test_encrypt_missing_secret_key_is_ok(encrypter_obj):
|
||||
"""If secret field missing in input, no error and no encryption called."""
|
||||
with patch("core.tools.utils.encryption.encrypter.encrypt_token") as mock_encrypt:
|
||||
out = encrypter_obj.encrypt({"username": "alice"})
|
||||
assert out["username"] == "alice"
|
||||
mock_encrypt.assert_not_called()
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.mask_tool_credentials()
|
||||
# ============================================================
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("raw", "prefix", "suffix"),
|
||||
[
|
||||
("longsecret", "lo", "et"),
|
||||
("abcdefg", "ab", "fg"),
|
||||
("1234567", "12", "67"),
|
||||
],
|
||||
)
|
||||
def test_mask_tool_credentials_long_secret(encrypter_obj, raw, prefix, suffix):
|
||||
"""
|
||||
For length > 6: keep first 2 and last 2, mask middle with '*'.
|
||||
"""
|
||||
data_in = {"username": "alice", "password": raw}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
out = encrypter_obj.mask_tool_credentials(data_in)
|
||||
masked = out["password"]
|
||||
|
||||
assert masked.startswith(prefix)
|
||||
assert masked.endswith(suffix)
|
||||
assert "*" in masked
|
||||
assert len(masked) == len(raw)
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
@pytest.mark.parametrize("raw", ["", "1", "12", "123", "123456"])
|
||||
def test_mask_tool_credentials_short_secret(encrypter_obj, raw):
|
||||
"""
|
||||
For length <= 6: fully mask with '*' of same length.
|
||||
"""
|
||||
out = encrypter_obj.mask_tool_credentials({"password": raw})
|
||||
assert out["password"] == ("*" * len(raw))
|
||||
|
||||
|
||||
def test_mask_tool_credentials_missing_key_noop(encrypter_obj):
|
||||
"""If secret key missing, leave other fields unchanged."""
|
||||
data_in = {"username": "alice"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
out = encrypter_obj.mask_tool_credentials(data_in)
|
||||
assert out["username"] == "alice"
|
||||
assert data_in == data_copy
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.decrypt()
|
||||
# ============================================================
|
||||
|
||||
|
||||
def test_decrypt_normal_flow(encrypter_obj):
|
||||
"""
|
||||
Normal decrypt flow:
|
||||
- decrypt_token called for secret field
|
||||
- secret replaced with decrypted value
|
||||
- non-secret unchanged
|
||||
"""
|
||||
data_in = {"username": "alice", "password": "ENC"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token", return_value="PLAIN") as mock_decrypt:
|
||||
out = encrypter_obj.decrypt(data_in)
|
||||
|
||||
assert out["username"] == "alice"
|
||||
assert out["password"] == "PLAIN"
|
||||
mock_decrypt.assert_called_once_with("tenant123", "ENC")
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
@pytest.mark.parametrize("empty_val", ["", None])
|
||||
def test_decrypt_skip_empty_values(encrypter_obj, empty_val):
|
||||
"""Skip decrypt if value is empty or None, keep original."""
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token") as mock_decrypt:
|
||||
out = encrypter_obj.decrypt({"password": empty_val})
|
||||
|
||||
mock_decrypt.assert_not_called()
|
||||
assert out["password"] == empty_val
|
||||
|
||||
|
||||
def test_decrypt_swallow_exception_and_keep_original(encrypter_obj):
|
||||
"""
|
||||
If decrypt_token raises, exception should be swallowed,
|
||||
and original value preserved.
|
||||
"""
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token", side_effect=Exception("boom")):
|
||||
out = encrypter_obj.decrypt({"password": "ENC_ERR"})
|
||||
|
||||
assert out["password"] == "ENC_ERR"
|
@@ -112,7 +112,7 @@ const WorkflowAppLogList: FC<ILogs> = ({ logs, appDetail, onRefresh }) => {
|
||||
</div>
|
||||
)}
|
||||
</td>
|
||||
<td className='w-[160px] p-3 pr-2'>{formatTime(log.created_at, t('appLog.dateTimeFormat') as string)}</td>
|
||||
<td className='w-[180px] p-3 pr-2'>{formatTime(log.created_at, t('appLog.dateTimeFormat') as string)}</td>
|
||||
<td className='p-3 pr-2'>{statusTdRender(log.workflow_run.status)}</td>
|
||||
<td className='p-3 pr-2'>
|
||||
<div className={cn(
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Protokolle',
|
||||
description: 'Die Protokolle zeichnen den Betriebsstatus der Anwendung auf, einschließlich Benutzereingaben und KI-Antworten.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
dateFormat: 'MM/DD/YYYY',
|
||||
table: {
|
||||
header: {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Logs',
|
||||
description: 'The logs record the running status of the application, including user inputs and AI replies.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
dateFormat: 'MM/DD/YYYY',
|
||||
table: {
|
||||
header: {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Registros',
|
||||
description: 'Los registros registran el estado de ejecución de la aplicación, incluyendo las entradas de usuario y las respuestas de la IA.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Hora actualizada',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'لاگها',
|
||||
description: 'لاگها وضعیت اجرایی برنامه را ثبت میکنند، شامل ورودیهای کاربر و پاسخهای هوش مصنوعی.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'زمان بهروزرسانی',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Journaux',
|
||||
description: 'Les journaux enregistrent l\'état d\'exécution de l\'application, y compris les entrées utilisateur et les réponses de l\'IA.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Heure de mise à jour',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'लॉग्स',
|
||||
description: 'लॉग्स एप्लिकेशन के रनिंग स्टेटस को रिकॉर्ड करते हैं, जिसमें यूजर इनपुट और एआई रिप्लाईज़ शामिल हैं।',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'अपडेट का समय',
|
||||
|
@@ -2,7 +2,7 @@ const translation = {
|
||||
title: 'Registri',
|
||||
description:
|
||||
'I registri registrano lo stato di esecuzione dell\'applicazione, inclusi input degli utenti e risposte AI.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Ora di aggiornamento',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'ログ',
|
||||
description: 'ログは、アプリケーションの実行状態を記録します。ユーザーの入力や AI の応答などが含まれます。',
|
||||
dateTimeFormat: 'YYYY/MM/DD hh:mm A',
|
||||
dateTimeFormat: 'YYYY/MM/DD hh:mm:ss A',
|
||||
dateFormat: 'YYYY/MM/DD',
|
||||
table: {
|
||||
header: {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: '로그',
|
||||
description: '로그는 애플리케이션 실행 상태를 기록합니다. 사용자 입력 및 AI 응답이 포함됩니다.',
|
||||
dateTimeFormat: 'YYYY/MM/DD HH:mm',
|
||||
dateTimeFormat: 'YYYY/MM/DD HH:mm:ss',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: '업데이트 시간',
|
||||
|
@@ -2,7 +2,7 @@ const translation = {
|
||||
title: 'Dzienniki',
|
||||
description:
|
||||
'Dzienniki rejestrują stan działania aplikacji, w tym dane wejściowe użytkowników i odpowiedzi AI.',
|
||||
dateTimeFormat: 'DD/MM/YYYY HH:mm',
|
||||
dateTimeFormat: 'DD/MM/YYYY HH:mm:ss',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Czas aktualizacji',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Registros',
|
||||
description: 'Os registros registram o status de execução do aplicativo, incluindo entradas do usuário e respostas do AI.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Hora de atualização',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Jurnale',
|
||||
description: 'Jurnalele înregistrează starea de funcționare a aplicației, inclusiv intrările utilizatorilor și răspunsurile AI.',
|
||||
dateTimeFormat: 'DD/MM/YYYY hh:mm A',
|
||||
dateTimeFormat: 'DD/MM/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Timp actualizare',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Логирование',
|
||||
description: 'В логах записывается состояние работы приложения, включая пользовательский ввод и ответы ИИ.',
|
||||
dateTimeFormat: 'DD.MM.YYYY HH:mm',
|
||||
dateTimeFormat: 'DD.MM.YYYY HH:mm:ss',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Время обновления',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Dnevniki',
|
||||
description: 'Dnevniki beležijo stanje delovanja aplikacije, vključno z vnosi uporabnikov in odgovori umetne inteligence.',
|
||||
dateTimeFormat: 'DD.MM.YYYY hh:mm A',
|
||||
dateTimeFormat: 'DD.MM.YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Čas posodobitve',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'บันทึก',
|
||||
description: 'บันทึกบันทึกสถานะการทํางานของแอปพลิเคชัน รวมถึงการป้อนข้อมูลของผู้ใช้และการตอบกลับ AI',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'อัพเดทเวลา',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Журнали',
|
||||
description: 'Журнали фіксують робочий статус додатка, включаючи введення користувачів та відповіді штучного інтелекту.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Час оновлення',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: 'Nhật ký',
|
||||
description: 'Nhật ký ghi lại trạng thái hoạt động của ứng dụng, bao gồm đầu vào của người dùng và phản hồi của trí tuệ nhân tạo.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Thời gian cập nhật',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: '日志',
|
||||
description: '日志记录了应用的运行情况,包括用户的输入和 AI 的回复。',
|
||||
dateTimeFormat: 'YYYY-MM-DD HH:mm',
|
||||
dateTimeFormat: 'YYYY-MM-DD HH:mm:ss',
|
||||
dateFormat: 'YYYY-MM-DD',
|
||||
table: {
|
||||
header: {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
const translation = {
|
||||
title: '日誌',
|
||||
description: '日誌記錄了應用的執行情況,包括使用者的輸入和 AI 的回覆。',
|
||||
dateTimeFormat: 'YYYY-MM-DD HH:mm',
|
||||
dateTimeFormat: 'YYYY-MM-DD HH:mm:ss',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: '更新時間',
|
||||
|
Reference in New Issue
Block a user