diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 27e8dd3fa..1467dfb6b 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -5,7 +5,11 @@ from werkzeug.exceptions import Forbidden, NotFound import services.dataset_service from controllers.service_api import api from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError -from controllers.service_api.wraps import DatasetApiResource, validate_dataset_token +from controllers.service_api.wraps import ( + DatasetApiResource, + cloud_edition_billing_rate_limit_check, + validate_dataset_token, +) from core.model_runtime.entities.model_entities import ModelType from core.plugin.entities.plugin import ModelProviderID from core.provider_manager import ProviderManager @@ -70,6 +74,7 @@ class DatasetListApi(DatasetApiResource): response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page} return response, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id): """Resource for creating datasets.""" parser = reqparse.RequestParser() @@ -193,6 +198,7 @@ class DatasetApi(DatasetApiResource): return data, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, _, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -293,6 +299,7 @@ class DatasetApi(DatasetApiResource): return result_data, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, _, dataset_id): """ Deletes a dataset given its ID. diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index ab7ab4dcf..e4779f3bd 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -19,7 +19,11 @@ from controllers.service_api.dataset.error import ( ArchivedDocumentImmutableError, DocumentIndexingError, ) -from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_resource_check +from controllers.service_api.wraps import ( + DatasetApiResource, + cloud_edition_billing_rate_limit_check, + cloud_edition_billing_resource_check, +) from core.errors.error import ProviderTokenNotInitError from extensions.ext_database import db from fields.document_fields import document_fields, document_status_fields @@ -35,6 +39,7 @@ class DocumentAddByTextApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): """Create document by text.""" parser = reqparse.RequestParser() @@ -99,6 +104,7 @@ class DocumentUpdateByTextApi(DatasetApiResource): """Resource for update documents.""" @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Update document by text.""" parser = reqparse.RequestParser() @@ -158,6 +164,7 @@ class DocumentAddByFileApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): """Create document by upload file.""" args = {} @@ -232,6 +239,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): """Resource for update documents.""" @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Update document by upload file.""" args = {} @@ -302,6 +310,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): class DocumentDeleteApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id): """Delete document.""" document_id = str(document_id) diff --git a/api/controllers/service_api/dataset/hit_testing.py b/api/controllers/service_api/dataset/hit_testing.py index 465f71bf0..52e9bca5d 100644 --- a/api/controllers/service_api/dataset/hit_testing.py +++ b/api/controllers/service_api/dataset/hit_testing.py @@ -1,9 +1,10 @@ from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase from controllers.service_api import api -from controllers.service_api.wraps import DatasetApiResource +from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): dataset_id_str = str(dataset_id) diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index 35582feea..1968696ee 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -3,7 +3,7 @@ from flask_restful import marshal, reqparse from werkzeug.exceptions import NotFound from controllers.service_api import api -from controllers.service_api.wraps import DatasetApiResource +from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check from fields.dataset_fields import dataset_metadata_fields from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( @@ -14,6 +14,7 @@ from services.metadata_service import MetadataService class DatasetMetadataCreateServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): parser = reqparse.RequestParser() parser.add_argument("type", type=str, required=True, nullable=True, location="json") @@ -39,6 +40,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): class DatasetMetadataServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id, dataset_id, metadata_id): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=True, location="json") @@ -54,6 +56,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) return marshal(metadata, dataset_metadata_fields), 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, metadata_id): dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -73,6 +76,7 @@ class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, action): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -88,6 +92,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): class DocumentMetadataEditServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index 337752275..403b7f0a0 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -8,6 +8,7 @@ from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.wraps import ( DatasetApiResource, cloud_edition_billing_knowledge_limit_check, + cloud_edition_billing_rate_limit_check, cloud_edition_billing_resource_check, ) from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError @@ -35,6 +36,7 @@ class SegmentApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Create single segment.""" # check dataset @@ -139,6 +141,7 @@ class SegmentApi(DatasetApiResource): class DatasetSegmentApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -162,6 +165,7 @@ class DatasetSegmentApi(DatasetApiResource): return 204 @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -236,6 +240,7 @@ class ChildChunkApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id, segment_id): """Create child chunk.""" # check dataset @@ -332,6 +337,7 @@ class DatasetChildChunkApi(DatasetApiResource): """Resource for updating child chunks.""" @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): """Delete child chunk.""" # check dataset @@ -370,6 +376,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): """Update child chunk.""" # check dataset