feat: mypy for all type check (#10921)

This commit is contained in:
yihong
2024-12-24 18:38:51 +08:00
committed by GitHub
parent c91e8b1737
commit 56e15d09a9
584 changed files with 3975 additions and 2826 deletions

View File

@@ -1,7 +1,7 @@
from collections.abc import Generator
from unittest.mock import MagicMock
import google.generativeai.types.generation_types as generation_config_types
import google.generativeai.types.generation_types as generation_config_types # type: ignore
import pytest
from _pytest.monkeypatch import MonkeyPatch
from google.ai import generativelanguage as glm
@@ -45,7 +45,7 @@ class MockGoogleClass:
return GenerateContentResponse(done=True, iterator=None, result=glm.GenerateContentResponse({}), chunks=[])
@staticmethod
def generate_content_stream() -> Generator[GenerateContentResponse, None, None]:
def generate_content_stream() -> MockGoogleResponseClass:
return MockGoogleResponseClass()
def generate_content(

View File

@@ -2,7 +2,7 @@ import os
import pytest
from _pytest.monkeypatch import MonkeyPatch
from huggingface_hub import InferenceClient
from huggingface_hub import InferenceClient # type: ignore
from tests.integration_tests.model_runtime.__mock.huggingface_chat import MockHuggingfaceChatClass

View File

@@ -3,15 +3,15 @@ from collections.abc import Generator
from typing import Any, Literal, Optional, Union
from _pytest.monkeypatch import MonkeyPatch
from huggingface_hub import InferenceClient
from huggingface_hub.inference._text_generation import (
from huggingface_hub import InferenceClient # type: ignore
from huggingface_hub.inference._text_generation import ( # type: ignore
Details,
StreamDetails,
TextGenerationResponse,
TextGenerationStreamResponse,
Token,
)
from huggingface_hub.utils import BadRequestError
from huggingface_hub.utils import BadRequestError # type: ignore
class MockHuggingfaceChatClass:

View File

@@ -6,7 +6,7 @@ import pytest
# import monkeypatch
from _pytest.monkeypatch import MonkeyPatch
from nomic import embed
from nomic import embed # type: ignore
def create_embedding(texts: list[str], model: str, **kwargs: Any) -> dict:

View File

@@ -6,14 +6,14 @@ import pytest
from _pytest.monkeypatch import MonkeyPatch
from requests import Response
from requests.sessions import Session
from xinference_client.client.restful.restful_client import (
from xinference_client.client.restful.restful_client import ( # type: ignore
Client,
RESTfulChatModelHandle,
RESTfulEmbeddingModelHandle,
RESTfulGenerateModelHandle,
RESTfulRerankModelHandle,
)
from xinference_client.types import Embedding, EmbeddingData, EmbeddingUsage
from xinference_client.types import Embedding, EmbeddingData, EmbeddingUsage # type: ignore
class MockXinferenceClass:

View File

@@ -1,6 +1,6 @@
import os
import dashscope
import dashscope # type: ignore
import pytest
from core.model_runtime.entities.rerank_entities import RerankResult