improve: introduce isort for linting Python imports (#1983)

This commit is contained in:
Bowen Liang
2024-01-12 12:34:01 +08:00
committed by GitHub
parent cca9edc97a
commit cc9e74123c
413 changed files with 1635 additions and 1906 deletions

View File

@@ -1,16 +1,14 @@
import anthropic
from anthropic import Anthropic
from anthropic.resources.completions import Completions
from anthropic.types import completion_create_params, Completion
from anthropic._types import NOT_GIVEN, NotGiven, Headers, Query, Body
from _pytest.monkeypatch import MonkeyPatch
from typing import List, Union, Literal, Any, Generator
from time import sleep
import pytest
import os
from time import sleep
from typing import Any, Generator, List, Literal, Union
import anthropic
import pytest
from _pytest.monkeypatch import MonkeyPatch
from anthropic import Anthropic
from anthropic._types import NOT_GIVEN, Body, Headers, NotGiven, Query
from anthropic.resources.completions import Completions
from anthropic.types import Completion, completion_create_params
MOCK = os.getenv('MOCK_SWITCH', 'false') == 'true'

View File

@@ -1,17 +1,15 @@
from typing import Generator, List
import google.generativeai.types.content_types as content_types
import google.generativeai.types.generation_types as generation_config_types
import google.generativeai.types.safety_types as safety_types
import pytest
from _pytest.monkeypatch import MonkeyPatch
from google.ai import generativelanguage as glm
from google.generativeai import GenerativeModel
from google.generativeai.client import _ClientManager, configure
from google.generativeai.types import GenerateContentResponse
from google.generativeai.types.generation_types import BaseGenerateContentResponse
import google.generativeai.types.generation_types as generation_config_types
import google.generativeai.types.content_types as content_types
import google.generativeai.types.safety_types as safety_types
from google.generativeai.client import _ClientManager, configure
from google.ai import generativelanguage as glm
from typing import Generator, List
from _pytest.monkeypatch import MonkeyPatch
import pytest
current_api_key = ''

View File

@@ -1,12 +1,10 @@
from tests.integration_tests.model_runtime.__mock.huggingface_chat import MockHuggingfaceChatClass
from huggingface_hub import InferenceClient
from _pytest.monkeypatch import MonkeyPatch
from typing import List, Dict, Any
import os
from typing import Any, Dict, List
import pytest
import os
from _pytest.monkeypatch import MonkeyPatch
from huggingface_hub import InferenceClient
from tests.integration_tests.model_runtime.__mock.huggingface_chat import MockHuggingfaceChatClass
MOCK = os.getenv('MOCK_SWITCH', 'false').lower() == 'true'

View File

@@ -1,11 +1,12 @@
import re
from typing import Any, Generator, List, Literal, Optional, Union
from _pytest.monkeypatch import MonkeyPatch
from huggingface_hub import InferenceClient
from huggingface_hub.inference._text_generation import TextGenerationResponse, TextGenerationStreamResponse, Details, StreamDetails, Token
from huggingface_hub.inference._text_generation import (Details, StreamDetails, TextGenerationResponse,
TextGenerationStreamResponse, Token)
from huggingface_hub.utils import BadRequestError
from typing import Literal, Optional, List, Generator, Union, Any
from _pytest.monkeypatch import MonkeyPatch
import re
class MockHuggingfaceChatClass(object):
@staticmethod

View File

@@ -1,22 +1,22 @@
from tests.integration_tests.model_runtime.__mock.openai_completion import MockCompletionsClass
from tests.integration_tests.model_runtime.__mock.openai_chat import MockChatClass
from tests.integration_tests.model_runtime.__mock.openai_remote import MockModelClass
from tests.integration_tests.model_runtime.__mock.openai_moderation import MockModerationClass
from tests.integration_tests.model_runtime.__mock.openai_speech2text import MockSpeech2TextClass
from tests.integration_tests.model_runtime.__mock.openai_embeddings import MockEmbeddingsClass
from openai.resources.completions import Completions
from openai.resources.chat import Completions as ChatCompletions
from openai.resources.models import Models
from openai.resources.moderations import Moderations
from openai.resources.audio.transcriptions import Transcriptions
from openai.resources.embeddings import Embeddings
import os
from typing import Callable, List, Literal
import pytest
# import monkeypatch
from _pytest.monkeypatch import MonkeyPatch
from typing import Literal, Callable, List
from openai.resources.audio.transcriptions import Transcriptions
from openai.resources.chat import Completions as ChatCompletions
from openai.resources.completions import Completions
from openai.resources.embeddings import Embeddings
from openai.resources.models import Models
from openai.resources.moderations import Moderations
from tests.integration_tests.model_runtime.__mock.openai_chat import MockChatClass
from tests.integration_tests.model_runtime.__mock.openai_completion import MockCompletionsClass
from tests.integration_tests.model_runtime.__mock.openai_embeddings import MockEmbeddingsClass
from tests.integration_tests.model_runtime.__mock.openai_moderation import MockModerationClass
from tests.integration_tests.model_runtime.__mock.openai_remote import MockModelClass
from tests.integration_tests.model_runtime.__mock.openai_speech2text import MockSpeech2TextClass
import os
import pytest
def mock_openai(monkeypatch: MonkeyPatch, methods: List[Literal["completion", "chat", "remote", "moderation", "speech2text", "text_embedding"]]) -> Callable[[], None]:
"""

View File

@@ -1,27 +1,26 @@
from openai import OpenAI
from openai.types import Completion as CompletionMessage
from openai._types import NotGiven, NOT_GIVEN
from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageParam, \
ChatCompletionToolChoiceOptionParam, ChatCompletionToolParam, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall, ChoiceDeltaFunctionCall,\
Choice, ChoiceDelta, ChoiceDeltaToolCallFunction
from openai.types.chat.chat_completion import Choice as _ChatCompletionChoice, ChatCompletion as _ChatCompletion
from openai.types.chat.chat_completion_message import FunctionCall, ChatCompletionMessage
from openai.types.chat.chat_completion_message_tool_call import Function
from openai.types.completion_usage import CompletionUsage
from openai.resources.chat.completions import Completions
from openai import AzureOpenAI
import re
from json import dumps, loads
from time import sleep, time
# import monkeypatch
from typing import Any, Generator, List, Literal, Optional, Union
import openai.types.chat.completion_create_params as completion_create_params
# import monkeypatch
from typing import List, Any, Generator, Union, Optional, Literal
from time import time, sleep
from json import dumps, loads
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from openai import AzureOpenAI, OpenAI
from openai._types import NOT_GIVEN, NotGiven
from openai.resources.chat.completions import Completions
from openai.types import Completion as CompletionMessage
from openai.types.chat import (ChatCompletion, ChatCompletionChunk, ChatCompletionMessageParam,
ChatCompletionMessageToolCall, ChatCompletionToolChoiceOptionParam,
ChatCompletionToolParam)
from openai.types.chat.chat_completion import ChatCompletion as _ChatCompletion
from openai.types.chat.chat_completion import Choice as _ChatCompletionChoice
from openai.types.chat.chat_completion_chunk import (Choice, ChoiceDelta, ChoiceDeltaFunctionCall, ChoiceDeltaToolCall,
ChoiceDeltaToolCallFunction)
from openai.types.chat.chat_completion_message import ChatCompletionMessage, FunctionCall
from openai.types.chat.chat_completion_message_tool_call import Function
from openai.types.completion_usage import CompletionUsage
import re
class MockChatClass(object):
@staticmethod

View File

@@ -1,17 +1,16 @@
from openai import BadRequestError, OpenAI, AzureOpenAI
from openai.types import Completion as CompletionMessage
from openai._types import NotGiven, NOT_GIVEN
from openai.types.completion import CompletionChoice
from openai.types.completion_usage import CompletionUsage
from openai.resources.completions import Completions
import re
from time import sleep, time
# import monkeypatch
from typing import List, Any, Generator, Union, Optional, Literal
from time import time, sleep
from typing import Any, Generator, List, Literal, Optional, Union
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from openai import AzureOpenAI, BadRequestError, OpenAI
from openai._types import NOT_GIVEN, NotGiven
from openai.resources.completions import Completions
from openai.types import Completion as CompletionMessage
from openai.types.completion import CompletionChoice
from openai.types.completion_usage import CompletionUsage
import re
class MockCompletionsClass(object):
@staticmethod

View File

@@ -1,14 +1,13 @@
from openai.resources.embeddings import Embeddings
from openai._types import NotGiven, NOT_GIVEN
from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage
from openai.types.embedding import Embedding
from openai import OpenAI
from typing import Union, List, Literal, Any
import re
from typing import Any, List, Literal, Union
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from openai import OpenAI
from openai._types import NOT_GIVEN, NotGiven
from openai.resources.embeddings import Embeddings
from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage
from openai.types.embedding import Embedding
import re
class MockEmbeddingsClass(object):
def create_embeddings(

View File

@@ -1,13 +1,12 @@
from openai.resources.moderations import Moderations
from openai.types import ModerationCreateResponse
from openai.types.moderation import Moderation, Categories, CategoryScores
from openai._types import NotGiven, NOT_GIVEN
from typing import Union, List, Literal, Any
import re
from typing import Any, List, Literal, Union
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from openai._types import NOT_GIVEN, NotGiven
from openai.resources.moderations import Moderations
from openai.types import ModerationCreateResponse
from openai.types.moderation import Categories, CategoryScores, Moderation
import re
class MockModerationClass(object):
def moderation_create(self: Moderations,*,

View File

@@ -1,8 +1,9 @@
from time import time
from typing import List
from openai.resources.models import Models
from openai.types.model import Model
from typing import List
from time import time
class MockModelClass(object):
"""

View File

@@ -1,12 +1,11 @@
from openai.resources.audio.transcriptions import Transcriptions
from openai._types import NotGiven, NOT_GIVEN, FileTypes
from openai.types.audio.transcription import Transcription
from typing import Union, List, Literal, Any
import re
from typing import Any, List, Literal, Union
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from openai._types import NOT_GIVEN, FileTypes, NotGiven
from openai.resources.audio.transcriptions import Transcriptions
from openai.types.audio.transcription import Transcription
import re
class MockSpeech2TextClass(object):
def speech2text_create(self: Transcriptions,

View File

@@ -1,17 +1,17 @@
from xinference_client.client.restful.restful_client import Client, \
RESTfulChatModelHandle, RESTfulGenerateModelHandle, RESTfulChatglmCppChatModelHandle, \
RESTfulEmbeddingModelHandle, RESTfulRerankModelHandle
from xinference_client.types import Embedding, EmbeddingData, EmbeddingUsage
from requests.sessions import Session
from requests import Response
from requests.exceptions import ConnectionError
from typing import Union, List
from _pytest.monkeypatch import MonkeyPatch
import pytest
import os
import re
from typing import List, Union
import pytest
from _pytest.monkeypatch import MonkeyPatch
from requests import Response
from requests.exceptions import ConnectionError
from requests.sessions import Session
from xinference_client.client.restful.restful_client import (Client, RESTfulChatglmCppChatModelHandle,
RESTfulChatModelHandle, RESTfulEmbeddingModelHandle,
RESTfulGenerateModelHandle, RESTfulRerankModelHandle)
from xinference_client.types import Embedding, EmbeddingData, EmbeddingUsage
class MockXinferenceClass(object):
def get_chat_model(self: Client, model_uid: str) -> Union[RESTfulChatglmCppChatModelHandle, RESTfulGenerateModelHandle, RESTfulChatModelHandle]: