fix: baichuan max chunks (#1990)

This commit is contained in:
Yeuoly
2024-01-10 23:13:35 +08:00
committed by GitHub
parent 94626487db
commit ebd11e7482
2 changed files with 85 additions and 11 deletions

View File

@@ -59,3 +59,40 @@ def test_get_num_tokens():
)
assert num_tokens == 2
def test_max_chunks():
model = BaichuanTextEmbeddingModel()
result = model.invoke(
model='baichuan-text-embedding',
credentials={
'api_key': os.environ.get('BAICHUAN_API_KEY'),
},
texts=[
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
"hello",
"world",
]
)
assert isinstance(result, TextEmbeddingResult)
assert len(result.embeddings) == 22