|
|
@ -386,7 +386,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|
|
|
)
|
|
|
|
)
|
|
|
|
for j in range(0, len(token), self.embedding_ctx_length):
|
|
|
|
for j in range(0, len(token), self.embedding_ctx_length):
|
|
|
|
tokens.append(
|
|
|
|
tokens.append(
|
|
|
|
token[j : j + self.embedding_ctx_length]
|
|
|
|
token[j: j + self.embedding_ctx_length]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
indices.append(i)
|
|
|
|
indices.append(i)
|
|
|
|
|
|
|
|
|
|
|
@ -406,7 +406,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|
|
|
for i in _iter:
|
|
|
|
for i in _iter:
|
|
|
|
response = embed_with_retry(
|
|
|
|
response = embed_with_retry(
|
|
|
|
self,
|
|
|
|
self,
|
|
|
|
input=tokens[i : i + _chunk_size],
|
|
|
|
input=tokens[i: i + _chunk_size],
|
|
|
|
**self._invocation_params,
|
|
|
|
**self._invocation_params,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
batched_embeddings.extend(
|
|
|
|
batched_embeddings.extend(
|
|
|
@ -486,7 +486,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|
|
|
)
|
|
|
|
)
|
|
|
|
for j in range(0, len(token), self.embedding_ctx_length):
|
|
|
|
for j in range(0, len(token), self.embedding_ctx_length):
|
|
|
|
tokens.append(
|
|
|
|
tokens.append(
|
|
|
|
token[j : j + self.embedding_ctx_length]
|
|
|
|
token[j: j + self.embedding_ctx_length]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
indices.append(i)
|
|
|
|
indices.append(i)
|
|
|
|
|
|
|
|
|
|
|
@ -495,7 +495,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|
|
|
for i in range(0, len(tokens), _chunk_size):
|
|
|
|
for i in range(0, len(tokens), _chunk_size):
|
|
|
|
response = await async_embed_with_retry(
|
|
|
|
response = await async_embed_with_retry(
|
|
|
|
self,
|
|
|
|
self,
|
|
|
|
input=tokens[i : i + _chunk_size],
|
|
|
|
input=tokens[i: i + _chunk_size],
|
|
|
|
**self._invocation_params,
|
|
|
|
**self._invocation_params,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
batched_embeddings.extend(
|
|
|
|
batched_embeddings.extend(
|
|
|
|