langchain to langchain_community

pull/385/head
evelynmitchell 1 year ago
parent df2fe9fca4
commit 301354978a

@ -1,7 +1,7 @@
import os
from dotenv import load_dotenv
from swarms import Agent
from langchain.llms import OpenAIChat
from langchain_communityllms import OpenAIChat
# Loading environment variables from .env file
load_dotenv()

@ -1,4 +1,4 @@
from langchain.document_loaders import CSVLoader
from langchain_communitydocument_loaders import CSVLoader
from swarms.memory import qdrant
loader = CSVLoader(

@ -2,10 +2,10 @@ from typing import Callable, List
import numpy as np
import tenacity
from langchain.chat_models import ChatOpenAI
from langchain.output_parsers import RegexParser
from langchain.prompts import PromptTemplate
from langchain.schema import (
from langchain_communitychat_models import ChatOpenAI
from langchain_communityoutput_parsers import RegexParser
from langchain_communityprompts import PromptTemplate
from langchain_communityschema import (
HumanMessage,
SystemMessage,
)

@ -32,6 +32,7 @@ asyncio = "3.4.3"
einops = "0.7.0"
google-generativeai = "0.3.1"
langchain-experimental = "0.0.10"
langchain-community = "0.0.20"
tensorflow = "*"
weaviate-client = "3.25.3"
opencv-python-headless = "4.8.1.78"

@ -3,6 +3,7 @@ transformers
pandas==1.5.3
langchain==0.0.333
langchain-experimental==0.0.10
langchain-community==0.0.20
httpx==0.24.1
Pillow==9.4.0
faiss-cpu==1.7.4

@ -1,4 +1,4 @@
from langchain.base_language import BaseLanguageModel
from langchain_communitybase_language import BaseLanguageModel
from langchain_experimental.autonomous_agents.hugginggpt.repsonse_generator import (
load_response_generator,
)

@ -2,9 +2,9 @@ import os
from typing import Any, List
import faiss
from langchain.docstore import InMemoryDocstore
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain_communitydocstore import InMemoryDocstore
from langchain_communityembeddings import OpenAIEmbeddings
from langchain_communityvectorstores import FAISS
from langchain_experimental.autonomous_agents import AutoGPT
from swarms.utils.decorators import error_decorator, timing_decorator

@ -19,16 +19,16 @@ from typing import (
Union,
)
from langchain.callbacks.manager import (
from langchain_communitycallbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.llms.base import LLM
from langchain_communityllms.base import LLM
from pydantic import Field, SecretStr, root_validator
from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.output import GenerationChunk
from langchain.schema.prompt import PromptValue
from langchain.utils import (
from langchain_communityschema.language_model import BaseLanguageModel
from langchain_communityschema.output import GenerationChunk
from langchain_communityschema.prompt import PromptValue
from langchain_communityutils import (
get_from_dict_or_env,
)
from packaging.version import parse
@ -360,7 +360,7 @@ class Anthropic(LLM, _AnthropicCommon):
.. code-block:: python
import anthropic
from langchain.llms import Anthropic
from langchain_communityllms import Anthropic
model = Anthropic(model="<model_name>", anthropic_api_key="my-api-key")
@ -386,7 +386,7 @@ class Anthropic(LLM, _AnthropicCommon):
"""Raise warning that this class is deprecated."""
warnings.warn(
"This Anthropic LLM is deprecated. Please use `from"
" langchain.chat_models import ChatAnthropic` instead"
" langchain_communitychat_models import ChatAnthropic` instead"
)
return values

@ -9,15 +9,15 @@ from tenacity import (
wait_exponential,
)
from langchain.callbacks.manager import (
from langchain_communitycallbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
from langchain.load.serializable import Serializable
from langchain_communityllms.base import LLM
from langchain_communityllms.utils import enforce_stop_tokens
from langchain_communityload.serializable import Serializable
from pydantic import Extra, Field, root_validator
from langchain.utils import get_from_dict_or_env
from langchain_communityutils import get_from_dict_or_env
logger = logging.getLogger(__name__)
@ -119,7 +119,7 @@ class Cohere(LLM, BaseCohere):
Example:
.. code-block:: python
from langchain.llms import Cohere
from langchain_communityllms import Cohere
cohere = Cohere(model="gptd-instruct-tft", cohere_api_key="my-api-key")
"""

@ -2,7 +2,7 @@ import tempfile
from enum import Enum
from typing import Any, Dict, Union
from langchain.utils import get_from_dict_or_env
from langchain_communityutils import get_from_dict_or_env
from pydantic import root_validator
from swarms.tools.tool import BaseTool

@ -155,7 +155,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
Example:
.. code-block:: python
from langchain.embeddings import OpenAIEmbeddings
from langchain_communityembeddings import OpenAIEmbeddings
openai = OpenAIEmbeddings(openai_api_key="my-api-key")
In order to use the library with Microsoft Azure endpoints, you need to set
@ -174,7 +174,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
os.environ["OPENAI_API_VERSION"] = "2023-05-15"
os.environ["OPENAI_PROXY"] = "http://your-corporate-proxy:8080"
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain_communityembeddings.openai import OpenAIEmbeddings
embeddings = OpenAIEmbeddings(
deployment="your-embeddings-deployment-name",
model="your-embeddings-model-name",

@ -22,19 +22,19 @@ from typing import (
Union,
)
from langchain.callbacks.manager import (
from langchain_communitycallbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.llms.base import BaseLLM
from langchain.pydantic_v1 import Field, root_validator
from langchain.schema import Generation, LLMResult
from langchain.schema.output import GenerationChunk
from langchain.utils import (
from langchain_communityllms.base import BaseLLM
from langchain_communitypydantic_v1 import Field, root_validator
from langchain_communityschema import Generation, LLMResult
from langchain_communityschema.output import GenerationChunk
from langchain_communityutils import (
get_from_dict_or_env,
get_pydantic_field_names,
)
from langchain.utils.utils import build_extra_kwargs
from langchain_communityutils.utils import build_extra_kwargs
from tenacity import (
RetryCallState,
before_sleep_log,
@ -796,7 +796,7 @@ class OpenAI(BaseOpenAI):
Example:
.. code-block:: python
from langchain.llms import OpenAI
from langchain_communityllms import OpenAI
openai = OpenAI(model_name="text-davinci-003")
"""
@ -820,7 +820,7 @@ class AzureOpenAI(BaseOpenAI):
Example:
.. code-block:: python
from langchain.llms import AzureOpenAI
from langchain_communityllms import AzureOpenAI
openai = AzureOpenAI(model_name="text-davinci-003")
"""
@ -897,7 +897,7 @@ class OpenAIChat(BaseLLM):
Example:
.. code-block:: python
from langchain.llms import OpenAIChat
from langchain_communityllms import OpenAIChat
openaichat = OpenAIChat(model_name="gpt-3.5-turbo")
"""

@ -3,11 +3,11 @@ from __future__ import annotations
import logging
from typing import Any, Callable, Dict, List, Optional
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms import BaseLLM
from langchain.pydantic_v1 import BaseModel, root_validator
from langchain.schema import Generation, LLMResult
from langchain.utils import get_from_dict_or_env
from langchain_communitycallbacks.manager import CallbackManagerForLLMRun
from langchain_communityllms import BaseLLM
from langchain_communitypydantic_v1 import BaseModel, root_validator
from langchain_communityschema import Generation, LLMResult
from langchain_communityutils import get_from_dict_or_env
from tenacity import (
before_sleep_log,
retry,

@ -8,7 +8,7 @@ from pydantic import Field
from swarms.utils.serializable import Serializable
if TYPE_CHECKING:
from langchain.prompts.chat import ChatPromptTemplate
from langchain_communityprompts.chat import ChatPromptTemplate
def get_buffer_string(
@ -29,7 +29,7 @@ def get_buffer_string(
Example:
.. code-block:: python
from langchain.schema import AIMessage, HumanMessage
from langchain_communityschema import AIMessage, HumanMessage
messages = [
HumanMessage(content="Hi, how are you?"),
@ -86,7 +86,7 @@ class BaseMessage(Serializable):
return True
def __add__(self, other: Any) -> ChatPromptTemplate:
from langchain.prompts.chat import ChatPromptTemplate
from langchain_communityprompts.chat import ChatPromptTemplate
prompt = ChatPromptTemplate(messages=[self])
return prompt + other

@ -5,7 +5,7 @@ from abc import ABC, abstractmethod
from functools import partial
from typing import Any, Literal, Sequence
from langchain.load.serializable import Serializable
from langchain_communityload.serializable import Serializable
from pydantic import Field

@ -3,7 +3,7 @@ import random
from typing import List
import tenacity
from langchain.output_parsers import RegexParser
from langchain_communityoutput_parsers import RegexParser
from swarms.structs.agent import Agent
from swarms.utils.logger import logger

@ -3,7 +3,7 @@ import re
from abc import abstractmethod
from typing import Dict, List, NamedTuple
from langchain.schema import BaseOutputParser
from langchain_communityschema import BaseOutputParser
from pydantic import ValidationError
from swarms.tools.tool import BaseTool

@ -19,8 +19,8 @@ from typing import (
Union,
)
from langchain.callbacks.base import BaseCallbackManager
from langchain.callbacks.manager import (
from langchain_communitycallbacks.base import BaseCallbackManager
from langchain_communitycallbacks.manager import (
AsyncCallbackManager,
AsyncCallbackManagerForToolRun,
CallbackManager,
@ -28,7 +28,7 @@ from langchain.callbacks.manager import (
Callbacks,
)
from langchain.load.serializable import Serializable
from langchain_communityload.serializable import Serializable
from pydantic import (
BaseModel,
Extra,
@ -37,7 +37,7 @@ from pydantic import (
root_validator,
validate_arguments,
)
from langchain.schema.runnable import (
from langchain_communityschema.runnable import (
Runnable,
RunnableConfig,
RunnableSerializable,

@ -100,9 +100,9 @@ Here's the pseudocode algorithm for a `WorkerNode` class that includes a vector
In Python, this could look something like:
```python
from langchain.vectorstores import FAISS
from langchain.docstore import InMemoryDocstore
from langchain.embeddings import OpenAIEmbeddings
from langchain_communityvectorstores import FAISS
from langchain_communitydocstore import InMemoryDocstore
from langchain_communityembeddings import OpenAIEmbeddings
import faiss
from swarms.workers.auto_agent import AutoGPT
from collections import deque

@ -47,7 +47,7 @@ def test_stream_speech(eleven_labs_tool):
# Testing fixture and environment variables
def test_api_key_validation(eleven_labs_tool):
with patch(
"langchain.utils.get_from_dict_or_env", return_value=API_KEY
"langchain_communityutils.get_from_dict_or_env", return_value=API_KEY
):
values = {"eleven_api_key": None}
validated_values = eleven_labs_tool.validate_environment(

Loading…
Cancel
Save