import getpassimport osif not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter API key for OpenAI: ")from langchain_openai import OpenAIEmbeddingsembeddings = OpenAIEmbeddings(model="text-embedding-3-large")
Azure
pip install -qU langchain-azure-ai
import getpassimport osif not os.environ.get("AZURE_OPENAI_API_KEY"): os.environ["AZURE_OPENAI_API_KEY"] = getpass.getpass("Enter API key for Azure: ")from langchain_openai import AzureOpenAIEmbeddingsembeddings = AzureOpenAIEmbeddings( azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT_NAME"], openai_api_version=os.environ["AZURE_OPENAI_API_VERSION"],)
Google Gemini
pip install -qU langchain-google-genai
import getpassimport osif not os.environ.get("GOOGLE_API_KEY"): os.environ["GOOGLE_API_KEY"] = getpass.getpass("Enter API key for Google Gemini: ")from langchain_google_genai import GoogleGenerativeAIEmbeddingsembeddings = GoogleGenerativeAIEmbeddings(model="models/gemini-embedding-001")
Google Vertex
pip install -qU langchain-google-vertexai
from langchain_google_vertexai import VertexAIEmbeddingsembeddings = VertexAIEmbeddings(model="text-embedding-005")
AWS
pip install -qU langchain-aws
from langchain_aws import BedrockEmbeddingsembeddings = BedrockEmbeddings(model_id="amazon.titan-embed-text-v2:0")
HuggingFace
pip install -qU langchain-huggingface
from langchain_huggingface import HuggingFaceEmbeddingsembeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
Ollama
pip install -qU langchain-ollama
from langchain_ollama import OllamaEmbeddingsembeddings = OllamaEmbeddings(model="llama3")
Cohere
pip install -qU langchain-cohere
import getpassimport osif not os.environ.get("COHERE_API_KEY"): os.environ["COHERE_API_KEY"] = getpass.getpass("Enter API key for Cohere: ")from langchain_cohere import CohereEmbeddingsembeddings = CohereEmbeddings(model="embed-english-v3.0")
Mistral AI
pip install -qU langchain-mistralai
import getpassimport osif not os.environ.get("MISTRALAI_API_KEY"): os.environ["MISTRALAI_API_KEY"] = getpass.getpass("Enter API key for MistralAI: ")from langchain_mistralai import MistralAIEmbeddingsembeddings = MistralAIEmbeddings(model="mistral-embed")
Nomic
pip install -qU langchain-nomic
import getpassimport osif not os.environ.get("NOMIC_API_KEY"): os.environ["NOMIC_API_KEY"] = getpass.getpass("Enter API key for Nomic: ")from langchain_nomic import NomicEmbeddingsembeddings = NomicEmbeddings(model="nomic-embed-text-v1.5")
NVIDIA
pip install -qU langchain-nvidia-ai-endpoints
import getpassimport osif not os.environ.get("NVIDIA_API_KEY"): os.environ["NVIDIA_API_KEY"] = getpass.getpass("Enter API key for NVIDIA: ")from langchain_nvidia_ai_endpoints import NVIDIAEmbeddingsembeddings = NVIDIAEmbeddings(model="NV-Embed-QA")
Voyage AI
pip install -qU langchain-voyageai
import getpassimport osif not os.environ.get("VOYAGE_API_KEY"): os.environ["VOYAGE_API_KEY"] = getpass.getpass("Enter API key for Voyage AI: ")from langchain-voyageai import VoyageAIEmbeddingsembeddings = VoyageAIEmbeddings(model="voyage-3")
IBM watsonx
pip install -qU langchain-ibm
import getpassimport osif not os.environ.get("WATSONX_APIKEY"): os.environ["WATSONX_APIKEY"] = getpass.getpass("Enter API key for IBM watsonx: ")from langchain_ibm import WatsonxEmbeddingsembeddings = WatsonxEmbeddings( model_id="ibm/slate-125m-english-rtrvr", url="https://us-south.ml.cloud.ibm.com", project_id="<WATSONX PROJECT_ID>",)
Fake
pip install -qU langchain-core
from langchain_core.embeddings import DeterministicFakeEmbeddingembeddings = DeterministicFakeEmbedding(size=4096)
xAI
pip install -qU langchain-xai
import getpassimport osif not os.environ.get("XAI_API_KEY"): os.environ["XAI_API_KEY"] = getpass.getpass("Enter API key for xAI: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("grok-2", model_provider="xai")
Perplexity
pip install -qU langchain-perplexity
import getpassimport osif not os.environ.get("PPLX_API_KEY"): os.environ["PPLX_API_KEY"] = getpass.getpass("Enter API key for Perplexity: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("llama-3.1-sonar-small-128k-online", model_provider="perplexity")
DeepSeek
pip install -qU langchain-deepseek
import getpassimport osif not os.environ.get("DEEPSEEK_API_KEY"): os.environ["DEEPSEEK_API_KEY"] = getpass.getpass("Enter API key for DeepSeek: ")from langchain.chat_models import init_chat_modelmodel = init_chat_model("deepseek-chat", model_provider="deepseek")
Select vector store:
In-memory
pip install -qU langchain-core
from langchain_core.vectorstores import InMemoryVectorStorevector_store = InMemoryVectorStore(embeddings)
Amazon OpenSearch
pip
pip install -qU boto3
from opensearchpy import RequestsHttpConnectionservice = "es" # must set the service as 'es'region = "us-east-2"credentials = boto3.Session( aws_access_key_id="xxxxxx", aws_secret_access_key="xxxxx").get_credentials()awsauth = AWS4Auth("xxxxx", "xxxxxx", region, service, session_token=credentials.token)vector_store = OpenSearchVectorSearch.from_documents( docs, embeddings, opensearch_url="host url", http_auth=awsauth, timeout=300, use_ssl=True, verify_certs=True, connection_class=RequestsHttpConnection, index_name="test-index",)
from langchain_chroma import Chromavector_store = Chroma( collection_name="example_collection", embedding_function=embeddings, persist_directory="./chroma_langchain_db", # Where to save data locally, remove if not necessary)
Install the package and start Elasticsearch locally using the start-local script:
pip install -qU langchain-elasticsearchcurl -fsSL https://elastic.co/start-local | sh
This creates an elastic-start-local folder. To start Elasticsearch:
cd elastic-start-local./start.sh
Elasticsearch will be available at http://localhost:9200. The password for the elastic user and API key are stored in the .env file in the elastic-start-local folder.
from langchain_elasticsearch import ElasticsearchStorevector_store = ElasticsearchStore( index_name="langchain-demo", embedding=embeddings, es_url="http://localhost:9200",)