Construct ElasticsearchStore wrapper from raw documents.
afrom_texts(
cls,
texts: List[str],
embedding: Optional[Embeddings] = None,
metadatas: Optional[List[Dict[str, Any]]] = None,
bulk_kwargs: Optional[Dict] = None,
client: Optional[AsyncElasticsearch] = None,
**kwargs: Any = {}
) -> AsyncElasticsearchStoreExample:
from langchain_elasticsearch.vectorstores import ElasticsearchStore
from langchain_openai import OpenAIEmbeddings
db = ElasticsearchStore.from_texts(
texts,
// embeddings optional if using
// a strategy that doesn't require inference
embeddings,
index_name="langchain-demo",
es_url="http://localhost:9200"
)| Name | Type | Description |
|---|---|---|
texts* | List[str] | List of texts to add to the Elasticsearch index. |
embedding | Optional[Embeddings] | Default: NoneEmbedding function to use to embed the texts. |
metadatas | Optional[List[Dict[str, Any]]] | Default: NoneOptional list of metadatas associated with the texts. |
bulk_kwargs | Optional[Dict] | Default: NoneOptional. Additional arguments to pass to Elasticsearch bulk. |
client | Optional[AsyncElasticsearch] | Default: NoneOptional pre-existing client connection. Alternatively, provide credentials ( |
**kwargs | Any | Default: {}Additional keyword arguments passed to the constructor. See |