Cache that stores things in memory.
InMemoryCache(
self,
*,
maxsize: int | None = None,
)Example:
from langchain_core.caches import InMemoryCache
from langchain_core.outputs import Generation
# Initialize cache
cache = InMemoryCache()
# Update cache
cache.update(
prompt="What is the capital of France?",
llm_string="model='gpt-3.5-turbo', temperature=0.1",
return_val=[Generation(text="Paris")],
)
# Lookup cache
result = cache.lookup(
prompt="What is the capital of France?",
llm_string="model='gpt-3.5-turbo', temperature=0.1",
)
# result is [Generation(text="Paris")]| Name | Type | Description |
|---|---|---|
maxsize | int | None | Default: NoneThe maximum number of items to store in the cache. If If the cache exceeds the maximum size, the oldest items are removed. |
| Name | Type |
|---|---|
| maxsize | int | None |