Skip to content

Index

BaseMemory #

Bases: BaseComponent

Base class for all memory types.

NOTE: The interface for memory is not yet finalized and is subject to change.

Source code in llama-index-core/llama_index/core/memory/types.py
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class BaseMemory(BaseComponent):
    """Base class for all memory types.

    NOTE: The interface for memory is not yet finalized and is subject to change.
    """

    @classmethod
    def class_name(cls) -> str:
        """Get class name."""
        return "BaseMemory"

    @classmethod
    @abstractmethod
    def from_defaults(
        cls,
        chat_history: Optional[List[ChatMessage]] = None,
        llm: Optional[LLM] = None,
    ) -> "BaseMemory":
        """Create a chat memory from defaults."""

    @abstractmethod
    def get(self, **kwargs: Any) -> List[ChatMessage]:
        """Get chat history."""

    @abstractmethod
    def get_all(self) -> List[ChatMessage]:
        """Get all chat history."""

    @abstractmethod
    def put(self, message: ChatMessage) -> None:
        """Put chat history."""

    @abstractmethod
    def set(self, messages: List[ChatMessage]) -> None:
        """Set chat history."""

    @abstractmethod
    def reset(self) -> None:
        """Reset chat history."""

class_name classmethod #

class_name() -> str

Get class name.

Source code in llama-index-core/llama_index/core/memory/types.py
17
18
19
20
@classmethod
def class_name(cls) -> str:
    """Get class name."""
    return "BaseMemory"

from_defaults abstractmethod classmethod #

from_defaults(chat_history: Optional[List[ChatMessage]] = None, llm: Optional[LLM] = None) -> BaseMemory

Create a chat memory from defaults.

Source code in llama-index-core/llama_index/core/memory/types.py
22
23
24
25
26
27
28
29
@classmethod
@abstractmethod
def from_defaults(
    cls,
    chat_history: Optional[List[ChatMessage]] = None,
    llm: Optional[LLM] = None,
) -> "BaseMemory":
    """Create a chat memory from defaults."""

get abstractmethod #

get(**kwargs: Any) -> List[ChatMessage]

Get chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
31
32
33
@abstractmethod
def get(self, **kwargs: Any) -> List[ChatMessage]:
    """Get chat history."""

get_all abstractmethod #

get_all() -> List[ChatMessage]

Get all chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
35
36
37
@abstractmethod
def get_all(self) -> List[ChatMessage]:
    """Get all chat history."""

put abstractmethod #

put(message: ChatMessage) -> None

Put chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
39
40
41
@abstractmethod
def put(self, message: ChatMessage) -> None:
    """Put chat history."""

set abstractmethod #

set(messages: List[ChatMessage]) -> None

Set chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
43
44
45
@abstractmethod
def set(self, messages: List[ChatMessage]) -> None:
    """Set chat history."""

reset abstractmethod #

reset() -> None

Reset chat history.

Source code in llama-index-core/llama_index/core/memory/types.py
47
48
49
@abstractmethod
def reset(self) -> None:
    """Reset chat history."""