Skip to content

Agents llm compiler

LLMCompilerAgentPack #

Bases: BaseLlamaPack

LLMCompilerAgent pack.

Parameters:

Name Type Description Default
tools List[BaseTool]

List of tools to use.

required
llm Optional[LLM]

LLM to use.

None
Source code in llama-index-packs/llama-index-packs-agents-llm-compiler/llama_index/packs/agents_llm_compiler/base.py
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
class LLMCompilerAgentPack(BaseLlamaPack):
    """LLMCompilerAgent pack.

    Args:
        tools (List[BaseTool]): List of tools to use.
        llm (Optional[LLM]): LLM to use.

    """

    def __init__(
        self,
        tools: List[BaseTool],
        llm: Optional[LLM] = None,
        callback_manager: Optional[CallbackManager] = None,
        agent_worker_kwargs: Optional[Dict[str, Any]] = None,
        agent_runner_kwargs: Optional[Dict[str, Any]] = None,
    ) -> None:
        """Init params."""
        self.llm = llm or OpenAI(model="gpt-4")
        self.callback_manager = callback_manager or self.llm.callback_manager
        self.agent_worker = LLMCompilerAgentWorker.from_tools(
            tools,
            llm=llm,
            verbose=True,
            callback_manager=self.callback_manager,
            **(agent_worker_kwargs or {})
        )
        self.agent = AgentRunner(
            self.agent_worker,
            callback_manager=self.callback_manager,
            **(agent_runner_kwargs or {})
        )

    def get_modules(self) -> Dict[str, Any]:
        """Get modules."""
        return {
            "llm": self.llm,
            "callback_manager": self.callback_manager,
            "agent_worker": self.agent_worker,
            "agent": self.agent,
        }

    def run(self, *args: Any, **kwargs: Any) -> Any:
        """Run the pipeline."""
        return self.agent.chat(*args, **kwargs)

get_modules #

get_modules() -> Dict[str, Any]

Get modules.

Source code in llama-index-packs/llama-index-packs-agents-llm-compiler/llama_index/packs/agents_llm_compiler/base.py
48
49
50
51
52
53
54
55
def get_modules(self) -> Dict[str, Any]:
    """Get modules."""
    return {
        "llm": self.llm,
        "callback_manager": self.callback_manager,
        "agent_worker": self.agent_worker,
        "agent": self.agent,
    }

run #

run(*args: Any, **kwargs: Any) -> Any

Run the pipeline.

Source code in llama-index-packs/llama-index-packs-agents-llm-compiler/llama_index/packs/agents_llm_compiler/base.py
57
58
59
def run(self, *args: Any, **kwargs: Any) -> Any:
    """Run the pipeline."""
    return self.agent.chat(*args, **kwargs)