Skip to content

mock_lm

langroid/language_models/mock_lm.py

Mock Language Model for testing

MockLMConfig

Bases: LLMConfig

Mock Language Model Configuration.

Attributes:

Name Type Description
response_dict Dict[str, str]

A "response rule-book", in the form of a dictionary; if last msg in dialog is x,then respond with response_dict[x]

MockLM(config=MockLMConfig())

Bases: LanguageModel

Source code in langroid/language_models/mock_lm.py
def __init__(self, config: MockLMConfig = MockLMConfig()):
    super().__init__(config)
    self.config: MockLMConfig = config

chat(messages, max_tokens=200, tools=None, tool_choice='auto', functions=None, function_call='auto', response_format=None)

Mock chat function for testing

Source code in langroid/language_models/mock_lm.py
def chat(
    self,
    messages: Union[str, List[lm.LLMMessage]],
    max_tokens: int = 200,
    tools: Optional[List[OpenAIToolSpec]] = None,
    tool_choice: ToolChoiceTypes | Dict[str, str | Dict[str, str]] = "auto",
    functions: Optional[List[lm.LLMFunctionSpec]] = None,
    function_call: str | Dict[str, str] = "auto",
    response_format: Optional[OpenAIJsonSchemaSpec] = None,
) -> lm.LLMResponse:
    """
    Mock chat function for testing
    """
    last_msg = messages[-1].content if isinstance(messages, list) else messages
    return self._response(last_msg)

achat(messages, max_tokens=200, tools=None, tool_choice='auto', functions=None, function_call='auto', response_format=None) async

Mock chat function for testing

Source code in langroid/language_models/mock_lm.py
async def achat(
    self,
    messages: Union[str, List[lm.LLMMessage]],
    max_tokens: int = 200,
    tools: Optional[List[OpenAIToolSpec]] = None,
    tool_choice: ToolChoiceTypes | Dict[str, str | Dict[str, str]] = "auto",
    functions: Optional[List[lm.LLMFunctionSpec]] = None,
    function_call: str | Dict[str, str] = "auto",
    response_format: Optional[OpenAIJsonSchemaSpec] = None,
) -> lm.LLMResponse:
    """
    Mock chat function for testing
    """
    last_msg = messages[-1].content if isinstance(messages, list) else messages
    return await self._response_async(last_msg)

generate(prompt, max_tokens=200)

Mock generate function for testing

Source code in langroid/language_models/mock_lm.py
def generate(self, prompt: str, max_tokens: int = 200) -> lm.LLMResponse:
    """
    Mock generate function for testing
    """
    return self._response(prompt)

agenerate(prompt, max_tokens=200) async

Mock generate function for testing

Source code in langroid/language_models/mock_lm.py
async def agenerate(self, prompt: str, max_tokens: int = 200) -> LLMResponse:
    """
    Mock generate function for testing
    """
    return await self._response_async(prompt)