microcore.types
1from typing import Callable, Any, Awaitable, Union 2from os import PathLike 3 4TplFunctionType = Callable[[Union[PathLike[str], str], Any], str] 5"""Function type for rendering prompt templates""" 6LLMFunctionType = Callable[[str, Any], str] 7"""Function type for requesting LLM synchronously""" 8LLMAsyncFunctionType = Callable[[str, Any], Awaitable[str]] 9"""Function type for requesting LLM asynchronously""" 10 11 12class BadAIAnswer(ValueError): 13 """Unprocessable response generated by the LLM""" 14 15 def __init__(self, message: str = None, details: str = None): 16 self.message = str(message or "Unprocessable response generated by the LLM") 17 self.details = details 18 super().__init__(self.message + (f": {self.details}" if self.details else "")) 19 20 def __str__(self): 21 return self.message + (f": {self.details}" if self.details else "") 22 23 24class BadAIJsonAnswer(BadAIAnswer): 25 def __init__( 26 self, message: str = "Invalid JSON generated by the LLM", details=None 27 ): 28 super().__init__(message, details)
TplFunctionType =
typing.Callable[[typing.Union[os.PathLike[str], str], typing.Any], str]
Function type for rendering prompt templates
LLMFunctionType =
typing.Callable[[str, typing.Any], str]
Function type for requesting LLM synchronously
LLMAsyncFunctionType =
typing.Callable[[str, typing.Any], typing.Awaitable[str]]
Function type for requesting LLM asynchronously
class
BadAIAnswer(builtins.ValueError):
13class BadAIAnswer(ValueError): 14 """Unprocessable response generated by the LLM""" 15 16 def __init__(self, message: str = None, details: str = None): 17 self.message = str(message or "Unprocessable response generated by the LLM") 18 self.details = details 19 super().__init__(self.message + (f": {self.details}" if self.details else "")) 20 21 def __str__(self): 22 return self.message + (f": {self.details}" if self.details else "")
Unprocessable response generated by the LLM
Inherited Members
- builtins.BaseException
- with_traceback
- add_note
- args
25class BadAIJsonAnswer(BadAIAnswer): 26 def __init__( 27 self, message: str = "Invalid JSON generated by the LLM", details=None 28 ): 29 super().__init__(message, details)
Unprocessable response generated by the LLM
Inherited Members
- builtins.BaseException
- with_traceback
- add_note
- args