|
from abc import ABC
|
|
from abc import abstractmethod
|
|
|
|
|
|
class Provider(ABC):
|
|
"""Base class for providers"""
|
|
|
|
@abstractmethod
|
|
def ask(
|
|
self,
|
|
prompt: str,
|
|
stream: bool = False,
|
|
raw: bool = False,
|
|
optimizer: str = None,
|
|
conversationally: bool = False,
|
|
) -> dict:
|
|
"""Chat with AI
|
|
|
|
Args:
|
|
prompt (str): Prompt to be sent
|
|
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
raw (bool, optional): Stream back raw response as received
|
|
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
|
|
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
Returns:
|
|
dict : {}
|
|
```json
|
|
{
|
|
"completion": "\nNext: domestic cat breeds with short hair >>",
|
|
"stop_reason": null,
|
|
"truncated": false,
|
|
"stop": null,
|
|
"model": "llama-2-13b-chat",
|
|
"log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
|
|
"exception": null
|
|
}
|
|
```
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass")
|
|
|
|
@abstractmethod
|
|
def chat(
|
|
self,
|
|
prompt: str,
|
|
stream: bool = False,
|
|
optimizer: str = None,
|
|
conversationally: bool = False,
|
|
) -> str:
|
|
"""Generate response `str`
|
|
Args:
|
|
prompt (str): Prompt to be sent
|
|
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
|
|
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
Returns:
|
|
str: Response generated
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass")
|
|
|
|
@abstractmethod
|
|
def get_message(self, response: dict) -> str:
|
|
"""Retrieves message only from response
|
|
|
|
Args:
|
|
response (dict): Response generated by `self.ask`
|
|
|
|
Returns:
|
|
str: Message extracted
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass")
|
|
|
|
|
|
class AsyncProvider(ABC):
|
|
"""Asynchronous base class for providers"""
|
|
|
|
@abstractmethod
|
|
async def ask(
|
|
self,
|
|
prompt: str,
|
|
stream: bool = False,
|
|
raw: bool = False,
|
|
optimizer: str = None,
|
|
conversationally: bool = False,
|
|
) -> dict:
|
|
"""Asynchronously chat with AI
|
|
|
|
Args:
|
|
prompt (str): Prompt to be sent
|
|
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
raw (bool, optional): Stream back raw response as received
|
|
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
|
|
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
Returns:
|
|
dict : {}
|
|
```json
|
|
{
|
|
"completion": "\nNext: domestic cat breeds with short hair >>",
|
|
"stop_reason": null,
|
|
"truncated": false,
|
|
"stop": null,
|
|
"model": "llama-2-13b-chat",
|
|
"log_id": "cmpl-3kYiYxSNDvgMShSzFooz6t",
|
|
"exception": null
|
|
}
|
|
```
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass")
|
|
|
|
@abstractmethod
|
|
async def chat(
|
|
self,
|
|
prompt: str,
|
|
stream: bool = False,
|
|
optimizer: str = None,
|
|
conversationally: bool = False,
|
|
) -> str:
|
|
"""Asynchronously generate response `str`
|
|
Args:
|
|
prompt (str): Prompt to be sent
|
|
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`
|
|
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
Returns:
|
|
str: Response generated
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass")
|
|
|
|
@abstractmethod
|
|
async def get_message(self, response: dict) -> str:
|
|
"""Asynchronously retrieves message only from response
|
|
|
|
Args:
|
|
response (dict): Response generated by `self.ask`
|
|
|
|
Returns:
|
|
str: Message extracted
|
|
"""
|
|
raise NotImplementedError("Method needs to be implemented in subclass") |