-
-
Notifications
You must be signed in to change notification settings - Fork 4.5k
Standard ChatGPT
def generate_random_hex(length: int = 17) -> strGenerate a random hex string
Arguments:
-
lengthint, optional - Length of the hex string. Defaults to 17.
Returns:
-
str- Random hex string
def random_int(min: int, max: int) -> intGenerate a random integer
Arguments:
-
minint - Minimum value -
maxint - Maximum value
Returns:
-
int- Random integer
def logger(is_timed: bool)Logger decorator
Arguments:
-
is_timedbool - Whether to include function running time in exit log
Returns:
-
_type_- decorated function
class Chatbot()Chatbot class for ChatGPT
@logger(is_timed=True)
def __init__(config: dict[str, str],
conversation_id: str | None = None,
parent_id: str | None = None,
lazy_loading: bool = True,
base_url: str | None = None) -> NoneInitialize a chatbot
Arguments:
-
configdict[str, str] - Login and proxy info. Example: { -
"access_token"- "<access_token>" -
"proxy"- "<proxy_url_string>", -
"model"- "<model_name>", -
"plugin"- "<plugin_id>", } More details on these are available at https://github.com/acheong08/ChatGPT#configuration -
conversation_idstr | None, optional - Id of the conversation to continue on. Defaults to None. -
parent_idstr | None, optional - Id of the previous response message to continue on. Defaults to None. -
session_clienttype, optional - description. Defaults to None.
Raises:
-
Exception- description
@logger(is_timed=False)
def set_access_token(access_token: str) -> NoneSet access token in request header and self.config, then cache it to file.
Arguments:
-
access_tokenstr - access_token
@logger(is_timed=True)
def login() -> NoneLogin to OpenAI by email and password
@logger(is_timed=True)
def post_messages(messages: list[dict],
conversation_id: str | None = None,
parent_id: str | None = None,
plugin_ids: list = [],
model: str | None = None,
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> Generator[dict, None, None]Ask a question to the chatbot
Arguments:
-
messageslist[dict] - The messages to send -
conversation_idstr | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr | None, optional - UUID for the message to continue on. Defaults to None. -
modelstr | None, optional - The model to use. Defaults to None. -
auto_continuebool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360. -
Yields- Generator[dict, None, None] - The response from the chatbot -
dict- { -
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, # "max_tokens" or "stop" -
"end_turn"- bool, -
"recipient"- str, -
"citations"- list[dict], }
@logger(is_timed=True)
def ask(prompt: str,
conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
plugin_ids: list = [],
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> Generator[dict, None, None]Ask a question to the chatbot
Arguments:
-
promptstr - The question -
conversation_idstr, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr, optional - UUID for the message to continue on. Defaults to "". -
modelstr, optional - The model to use. Defaults to "". -
auto_continuebool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360. -
Yields- The response from the chatbot -
dict- { -
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, # "max_tokens" or "stop" -
"end_turn"- bool, -
"recipient"- str, }
@logger(is_timed=True)
def continue_write(conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
auto_continue: bool = False,
timeout: float = 360) -> Generator[dict, None, None]let the chatbot continue to write.
Arguments:
-
conversation_idstr | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr, optional - UUID for the message to continue on. Defaults to None. -
modelstr, optional - The model to use. Defaults to None. -
auto_continuebool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
-
dict- { -
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, # "max_tokens" or "stop" -
"end_turn"- bool, -
"recipient"- str, }
@logger(is_timed=True)
def get_conversations(offset: int = 0,
limit: int = 20,
encoding: str | None = None) -> listGet conversations
Arguments:
-
offset: Integer -
limit: Integer
@logger(is_timed=True)
def get_msg_history(convo_id: str, encoding: str | None = None) -> listGet message history
Arguments:
-
id: UUID of conversation -
encoding: String
def share_conversation(title: str = None,
convo_id: str = None,
node_id: str = None,
anonymous: bool = True) -> strCreates a share link to a conversation
Arguments:
-
convo_id: UUID of conversation -
node_id: UUID of node -
anonymous: Boolean -
title: String Returns: str: A URL to the shared link
@logger(is_timed=True)
def gen_title(convo_id: str, message_id: str) -> strGenerate title for conversation
Arguments:
-
id: UUID of conversation -
message_id: UUID of message
@logger(is_timed=True)
def change_title(convo_id: str, title: str) -> NoneChange title of conversation
Arguments:
-
id: UUID of conversation -
title: String
@logger(is_timed=True)
def delete_conversation(convo_id: str) -> NoneDelete conversation
Arguments:
-
id: UUID of conversation
@logger(is_timed=True)
def clear_conversations() -> NoneDelete all conversations
@logger(is_timed=False)
def reset_chat() -> NoneReset the conversation ID and parent ID.
Returns:
None
@logger(is_timed=False)
def rollback_conversation(num: int = 1) -> NoneRollback the conversation.
Arguments:
-
num: Integer. The number of messages to rollback
Returns:
None
@logger(is_timed=True)
def get_plugins(offset: int = 0, limit: int = 250, status: str = "approved")Get plugins
Arguments:
-
offset: Integer. Offset (Only supports 0) -
limit: Integer. Limit (Only below 250) -
status: String. Status of plugin (approved)
@logger(is_timed=True)
def install_plugin(plugin_id: str)Install plugin by ID
Arguments:
-
plugin_id: String. ID of plugin
@logger(is_timed=True)
def get_unverified_plugin(domain: str, install: bool = True) -> dictGet unverified plugin by domain
Arguments:
-
domain: String. Domain of plugin -
install: Boolean. Install plugin if found
class AsyncChatbot(Chatbot)Async Chatbot class for ChatGPT
def __init__(config: dict,
conversation_id: str | None = None,
parent_id: str | None = None,
base_url: str | None = None,
lazy_loading: bool = True) -> NoneSame as Chatbot class, but with async methods.
async def post_messages(messages: list[dict],
conversation_id: str | None = None,
parent_id: str | None = None,
plugin_ids: list = [],
model: str | None = None,
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> AsyncGenerator[dict, None]Post messages to the chatbot
Arguments:
-
messageslist[dict] - the messages to post -
conversation_idstr | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr | None, optional - UUID for the message to continue on. Defaults to None. -
modelstr | None, optional - The model to use. Defaults to None. -
auto_continuebool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, -
"end_turn"- bool, -
"recipient"- str, -
"citations"- list[dict], }
async def ask(prompt: str,
conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
plugin_ids: list = [],
auto_continue: bool = False,
timeout: int = 360,
**kwargs) -> AsyncGenerator[dict, None]Ask a question to the chatbot
Arguments:
-
promptstr - The question to ask -
conversation_idstr | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr, optional - UUID for the message to continue on. Defaults to "". -
modelstr, optional - The model to use. Defaults to "". -
auto_continuebool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, -
"end_turn"- bool, -
"recipient"- str, }
async def continue_write(conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
auto_continue: bool = False,
timeout: float = 360) -> AsyncGenerator[dict, None]let the chatbot continue to write
Arguments:
-
conversation_idstr | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_idstr, optional - UUID for the message to continue on. Defaults to None. -
modelstr, optional - Model to use. Defaults to None. -
auto_continuebool, optional - Whether to continue writing automatically. Defaults to False. -
timeoutfloat, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"- str, -
"conversation_id"- str, -
"parent_id"- str, -
"model"- str, -
"finish_details"- str, -
"end_turn"- bool, -
"recipient"- str, }
async def get_conversations(offset: int = 0, limit: int = 20) -> listGet conversations
Arguments:
-
offset: Integer -
limit: Integer
async def get_msg_history(convo_id: str,
encoding: str | None = "utf-8") -> dictGet message history
Arguments:
-
id: UUID of conversation
async def share_conversation(title: str = None,
convo_id: str = None,
node_id: str = None,
anonymous: bool = True) -> strCreates a share link to a conversation
Arguments:
-
convo_id: UUID of conversation -
node_id: UUID of node Returns: str: A URL to the shared link
async def gen_title(convo_id: str, message_id: str) -> NoneGenerate title for conversation
async def change_title(convo_id: str, title: str) -> NoneChange title of conversation
Arguments:
-
convo_id: UUID of conversation -
title: String
async def delete_conversation(convo_id: str) -> NoneDelete conversation
Arguments:
-
convo_id: UUID of conversation
async def clear_conversations() -> NoneDelete all conversations
@logger(is_timed=False)
def configure() -> dictLooks for a config file in the following locations:
@logger(is_timed=False)
def main(config: dict) -> NoReturnMain function for the chatGPT program.