Tailchat-Assistant/.env.example

37 lines
1.1 KiB
Plaintext

HOST=
# The address where your tailchat server is running, including http:// or https://
ID=
# Your bot ID
SECRET=
# Your bot secret
API_ENDPOINT=
# The OpenAI-compatible endpoint for the bot to send messsages to. Defaults to "http://localhost:8080/v1"
# e.x. "http://localhost:8080/v1", "https://api.openai.com/v1"
API_KEY=
# Your API key here for OpenAI/LocalAI. Defaults to the string "none".
TEXT_MODEL=
# The model to query when sending text messages. Defaults to "gpt-4"
# e.x. "gpt-3", "gpt-4"
CREATE_IMAGE_MODEL=
# The model to use when creating images. Defaults to "stablediffusion-cpp",
# e.x. "dall-e-3", "stablediffusion-cpp"
ANALYZE_IMAGE_MODEL=
# The model to use when analyzing images. Defaults to "gpt4-vision-preview".
# e.x. "gpt-4-vision-preview", "llava"
TEMPERATURE=
# The temperature of the TEXT_MODEL. Defaults to 1.0. Minimum is 0.1, max is 2.0.
ALLOWED_CHAT=
# The ID of the chat that the bot is allowed to respond in. Keep in mind that the bot is always able to respond in Direct Messages.
SAFE_WORD=
# When this character/string is detected anywhere in a message, the bot won't respond to it. Defaults to "\".