Compare commits
No commits in common. "bc2666085c2c76c05c0cac25234dd2bec4eec876" and "e206c4809a0ef7dae7efbc22c35b6ec147385144" have entirely different histories.
bc2666085c
...
e206c4809a
|
@ -59,8 +59,7 @@
|
|||
"awake": true
|
||||
}
|
||||
},
|
||||
"question_mark": "?",
|
||||
"computing_replies": ["Hmm…"]
|
||||
"question_mark": "?"
|
||||
},
|
||||
"compliments": {
|
||||
"sentences": ["you're breathtaking"],
|
||||
|
@ -71,7 +70,8 @@
|
|||
},
|
||||
"gpt3": {
|
||||
"openai_key": "",
|
||||
"join_lines": "; "
|
||||
"join_lines": "; ",
|
||||
"computing_replies": ["Hmm…"]
|
||||
},
|
||||
"horoscope": {
|
||||
"commands": ["horoscope"],
|
||||
|
@ -90,10 +90,6 @@
|
|||
"dates": ["01-01 NYE"],
|
||||
"no_entry_reply": "Nothing special today."
|
||||
},
|
||||
"kagifastgpt": {
|
||||
"api_key": "",
|
||||
"prompt": ""
|
||||
},
|
||||
"meteofrance": {
|
||||
"commands": ["météo"],
|
||||
"aliases": {
|
||||
|
@ -126,8 +122,8 @@
|
|||
"pfouah_sentence": "Folks, I think I'm {word}!",
|
||||
"pfouah1": ["doo", "da"],
|
||||
"pfouah2": ["lo", "di"],
|
||||
"pfouah3": ["zzel", "ddle"],
|
||||
"pfouah_suffix": "d"
|
||||
"pfouah3": ["zzel", "ddle"]
|
||||
"pfouah_suffix": "d"]
|
||||
},
|
||||
"mood": {
|
||||
"commands": ["calm down"],
|
||||
|
|
|
@ -97,6 +97,9 @@ class Bot(irc.client_aio.AioSimpleIRCClient, Logger):
|
|||
self.channels.append(event.target)
|
||||
self.run_plugin_callbacks(event)
|
||||
|
||||
async def flubiz(self):
|
||||
self.say('#idi0crates', "acab")
|
||||
|
||||
def on_part(self, connection: Connection, event: Event):
|
||||
"""Handle someone, possibly the bot, leaving a channel."""
|
||||
if event.source.nick == self.nick:
|
||||
|
|
|
@ -10,7 +10,7 @@ from edmond.utils import limit_text_length
|
|||
|
||||
class Gpt3Plugin(Plugin):
|
||||
|
||||
REQUIRED_CONFIGS = ["openai_key", "join_lines"]
|
||||
REQUIRED_CONFIGS = ["openai_key", "computing_replies", "join_lines"]
|
||||
|
||||
def __init__(self, bot):
|
||||
super().__init__(bot)
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
import json
|
||||
import random
|
||||
import re
|
||||
from typing import Optional, Tuple, cast
|
||||
|
||||
import requests
|
||||
|
||||
from edmond.plugin import Plugin
|
||||
from edmond.plugins.plus import PlusPlugin
|
||||
from edmond.utils import limit_text_length
|
||||
|
||||
|
||||
class KagiFastgptPlugin(Plugin):
|
||||
|
||||
BASE_URL = "https://kagi.com/api/v0/fastgpt"
|
||||
REQUIRED_CONFIGS = ["api_key"]
|
||||
|
||||
def __init__(self, bot):
|
||||
super().__init__(bot)
|
||||
self.api_key = self.config["api_key"]
|
||||
self.prompt = self.config.get("prompt", "")
|
||||
|
||||
def on_welcome(self, _):
|
||||
if not self.api_key:
|
||||
self.bot.log_w("Kagi FastGPT API key unavailable.")
|
||||
self.is_ready = False
|
||||
|
||||
def reply(self, query: str, target: str):
|
||||
computing_reply = random.choice(self.config["computing_replies"])
|
||||
self.bot.say(target, computing_reply)
|
||||
|
||||
output, references = self.complete(query)
|
||||
if output:
|
||||
self.bot.say(target, self.sanitize(output))
|
||||
self.register_references_for_plus(references, target)
|
||||
else:
|
||||
self.signal_failure(target)
|
||||
|
||||
def complete(self, query: str) -> Tuple[Optional[str], list]:
|
||||
try:
|
||||
response = requests.post(
|
||||
self.BASE_URL,
|
||||
headers={"Authorization": f"Bot {self.api_key}"},
|
||||
json={
|
||||
"query": self.prompt + query
|
||||
}
|
||||
)
|
||||
except requests.RequestException as exc:
|
||||
self.bot.log_e(f"Request error: {exc}")
|
||||
return None, []
|
||||
|
||||
data = response.json().get("data", {})
|
||||
self.bot.log_d(f"Data received: {json.dumps(data)}")
|
||||
output = data.get("output", "")
|
||||
if not output:
|
||||
self.bot.log_w("Empty FastGPT output!")
|
||||
return None, []
|
||||
|
||||
references = data.get("references", [])
|
||||
return output, references
|
||||
|
||||
def register_references_for_plus(
|
||||
self,
|
||||
references: list[str],
|
||||
target: str
|
||||
) -> None:
|
||||
if references and (plus_plugin := self.bot.get_plugin("plus")):
|
||||
def handler(plus_event):
|
||||
for ref in references[:3]:
|
||||
message = ref["title"] + " " + ref["url"]
|
||||
self.bot.say(plus_event.target, message)
|
||||
cast(PlusPlugin, plus_plugin).add_handler(target, handler)
|
||||
|
||||
def sanitize(self, text: str) -> str:
|
||||
text = text.strip()
|
||||
text = re.sub(r"\n+", " — ", text)
|
||||
text = limit_text_length(text)
|
||||
return text
|
|
@ -1,5 +1,5 @@
|
|||
from edmond.plugin import Plugin
|
||||
from edmond.plugins.kagi_fastgpt import KagiFastgptPlugin
|
||||
from edmond.plugins.gpt3 import Gpt3Plugin
|
||||
|
||||
|
||||
class UnknownCommandPlugin(Plugin):
|
||||
|
@ -7,12 +7,12 @@ class UnknownCommandPlugin(Plugin):
|
|||
def __init__(self, bot):
|
||||
super().__init__(bot)
|
||||
self.priority: int = -6
|
||||
self.gpt_plugin: KagiFastgptPlugin
|
||||
self.gpt3_plugin: Gpt3Plugin
|
||||
|
||||
def on_welcome(self, _):
|
||||
self.gpt_plugin = self.bot.get_plugin("kagifastgpt")
|
||||
if self.gpt_plugin is None or not self.gpt_plugin:
|
||||
self.bot.log_w("GPT plugin is not available.")
|
||||
self.gpt3_plugin = self.bot.get_plugin("gpt3")
|
||||
if self.gpt3_plugin is None or not self.gpt3_plugin:
|
||||
self.bot.log_w("GPT-3 plugin is not available.")
|
||||
self.is_ready = False
|
||||
|
||||
def on_pubmsg(self, event):
|
||||
|
@ -26,5 +26,6 @@ class UnknownCommandPlugin(Plugin):
|
|||
query = " ".join(words[:-1])
|
||||
if not query.endswith("."):
|
||||
query += "."
|
||||
self.gpt_plugin.reply(query, event.target)
|
||||
query += "\n\n"
|
||||
self.gpt3_plugin.reply(query, event.target)
|
||||
return True
|
||||
|
|
|
@ -17,15 +17,16 @@ class UnknownQuestionPlugin(Plugin):
|
|||
|
||||
def on_welcome(self, _):
|
||||
self.misc_plugin = self.bot.get_plugin("miscreactions")
|
||||
self.gpt_plugin = self.bot.get_plugin("kagifastgpt")
|
||||
self.gpt3_plugin = self.bot.get_plugin("gpt3")
|
||||
|
||||
def on_pubmsg(self, event):
|
||||
message = self.should_read_message(event.arguments[0])
|
||||
if message is None:
|
||||
return False
|
||||
|
||||
if self.gpt_plugin and message.endswith(self.config["question_mark"]):
|
||||
self.gpt_plugin.reply(message, event.target)
|
||||
if self.gpt3_plugin and message.endswith(self.config["question_mark"]):
|
||||
message += "\n\n"
|
||||
self.gpt3_plugin.reply(message, event.target)
|
||||
else:
|
||||
self.classic_reply(event)
|
||||
return True
|
||||
|
|
|
@ -23,7 +23,7 @@ class WolframAlphaPlugin(Plugin):
|
|||
|
||||
def on_welcome(self, _):
|
||||
if not self.config["api_key"]:
|
||||
self.bot.log_w("Wolfram API key unavailable.")
|
||||
self.bot.log_w("API key unavailable.")
|
||||
self.is_ready = False
|
||||
|
||||
def on_pubmsg(self, event):
|
||||
|
|
|
@ -40,7 +40,7 @@ class YoutubePlugin(Plugin):
|
|||
|
||||
def on_welcome(self, _):
|
||||
if not self.config["api_key"]:
|
||||
self.bot.log_w("Youtube API key unavailable.")
|
||||
self.bot.log_w("API key unavailable.")
|
||||
self.is_ready = False
|
||||
|
||||
def on_pubmsg(self, event):
|
||||
|
|
Loading…
Reference in a new issue