import datetime from typing import cast # BS is optional and only for scrapping journee-mondiale.com, thus why we do not # mark the dependencies flag here. try: from bs4 import BeautifulSoup except ImportError: BeautifulSoup = None from edmond.plugin import Plugin from edmond.plugins.plus import PlusPlugin from edmond.utils import http_get class JourneeMondialePlugin(Plugin): """This plugin shows today's international observance. It used to fetch data from the website journee-mondiale.com but it is regularly broken so it is now loading a static list as a resource, and the website is fetched only if the user adds the "jmcom" config value to True. The local list uses the format "MM-DD Name", one per line, e.g.: ``` 01-01 NYE 08-03 Something the 3rd of August 12-25 Christmas ``` A list can be found on the UN website but it has to be converted by hand: https://www.un.org/en/observances/list-days-weeks """ REQUIRED_CONFIGS = ["commands", "dates", "no_entry_reply"] JMCOM_URL = "https://www.journee-mondiale.com" def __init__(self, bot): super().__init__(bot) def on_pubmsg(self, event): if not self.should_handle_command(event.arguments[0], no_content=True): return False main_reply = self.get_registered_days() if main_reply: self.bot.say(event.target, main_reply) jmcom_reply = "" if self.config.get("jmcom", False) is True: jmcom_reply = self.get_jmcom_days(event.target) if jmcom_reply: self.bot.say(event.target, jmcom_reply) if not (main_reply or jmcom_reply): self.bot.say(event.target, self.config["no_entry_reply"]) return True def get_registered_days(self) -> str: """Get international days for the local list.""" now = datetime.datetime.now() date_tag = f"{now.month:02}-{now.day:02}" today_obs = map( lambda line: line.split(maxsplit=1)[1], filter( lambda line: line.startswith(date_tag), self.config["dates"], ), ) days = ", ".join(today_obs) return days def get_jmcom_days(self, target) -> str: """Get international days from journee-mondiale.com.""" response = http_get(JourneeMondialePlugin.JMCOM_URL) if not response: return "" soup = BeautifulSoup(response, "html.parser") entries = [] try: items = soup.find("div", id="journeesDuJour").find_all("article") for item in items: if (first_link := item.find("a")) is None: continue if (first_title := item.find("h2")) is None: continue link = first_link["href"] title = first_title.string entries.append({"url": link, "title": title}) except (ValueError, KeyError): return "" if plus_plugin := cast(PlusPlugin, self.bot.get_plugin("plus")): def handler(plus_event): urls = map(lambda i: i["url"], entries) self.bot.say(plus_event.target, " — ".join(urls)) plus_plugin.add_handler(target, handler) days = ", ".join(map(lambda i: i["title"], entries)) return days