aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sjdbmk/cal.py15
-rw-r--r--sjdbmk/common.py30
-rw-r--r--sjdbmk/daily.py38
-rw-r--r--sjdbmk/grant.py16
-rw-r--r--sjdbmk/inspire_dl.py12
-rw-r--r--sjdbmk/legacy_wikipedia.py63
-rw-r--r--sjdbmk/menu.py65
-rw-r--r--sjdbmk/pack.py22
-rw-r--r--sjdbmk/sendmail.py55
-rw-r--r--sjdbmk/serve.py6
-rw-r--r--sjdbmk/twa.py48
-rw-r--r--sjdbmk/weekly.py39
12 files changed, 324 insertions, 85 deletions
diff --git a/sjdbmk/cal.py b/sjdbmk/cal.py
index eccedc3..6ad0fb5 100644
--- a/sjdbmk/cal.py
+++ b/sjdbmk/cal.py
@@ -23,17 +23,24 @@ import datetime
import requests
-def calfetch(token: str, calendar_address: str, datetime_target: datetime.datetime) -> Any:
+def calfetch(
+ token: str, calendar_address: str, datetime_target: datetime.datetime
+) -> Any:
calendar_response = requests.get(
- "https://graph.microsoft.com/v1.0/users/%s/calendar/calendarView" % calendar_address,
+ "https://graph.microsoft.com/v1.0/users/%s/calendar/calendarView"
+ % calendar_address,
headers={"Authorization": "Bearer " + token},
params={
"startDateTime": datetime_target.replace(microsecond=0).isoformat(),
- "endDateTime": (datetime_target + datetime.timedelta(days=7)).replace(microsecond=0).isoformat(),
+ "endDateTime": (datetime_target + datetime.timedelta(days=7))
+ .replace(microsecond=0)
+ .isoformat(),
},
timeout=15,
)
if calendar_response.status_code != 200:
- raise ValueError("Calendar response status code is not 200", calendar_response.content)
+ raise ValueError(
+ "Calendar response status code is not 200", calendar_response.content
+ )
calendar_object = calendar_response.json()
return calendar_object
diff --git a/sjdbmk/common.py b/sjdbmk/common.py
index 1d315cc..642b251 100644
--- a/sjdbmk/common.py
+++ b/sjdbmk/common.py
@@ -38,7 +38,9 @@ def acquire_token(
graph_client_id,
authority=graph_authority,
)
- result = app.acquire_token_by_username_password(graph_username, graph_password, scopes=graph_scopes)
+ result = app.acquire_token_by_username_password(
+ graph_username, graph_password, scopes=graph_scopes
+ )
if "access_token" in result:
assert isinstance(result["access_token"], str)
@@ -62,24 +64,25 @@ def search_mail(token: str, query_string: str) -> list[dict[str, Any]]:
]
},
timeout=20,
- ).json()["value"][
- 0
- ]["hitsContainers"][
- 0
- ]["hits"]
+ ).json()["value"][0]["hitsContainers"][0]["hits"]
assert isinstance(hits, list)
assert isinstance(hits[0], dict)
return hits
def encode_sharing_url(url: str) -> str:
- return "u!" + base64.urlsafe_b64encode(url.encode("utf-8")).decode("ascii").rstrip("=")
+ return "u!" + base64.urlsafe_b64encode(url.encode("utf-8")).decode("ascii").rstrip(
+ "="
+ )
-def download_share_url(token: str, url: str, local_filename: str, chunk_size: int = 65536) -> None:
+def download_share_url(
+ token: str, url: str, local_filename: str, chunk_size: int = 65536
+) -> None:
download_direct_url = requests.get(
- "https://graph.microsoft.com/v1.0/shares/%s/driveItem" % encode_sharing_url(url),
+ "https://graph.microsoft.com/v1.0/shares/%s/driveItem"
+ % encode_sharing_url(url),
headers={"Authorization": "Bearer " + token},
timeout=20,
).json()["@microsoft.graph.downloadUrl"]
@@ -98,9 +101,14 @@ def download_share_url(token: str, url: str, local_filename: str, chunk_size: in
fd.flush()
-def filter_mail_results_by_sender(original: Iterable[dict[str, Any]], sender: str) -> Iterator[dict[str, Any]]:
+def filter_mail_results_by_sender(
+ original: Iterable[dict[str, Any]], sender: str
+) -> Iterator[dict[str, Any]]:
for hit in original:
- if hit["resource"]["sender"]["emailAddress"]["address"].lower() == sender.lower():
+ if (
+ hit["resource"]["sender"]["emailAddress"]["address"].lower()
+ == sender.lower()
+ ):
yield hit
diff --git a/sjdbmk/daily.py b/sjdbmk/daily.py
index 4c7ad69..6820648 100644
--- a/sjdbmk/daily.py
+++ b/sjdbmk/daily.py
@@ -57,7 +57,9 @@ def main() -> None:
# TODO: Verify validity of date
# TODO: Verify consistency of date elsewhere
)
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
if args.date:
@@ -113,7 +115,9 @@ def generate(
logger.warning('Cycle day not found, using "SA"')
for days_since_beginning in range(0, 5):
- week_start_date = datetime_target - datetime.timedelta(days=days_since_beginning)
+ week_start_date = datetime_target - datetime.timedelta(
+ days=days_since_beginning
+ )
try:
with open(
"week-%s.json" % week_start_date.strftime("%Y%m%d"),
@@ -126,7 +130,9 @@ def generate(
else:
break
else:
- raise FileNotFoundError("Cannot find a week-{date}.json file without five prior days")
+ raise FileNotFoundError(
+ "Cannot find a week-{date}.json file without five prior days"
+ )
try:
aod = week_data["aods"][days_since_beginning]
@@ -145,15 +151,15 @@ def generate(
except KeyError:
breakfast_tomorrow = None
try:
- snack_morning = week_data["snacks"][0][days_since_beginning]
+ snack_morning = week_data["snacks"]["Morning"][days_since_beginning]
except (KeyError, IndexError):
snack_morning = None
try:
- snack_afternoon = week_data["snacks"][1][days_since_beginning]
+ snack_afternoon = week_data["snacks"]["Afternoon"][days_since_beginning]
except (KeyError, IndexError):
snack_afternoon = None
try:
- snack_evening = week_data["snacks"][2][days_since_beginning]
+ snack_evening = week_data["snacks"]["Evening"][days_since_beginning]
except (KeyError, IndexError):
snack_evening = None
@@ -180,9 +186,13 @@ def generate(
inspiration_image_fn = inspjq["file"]
if inspiration_image_fn:
logger.info("Inspiration has attachment %s" % inspiration_image_fn)
- inspiration_image_mime, inspiration_image_extra_encoding = mimetypes.guess_type(inspiration_image_fn)
+ inspiration_image_mime, inspiration_image_extra_encoding = (
+ mimetypes.guess_type(inspiration_image_fn)
+ )
assert not inspiration_image_extra_encoding
- with open("inspattach-%s" % os.path.basename(inspiration_image_fn), "rb") as ifd:
+ with open(
+ "inspattach-%s" % os.path.basename(inspiration_image_fn), "rb"
+ ) as ifd:
inspiration_image_data = base64.b64encode(ifd.read()).decode("ascii")
else:
inspiration_image_data = None
@@ -237,9 +247,11 @@ def generate(
"today_dinner": dinner_today,
"next_breakfast": breakfast_tomorrow,
"the_week_ahead_url": the_week_ahead_url,
- "snack_morning": snack_morning,
- "snack_afternoon": snack_afternoon,
- "snack_evening": snack_evening,
+ "today_snack": {
+ "Morning": snack_morning,
+ "Afternoon": snack_afternoon,
+ "Evening": snack_evening,
+ },
"inspiration_type": inspiration_type,
"inspiration_shared_by": inspiration_shared_by,
"inspiration_origin": inspiration_origin,
@@ -251,7 +263,9 @@ def generate(
"in_the_news_html_en": in_the_news_html_en,
"in_the_news_html_zh": in_the_news_html_zh,
}
- with open("day-%s.json" % datetime_target.strftime("%Y%m%d"), "w", encoding="utf-8") as fd:
+ with open(
+ "day-%s.json" % datetime_target.strftime("%Y%m%d"), "w", encoding="utf-8"
+ ) as fd:
json.dump(data, fd, ensure_ascii=False, indent="\t")
logger.info(
"Data dumped to " + "day-%s.json" % datetime_target.strftime("%Y%m%d"),
diff --git a/sjdbmk/grant.py b/sjdbmk/grant.py
index 2bb761f..7a5407f 100644
--- a/sjdbmk/grant.py
+++ b/sjdbmk/grant.py
@@ -33,7 +33,9 @@ from . import common
# logging.getLogger("msal").setLevel(logging.INFO)
-def acquire_token_interactive(app: msal.PublicClientApplication, config: ConfigParser) -> str:
+def acquire_token_interactive(
+ app: msal.PublicClientApplication, config: ConfigParser
+) -> str:
result = app.acquire_token_interactive(
config["credentials"]["scope"].split(" "),
login_hint=config["credentials"]["username"],
@@ -42,10 +44,14 @@ def acquire_token_interactive(app: msal.PublicClientApplication, config: ConfigP
if "access_token" in result:
assert isinstance(result["access_token"], str)
return result["access_token"]
- raise ValueError("Authentication error while trying to interactively acquire a token")
+ raise ValueError(
+ "Authentication error while trying to interactively acquire a token"
+ )
-def test_login(app: msal.PublicClientApplication, config: ConfigParser) -> dict[str, Any]:
+def test_login(
+ app: msal.PublicClientApplication, config: ConfigParser
+) -> dict[str, Any]:
result = app.acquire_token_by_username_password(
config["credentials"]["username"],
config["credentials"]["password"],
@@ -69,7 +75,9 @@ def test_login(app: msal.PublicClientApplication, config: ConfigParser) -> dict[
def main() -> None:
config = ConfigParser()
if len(sys.argv) != 2 or not os.path.isfile(sys.argv[1]):
- raise common.DailyBulletinError("You must specify a configuration file as the only argument")
+ raise common.DailyBulletinError(
+ "You must specify a configuration file as the only argument"
+ )
config.read(sys.argv[1])
app = msal.PublicClientApplication(
config["credentials"]["client_id"],
diff --git a/sjdbmk/inspire_dl.py b/sjdbmk/inspire_dl.py
index 94fd994..631ea44 100644
--- a/sjdbmk/inspire_dl.py
+++ b/sjdbmk/inspire_dl.py
@@ -35,7 +35,9 @@ def main() -> None:
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description="Download Daily Inspirations")
# parser.add_argument("--changeme", default=None, help="changeme")
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
config = ConfigParser()
@@ -55,7 +57,9 @@ def main() -> None:
assert isinstance(response_json, list)
remote_submission_list = set(response_json)
- local_submission_list = set([sn.lstrip("inspire-") for sn in os.listdir() if sn.startswith("inspire-")])
+ local_submission_list = set(
+ [sn.lstrip("inspire-") for sn in os.listdir() if sn.startswith("inspire-")]
+ )
to_fetch = remote_submission_list - local_submission_list
if to_fetch:
logger.info("Going to fetch: %s" % ", ".join(to_fetch))
@@ -94,7 +98,9 @@ def main() -> None:
timeout=20,
) as r:
with open("inspattach-%s" % os.path.basename(sub["file"]), "wb") as fd:
- logger.info("Saved to inspattach-%s" % os.path.basename(sub["file"]))
+ logger.info(
+ "Saved to inspattach-%s" % os.path.basename(sub["file"])
+ )
shutil.copyfileobj(r.raw, fd)
fd.flush()
diff --git a/sjdbmk/legacy_wikipedia.py b/sjdbmk/legacy_wikipedia.py
index 2d66b20..c2f60a1 100644
--- a/sjdbmk/legacy_wikipedia.py
+++ b/sjdbmk/legacy_wikipedia.py
@@ -62,7 +62,21 @@ def get_on_this_day_zh() -> None:
li_element.append(event)
ul_element.append(li_element)
- result = str(p_element).replace("/wiki", "https://zh.wikipedia.org/zh-cn").replace('<span class="otd-year">', "<b>").replace("</span>:", ":</b>") + str(ul_element).replace("/wiki", "https://zh.wikipedia.org/zh-cn").replace("</dt><dd>", " – ").replace('<div class="event">\n<dt>', "").replace("</dd>\n</div>", "")
+ result = str(p_element).replace(
+ "/wiki", "https://zh.wikipedia.org/zh-cn"
+ ).replace('<span class="otd-year">', "<b>").replace(
+ "</span>:", ":</b>"
+ ) + str(
+ ul_element
+ ).replace(
+ "/wiki", "https://zh.wikipedia.org/zh-cn"
+ ).replace(
+ "</dt><dd>", " – "
+ ).replace(
+ '<div class="event">\n<dt>', ""
+ ).replace(
+ "</dd>\n</div>", ""
+ )
result = re.sub(r"<small>.*?图.*?</small>", "", result)
with open("otd_zh-" + formatted_time_yearless + ".html", "w") as file:
@@ -91,7 +105,9 @@ def get_on_this_day_en() -> None:
month = months[index]
day = 1
- url = "https://en.m.wikipedia.org/wiki/Wikipedia:Selected_anniversaries/" + month
+ url = (
+ "https://en.m.wikipedia.org/wiki/Wikipedia:Selected_anniversaries/" + month
+ )
response = requests.get(url, timeout=15)
html = response.text
soup = bs4.BeautifulSoup(html, "html.parser")
@@ -119,7 +135,16 @@ def get_on_this_day_en() -> None:
for li in li_contents:
p_element_2.append(li)
- result = str(p_element).replace("/wiki", "https://en.wikipedia.org/wiki") + str(ul_element).replace("/wiki", "https://en.wikipedia.org/wiki") + "\n" + str(p_element_2).replace("</li><li>", "; ").replace("<li>", "<b>Births and Deaths: </b>").replace("</li>", "").replace("/wiki", "https://en.wikipedia.org/wiki")
+ result = (
+ str(p_element).replace("/wiki", "https://en.wikipedia.org/wiki")
+ + str(ul_element).replace("/wiki", "https://en.wikipedia.org/wiki")
+ + "\n"
+ + str(p_element_2)
+ .replace("</li><li>", "; ")
+ .replace("<li>", "<b>Births and Deaths: </b>")
+ .replace("</li>", "")
+ .replace("/wiki", "https://en.wikipedia.org/wiki")
+ )
result = re.sub(r" <i>.*?icture.*?</i>", "", result)
with open("otd_en-" + formatted_time_yearless + ".html", "w") as file:
@@ -175,7 +200,23 @@ def get_in_the_news_en() -> str:
else:
p_element_3.append(li)
- result = str(ul_element).replace("/wiki", "https://en.wikipedia.org/wiki") + str(p_element_2).replace("</li><li>", "; ").replace("<li>", "<b>Ongoing: </b>").replace("</li>", "").replace("\n;", ";").replace("/wiki", "https://en.wikipedia.org/wiki").replace("</p>", "<br>") + str(p_element_3).replace("</li><li>", "; ").replace("<li>", "<b>Recent deaths: </b>").replace("</li>", "").replace("\n;", ";").replace("/wiki", "https://en.wikipedia.org/wiki").replace("<p>", "")
+ result = (
+ str(ul_element).replace("/wiki", "https://en.wikipedia.org/wiki")
+ + str(p_element_2)
+ .replace("</li><li>", "; ")
+ .replace("<li>", "<b>Ongoing: </b>")
+ .replace("</li>", "")
+ .replace("\n;", ";")
+ .replace("/wiki", "https://en.wikipedia.org/wiki")
+ .replace("</p>", "<br>")
+ + str(p_element_3)
+ .replace("</li><li>", "; ")
+ .replace("<li>", "<b>Recent deaths: </b>")
+ .replace("</li>", "")
+ .replace("\n;", ";")
+ .replace("/wiki", "https://en.wikipedia.org/wiki")
+ .replace("<p>", "")
+ )
result = re.sub(r" <i>\(.*?\)</i>", "", result)
return result
@@ -214,7 +255,11 @@ def get_in_the_news_zh() -> str:
"",
)
.replace("/wiki", "https://zh.wikipedia.org/zh-cn")
- + str(p_element_3).replace('<span class="hlist inline">', "<b>最近逝世:</b>").replace("</span>", "").replace("-", ";").replace("/wiki", "https://zh.wikipedia.org/zh-cn")
+ + str(p_element_3)
+ .replace('<span class="hlist inline">', "<b>最近逝世:</b>")
+ .replace("</span>", "")
+ .replace("-", ";")
+ .replace("/wiki", "https://zh.wikipedia.org/zh-cn")
).replace("</p><p>", "<br>")
result = re.sub(r"<small.*?>.*?</small>", "", result)
@@ -222,8 +267,12 @@ def get_in_the_news_zh() -> str:
def main() -> None:
- parser = argparse.ArgumentParser(description="Legacy Wikipedia script for the Daily Bulletin")
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser = argparse.ArgumentParser(
+ description="Legacy Wikipedia script for the Daily Bulletin"
+ )
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
config = configparser.ConfigParser()
diff --git a/sjdbmk/menu.py b/sjdbmk/menu.py
index f6a26a5..85d8589 100644
--- a/sjdbmk/menu.py
+++ b/sjdbmk/menu.py
@@ -37,10 +37,18 @@ def menu_item_fix(s: str) -> Optional[str]:
return None
if s == "Condiments Selection\n葱,香菜,榨菜丝,老干妈,生抽,醋":
return None
- return s.strip().replace("Biscuit /", "Biscuit/").replace("Juice /", "Juice/").replace(" \n", "\n").replace("\n ", "\n")
+ return (
+ s.strip()
+ .replace("Biscuit /", "Biscuit/")
+ .replace("Juice /", "Juice/")
+ .replace(" \n", "\n")
+ .replace("\n ", "\n")
+ )
-def parse_meal_table(rows: list[Any], initrow: int, t: list[str]) -> dict[str, dict[str, list[str]]]:
+def parse_meal_table(
+ rows: list[Any], initrow: int, t: list[str]
+) -> dict[str, dict[str, list[str]]]:
assert rows[initrow + 1][1].value is None
igroups = []
@@ -77,7 +85,9 @@ def parse_meal_table(rows: list[Any], initrow: int, t: list[str]) -> dict[str, d
return ret
-def parse_menus(datetime_target: datetime.datetime) -> dict[str, dict[str, dict[str, list[str]]]]:
+def parse_menus(
+ datetime_target: datetime.datetime,
+) -> dict[str, dict[str, dict[str, list[str]]]]:
logger.info("Parsing menus")
filename = "menu-%s.xlsx" % datetime_target.strftime("%Y%m%d")
wb = openpyxl.load_workbook(filename=filename)
@@ -139,6 +149,37 @@ def parse_menus(datetime_target: datetime.datetime) -> dict[str, dict[str, dict[
return final
+def parse_snacks(datetime_target: datetime.datetime) -> dict[str, list[str]]:
+ logger.info("Parsing snacks")
+ filename = "menu-%s.xlsx" % datetime_target.strftime("%Y%m%d")
+ wb = openpyxl.load_workbook(filename=filename)
+ ws = wb["菜单"]
+ rows = list(ws.iter_rows())
+
+ final = {}
+
+ i = -1
+ while i < len(rows) - 1:
+ i += 1
+ row = rows[i]
+ if not isinstance(row[1].value, str):
+ continue
+ elif "Students Snack" in row[1].value:
+ break
+ else:
+ raise ValueError("snacks not found")
+ i += 2
+ return {
+ "Morning": row[i][2:7],
+ "Afternoon": row[i + 1][2:7],
+ "Evening": row[i + 2][2:7],
+ }
+
+ # parse_meal_table(rows, i)
+
+ return final
+
+
def download_menu(
token: str,
datetime_target: datetime.datetime,
@@ -156,11 +197,16 @@ def download_menu(
weekly_menu_subject_regex_four_groups,
):
try:
- subject_1st_month = datetime.datetime.strptime(matched_groups[0], "%b").month # issues here are probably locales
+ subject_1st_month = datetime.datetime.strptime(
+ matched_groups[0], "%b"
+ ).month # issues here are probably locales
subject_1st_day = int(matched_groups[1])
except ValueError:
raise ValueError(hit["resource"]["subject"], matched_groups[0])
- if subject_1st_month == datetime_target.month and subject_1st_day == datetime_target.day:
+ if (
+ subject_1st_month == datetime_target.month
+ and subject_1st_day == datetime_target.day
+ ):
break
else:
raise ValueError("No menu email found")
@@ -190,7 +236,14 @@ def download_menu(
raise ValueError("No proper attachment found in email")
-def download_or_report_menu(token: str, datetime_target: datetime.datetime, weekly_menu_query_string: str, weekly_menu_sender: str, weekly_menu_subject_regex: str, weekly_menu_subject_regex_four_groups: tuple[int, int, int, int]) -> None:
+def download_or_report_menu(
+ token: str,
+ datetime_target: datetime.datetime,
+ weekly_menu_query_string: str,
+ weekly_menu_sender: str,
+ weekly_menu_subject_regex: str,
+ weekly_menu_subject_regex_four_groups: tuple[int, int, int, int],
+) -> None:
menu_filename = "menu-%s.xlsx" % datetime_target.strftime("%Y%m%d")
if not (os.path.isfile(menu_filename)):
logger.info("Menu not found, downloading")
diff --git a/sjdbmk/pack.py b/sjdbmk/pack.py
index 192cd09..902e256 100644
--- a/sjdbmk/pack.py
+++ b/sjdbmk/pack.py
@@ -34,10 +34,14 @@ def main(date: str, config: ConfigParser) -> None:
"r",
encoding="utf-8",
) as template_file:
- template = Template(template_file.read(), undefined=StrictUndefined, autoescape=True)
+ template = Template(
+ template_file.read(), undefined=StrictUndefined, autoescape=True
+ )
with open(
- os.path.join(config["general"]["build_path"], "day-" + date.replace("-", "") + ".json"),
+ os.path.join(
+ config["general"]["build_path"], "day-" + date.replace("-", "") + ".json"
+ ),
"r",
encoding="utf-8",
) as fd:
@@ -48,7 +52,11 @@ def main(date: str, config: ConfigParser) -> None:
#
# data = data | extra_data
- template.stream(**data).dump(os.path.join(config["general"]["build_path"], "sjdb-%s.html" % date.replace("-", "")))
+ template.stream(**data).dump(
+ os.path.join(
+ config["general"]["build_path"], "sjdb-%s.html" % date.replace("-", "")
+ )
+ )
# FIXME: Escape the dangerous HTML!
@@ -64,14 +72,18 @@ if __name__ == "__main__":
# TODO: Verify validity of date
# TODO: Verify consistency of date elsewhere
)
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
config = ConfigParser()
config.read(args.config)
if args.date:
date = args.date
else:
- now = datetime.datetime.now(zoneinfo.ZoneInfo(config["general"]["timezone"]))
+ now = datetime.datetime.now(
+ zoneinfo.ZoneInfo(config["general"]["timezone"])
+ )
date = (now + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
logging.info("Generating for day %s" % date)
# main(date, config)
diff --git a/sjdbmk/sendmail.py b/sjdbmk/sendmail.py
index 4b58674..b966e49 100644
--- a/sjdbmk/sendmail.py
+++ b/sjdbmk/sendmail.py
@@ -70,7 +70,9 @@ def sendmail(
raise TypeError("Naive datetimes are no longer supported")
utcwhen = when.astimezone(datetime.timezone.utc)
isoval = utcwhen.isoformat(timespec="seconds").replace("+00:00", "Z")
- data["singleValueExtendedProperties"] = [{"id": "SystemTime 0x3FEF", "value": isoval}]
+ data["singleValueExtendedProperties"] = [
+ {"id": "SystemTime 0x3FEF", "value": isoval}
+ ]
if not reply_to:
response = requests.post(
@@ -110,7 +112,8 @@ def sendmail(
if response2.status_code != 202:
pprint(response2.content.decode("utf-8", "replace"))
raise ValueError(
- "Graph response to messages/%s/send returned something other than 202 Accepted" % response["id"],
+ "Graph response to messages/%s/send returned something other than 202 Accepted"
+ % response["id"],
)
return msgid
@@ -136,14 +139,20 @@ def main() -> None:
action="store_true",
help="Send the bulletin right now, instead of at the right time",
)
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
config = ConfigParser()
config.read(args.config)
if args.date:
- date = datetime.datetime.strptime(args.date, "%Y-%m-%d").replace(tzinfo=zoneinfo.ZoneInfo(config["general"]["timezone"]))
+ date = datetime.datetime.strptime(args.date, "%Y-%m-%d").replace(
+ tzinfo=zoneinfo.ZoneInfo(config["general"]["timezone"])
+ )
else:
- date = datetime.datetime.now(zoneinfo.ZoneInfo(config["general"]["timezone"])) + datetime.timedelta(days=1)
+ date = datetime.datetime.now(
+ zoneinfo.ZoneInfo(config["general"]["timezone"])
+ ) + datetime.timedelta(days=1)
os.chdir(config["general"]["build_path"])
@@ -160,11 +169,16 @@ def main() -> None:
if not args.reply:
a = sendmail(
token,
- subject=config["sendmail"]["subject_format"] % date.strftime(config["sendmail"]["subject_date_format"]),
+ subject=config["sendmail"]["subject_format"]
+ % date.strftime(config["sendmail"]["subject_date_format"]),
body=html,
to=config["sendmail"]["to_1"].split(" "),
cc=config["sendmail"]["cc_1"].split(" "),
- bcc=[w.strip() for w in open(config["sendmail"]["bcc_1_file"], "r").readlines() if w.strip()],
+ bcc=[
+ w.strip()
+ for w in open(config["sendmail"]["bcc_1_file"], "r").readlines()
+ if w.strip()
+ ],
when=date.replace(
hour=int(config["sendmail"]["hour"]),
minute=int(config["sendmail"]["minute"]),
@@ -180,11 +194,16 @@ def main() -> None:
fd.write(a)
b = sendmail(
token,
- subject=config["sendmail"]["subject_format"] % date.strftime(config["sendmail"]["subject_date_format"]),
+ subject=config["sendmail"]["subject_format"]
+ % date.strftime(config["sendmail"]["subject_date_format"]),
body=html,
to=config["sendmail"]["to_2"].split(" "),
cc=config["sendmail"]["cc_2"].split(" "),
- bcc=[w.strip() for w in open(config["sendmail"]["bcc_2_file"], "r").readlines() if w.strip()],
+ bcc=[
+ w.strip()
+ for w in open(config["sendmail"]["bcc_2_file"], "r").readlines()
+ if w.strip()
+ ],
when=date.replace(
hour=int(config["sendmail"]["hour"]),
minute=int(config["sendmail"]["minute"]),
@@ -203,11 +222,16 @@ def main() -> None:
last_a = fd.read().strip()
a = sendmail(
token,
- subject=config["sendmail"]["subject_format"] % date.strftime(config["sendmail"]["subject_date_format"]),
+ subject=config["sendmail"]["subject_format"]
+ % date.strftime(config["sendmail"]["subject_date_format"]),
body=html,
to=config["sendmail"]["to_1"].split(" "),
cc=config["sendmail"]["cc_1"].split(" "),
- bcc=[w.strip() for w in open(config["sendmail"]["bcc_1_file"], "r").readlines() if w.strip()],
+ bcc=[
+ w.strip()
+ for w in open(config["sendmail"]["bcc_1_file"], "r").readlines()
+ if w.strip()
+ ],
when=date.replace(
hour=int(config["sendmail"]["hour"]),
minute=int(config["sendmail"]["minute"]),
@@ -226,11 +250,16 @@ def main() -> None:
last_b = fd.read().strip()
b = sendmail(
token,
- subject=config["sendmail"]["subject_format"] % date.strftime(config["sendmail"]["subject_date_format"]),
+ subject=config["sendmail"]["subject_format"]
+ % date.strftime(config["sendmail"]["subject_date_format"]),
body=html,
to=config["sendmail"]["to_2"].split(" "),
cc=config["sendmail"]["cc_2"].split(" "),
- bcc=[w.strip() for w in open(config["sendmail"]["bcc_2_file"], "r").readlines() if w.strip()],
+ bcc=[
+ w.strip()
+ for w in open(config["sendmail"]["bcc_2_file"], "r").readlines()
+ if w.strip()
+ ],
when=date.replace(
hour=int(config["sendmail"]["hour"]),
minute=int(config["sendmail"]["minute"]),
diff --git a/sjdbmk/serve.py b/sjdbmk/serve.py
index 411a5b2..492e443 100644
--- a/sjdbmk/serve.py
+++ b/sjdbmk/serve.py
@@ -65,7 +65,11 @@ def index() -> ResponseType:
with open(
os.path.join(
config["general"]["build_path"],
- "day-%s.json" % (datetime.datetime.now(tz=zoneinfo.ZoneInfo("Asia/Shanghai")) + datetime.timedelta(days=1)).strftime("%Y%m%d"),
+ "day-%s.json"
+ % (
+ datetime.datetime.now(tz=zoneinfo.ZoneInfo("Asia/Shanghai"))
+ + datetime.timedelta(days=1)
+ ).strftime("%Y%m%d"),
),
"r",
encoding="utf-8",
diff --git a/sjdbmk/twa.py b/sjdbmk/twa.py
index b7bb64e..aa301c6 100644
--- a/sjdbmk/twa.py
+++ b/sjdbmk/twa.py
@@ -28,8 +28,12 @@ from . import common
logger = logging.getLogger(__name__)
-def download_or_report_the_week_ahead(token: str, datetime_target: datetime.datetime, the_week_ahead_url: str) -> None:
- the_week_ahead_filename = "the_week_ahead-%s.pptx" % datetime_target.strftime("%Y%m%d")
+def download_or_report_the_week_ahead(
+ token: str, datetime_target: datetime.datetime, the_week_ahead_url: str
+) -> None:
+ the_week_ahead_filename = "the_week_ahead-%s.pptx" % datetime_target.strftime(
+ "%Y%m%d"
+ )
if not os.path.isfile(the_week_ahead_filename):
logger.info("Downloading The Week Ahead to %s" % the_week_ahead_filename)
common.download_share_url(token, the_week_ahead_url, the_week_ahead_filename)
@@ -38,9 +42,15 @@ def download_or_report_the_week_ahead(token: str, datetime_target: datetime.date
logger.info("The Week Ahead already exists at %s" % the_week_ahead_filename)
-def parse_the_week_ahead(datetime_target: datetime.datetime, the_week_ahead_community_time_page_number: int, the_week_ahead_aod_page_number: int) -> tuple[list[list[str]], list[str]]:
+def parse_the_week_ahead(
+ datetime_target: datetime.datetime,
+ the_week_ahead_community_time_page_number: int,
+ the_week_ahead_aod_page_number: int,
+) -> tuple[list[list[str]], list[str]]:
logger.info("Parsing The Week Ahead")
- the_week_ahead_filename = "the_week_ahead-%s.pptx" % datetime_target.strftime("%Y%m%d")
+ the_week_ahead_filename = "the_week_ahead-%s.pptx" % datetime_target.strftime(
+ "%Y%m%d"
+ )
the_week_ahead_presentation = pptx.Presentation(the_week_ahead_filename)
community_time = extract_community_time(
the_week_ahead_presentation,
@@ -50,7 +60,9 @@ def parse_the_week_ahead(datetime_target: datetime.datetime, the_week_ahead_comm
return community_time, aods
-def extract_community_time(prs: pptx.presentation.Presentation, community_time_page_number: int) -> list[list[str]]:
+def extract_community_time(
+ prs: pptx.presentation.Presentation, community_time_page_number: int
+) -> list[list[str]]:
slide = prs.slides[community_time_page_number]
for shape in slide.shapes:
if not shape.has_table:
@@ -62,9 +74,13 @@ def extract_community_time(prs: pptx.presentation.Presentation, community_time_p
row_count = len(tbl.rows)
col_count = len(tbl.columns)
if col_count not in [4, 5]:
- raise ValueError("Community time parsing: The Week Ahead community time table does not have 4 or 5 columns")
+ raise ValueError(
+ "Community time parsing: The Week Ahead community time table does not have 4 or 5 columns"
+ )
if col_count == 4:
- logger.warning("Community time warning: only four columns found, assuming that Y12 has graduated")
+ logger.warning(
+ "Community time warning: only four columns found, assuming that Y12 has graduated"
+ )
res = [["" for c in range(col_count)] for r in range(row_count)]
@@ -79,7 +95,11 @@ def extract_community_time(prs: pptx.presentation.Presentation, community_time_p
t = t.strip()
if "whole school assembly" in t.lower():
t = "Whole School Assembly"
- elif "tutor group check-in" in t.lower() or "follow up day" in t.lower() or "open session for tutor and tutee" in t.lower():
+ elif (
+ "tutor group check-in" in t.lower()
+ or "follow up day" in t.lower()
+ or "open session for tutor and tutee" in t.lower()
+ ):
t = "Tutor Time"
res[r][c] = t.replace("(", " (").replace(")", ") ").replace(" ", " ")
if cell.is_merge_origin:
@@ -90,7 +110,9 @@ def extract_community_time(prs: pptx.presentation.Presentation, community_time_p
return [x[1:] for x in res[1:]]
-def extract_aods(prs: pptx.presentation.Presentation, aod_page_number: int) -> list[str]:
+def extract_aods(
+ prs: pptx.presentation.Presentation, aod_page_number: int
+) -> list[str]:
slide = prs.slides[aod_page_number]
aods = ["", "", "", ""]
for shape in slide.shapes:
@@ -111,8 +133,12 @@ def extract_aods(prs: pptx.presentation.Presentation, aod_page_number: int) -> l
elif day == "thursday":
aods[3] = aod
if not all(aods):
- raise common.DailyBulletinError("The Week Ahead doesn't include all AOD days, or the formatting is borked")
+ raise common.DailyBulletinError(
+ "The Week Ahead doesn't include all AOD days, or the formatting is borked"
+ )
return aods
- raise common.DailyBulletinError("The Week Ahead's doesn't even include an AOD for Monday")
+ raise common.DailyBulletinError(
+ "The Week Ahead's doesn't even include an AOD for Monday"
+ )
# TODO: this is one of those places where Monday is *expected* to be the first day.
# TODO: revamp this. this is ugly!
diff --git a/sjdbmk/weekly.py b/sjdbmk/weekly.py
index aef2f39..fa73d55 100644
--- a/sjdbmk/weekly.py
+++ b/sjdbmk/weekly.py
@@ -73,10 +73,23 @@ def generate(
output_filename = "week-%s.json" % datetime_target.strftime("%Y%m%d")
logger.info("Output filename: %s" % output_filename)
- token = common.acquire_token(graph_client_id, graph_authority, graph_username, graph_password, graph_scopes)
+ token = common.acquire_token(
+ graph_client_id, graph_authority, graph_username, graph_password, graph_scopes
+ )
twa.download_or_report_the_week_ahead(token, datetime_target, the_week_ahead_url)
- menu.download_or_report_menu(token, datetime_target, weekly_menu_query_string, weekly_menu_sender, weekly_menu_subject_regex, weekly_menu_subject_regex_four_groups)
- community_time, aods = twa.parse_the_week_ahead(datetime_target, the_week_ahead_community_time_page_number, the_week_ahead_aod_page_number)
+ menu.download_or_report_menu(
+ token,
+ datetime_target,
+ weekly_menu_query_string,
+ weekly_menu_sender,
+ weekly_menu_subject_regex,
+ weekly_menu_subject_regex_four_groups,
+ )
+ community_time, aods = twa.parse_the_week_ahead(
+ datetime_target,
+ the_week_ahead_community_time_page_number,
+ the_week_ahead_aod_page_number,
+ )
menu_data = menu.parse_menus(datetime_target)
logger.info("Packing final data")
@@ -101,7 +114,9 @@ def main() -> None:
default=None,
help="the start of the week to generate for, in local time, YYYY-MM-DD; defaults to next Monday",
)
- parser.add_argument("--config", default="config.ini", help="path to the configuration file")
+ parser.add_argument(
+ "--config", default="config.ini", help="path to the configuration file"
+ )
args = parser.parse_args()
if args.date:
@@ -117,7 +132,9 @@ def main() -> None:
datetime_target_aware = datetime_target_naive.replace(tzinfo=tzinfo)
else:
datetime_current_aware = datetime.datetime.now(tz=tzinfo)
- datetime_target_aware = datetime_current_aware + datetime.timedelta(days=((-datetime_current_aware.weekday()) % 7))
+ datetime_target_aware = datetime_current_aware + datetime.timedelta(
+ days=((-datetime_current_aware.weekday()) % 7)
+ )
logger.info("Generating for %s" % datetime_target_aware.strftime("%Y-%m-%d %Z"))
build_path = config["general"]["build_path"]
@@ -125,14 +142,20 @@ def main() -> None:
os.chdir(build_path)
the_week_ahead_url = config["the_week_ahead"]["file_url"]
- the_week_ahead_community_time_page_number = int(config["the_week_ahead"]["community_time_page_number"])
+ the_week_ahead_community_time_page_number = int(
+ config["the_week_ahead"]["community_time_page_number"]
+ )
the_week_ahead_aod_page_number = int(config["the_week_ahead"]["aod_page_number"])
weekly_menu_query_string = config["weekly_menu"]["query_string"]
weekly_menu_sender = config["weekly_menu"]["sender"]
weekly_menu_subject_regex = config["weekly_menu"]["subject_regex"]
- weekly_menu_subject_regex_four_groups_raw = config["weekly_menu"]["subject_regex_four_groups"].split(" ")
- weekly_menu_subject_regex_four_groups = tuple([int(z) for z in weekly_menu_subject_regex_four_groups_raw])
+ weekly_menu_subject_regex_four_groups_raw = config["weekly_menu"][
+ "subject_regex_four_groups"
+ ].split(" ")
+ weekly_menu_subject_regex_four_groups = tuple(
+ [int(z) for z in weekly_menu_subject_regex_four_groups_raw]
+ )
assert len(weekly_menu_subject_regex_four_groups) == 4
# weekly_menu_dessert_page_number = config["weekly_menu"]["dessert_page_number"]