diff --git a/config.py b/config.py new file mode 100644 index 0000000..5af306c --- /dev/null +++ b/config.py @@ -0,0 +1,8 @@ +import tomllib +from pprint import pprint + +with open("config.toml", mode="rb") as config_toml: + config = tomllib.load(config_toml) + + +# pprint(config) diff --git a/db.json b/db.json index b54b0fe..0cddaa1 100644 --- a/db.json +++ b/db.json @@ -1,39 +1,27 @@ { "files": { - "hlds_files\\bicyclette.hlds": { - "local_file_path": "hlds_files\\bicyclette.hlds", + "hlds_files/bicyclette.hlds": { + "local_file_path": "hlds_files/bicyclette.hlds", "metadata": { - "sync-to": "menus\\la_bicylette.hlds" + "sync-to": "menus/la_bicylette.hlds" } }, - "hlds_files\\bocca_ovp.hlds": { - "local_file_path": "hlds_files\\bocca_ovp.hlds", + "hlds_files/bocca_ovp.hlds": { + "local_file_path": "hlds_files/bocca_ovp.hlds", "metadata": { - "sync-to": "menus\\bocca_ovp.hlds" + "sync-to": "menus/bocca_ovp.hlds" } }, - "hlds_files\\metropol.hlds": { - "local_file_path": "hlds_files\\metropol.hlds", + "hlds_files/pizza_donna.hlds": { + "local_file_path": "hlds_files/pizza_donna.hlds", "metadata": { - "sync-to": "menus\\pitta_metropol.hlds" + "sync-to": "menus/prima_donna.hlds" } }, - "hlds_files\\pizza_donna.hlds": { - "local_file_path": "hlds_files\\pizza_donna.hlds", + "hlds_files/simpizza.hlds": { + "local_file_path": "hlds_files/simpizza.hlds", "metadata": { - "sync-to": "menus\\prima_donna.hlds" - } - }, - "hlds_files\\s5.hlds": { - "local_file_path": "hlds_files\\s5.hlds", - "metadata": { - "sync-to": "menus\\s5.hlds" - } - }, - "hlds_files\\simpizza.hlds": { - "local_file_path": "hlds_files\\simpizza.hlds", - "metadata": { - "sync-to": "menus\\simpizza.hlds" + "sync-to": "menus/simpizza.hlds" } } } diff --git a/db.py b/db_file.py similarity index 69% rename from db.py rename to db_file.py index 66fb38f..9c4701c 100644 --- a/db.py +++ b/db_file.py @@ -54,6 +54,15 @@ def is_file_different(hlds_file: str, menu_file: str) -> bool: return hlds_hash != menu_hash +def dos2unix(path: str) -> str: + """ + Converts Windows-style backslashes in a file path to Unix-style forward slashes. + :param path: The input file path (string) + :return: The normalized file path with forward slashes + """ + return path.replace("\\", "/") + + def get_manual_file_mapping() -> Dict[str, str]: """ Creates a manual mapping of file names between the hlds_menus directory and the menus directory. @@ -64,25 +73,46 @@ def get_manual_file_mapping() -> Dict[str, str]: # Manual mapping of file names file_mapping = { - os.path.join(hlds_dir, "bicyclette.hlds"): os.path.join(menu_dir, "la_bicylette.hlds"), - os.path.join(hlds_dir, "bocca_ovp.hlds"): os.path.join(menu_dir, "bocca_ovp.hlds"), - os.path.join(hlds_dir, "metropol.hlds"): os.path.join(menu_dir, "pitta_metropol.hlds"), - os.path.join(hlds_dir, "pizza_donna.hlds"): os.path.join(menu_dir, "prima_donna.hlds"), - os.path.join(hlds_dir, "s5.hlds"): os.path.join(menu_dir, "s5.hlds"), - os.path.join(hlds_dir, "simpizza.hlds"): os.path.join(menu_dir, "simpizza.hlds"), + dos2unix(os.path.join(hlds_dir, "bicyclette.hlds")): dos2unix(os.path.join(menu_dir, "la_bicylette.hlds")), + dos2unix(os.path.join(hlds_dir, "bocca_ovp.hlds")): dos2unix(os.path.join(menu_dir, "bocca_ovp.hlds")), + dos2unix(os.path.join(hlds_dir, "metropol.hlds")): dos2unix(os.path.join(menu_dir, "pitta_metropol.hlds")), + dos2unix(os.path.join(hlds_dir, "pizza_donna.hlds")): dos2unix(os.path.join(menu_dir, "prima_donna.hlds")), + dos2unix(os.path.join(hlds_dir, "s5.hlds")): dos2unix(os.path.join(menu_dir, "s5.hlds")), + dos2unix(os.path.join(hlds_dir, "simpizza.hlds")): dos2unix(os.path.join(menu_dir, "simpizza.hlds")), # Add more mappings here as needed } return file_mapping +def get_mapped_path(file_path: str) -> str: + """ + Maps a given file path using the manual mapping or returns the same path if not in the mapping. + Ensures the returned path uses forward slashes. + :param file_path: The input file path + :return: The mapped file path or the original path + """ + file_mapping = get_manual_file_mapping() + normalized_path = dos2unix(file_path) + return file_mapping.get(normalized_path, normalized_path) + + def test_file_comparison(): """ Compares all files based on the manual mapping and prints whether they are different or identical. """ - file_mapping = get_manual_file_mapping() + hlds_dir = "hlds_files" + menu_dir = "menus" + + # Get a list of files in the `hlds_files` directory + hlds_files = [ + dos2unix(os.path.join(hlds_dir, f)) + for f in os.listdir(hlds_dir) if os.path.isfile(os.path.join(hlds_dir, f)) + ] + + for hlds_file in hlds_files: + menu_file = get_mapped_path(hlds_file) - for hlds_file, menu_file in file_mapping.items(): if not os.path.exists(hlds_file): print(f"{hlds_file} does not exist. Skipping...") continue diff --git a/hlds_files/simpizza.hlds b/hlds_files/simpizza.hlds index 10bfb48..2b0da21 100644 --- a/hlds_files/simpizza.hlds +++ b/hlds_files/simpizza.hlds @@ -2355,6 +2355,55 @@ dish pasta_chicken_picanto: Pasta Chicken picanto -- Roomsaus, rode pesto, kip, zakje_parmezaanse_kaas: Zakje Parmezaanse kaas € 1.0 bestek: bestek € 0.35 dish pasta_deal: Pasta Deal -- Kleine portie pasta, lookbrood naar keuze, frisdrank € 18.95 + single_choice Spaghetti/penne: Welke Spaghetti/penne + spaghetti: spaghetti € 0.0 + penne: penne € 0.0 + single_choice Drank?: Welke Drank? + cola: Cola € 0.0 + cola_zero: Cola Zero € 0.0 + fanta: Fanta € 0.0 + fanta_exotic: Fanta Exotic € 0.0 + fanta_cassis: Fanta Cassis € 0.0 + fanta_strawberry_&_kiwi: Fanta Strawberry & Kiwi € 0.0 + tropico: Tropico € 0.0 + ice_tea: Ice Tea € 0.0 + ice_tea_peach: Ice Tea Peach € 0.0 + sprite: Sprite € 0.0 + spa_plat: Spa Plat € 0.0 + spa_bruis: Spa Bruis € 0.0 + uludag: Uludag € 0.0 + oasis_tropical: Oasis Tropical € 0.0 + single_choice Garnering: Welke Garnering + scampi: Scampi € 1.5 + kip: kip € 1.5 + ham: Ham € 1.5 + extra_kaas: extra kaas € 1.5 + extra_saus: extra saus € 1.5 + zakje_parmezaanse_kaas: Zakje Parmezaanse kaas € 1.0 + bestek: bestek € 0.35 + single_choice Pasta: Welke Pasta + pasta_bolognese: Pasta Bolognese € 0.0 + pasta_kaassaus: Pasta kaassaus € 0.0 + pasta_kip_en_kaassaus: Pasta kip en kaassaus € 0.0 + pasta_ham_en_kaassaus: Pasta ham en kaassaus € 0.0 + pasta_milano: Pasta Milano € 0.0 + pasta_scampi: Pasta scampi € 0.0 + pasta_multi_cheese: Pasta Multi cheese € 0.0 + pasta_veggie: Pasta Veggie € 0.0 + pasta_pesto_chicken: Pasta Pesto Chicken € 0.0 + pasta_veggie: Pasta Veggie € 0.0 + pasta_exotique: Pasta Exotique € 0.0 + pasta_chicken_pesto_spinazie: Pasta Chicken pesto spinazie € 0.0 + pasta_gambaretti_spinazie: Pasta Gambaretti spinazie € 0.0 + pasta_chicken_arabiata: Pasta Chicken arabiata € 0.0 + pasta_chicken_picanto: Pasta Chicken picanto € 0.0 + single_choice Lookbrood: Welke Lookbrood + lookbrood_natuur: Lookbrood natuur € 0.0 + lookbrood_kaas: Lookbrood kaas € 0.0 + lookbrood_kaas_&_ham: Lookbrood kaas & ham € 0.0 + lookbrood_kaas_&_tomaat: Lookbrood kaas & tomaat € 0.0 + lookbrood_kaas_&_salami: Lookbrood kaas & salami € 0.0 + lookbrood_kaas_&_kip: Lookbrood kaas & kip € 0.0 dish pasta_exotique: Pasta Exotique -- Tomatenroomsaus, ananas, champignons, chili, cherrytomaat, paprika € 13.95 single_choice Spaghetti/penne: Welke Spaghetti/penne spaghetti: spaghetti € 0.0 diff --git a/main.py b/main.py index d5cbe69..5824d56 100644 --- a/main.py +++ b/main.py @@ -149,7 +149,7 @@ def run_scrapers( # Default values default_run_everything: bool = False default_use_parallelism: bool = False - default_restaurant_names = ["bocca_ovp"] + default_restaurant_names = ["s5"] # Parse command-line arguments args = parse_arguments() diff --git a/mattermost_client.py b/mattermost_client.py new file mode 100644 index 0000000..b7c0ad3 --- /dev/null +++ b/mattermost_client.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import pprint as pp +from abc import ABC, abstractmethod +from enum import Enum +from typing import Dict +from config import config + +from colored import Style +from mattermostdriver import Driver + +from mattermost_objects import MMChannelPosts +from utils import timer + +pp = pp.PrettyPrinter(indent=2) + + +class LogLevel(Enum): + INFO = "INFO" + ERROR = "ERROR" + + +class User(ABC): + @abstractmethod + def credentials_dict(self) -> dict: + pass + +class TokenUser(User): + def __init__(self, token): + self.token = token + + def credentials_dict(self) -> dict: + return {"token": self.token} + + def __repr__(self): + return "TokenUser".format(self.token) + + +users: {str: [User]} = {} + + +def loadusers(): + usr = None + for name, data in config["mattermost"]["users"].items(): + if "token" in data: + usr = TokenUser(token=data["token"]) + else: + print("Invalid user '{}' in toml file".format(name)) + exit(1) + users[name] = usr + + +loadusers() + + +def merge_dict(a: dict, b: dict) -> dict: + return {**a, **b} + + +class MMApi(Driver): + def __init__(self, user: User = users["tyboro"]): + print(f"Initializing MMApi client for user {user}") + Driver.__init__( + self, + merge_dict( + { + "url": "mattermost.zeus.gent", + "port": 443, + "debug": False, + }, + user.credentials_dict(), + ), + ) + self.login() + self.user_id = self.users.get_user(user_id="me")["id"] + self.team_id = self.teams.get_team_by_name("zeus")["id"] + print(" = Creating mattermost client") + print(f" = - User: {self.user_id}") + print(f" = - Team: {self.team_id}") + + @staticmethod + def print_response(resp, title="Response"): + print("--------") + print(Style.BOLD + title + Style.RESET) + pp.pprint(resp) + + def log(self, text: str, log_level: LogLevel = LogLevel.INFO): + print(f"{Style.BOLD}[{log_level.value}]{Style.RESET} {text}") + + def get_channel_id(self, channel_name): + resp = self.channels.get_channel_by_name(self.team_id, channel_name) + id = resp["id"] + self.log(f"Fetching channel id for {channel_name}: {id}") + return id + + @timer + def get_posts_for_channel(self, channel_id, since): + print(f"Fetching posts for {channel_id} since {since}") + page_size = 200 + page_i = 0 + data = {} + more = True + while more: + resp = self.posts.get_posts_for_channel( + channel_id, + params={"page": page_i, "per_page": page_size, "since": since}, + ) + page_i += 1 + print(f"Fetching page {page_i}") + # print("-", end=" ") + + paged_data = resp["posts"] + paged_count = len(paged_data) + + if since != 0: + # The mattermost api is absolutely retarted + # If you add the since parameter and it's different then 0 it will give you 1000 posts max. + # It will not respect you page_index or page_size. + more = False + else: + if paged_count < page_size: + more = False + + # Transform the data into something more sensible or practical + if type(paged_data) is list: + paged_data = {item["id"]: item for item in paged_data} + + # Append the paged_data to our global data variable + data = {**data, **paged_data} + print() + + self.log(f"Post count: {len(data)}") + return data + + +class ChannelApi(MMApi): + def __init__(self, channel_name=None, channel_id=None, user=None): + MMApi.__init__(self, user) + assert channel_name is not None or channel_id != None + + if channel_name is not None: + self.channel_id = self.get_channel_id(channel_name) + if channel_id is not None: + self.channel_id = channel_id + + def create_post(self, message: str, props: Dict = None) -> None: + resp = self.posts.create_post( + options={"channel_id": self.channel_id, "message": message, "props": props} + ) + self.log(f'Message successfully created: "{message}"') + + def create_threaded_post( + self, post_id: str, message: str, props: Dict = None + ) -> None: + resp = self.posts.create_post( + options={ + "channel_id": self.channel_id, + "message": message, + "root_id": post_id, + "props": props, + } + ) + self.log(f'Message successfully created: "{message}"') + # print_response("Create post", resp) + + +if __name__ == "__main__": + foo = MMApi(user=users["flynn"]) + + # all_posts = foo.get_all_posts() + + channel = foo.channels.get_channel_by_name( + foo.team_id, + "bestuur", + ) + channel_id = channel["id"] + resp = foo.posts.get_posts_for_channel(channel_id, params={"per_page": 200}) + channel_posts: MMChannelPosts = MMChannelPosts.load(resp) diff --git a/mattermost_comunication.py b/mattermost_comunication.py new file mode 100644 index 0000000..f845f56 --- /dev/null +++ b/mattermost_comunication.py @@ -0,0 +1,59 @@ +import mattermostdriver.exceptions + +import mattermost_client +from config import config +from mattermost_client import ChannelApi, MMApi + + +def send_message(file_info, message): + channel_id = file_info["originating_mm_post_channel_id"] + post_id = file_info["originating_mm_post_id"] + + # TODO Comment below line, this is for testing purposes + # channel_id = MMApi().get_channel_id("bestuur-dev") + channel = ChannelApi( + channel_id=channel_id, + user=mattermost_client.users[config["mattermost"]["selected_user"]], + ) + + try: + channel.create_threaded_post( + post_id, + f"{message}", + ) + except mattermostdriver.exceptions.InvalidOrMissingParameters as e: + # This will occur when we try to react to a file in a channel that is not the same as the originating channel. + channel.create_post( + f"{message}", + ) + + +def report_newly_found_file(file_info): + git_url = f"https://{config['gitea']['server_url']}/{config['gitea']['remote_org']}/{config['gitea']['remote_repo']}" + message = f"I found a new CodiMD file in this post! Making work of putting it on git :)\n - Requested location in the [drive]({git_url}): {file_info['metadata']['sync-to']}" + send_message(file_info, message) + + +def report_newly_found_but_invalid_file(file_info): + message = """Hi there! :wave: +I'm your friendly neighbourhood document sync bot. +I could synchronize this CodiMD file automatically to our Git DRIVE for safekeeping, but the necessary metadata block is not present. +You can easily add the correct info and I will do the rest of the work for you! + +Just add the following lines to your file, the location in your file is not important but at the top would be my recommendation. + +``` +:::spoiler git drive sync +- sync-to: +::: +```""" + send_message(file_info, message) + + +# send_message( +# { +# "originating_mm_post_channel_id": "dm1abp4wfidezmig1yqyu53mmy", +# "originating_mm_post_id": "dm1abp4wfidezmig1yqyu53mmy" +# }, +# "haldis_sync is started" +# ) diff --git a/mattermost_objects.py b/mattermost_objects.py new file mode 100644 index 0000000..588d1ff --- /dev/null +++ b/mattermost_objects.py @@ -0,0 +1,137 @@ +from typing import Dict, List, NamedTuple + + +class MMUser(NamedTuple): + id: str + create_at: int + update_at: int + delete_at: int + username: str + first_name: str + last_name: str + nickname: str + email: str + auth_data: str + auth_service: str + roles: str + locale: str + timezone: dict + position: any + + is_bot: bool = None + bot_description: str = None + email_verified: bool = None + notify_props: dict = None + last_password_update: int = None + failed_attempts: int = None + mfa_active: bool = False + terms_of_service_id: str = None + terms_of_service_create_at: int = None + props: dict = {} + last_picture_update: int = None + + @staticmethod + def load(data): + try: + return MMUser(**data) + except TypeError as e: + print("[ERROR] Could not load dict into MMUser namedtuple") + print(str(e)) + + +class MMPostProps(NamedTuple): + from_webhook: str = False + override_icon_url: str = None + override_username: str = None + webhook_display_name: str = None + + channel_mentions: Dict = None + matterircd_krcggydky38kdcuubsc7fddc7w: str = None + matterircd_s4ptwhx7wfnx7qwexp1khorh7e: str = None + username: str = None + userId: str = None + old_header: str = None + new_header: str = None + old_purpose: str = None + new_purpose: str = None + old_displayname: str = None + new_displayname: str = None + remove_link_preview: str = None + removedUserId: str = None + addedUserId: str = None + removedUsername: str = None + addedUsername: str = None + message: str = None + attachments: str = None + from_bot: str = False + disable_group_highlight: str = None + + +class MMPost(NamedTuple): + channel_id: str + create_at: int + delete_at: int + edit_at: int + hashtags: str + id: str + is_pinned: bool + message: str + metadata: Dict + original_id: str + pending_post_id: str + root_id: str + type: str + update_at: int + user_id: str + parent_id: str = None + message_source: str = None + has_reactions: bool = None + file_ids: List[str] = None + props: MMPostProps = None + reply_count: int = None + last_reply_at: str = None + participants: any = None + + def from_human(self): + return self.props is None or ( + self.props.from_webhook is False and self.props.from_bot is False + ) + + @staticmethod + def load(data): + try: + props = None + if "props" in data: + try: + props: MMPostProps = MMPostProps(**data["props"]) + except TypeError as e: + print("[ERROR] Could not load dict into MMPostProps namedtuple") + print(str(e)) + del data["props"] + return MMPost(props=props, **data) + except TypeError as e: + print("[ERROR] Could not load dict into MMPost namedtuple") + print(str(e)) + + +class MMChannelPosts(NamedTuple): + prev_post_id: str + next_post_id: str + order: List[str] + posts: Dict[str, MMPost] + has_next: any + first_inaccessible_post_time: any + reply_count: any = None + disable_group_highlight: any = None + + @staticmethod + def load(data): + try: + posts: Dict[str, MMPost] = { + k: MMPost.load(v) for (k, v) in data["posts"].items() + } + del data["posts"] + return MMChannelPosts(posts=posts, **data) + except TypeError as e: + print("[ERROR] Could not load dict into MMUser namedtuple") + print(str(e)) diff --git a/requirements.txt b/requirements.txt index 20d86e3..ba9c129 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,6 @@ PyPDF2==3.0.1 pdfplumber==0.11.4 requests==2.32.3 selenium==4.27.1 -seleniumbase==4.33.11 \ No newline at end of file +seleniumbase==4.33.11 +mattermostdriver +colored \ No newline at end of file diff --git a/run_sync.py b/run_sync.py index 2efc6fd..81fae92 100644 --- a/run_sync.py +++ b/run_sync.py @@ -1,6 +1,6 @@ import traceback -import db +import db_file as db # import dir_utils # import mattermost_client # import mattermost_communication @@ -9,7 +9,7 @@ def sync_files_to_gitea(): repo, api_handler = sync.init_sync() - print(db.get_files().items()) + # print(db.get_files().items()) for file_id, file_info in db.get_files().items(): # print(file_id, file_info) try: @@ -19,6 +19,17 @@ def sync_files_to_gitea(): traceback.print_exc() +def sync_gitmate(): + print() + print("================================================") + print("== Syncing files to git ==") + sync_files_to_gitea() + print() + return { + "synced": "success" + } + + if __name__ == "__main__": print() print("================================================") diff --git a/scraper_data.db b/scraper_data.db index 77684b6..a5c0cc3 100644 Binary files a/scraper_data.db and b/scraper_data.db differ diff --git a/sync_gitmate.py b/sync_gitmate.py index 6e85d08..0fa2ef3 100644 --- a/sync_gitmate.py +++ b/sync_gitmate.py @@ -9,6 +9,8 @@ import tomllib from pprint import pprint +from mattermost_comunication import send_message + # import mattermost_communication with open("config.toml", mode="rb") as config_toml: @@ -23,6 +25,11 @@ GIT_ORG = config["gitea"]["remote_org"] GIT_REPO = config["gitea"]["remote_repo"] +conf = { + "originating_mm_post_channel_id": "dm1abp4wfidezmig1yqyu53mmy", + "originating_mm_post_id": "dm1abp4wfidezmig1yqyu53mmy" +} + def init_sync(): repo = get_repo() @@ -60,17 +67,55 @@ def clear_repo(repo): repo.git.restore("--", "*") +def prune_remote(repo): + """ + Prunes stale remote branches for the 'origin' remote. + """ + print("Pruning stale remote branches...") + repo.git.remote("prune", "origin") + print("Pruning complete.") + + +def delete_stale_local_branches(repo): + """ + Deletes stale local branches that no longer exist on the remote. + """ + print("Checking for stale local branches...") + remote_refs = [ref.strip() for ref in repo.git.branch("-r").split("\n")] + local_branches = [ref.strip("* ").strip() for ref in repo.git.branch().split("\n")] + + # Identify local branches that are no longer on the remote + for branch in local_branches: + remote_branch_ref = f"origin/{branch}" + if branch != "master" and remote_branch_ref not in remote_refs: + print(f"Deleting stale local branch: {branch}") + repo.git.branch("-D", branch) # Force delete the branch + print("Local cleanup complete.") + + def checkout_branch(repo, branch_name): repo.git.switch("master") - branches = repo.git.branch() - # Print the branches - print("Available branches:\n", branches) - if branch_name in repo.heads: - # repo.git.branch("-D", branch_name) # Force delete the branch - repo.git.switch(branch_name) + prune_remote(repo) + delete_stale_local_branches(repo) + # status = repo.git.status() + # print("\nGit Status:\n", status) + repo.git.fetch("--all") + # Get a list of all remote branches + remote_branches = [ref.strip() for ref in repo.git.branch("-r").split("\n")] + # print(remote_branches) + remote_branch_full = f"origin/{branch_name}" + if remote_branch_full in remote_branches: + # If the branch exists on the remote, check it out and pull changes + print(f"Checking out existing branch: {branch_name}") + repo.git.checkout(branch_name) repo.git.pull("origin", branch_name) else: - repo.git.switch("-c", branch_name) + # If the branch doesn't exist, create it and push to the remote + print(f"Branch {branch_name} does not exist on origin. Creating the branch.") + repo.git.checkout("-b", branch_name) + repo.git.push("-u", "origin", branch_name) + # status = repo.git.status() + # print("\nGit Status:\n", status) if branch_name in repo.remotes.origin.refs: repo.heads[branch_name].set_tracking_branch( repo.remotes.origin.refs[branch_name] @@ -83,11 +128,12 @@ def sync_file(repo, api_instance, file_info): sync_to = file_info["metadata"]["sync-to"] # branch_name = f"hlds-sync_{sync_to}" - branch_name = f"hlds_sync_{os.path.basename(sync_to).replace(".hlds", "")}" + branch_name = f"haldis_sync_{os.path.basename(sync_to).replace(".hlds", "")}" print(f"Starting sync of {path}") clear_repo(repo) print(f" Checking out onto branch: {branch_name}") checkout_branch(repo, branch_name) + # return # barrier to stop PR's while testing TODO remove with open(path) as r: # pathlib.Path(f"{REPO_FOLDER}/{sync_to}").mkdir( # parents=True, exist_ok=True @@ -113,6 +159,10 @@ def sync_file(repo, api_instance, file_info): print( " Creating a new merge request to update the git menu with the new version from the hlds menu." ) + send_message( + conf, + f"[hlds sync] Creating a new merge request to update the git menu of {sync_to} with the new version from the hlds menu." + ) api_instance.repo_create_pull_request( GIT_ORG, GIT_REPO, @@ -124,6 +174,10 @@ def sync_file(repo, api_instance, file_info): ) else: print(" Creating a new merge request to add the Menu to git.") + send_message( + conf, + f"[hlds sync] Creating a new merge request to add the git menu of {sync_to} with the new version from the hlds menu." + ) api_instance.repo_create_pull_request( GIT_ORG, GIT_REPO, @@ -135,5 +189,9 @@ def sync_file(repo, api_instance, file_info): ) else: print(" Merge request was already open.") + send_message( + conf, + f"[hlds sync] Merge request was already open for git menu of {sync_to}" + ) else: print(" Menu has no changes.") diff --git a/utils.py b/utils.py index e296f46..cf4937e 100644 --- a/utils.py +++ b/utils.py @@ -1,3 +1,4 @@ +import functools import re import time @@ -8,6 +9,19 @@ import pdfplumber from selenium.common.exceptions import StaleElementReferenceException +def timer(func): + """Print the runtime of the decorated function""" + + @functools.wraps(func) + def wrapper_timer(*args, **kwargs): + start_time = time.perf_counter() # 1 + value = func(*args, **kwargs) + end_time = time.perf_counter() # 2 + run_time = end_time - start_time # 3 + print(f"Finished {func.__name__!r} in {run_time:.4f} secs") + return value + + return wrapper_timer def comma_float(inp: str) -> float: return float(inp.replace(',', '.')) diff --git a/website/app.py b/website/app.py index d4491a6..11e8ddc 100644 --- a/website/app.py +++ b/website/app.py @@ -10,6 +10,7 @@ # Add the parent directory to the system path to allow imports from the higher-level directory sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from main import run_scrapers +from run_sync import sync_gitmate app = Flask(__name__) @@ -198,6 +199,21 @@ def home(): return render_template('index.html', scraper_info=scraper_info) +@app.route("/sync-all", methods=["POST"]) +def sync_all_files(): + """ + Sync all files to GitMate. + """ + try: + # Call the `sync_gitmate` function without arguments to sync all files + print("Syncing all files to GitMate...") + sync_gitmate() + print("Synced all files to GitMate") + return jsonify({"message": "All files synced successfully."}), 200 + except Exception as e: + return jsonify({"error": str(e)}), 500 + + if __name__ == "__main__": # Initialize the database when the app starts init_db() diff --git a/website/static/favicon.ico b/website/static/favicon.ico new file mode 100644 index 0000000..d39f97f Binary files /dev/null and b/website/static/favicon.ico differ diff --git a/website/templates/index.html b/website/templates/index.html index 1076e25..b2e4278 100644 --- a/website/templates/index.html +++ b/website/templates/index.html @@ -4,6 +4,7 @@ Restaurant Scraper +