From 1ac385ee6c0dc0c4838e892c85384c5ed2e8a465 Mon Sep 17 00:00:00 2001 From: Wang Yihang Date: Mon, 31 Jan 2022 14:19:32 +0800 Subject: [PATCH] style(__init__.py): change string formatting with f string in python3.6+ for more readablity --- GitHacker/__init__.py | 48 ++++++++++++++++++++----------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/GitHacker/__init__.py b/GitHacker/__init__.py index 50a4bfc..122ac58 100644 --- a/GitHacker/__init__.py +++ b/GitHacker/__init__.py @@ -37,9 +37,8 @@ def __init__(self, url, dst, threads=0x08, brute=True) -> None: def start(self): # Ensure the target is a git folder via `.git/HEAD` - if requests.head("{}{}".format(self.url, ".git/HEAD"), verify=self.verify).status_code != 200: - logging.error( - "The target url({}) is not a valid git repository, `.git/HEAD` not exists".format(self.url)) + if requests.head(f"{self.url}.git/HEAD", verify=self.verify).status_code != 200: + logging.error(f"The target url({self.url}) is not a valid git repository, `.git/HEAD` not exists") return False for _ in range(self.thread_number): @@ -51,7 +50,7 @@ def start(self): return self.blind() def directory_listing_enabled(self): - response = requests.get("{}{}".format(self.url, ".git/"), verify=self.verify) + response = requests.get(f"{self.url}.git/", verify=self.verify) keywords = { "apache": "Index of", "nginx": "<title>Index of", @@ -59,8 +58,7 @@ def directory_listing_enabled(self): if response.status_code == 200: for server, keyword in keywords.items(): if keyword in response.text: - logging.info( - "Directory listing enable under: {}".format(server)) + logging.info(f"Directory listing enable under: {server}") return True return False @@ -70,7 +68,7 @@ def sighted(self): return self.git_clone() def add_folder(self, base_url, folder): - url = "{}{}".format(base_url, folder) + url = f"{base_url}{folder}" soup = bs4.BeautifulSoup(requests.get( url, verify=self.verify).text, features="html.parser") links = soup.find_all("a") @@ -81,7 +79,7 @@ def add_folder(self, base_url, folder): if href.endswith("/"): self.add_folder(url, href) else: - file_url = "{}{}".format(url, href) + file_url = f"{url}{href}" path = file_url.replace(self.url, "").split("/") self.q.put(path) @@ -104,11 +102,11 @@ def blind(self): logging.info('Running git fsck files...') while True: - content = "{}".format(subprocess.run( + content = subprocess.run( ['git', "fsck"], stdout=subprocess.PIPE, cwd=self.dst, - )) + ).stdout tn = self.add_hashes_parsed(content) if tn > 0: self.q.join() @@ -118,7 +116,7 @@ def blind(self): return self.git_clone() def git_clone(self): - logging.info("Cloning downloaded repo from {} to {}".format(self.dst, self.real_dst)) + logging.info(f"Cloning downloaded repo from {self.dst} to {self.real_dst}") result = subprocess.run( ["git", "clone", self.dst, self.real_dst], stdout=subprocess.PIPE, @@ -127,19 +125,19 @@ def git_clone(self): if result.stdout != b'': logging.info(result.stdout.decode("utf-8").strip()) if result.stderr != b'': logging.error(result.stderr.decode("utf-8").strip()) if b"invalid path" in result.stderr: - logging.info("Remote repo is downloaded into {}".format(self.real_dst)) + logging.info(f"Remote repo is downloaded into {self.real_dst}") logging.error("Be careful to checkout the source code, cause the target repo may be a honey pot.") logging.error("FYI: https://drivertom.blogspot.com/2021/08/git.html") if result.returncode == 0: # return True only when `git clone` successfully executed - logging.info("Check it out: {}".format(self.real_dst)) + logging.info(f"Check it out: {self.real_dst}") shutil.rmtree(self.dst) return True return False def add_hashes_parsed(self, content): - hashes = re.findall(r"([a-f\d]{40})", content) + hashes = re.findall(r"([a-f\d]{40})", content.decode("utf-8")) n = 0 for hash in hashes: n += 1 @@ -155,7 +153,7 @@ def add_head_file_tasks(self): file_path = os.path.join( self.dst, os.path.sep.join([".git", "logs", ref_path])) if os.path.exists(file_path): - with open(file_path) as ff: + with open(file_path, "rb") as ff: data = ff.read() n += self.add_hashes_parsed(data) return n @@ -213,9 +211,9 @@ def add_basic_file_tasks(self): for minor in range(self.max_semanic_version): for patch in range(self.max_semanic_version): files.append( - [".git", "refs", "tags", "v{}.{}.{}".format(major, minor, patch)]) + [".git", "refs", "tags", f"v{major}.{minor}.{patch}"]) files.append( - [".git", "refs", "tags", "{}.{}.{}".format(major, minor, patch)]) + [".git", "refs", "tags", f"{major}.{minor}.{patch}"]) else: files.append([".git", "refs", "tags", "v0.0.1"]) files.append([".git", "refs", "tags", "0.0.1"]) @@ -261,15 +259,13 @@ def worker(self): else: fs_path = os.path.join(self.dst, os.path.sep.join(path)) url_path = "/".join(path) - url = "{}{}".format(self.url, url_path) + url = f"{self.url}{url_path}" if not os.path.exists(fs_path): status_code, length, result = self.wget(url, fs_path) if result: - logging.info('[{:d} bytes] {} {}'.format( - length, status_code, url_path)) + logging.info(f'[{length} bytes] {status_code} {url_path}') else: - logging.error('[{:d} bytes] {} {}'.format( - length, status_code, url_path)) + logging.error(f'[{length} bytes] {status_code} {url_path}') self.q.task_done() def check_file_content(self, content): @@ -332,10 +328,10 @@ def main(): if url.strip() != "": urls.append(url) succeed_urls = [] - logging.info("{} urls to be exploited".format(len(urls))) + logging.info(f"{len(urls)} urls to be exploited") for url in urls: folder = os.path.sep.join([args.output_folder, md5(url)]) - logging.info("Exploiting {} into {}".format(url, folder)) + logging.info(f"Exploiting {url} into {folder}") try: result = GitHacker( url=append_if_not_exists(remove_suffixes(url, ['.git', '.git/']), '/'), @@ -348,10 +344,10 @@ def main(): except Exception as e: logging.error(repr(e)) - logging.info("{} / {} were exploited successfully".format(len(succeed_urls), len(urls))) + logging.info(f"{len(succeed_urls)} / {len(urls)} were exploited successfully") for url in succeed_urls: folder = os.path.sep.join([args.output_folder, md5(url)]) - logging.info("{} -> {}".format(url, folder)) + logging.info(f"{url} -> {folder}") if __name__ == "__main__": main()