From a8ccf081a2ae76da4f4a3c76b65cdefa12a0a6bc Mon Sep 17 00:00:00 2001 From: Karma Riuk Date: Wed, 21 May 2025 09:18:33 +0200 Subject: [PATCH] formatted file --- stats_pull_requests.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/stats_pull_requests.py b/stats_pull_requests.py index 555facd..9fa7e44 100644 --- a/stats_pull_requests.py +++ b/stats_pull_requests.py @@ -3,17 +3,18 @@ from datetime import datetime import pandas as pd from tqdm import tqdm from github import Github -from utils import has_only_1_round_of_comments, has_only_1_comment, move_github_logging_to_file +from utils import has_only_1_round_of_comments, has_only_1_comment, move_logger_to_file tqdm.pandas() # Initialize GitHub API client g = Github(os.environ["GITHUB_AUTH_TOKEN_CRAB"]) + def process_pull(repo, pull): commits = pull.get_commits() comments = pull.get_review_comments() - + return { "repo": repo.full_name, "pr_number": pull.number, @@ -24,24 +25,26 @@ def process_pull(repo, pull): "has_only_1_comment": has_only_1_comment(commits, comments), } + def process_repo(repo_name): repo = g.get_repo(repo_name) stats = [] - + with tqdm(list(repo.get_pulls(state="closed")), desc=repo_name, leave=False) as pbar: for pull in pbar: pbar.set_postfix({"started at": datetime.now().strftime("%d/%m, %H:%M:%S")}) if not pull.merged_at: continue - + stats.append(process_pull(repo, pull)) return stats + def main(): repos = pd.read_csv("results.csv") repos = repos[(repos["good_repo_for_crab"] == True) & (repos["n_tests"] > 0)] stats = [] - + try: for _, row in tqdm(repos.iterrows(), total=len(repos)): if "name" not in row or not isinstance(row["name"], str): @@ -51,6 +54,7 @@ def main(): finally: pd.DataFrame(stats).to_csv("pr_stats.csv", index=False) + if __name__ == "__main__": - move_github_logging_to_file() + move_logger_to_file() main()