2022-02-26 07:37:47 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
# Copyright (c) 2022 Intel Corp.
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import datetime
|
|
|
|
from github import Github, GithubException
|
2022-07-19 07:37:31 +08:00
|
|
|
from github.GithubException import UnknownObjectException
|
2022-02-26 07:37:47 +08:00
|
|
|
from collections import defaultdict
|
2023-08-15 22:31:32 +08:00
|
|
|
from west.manifest import Manifest
|
|
|
|
from west.manifest import ManifestProject
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
TOP_DIR = os.path.join(os.path.dirname(__file__))
|
|
|
|
sys.path.insert(0, os.path.join(TOP_DIR, "scripts"))
|
|
|
|
from get_maintainer import Maintainers
|
|
|
|
|
|
|
|
def log(s):
|
|
|
|
if args.verbose > 0:
|
|
|
|
print(s, file=sys.stdout)
|
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
global args
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description=__doc__,
|
2023-01-05 00:08:36 +08:00
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
parser.add_argument("-M", "--maintainer-file", required=False, default="MAINTAINERS.yml",
|
|
|
|
help="Maintainer file to be used.")
|
2023-08-15 22:31:32 +08:00
|
|
|
|
|
|
|
group = parser.add_mutually_exclusive_group()
|
|
|
|
group.add_argument("-P", "--pull_request", required=False, default=None, type=int,
|
|
|
|
help="Operate on one pull-request only.")
|
2023-08-23 01:04:31 +08:00
|
|
|
group.add_argument("-I", "--issue", required=False, default=None, type=int,
|
|
|
|
help="Operate on one issue only.")
|
2023-08-15 22:31:32 +08:00
|
|
|
group.add_argument("-s", "--since", required=False,
|
|
|
|
help="Process pull-requests since date.")
|
|
|
|
group.add_argument("-m", "--modules", action="store_true",
|
|
|
|
help="Process pull-requests from modules.")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
parser.add_argument("-y", "--dry-run", action="store_true", default=False,
|
|
|
|
help="Dry run only.")
|
|
|
|
|
|
|
|
parser.add_argument("-o", "--org", default="zephyrproject-rtos",
|
|
|
|
help="Github organisation")
|
|
|
|
|
|
|
|
parser.add_argument("-r", "--repo", default="zephyr",
|
|
|
|
help="Github repository")
|
|
|
|
|
|
|
|
parser.add_argument("-v", "--verbose", action="count", default=0,
|
|
|
|
help="Verbose Output")
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
def process_pr(gh, maintainer_file, number):
|
|
|
|
|
|
|
|
gh_repo = gh.get_repo(f"{args.org}/{args.repo}")
|
|
|
|
pr = gh_repo.get_pull(number)
|
|
|
|
|
|
|
|
log(f"working on https://github.com/{args.org}/{args.repo}/pull/{pr.number} : {pr.title}")
|
|
|
|
|
|
|
|
labels = set()
|
|
|
|
area_counter = defaultdict(int)
|
2023-10-12 18:24:06 +08:00
|
|
|
found_maintainers = defaultdict(int)
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
num_files = 0
|
|
|
|
all_areas = set()
|
|
|
|
fn = list(pr.get_files())
|
2023-10-12 18:24:06 +08:00
|
|
|
|
2023-11-06 21:22:55 +08:00
|
|
|
for changed_file in fn:
|
|
|
|
if changed_file.filename in ['west.yml','submanifests/optional.yaml']:
|
|
|
|
break
|
|
|
|
|
2024-06-04 20:42:32 +08:00
|
|
|
if pr.commits == 1 and (pr.additions <= 1 and pr.deletions <= 1):
|
|
|
|
labels = {'size: XS'}
|
2023-10-12 18:24:06 +08:00
|
|
|
|
2022-02-26 07:37:47 +08:00
|
|
|
if len(fn) > 500:
|
|
|
|
log(f"Too many files changed ({len(fn)}), skipping....")
|
|
|
|
return
|
2023-10-12 18:24:06 +08:00
|
|
|
|
|
|
|
for changed_file in fn:
|
2022-02-26 07:37:47 +08:00
|
|
|
num_files += 1
|
2023-10-12 18:24:06 +08:00
|
|
|
log(f"file: {changed_file.filename}")
|
|
|
|
areas = maintainer_file.path2areas(changed_file.filename)
|
|
|
|
|
|
|
|
if not areas:
|
|
|
|
continue
|
|
|
|
|
|
|
|
all_areas.update(areas)
|
|
|
|
is_instance = False
|
|
|
|
sorted_areas = sorted(areas, key=lambda x: 'Platform' in x.name, reverse=True)
|
|
|
|
for area in sorted_areas:
|
|
|
|
c = 1 if not is_instance else 0
|
|
|
|
|
|
|
|
area_counter[area] += c
|
|
|
|
labels.update(area.labels)
|
|
|
|
# FIXME: Here we count the same file multiple times if it exists in
|
|
|
|
# multiple areas with same maintainer
|
|
|
|
for area_maintainer in area.maintainers:
|
|
|
|
found_maintainers[area_maintainer] += c
|
|
|
|
|
|
|
|
if 'Platform' in area.name:
|
|
|
|
is_instance = True
|
|
|
|
|
|
|
|
area_counter = dict(sorted(area_counter.items(), key=lambda item: item[1], reverse=True))
|
|
|
|
log(f"Area matches: {area_counter}")
|
2022-02-26 07:37:47 +08:00
|
|
|
log(f"labels: {labels}")
|
|
|
|
|
2022-10-20 20:51:03 +08:00
|
|
|
# Create a list of collaborators ordered by the area match
|
|
|
|
collab = list()
|
2023-10-12 18:24:06 +08:00
|
|
|
for area in area_counter:
|
|
|
|
collab += maintainer_file.areas[area.name].maintainers
|
|
|
|
collab += maintainer_file.areas[area.name].collaborators
|
2022-10-20 20:51:03 +08:00
|
|
|
collab = list(dict.fromkeys(collab))
|
|
|
|
log(f"collab: {collab}")
|
|
|
|
|
2023-10-12 18:24:06 +08:00
|
|
|
_all_maintainers = dict(sorted(found_maintainers.items(), key=lambda item: item[1], reverse=True))
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
log(f"Submitted by: {pr.user.login}")
|
2023-10-12 18:24:06 +08:00
|
|
|
log(f"candidate maintainers: {_all_maintainers}")
|
|
|
|
|
2024-06-18 19:27:46 +08:00
|
|
|
assignees = []
|
2024-06-26 09:56:57 +08:00
|
|
|
tmp_assignees = []
|
2023-10-12 18:24:06 +08:00
|
|
|
|
|
|
|
# we start with areas with most files changed and pick the maintainer from the first one.
|
|
|
|
# if the first area is an implementation, i.e. driver or platform, we
|
2024-06-26 09:56:57 +08:00
|
|
|
# continue searching for any other areas involved
|
2023-10-12 18:24:06 +08:00
|
|
|
for area, count in area_counter.items():
|
|
|
|
if count == 0:
|
|
|
|
continue
|
|
|
|
if len(area.maintainers) > 0:
|
2024-06-26 09:56:57 +08:00
|
|
|
tmp_assignees = area.maintainers
|
|
|
|
if pr.user.login in area.maintainers:
|
|
|
|
# submitter = assignee, try to pick next area and
|
|
|
|
# assign someone else other than the submitter
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
assignees = area.maintainers
|
2023-10-12 18:24:06 +08:00
|
|
|
|
|
|
|
if 'Platform' not in area.name:
|
|
|
|
break
|
|
|
|
|
2024-06-26 09:56:57 +08:00
|
|
|
if tmp_assignees and not assignees:
|
|
|
|
assignees = tmp_assignees
|
|
|
|
|
2024-06-18 19:27:46 +08:00
|
|
|
if assignees:
|
|
|
|
prop = (found_maintainers[assignees[0]] / num_files) * 100
|
|
|
|
log(f"Picked assignees: {assignees} ({prop:.2f}% ownership)")
|
2023-10-12 18:24:06 +08:00
|
|
|
log("+++++++++++++++++++++++++")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
# Set labels
|
2023-01-27 01:03:13 +08:00
|
|
|
if labels:
|
|
|
|
if len(labels) < 10:
|
|
|
|
for l in labels:
|
|
|
|
log(f"adding label {l}...")
|
|
|
|
if not args.dry_run:
|
|
|
|
pr.add_to_labels(l)
|
|
|
|
else:
|
|
|
|
log(f"Too many labels to be applied")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
if collab:
|
|
|
|
reviewers = []
|
|
|
|
existing_reviewers = set()
|
|
|
|
|
|
|
|
revs = pr.get_reviews()
|
|
|
|
for review in revs:
|
|
|
|
existing_reviewers.add(review.user)
|
|
|
|
|
|
|
|
rl = pr.get_review_requests()
|
|
|
|
page = 0
|
|
|
|
for r in rl:
|
|
|
|
existing_reviewers |= set(r.get_page(page))
|
|
|
|
page += 1
|
|
|
|
|
2024-01-11 03:10:13 +08:00
|
|
|
# check for reviewers that remove themselves from list of reviewer and
|
|
|
|
# do not attempt to add them again based on MAINTAINERS file.
|
|
|
|
self_removal = []
|
|
|
|
for event in pr.get_issue_events():
|
|
|
|
if event.event == 'review_request_removed' and event.actor == event.requested_reviewer:
|
|
|
|
self_removal.append(event.actor)
|
|
|
|
|
|
|
|
for collaborator in collab:
|
2022-07-19 07:37:31 +08:00
|
|
|
try:
|
2024-01-11 03:10:13 +08:00
|
|
|
gh_user = gh.get_user(collaborator)
|
2024-04-02 16:49:48 +08:00
|
|
|
if pr.user == gh_user or gh_user in existing_reviewers:
|
|
|
|
continue
|
|
|
|
if not gh_repo.has_in_collaborators(gh_user):
|
|
|
|
log(f"Skip '{collaborator}': not in collaborators")
|
|
|
|
continue
|
|
|
|
if gh_user in self_removal:
|
|
|
|
log(f"Skip '{collaborator}': self removed")
|
|
|
|
continue
|
|
|
|
reviewers.append(collaborator)
|
2022-07-19 07:37:31 +08:00
|
|
|
except UnknownObjectException as e:
|
2024-01-11 03:10:13 +08:00
|
|
|
log(f"Can't get user '{collaborator}', account does not exist anymore? ({e})")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
2022-10-20 20:51:03 +08:00
|
|
|
if len(existing_reviewers) < 15:
|
|
|
|
reviewer_vacancy = 15 - len(existing_reviewers)
|
|
|
|
reviewers = reviewers[:reviewer_vacancy]
|
|
|
|
|
|
|
|
if reviewers:
|
|
|
|
try:
|
|
|
|
log(f"adding reviewers {reviewers}...")
|
|
|
|
if not args.dry_run:
|
|
|
|
pr.create_review_request(reviewers=reviewers)
|
|
|
|
except GithubException:
|
|
|
|
log("cant add reviewer")
|
|
|
|
else:
|
|
|
|
log("not adding reviewers because the existing reviewer count is greater than or "
|
|
|
|
"equal to 15")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
ms = []
|
|
|
|
# assignees
|
2024-06-18 19:27:46 +08:00
|
|
|
if assignees and not pr.assignee:
|
2022-02-26 07:37:47 +08:00
|
|
|
try:
|
2024-06-18 19:27:46 +08:00
|
|
|
for assignee in assignees:
|
|
|
|
u = gh.get_user(assignee)
|
|
|
|
ms.append(u)
|
2022-02-26 07:37:47 +08:00
|
|
|
except GithubException:
|
|
|
|
log(f"Error: Unknown user")
|
|
|
|
|
|
|
|
for mm in ms:
|
|
|
|
log(f"Adding assignee {mm}...")
|
|
|
|
if not args.dry_run:
|
|
|
|
pr.add_to_assignees(mm)
|
2022-06-16 23:25:52 +08:00
|
|
|
else:
|
|
|
|
log("not setting assignee")
|
2022-02-26 07:37:47 +08:00
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
2023-08-15 22:31:32 +08:00
|
|
|
|
2023-08-23 01:04:31 +08:00
|
|
|
def process_issue(gh, maintainer_file, number):
|
|
|
|
gh_repo = gh.get_repo(f"{args.org}/{args.repo}")
|
|
|
|
issue = gh_repo.get_issue(number)
|
|
|
|
|
|
|
|
log(f"Working on {issue.url}: {issue.title}")
|
|
|
|
|
|
|
|
if issue.assignees:
|
|
|
|
print(f"Already assigned {issue.assignees}, bailing out")
|
|
|
|
return
|
|
|
|
|
|
|
|
label_to_maintainer = defaultdict(set)
|
|
|
|
for _, area in maintainer_file.areas.items():
|
|
|
|
if not area.labels:
|
|
|
|
continue
|
|
|
|
|
|
|
|
labels = set()
|
|
|
|
for label in area.labels:
|
|
|
|
labels.add(label.lower())
|
|
|
|
labels = tuple(sorted(labels))
|
|
|
|
|
|
|
|
for maintainer in area.maintainers:
|
|
|
|
label_to_maintainer[labels].add(maintainer)
|
|
|
|
|
|
|
|
# Add extra entries for areas with multiple labels so they match with just
|
|
|
|
# one label if it's specific enough.
|
|
|
|
for areas, maintainers in dict(label_to_maintainer).items():
|
|
|
|
for area in areas:
|
|
|
|
if tuple([area]) not in label_to_maintainer:
|
|
|
|
label_to_maintainer[tuple([area])] = maintainers
|
|
|
|
|
|
|
|
issue_labels = set()
|
|
|
|
for label in issue.labels:
|
|
|
|
label_name = label.name.lower()
|
|
|
|
if tuple([label_name]) not in label_to_maintainer:
|
|
|
|
print(f"Ignoring label: {label}")
|
|
|
|
continue
|
|
|
|
issue_labels.add(label_name)
|
|
|
|
issue_labels = tuple(sorted(issue_labels))
|
|
|
|
|
|
|
|
print(f"Using labels: {issue_labels}")
|
|
|
|
|
|
|
|
if issue_labels not in label_to_maintainer:
|
|
|
|
print(f"no match for the label set, not assigning")
|
|
|
|
return
|
|
|
|
|
|
|
|
for maintainer in label_to_maintainer[issue_labels]:
|
|
|
|
log(f"Adding {maintainer} to {issue.html_url}")
|
|
|
|
if not args.dry_run:
|
|
|
|
issue.add_to_assignees(maintainer)
|
|
|
|
|
|
|
|
|
2023-08-15 22:31:32 +08:00
|
|
|
def process_modules(gh, maintainers_file):
|
|
|
|
manifest = Manifest.from_file()
|
|
|
|
|
|
|
|
repos = {}
|
|
|
|
for project in manifest.get_projects([]):
|
|
|
|
if not manifest.is_active(project):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if isinstance(project, ManifestProject):
|
|
|
|
continue
|
|
|
|
|
|
|
|
area = f"West project: {project.name}"
|
|
|
|
if area not in maintainers_file.areas:
|
|
|
|
log(f"No area for: {area}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
maintainers = maintainers_file.areas[area].maintainers
|
|
|
|
if not maintainers:
|
|
|
|
log(f"No maintainers for: {area}")
|
|
|
|
continue
|
|
|
|
|
2023-09-14 21:09:33 +08:00
|
|
|
collaborators = maintainers_file.areas[area].collaborators
|
|
|
|
|
|
|
|
log(f"Found {area}, maintainers={maintainers}, collaborators={collaborators}")
|
|
|
|
|
2023-08-15 22:31:32 +08:00
|
|
|
repo_name = f"{args.org}/{project.name}"
|
|
|
|
repos[repo_name] = maintainers_file.areas[area]
|
|
|
|
|
|
|
|
query = f"is:open is:pr no:assignee"
|
|
|
|
for repo in repos:
|
|
|
|
query += f" repo:{repo}"
|
|
|
|
|
|
|
|
issues = gh.search_issues(query=query)
|
|
|
|
for issue in issues:
|
|
|
|
pull = issue.as_pull_request()
|
|
|
|
|
|
|
|
if pull.draft:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if pull.assignees:
|
|
|
|
log(f"ERROR: {pull.html_url} should have no assignees, found {pull.assignees}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
repo_name = f"{args.org}/{issue.repository.name}"
|
|
|
|
area = repos[repo_name]
|
|
|
|
|
|
|
|
for maintainer in area.maintainers:
|
2023-09-14 21:09:33 +08:00
|
|
|
log(f"Assigning {maintainer} to {pull.html_url}")
|
2023-08-15 22:31:32 +08:00
|
|
|
if not args.dry_run:
|
|
|
|
pull.add_to_assignees(maintainer)
|
2023-09-14 21:09:33 +08:00
|
|
|
pull.create_review_request(maintainer)
|
|
|
|
|
|
|
|
for collaborator in area.collaborators:
|
|
|
|
log(f"Adding {collaborator} to {pull.html_url}")
|
|
|
|
if not args.dry_run:
|
|
|
|
pull.create_review_request(collaborator)
|
2023-08-15 22:31:32 +08:00
|
|
|
|
|
|
|
|
2022-02-26 07:37:47 +08:00
|
|
|
def main():
|
|
|
|
parse_args()
|
|
|
|
|
|
|
|
token = os.environ.get('GITHUB_TOKEN', None)
|
|
|
|
if not token:
|
|
|
|
sys.exit('Github token not set in environment, please set the '
|
|
|
|
'GITHUB_TOKEN environment variable and retry.')
|
|
|
|
|
|
|
|
gh = Github(token)
|
|
|
|
maintainer_file = Maintainers(args.maintainer_file)
|
|
|
|
|
|
|
|
if args.pull_request:
|
|
|
|
process_pr(gh, maintainer_file, args.pull_request)
|
2023-08-25 21:49:27 +08:00
|
|
|
elif args.issue:
|
2023-08-23 01:04:31 +08:00
|
|
|
process_issue(gh, maintainer_file, args.issue)
|
2023-08-15 22:31:32 +08:00
|
|
|
elif args.modules:
|
|
|
|
process_modules(gh, maintainer_file)
|
2022-02-26 07:37:47 +08:00
|
|
|
else:
|
|
|
|
if args.since:
|
|
|
|
since = args.since
|
|
|
|
else:
|
|
|
|
today = datetime.date.today()
|
|
|
|
since = today - datetime.timedelta(days=1)
|
|
|
|
|
|
|
|
common_prs = f'repo:{args.org}/{args.repo} is:open is:pr base:main -is:draft no:assignee created:>{since}'
|
|
|
|
pulls = gh.search_issues(query=f'{common_prs}')
|
|
|
|
|
|
|
|
for issue in pulls:
|
|
|
|
process_pr(gh, maintainer_file, issue.number)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|