-
Notifications
You must be signed in to change notification settings - Fork 249
/
Copy pathgithub_modified_files.py
executable file
·94 lines (79 loc) · 2.82 KB
/
github_modified_files.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
#!/usr/bin/env python3
"""
Gets list of files that will be modified by all PRs for the branch.
Dumps to file to be loaded by other script.
We assume that all PRs we are interested in are made for the master branch.
"""
from github import Github
from github_utils import *
from os.path import expanduser
from repo_config import GH_TOKEN
from argparse import ArgumentParser
import json
import logging
# logger and logger config
# https://docs.python.org/2/library/logger.html
FORMAT = "%(levelname)s - %(funcName)s - %(lineno)d: %(message)s"
logging.basicConfig(format=FORMAT)
logger = logging.getLogger(__name__)
def main():
parser = ArgumentParser()
parser.add_argument("-r", "--repo")
parser.add_argument("-d", "--destination")
parser.add_argument("-c", "--cached_pr", default=None)
parser.add_argument("-b", "--branch", default=None)
parser.add_argument("-p", "--pull", default=None)
parser.add_argument(
"-l",
"--logging",
default="DEBUG",
choices=logging._nameToLevel,
help="Set level of logging",
)
args = parser.parse_args()
logger.setLevel(args.logging)
gh = Github(login_or_token=open(expanduser(GH_TOKEN)).read().strip())
repo = gh.get_repo(args.repo)
old_prs_dict = {}
if args.cached_pr:
try:
with open(args.cached_pr) as f:
old_prs_dict = json.load(f)
except Exception as e:
logger.warning("Could not load a dumped prs", str(e))
pr_list = []
rez = {}
if args.pull:
import copy
rez = copy.deepcopy(old_prs_dict)
pr_list = [repo.get_pull(int(args.pull))]
else:
pr_list = get_pull_requests(repo, branch=args.branch)
print("GitHub API rate limit before: {}".format(gh.get_rate_limit()))
for pr in pr_list:
nr = str(pr.number)
if pr.state == "closed":
if nr in rez:
del rez[nr]
continue
rez[nr] = {
"number": int(nr),
"state": pr.state,
"created_at": int(pr.created_at.strftime("%s")),
"updated_at": int(pr.updated_at.strftime("%s")),
"base_branch": pr.base.ref,
}
# to check for cached PRs
if nr in old_prs_dict.keys():
pr_old = old_prs_dict[nr]
if int(get_unix_time(pr.updated_at)) == pr_old["updated_at"]:
rez[nr]["changed_files_names"] = pr_old["changed_files_names"]
logger.debug(" Using from cache %s" % nr)
continue
logger.debug("!PR was updated %s" % nr)
rez[nr]["changed_files_names"] = pr_get_changed_files(pr)
with open(args.destination, "w") as d:
json.dump(rez, d, sort_keys=True, indent=4)
print("GitHub API rate limit after: {}".format(gh.get_rate_limit()))
if __name__ == "__main__":
main()