This commit is contained in:
pseusys
2023-02-21 20:59:30 +01:00
parent 0b4f9efb32
commit a97119e612
2 changed files with 8 additions and 10 deletions

View File

@@ -225,9 +225,7 @@ class DownloadManager:
:param kwargs: Parameters for substitution of variables in dynamic query. :param kwargs: Parameters for substitution of variables in dynamic query.
:return: Response JSON dictionary. :return: Response JSON dictionary.
""" """
res = await DownloadManager._client.post("https://api.github.com/graphql", json={ res = await DownloadManager._client.post("https://api.github.com/graphql", json={"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)})
"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)
})
if res.status_code == 200: if res.status_code == 200:
return res.json() return res.json()
else: else:
@@ -266,7 +264,7 @@ class DownloadManager:
:param kwargs: Parameters for substitution of variables in dynamic query. :param kwargs: Parameters for substitution of variables in dynamic query.
:return: Response JSON dictionary. :return: Response JSON dictionary.
""" """
initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f"first: 100") initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination="first: 100")
page_list, page_info = DownloadManager._find_pagination_and_data_list(initial_query_response) page_list, page_info = DownloadManager._find_pagination_and_data_list(initial_query_response)
while page_info["hasNextPage"]: while page_info["hasNextPage"]:
query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f'first: 100, after: "{page_info["endCursor"]}"') query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f'first: 100, after: "{page_info["endCursor"]}"')

View File

@@ -16,10 +16,10 @@ async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
:returns: Commit quarter yearly data dictionary. :returns: Commit quarter yearly data dictionary.
""" """
yearly_data = dict() yearly_data = dict()
total = len(repositories['data']['user']['repositories']['nodes']) total = len(repositories["data"]["user"]["repositories"]["nodes"])
for ind, repo in enumerate(repositories['data']['user']['repositories']['nodes']): for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["nodes"]):
if repo['name'] not in EM.IGNORED_REPOS: if repo["name"] not in EM.IGNORED_REPOS:
print(f"{ind + 1}/{total}", "Retrieving repo:", repo["owner"]["login"], repo['name']) print(f"{ind + 1}/{total}", "Retrieving repo:", repo["owner"]["login"], repo["name"])
await update_yearly_data_with_commit_stats(repo, yearly_data) await update_yearly_data_with_commit_stats(repo, yearly_data)
return yearly_data return yearly_data
@@ -33,13 +33,13 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data:
:param yearly_data: Yearly data dictionary to update. :param yearly_data: Yearly data dictionary to update.
""" """
owner = repo_details["owner"]["login"] owner = repo_details["owner"]["login"]
branch_data = await DM.get_remote_graphql("repo_branch_list", owner=owner, name=repo_details['name']) branch_data = await DM.get_remote_graphql("repo_branch_list", owner=owner, name=repo_details["name"])
if branch_data["data"]["repository"] is None: if branch_data["data"]["repository"] is None:
print(f"\tSkipping repo: {repo_details['name']}") print(f"\tSkipping repo: {repo_details['name']}")
return dict() return dict()
for branch in branch_data["data"]["repository"]["refs"]["nodes"]: for branch in branch_data["data"]["repository"]["refs"]["nodes"]:
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=owner, name=repo_details['name'], branch=branch["name"], id=GHM.USER.node_id) commit_data = await DM.get_remote_graphql("repo_commit_list", owner=owner, name=repo_details["name"], branch=branch["name"], id=GHM.USER.node_id)
for commit in commit_data["data"]["repository"]["ref"]["target"]["history"]["nodes"]: for commit in commit_data["data"]["repository"]["ref"]["target"]["history"]["nodes"]:
date = search(r"\d+-\d+-\d+", commit["committedDate"]).group() date = search(r"\d+-\d+-\d+", commit["committedDate"]).group()
curr_year = datetime.fromisoformat(date).year curr_year = datetime.fromisoformat(date).year