You've already forked wakapi-readme-stats
codestyle applying done
This commit is contained in:
@@ -1,4 +1,5 @@
|
|||||||
from typing import Dict
|
from enum import Enum
|
||||||
|
from typing import Dict, Tuple, List
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from pytz import timezone, utc
|
from pytz import timezone, utc
|
||||||
@@ -9,50 +10,46 @@ from manager_github import GitHubManager as GHM
|
|||||||
from manager_localization import LocalizationManager as LM
|
from manager_localization import LocalizationManager as LM
|
||||||
|
|
||||||
|
|
||||||
|
DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"]
|
||||||
|
DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"]
|
||||||
|
WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
|
||||||
|
|
||||||
|
|
||||||
|
class Symbol(Enum):
|
||||||
|
VERSION_1 = "█", "░"
|
||||||
|
VERSION_2 = "⣿", "⣀"
|
||||||
|
VERSION_3 = "⬛", "⬜"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_symbols(version: int) -> Tuple[str, str]:
|
||||||
|
return Symbol[f"VERSION_{version}"].value
|
||||||
|
|
||||||
|
|
||||||
def make_graph(percent: float):
|
def make_graph(percent: float):
|
||||||
'''Make progress graph from API graph'''
|
"""
|
||||||
if EM.SYMBOL_VERSION == 1: # version 1
|
Make progress graph from API graph
|
||||||
done_block = '█'
|
"""
|
||||||
empty_block = '░'
|
done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION)
|
||||||
elif EM.SYMBOL_VERSION == 2: # version 2
|
percent_quart = round(percent / 4)
|
||||||
done_block = '⣿'
|
return f"{done_block * percent_quart}{empty_block * (25 - percent_quart)}"
|
||||||
empty_block = '⣀'
|
|
||||||
elif EM.SYMBOL_VERSION == 3: # version 3
|
|
||||||
done_block = '⬛'
|
|
||||||
empty_block = '⬜'
|
|
||||||
else:
|
|
||||||
done_block = '█' # default is version 1
|
|
||||||
empty_block = '░'
|
|
||||||
|
|
||||||
pc_rnd = round(percent)
|
|
||||||
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}"
|
|
||||||
|
|
||||||
|
|
||||||
def make_list(data: list): # TODO: add arg: sorted
|
def make_list(data: Dict = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str:
|
||||||
'''Make List'''
|
"""
|
||||||
data_list = []
|
Make List
|
||||||
for l in data[:5]:
|
"""
|
||||||
ln = len(l['name'])
|
if data is not None:
|
||||||
ln_text = len(l['text'])
|
names = [value for key, value in data if key == "name"] if names is None else names
|
||||||
percent = "{:05.2f}".format(float(l['percent']))
|
texts = [value for key, value in data if key == "text"] if texts is None else texts
|
||||||
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
|
percents = [value for key, value in data if key == "percent"] if percents is None else percents
|
||||||
data_list.append(op)
|
|
||||||
|
data = list(zip(names, texts, percents))
|
||||||
|
top_data = sorted(data[:top_num], key=lambda _, __, p: p) if sort else data[:top_num]
|
||||||
|
data_list = [f"{n:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data]
|
||||||
return '\n'.join(data_list)
|
return '\n'.join(data_list)
|
||||||
|
|
||||||
|
|
||||||
def make_commit_list(data: list):
|
async def make_commit_day_time_list(time_zone: str) -> str:
|
||||||
'''Make List'''
|
|
||||||
data_list = []
|
|
||||||
for l in data[:7]:
|
|
||||||
ln = len(l['name'])
|
|
||||||
ln_text = len(l['text'])
|
|
||||||
percent = "{:05.2f}".format(float(l['percent']))
|
|
||||||
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
|
|
||||||
data_list.append(op)
|
|
||||||
return '\n'.join(data_list)
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_commit_list(time_zone: str) -> str:
|
|
||||||
stats = str()
|
stats = str()
|
||||||
|
|
||||||
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
|
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
|
||||||
@@ -75,28 +72,19 @@ async def generate_commit_list(time_zone: str) -> str:
|
|||||||
sum_day = sum(day_times)
|
sum_day = sum(day_times)
|
||||||
sum_week = sum(week_days)
|
sum_week = sum(week_days)
|
||||||
day_times = day_times[1:] + day_times[:1]
|
day_times = day_times[1:] + day_times[:1]
|
||||||
time_of_day_data = [
|
|
||||||
{"name": f"🌞 {LM.t('Morning')}", "text": f"{day_times[0]} commits", "percent": round((day_times[0] / sum_day) * 100, 2)},
|
|
||||||
{"name": f"🌆 {LM.t('Daytime')}", "text": f"{day_times[1]} commits", "percent": round((day_times[1] / sum_day) * 100, 2)},
|
|
||||||
{"name": f"🌃 {LM.t('Evening')}", "text": f"{day_times[2]} commits", "percent": round((day_times[2] / sum_day) * 100, 2)},
|
|
||||||
{"name": f"🌙 {LM.t('Night')}", "text": f"{day_times[3]} commits", "percent": round((day_times[3] / sum_day) * 100, 2)},
|
|
||||||
]
|
|
||||||
day_of_week_data = [
|
|
||||||
{"name": LM.t("Monday"), "text": f"{week_days[0]} commits", "percent": round((week_days[0] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Tuesday"), "text": f"{week_days[1]} commits", "percent": round((week_days[1] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Wednesday"), "text": f"{week_days[2]} commits", "percent": round((week_days[2] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Thursday"), "text": f"{week_days[3]} commits", "percent": round((week_days[3] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Friday"), "text": f"{week_days[4]} commits", "percent": round((week_days[4] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Saturday"), "text": f"{week_days[5]} commits", "percent": round((week_days[5] / sum_week) * 100, 2)},
|
|
||||||
{"name": LM.t("Sunday"), "text": f"{week_days[6]} commits", "percent": round((week_days[6] / sum_week) * 100, 2)},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
day_time_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))]
|
||||||
|
day_time_texts = [f'{day_time} commits' for day_time in day_times]
|
||||||
|
day_time_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times]
|
||||||
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
|
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
|
||||||
stats += f"**{title}** \n\n```text\n{make_commit_list(time_of_day_data)}\n\n```\n"
|
stats += f"**{title}** \n\n```text\n{make_list(names=day_time_names, texts=day_time_texts, percents=day_time_percents, top_num=7)}\n\n```\n"
|
||||||
|
|
||||||
if EM.SHOW_DAYS_OF_WEEK:
|
if EM.SHOW_DAYS_OF_WEEK:
|
||||||
most_productive = max(day_of_week_data, key=lambda d: d["percent"])
|
week_day_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES]
|
||||||
stats += f"📅 **{LM.t('I am Most Productive on') % most_productive['name']}** \n\n```text\n{make_commit_list(day_of_week_data)}\n\n```\n"
|
week_day_texts = [f'{week_day} commits' for week_day in week_days]
|
||||||
|
week_day_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days]
|
||||||
|
title = LM.t("I am Most Productive on") % week_day_names[week_day_percents.index(max(week_day_percents))]
|
||||||
|
stats += f"📅 **{title}** \n\n```text\n{make_list(names=week_day_names, texts=week_day_texts, percents=week_day_percents, top_num=7)}\n\n```\n"
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
@@ -109,14 +97,10 @@ def make_language_per_repo_list(result: Dict) -> str:
|
|||||||
language_count[language] = language_count.get(language, {"count": 0})
|
language_count[language] = language_count.get(language, {"count": 0})
|
||||||
language_count[language]["count"] += 1
|
language_count[language]["count"] += 1
|
||||||
|
|
||||||
data = list()
|
names = list(language_count.keys())
|
||||||
for language in language_count.keys():
|
texts = [f"{language_count[lang]['count']} {'repo' if language_count[lang]['count'] == 1 else 'repos'}" for lang in names]
|
||||||
data.append({
|
percents = [round(language_count[lang]["count"] / len(repos_with_language) * 100, 2) for lang in names]
|
||||||
"name": language,
|
|
||||||
"text": f"{language_count[language]['count']} {'repo' if language_count[language]['count'] == 1 else 'repos'}",
|
|
||||||
"percent": round(language_count[language]["count"] / len(repos_with_language) * 100, 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"])
|
top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"])
|
||||||
title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else ""
|
title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else ""
|
||||||
return f"{title}```text\n{make_list(data)}\n```\n\n"
|
return f"{title}```text\n{make_list(names=names, texts=texts, percents=percents)}\n```\n\n"
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
import re
|
|
||||||
from asyncio import sleep
|
|
||||||
|
|
||||||
from github import Github, InputGitAuthor, AuthenticatedUser
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
from manager_download import DownloadManager as DM
|
|
||||||
from graphics_chart_drawer import create_loc_graph
|
|
||||||
|
|
||||||
|
|
||||||
class LinesOfCode:
|
|
||||||
GRAPH_PATH = "assets/bar_graph.png"
|
|
||||||
|
|
||||||
def __init__(self, user: AuthenticatedUser, ghtoken, repositoryData, ignored_repos):
|
|
||||||
self.g = Github(ghtoken)
|
|
||||||
self.user = user
|
|
||||||
self.repositoryData = repositoryData
|
|
||||||
self.ignored_repos = ignored_repos
|
|
||||||
|
|
||||||
async def calculateLoc(self):
|
|
||||||
result = self.repositoryData
|
|
||||||
yearly_data = {}
|
|
||||||
total = len(result['data']['user']['repositories']['edges'])
|
|
||||||
for ind, repo in enumerate(result['data']['user']['repositories']['edges']):
|
|
||||||
if repo['node']['name'] not in self.ignored_repos:
|
|
||||||
print(f"{ind}/{total}", "Retrieving repo:", repo['node']["owner"]["login"], repo['node']['name'])
|
|
||||||
await self.getCommitStat(repo['node'], yearly_data)
|
|
||||||
await sleep(0.7)
|
|
||||||
return yearly_data
|
|
||||||
|
|
||||||
async def plotLoc(self, yearly_data):
|
|
||||||
await create_loc_graph(yearly_data, LinesOfCode.GRAPH_PATH)
|
|
||||||
self.pushChart()
|
|
||||||
|
|
||||||
def getQuarter(self, timeStamp):
|
|
||||||
month = datetime.datetime.fromisoformat(timeStamp).month
|
|
||||||
if month >= 1 and month <= 3:
|
|
||||||
return 1
|
|
||||||
elif month >= 4 and month <= 6:
|
|
||||||
return 2
|
|
||||||
elif month >= 7 and month <= 9:
|
|
||||||
return 3
|
|
||||||
elif month >= 10 and month <= 12:
|
|
||||||
return 4
|
|
||||||
|
|
||||||
async def getCommitStat(self, repoDetails, yearly_data):
|
|
||||||
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id)
|
|
||||||
|
|
||||||
if commit_data["data"]["repository"] is None:
|
|
||||||
print("\tSkipping:", repoDetails['name'])
|
|
||||||
return
|
|
||||||
|
|
||||||
for commit in [commit["node"] for branch in commit_data["data"]["repository"]["refs"]["edges"] for commit in branch["node"]["target"]["history"]["edges"]]:
|
|
||||||
date = re.search(r'\d+-\d+-\d+', commit["committedDate"]).group(0)
|
|
||||||
curr_year = datetime.datetime.fromisoformat(date).year
|
|
||||||
quarter = self.getQuarter(date)
|
|
||||||
|
|
||||||
if repoDetails['primaryLanguage'] is not None:
|
|
||||||
if curr_year not in yearly_data:
|
|
||||||
yearly_data[curr_year] = {}
|
|
||||||
if quarter not in yearly_data[curr_year]:
|
|
||||||
yearly_data[curr_year][quarter] = {}
|
|
||||||
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
|
|
||||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
|
|
||||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (commit["additions"] - commit["deletions"])
|
|
||||||
|
|
||||||
|
|
||||||
def pushChart(self):
|
|
||||||
repo = self.g.get_repo(f"{self.user.login}/{self.user.login}")
|
|
||||||
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
|
|
||||||
with open(LinesOfCode.GRAPH_PATH, 'rb') as input_file:
|
|
||||||
data = input_file.read()
|
|
||||||
try:
|
|
||||||
contents = repo.get_contents(LinesOfCode.GRAPH_PATH)
|
|
||||||
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
|
|
||||||
except Exception as e:
|
|
||||||
repo.create_file(LinesOfCode.GRAPH_PATH, "Charts Added", data, committer=committer)
|
|
||||||
@@ -2,7 +2,6 @@
|
|||||||
Readme Development Metrics With waka time progress
|
Readme Development Metrics With waka time progress
|
||||||
"""
|
"""
|
||||||
from asyncio import run
|
from asyncio import run
|
||||||
from typing import Dict, Tuple
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
@@ -12,8 +11,9 @@ from manager_download import init_download_manager, DownloadManager as DM
|
|||||||
from manager_environment import EnvironmentManager as EM
|
from manager_environment import EnvironmentManager as EM
|
||||||
from manager_github import init_github_manager, GitHubManager as GHM
|
from manager_github import init_github_manager, GitHubManager as GHM
|
||||||
from manager_localization import init_localization_manager, LocalizationManager as LM
|
from manager_localization import init_localization_manager, LocalizationManager as LM
|
||||||
from loc import LinesOfCode # TODO: refactor
|
from graphics_chart_drawer import create_loc_graph
|
||||||
from graphics_list_formatter import make_list, generate_commit_list, make_language_per_repo_list
|
from yearly_commit_calculator import GRAPH_PATH, calculate_yearly_commit_data
|
||||||
|
from graphics_list_formatter import make_list, make_commit_day_time_list, make_language_per_repo_list
|
||||||
|
|
||||||
|
|
||||||
async def get_waka_time_stats() -> str:
|
async def get_waka_time_stats() -> str:
|
||||||
@@ -21,7 +21,7 @@ async def get_waka_time_stats() -> str:
|
|||||||
|
|
||||||
data = await DM.get_remote_json("waka_latest")
|
data = await DM.get_remote_json("waka_latest")
|
||||||
if EM.SHOW_COMMIT:
|
if EM.SHOW_COMMIT:
|
||||||
stats += f"{await generate_commit_list(data['data']['timezone'])}\n\n"
|
stats += f"{await make_commit_day_time_list(data['data']['timezone'])}\n\n"
|
||||||
|
|
||||||
if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS:
|
if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS:
|
||||||
no_activity = LM.t("No Activity Tracked This Week")
|
no_activity = LM.t("No Activity Tracked This Week")
|
||||||
@@ -52,11 +52,6 @@ async def get_waka_time_stats() -> str:
|
|||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
|
||||||
async def get_yearly_data(repository_list) -> Tuple[LinesOfCode, Dict]: # TODO: refactor!
|
|
||||||
loc = LinesOfCode(GHM.USER, EM.GH_TOKEN, repository_list, EM.IGNORED_REPOS)
|
|
||||||
return loc, await loc.calculateLoc()
|
|
||||||
|
|
||||||
|
|
||||||
async def get_short_github_info():
|
async def get_short_github_info():
|
||||||
stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
|
stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
|
||||||
|
|
||||||
@@ -101,9 +96,9 @@ async def get_stats() -> str:
|
|||||||
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
|
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
|
||||||
|
|
||||||
if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART:
|
if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART:
|
||||||
loc, yearly_data = await get_yearly_data(repositories)
|
yearly_data = await calculate_yearly_commit_data(repositories)
|
||||||
else:
|
else:
|
||||||
loc, yearly_data = (None, dict())
|
yearly_data = (None, dict())
|
||||||
|
|
||||||
if EM.SHOW_TOTAL_CODE_TIME:
|
if EM.SHOW_TOTAL_CODE_TIME:
|
||||||
data = await DM.get_remote_json("waka_all")
|
data = await DM.get_remote_json("waka_all")
|
||||||
@@ -127,8 +122,9 @@ async def get_stats() -> str:
|
|||||||
stats += f"{make_language_per_repo_list(repositories)}\n\n"
|
stats += f"{make_language_per_repo_list(repositories)}\n\n"
|
||||||
|
|
||||||
if EM.SHOW_LOC_CHART:
|
if EM.SHOW_LOC_CHART:
|
||||||
await loc.plotLoc(yearly_data)
|
await create_loc_graph(yearly_data, GRAPH_PATH)
|
||||||
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{LinesOfCode.GRAPH_PATH}"
|
GHM.update_chart(GRAPH_PATH)
|
||||||
|
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}"
|
||||||
stats += '**' + LM.t('Timeline') + '**\n\n'
|
stats += '**' + LM.t('Timeline') + '**\n\n'
|
||||||
stats += f"\n\n"
|
stats += f"\n\n"
|
||||||
|
|
||||||
@@ -156,5 +152,4 @@ if __name__ == '__main__':
|
|||||||
# TODO: check function and variable naming
|
# TODO: check function and variable naming
|
||||||
# TODO: check type hints
|
# TODO: check type hints
|
||||||
# TODO: sorted to max / min
|
# TODO: sorted to max / min
|
||||||
# TODO: add 1 to repo count
|
|
||||||
# TODO: drop not awaited coroutines
|
# TODO: drop not awaited coroutines
|
||||||
|
|||||||
@@ -33,4 +33,4 @@ class EnvironmentManager:
|
|||||||
LOCALE = getenv('INPUT_LOCALE', "en")
|
LOCALE = getenv('INPUT_LOCALE', "en")
|
||||||
UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S")
|
UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S")
|
||||||
IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',')
|
IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',')
|
||||||
SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) # TODO: enum?
|
SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION'))
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
from re import sub
|
from re import sub
|
||||||
|
|
||||||
from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor
|
from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor, UnknownObjectException
|
||||||
|
|
||||||
from manager_environment import EnvironmentManager as EM
|
from manager_environment import EnvironmentManager as EM
|
||||||
|
|
||||||
@@ -75,3 +75,13 @@ class GitHubManager:
|
|||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update_chart(chart_path: str):
|
||||||
|
with open(chart_path, "rb") as input_file:
|
||||||
|
data = input_file.read()
|
||||||
|
try:
|
||||||
|
contents = GitHubManager.REPO.get_contents(chart_path)
|
||||||
|
GitHubManager.REPO.update_file(contents.path, "Charts Updated", data, contents.sha, committer=GitHubManager._get_author())
|
||||||
|
except UnknownObjectException:
|
||||||
|
GitHubManager.REPO.create_file(chart_path, "Charts Added", data, committer=GitHubManager._get_author())
|
||||||
|
|||||||
42
sources/yearly_commit_calculator.py
Normal file
42
sources/yearly_commit_calculator.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from re import search
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from manager_download import DownloadManager as DM
|
||||||
|
from manager_environment import EnvironmentManager as EM
|
||||||
|
from manager_github import GitHubManager as GHM
|
||||||
|
|
||||||
|
|
||||||
|
GRAPH_PATH = "assets/bar_graph.png"
|
||||||
|
|
||||||
|
|
||||||
|
async def calculate_yearly_commit_data(repository_data: Dict) -> Dict:
|
||||||
|
yearly_data = dict()
|
||||||
|
total = len(repository_data["data"]["user"]["repositories"]["edges"])
|
||||||
|
for ind, repo in enumerate(repository_data["data"]["user"]["repositories"]["edges"]):
|
||||||
|
if repo["node"]["name"] not in EM.IGNORED_REPOS:
|
||||||
|
print(f"{ind + 1}/{total}", "Retrieving repo:", repo["node"]["owner"]["login"], repo["node"]["name"])
|
||||||
|
await update_yearly_data_with_commit_stats(repo["node"], yearly_data)
|
||||||
|
return yearly_data
|
||||||
|
|
||||||
|
|
||||||
|
async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict) -> Dict:
|
||||||
|
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repo_details["owner"]["login"], name=repo_details["name"], id=GHM.USER.node_id)
|
||||||
|
|
||||||
|
if commit_data["data"]["repository"] is None:
|
||||||
|
print(f"\tSkipping repo: {repo_details['name']}")
|
||||||
|
return dict()
|
||||||
|
|
||||||
|
for commit in [commit["node"] for branch in commit_data["data"]["repository"]["refs"]["edges"] for commit in branch["node"]["target"]["history"]["edges"]]:
|
||||||
|
date = search(r"\d+-\d+-\d+", commit["committedDate"]).group()
|
||||||
|
curr_year = datetime.fromisoformat(date).year
|
||||||
|
quarter = (datetime.fromisoformat(date).month - 1) // 3 + 1
|
||||||
|
|
||||||
|
if repo_details["primaryLanguage"] is not None:
|
||||||
|
if curr_year not in yearly_data:
|
||||||
|
yearly_data[curr_year] = dict()
|
||||||
|
if quarter not in yearly_data[curr_year]:
|
||||||
|
yearly_data[curr_year][quarter] = dict()
|
||||||
|
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
|
||||||
|
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
|
||||||
|
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += (commit["additions"] - commit["deletions"])
|
||||||
Reference in New Issue
Block a user