From 243e0488132a4ab60d99beb710cd51f1b465453f Mon Sep 17 00:00:00 2001 From: pseusys Date: Thu, 16 Feb 2023 23:47:11 +0100 Subject: [PATCH 01/23] empty colors file removed --- sources/colors.json | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 sources/colors.json diff --git a/sources/colors.json b/sources/colors.json deleted file mode 100644 index e69de29..0000000 From 3e7044d5d9d9788f8cd5e54d75008e0f12ced829 Mon Sep 17 00:00:00 2001 From: pseusys Date: Thu, 16 Feb 2023 23:50:26 +0100 Subject: [PATCH 02/23] NodeJS mentions removed --- Makefile | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/Makefile b/Makefile index fcec175..0d47e76 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ help: @ # Print help commands echo "Welcome to 'waka-readme-stats' GitHub Actions!" echo "The action can be tested locally with: 'make run'." - echo "NB! For local testing Python version 3.6+ and NodeJS version between 14 and 16 are required." + echo "NB! For local testing Python version 3.6+ is required." echo "The action image can be built locally with: 'make container'." echo "NB! For local container building Docker version 20+ is required." echo "The action directory and image can be cleaned with: 'make clean'." @@ -21,13 +21,8 @@ venv: pip install --upgrade pip pip install -r requirements.txt -node_modules: - @ # Install NodeJS dependencies - npm i npm@next-8 - npm i vega vega-lite vega-cli canvas - -run-locally: venv node_modules +run-locally: venv @ # Run action locally source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py .PHONY: run-locally @@ -42,7 +37,6 @@ run-container: clean: @ # Clean all build files, including: libraries, package manager configs, docker images and containers rm -rf venv - rm -rf node_modules rm -f package*.json docker rm -f waka-readme-stats 2>/dev/null || true docker rmi $(docker images | grep "waka-readme-stats") 2> /dev/null || true From ee1b2c0aca78f1ff31ef77d5a85ea118d1fd3ee2 Mon Sep 17 00:00:00 2001 From: pseusys Date: Thu, 16 Feb 2023 23:51:41 +0100 Subject: [PATCH 03/23] .env file read with Make --- Makefile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 0d47e76..a93a6e9 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,8 @@ .DEFAULT_GOAL = help SHELL = /bin/bash -PATH := venv/bin:node_modules/.bin:$(PATH) +ENV = .env.example +include $(ENV) help: @@ -24,13 +25,13 @@ venv: run-locally: venv @ # Run action locally - source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py + python3 ./sources/main.py .PHONY: run-locally run-container: @ # Run action in container docker build -t waka-readme-stats -f Dockerfile . - docker run --env-file .env.example waka-readme-stats + docker run --env-file $(ENV) waka-readme-stats .PHONY: run-container From 904cb7bd91e3bfb24bab5f972a2a489c5ab2f7d0 Mon Sep 17 00:00:00 2001 From: pseusys Date: Thu, 16 Feb 2023 23:56:24 +0100 Subject: [PATCH 04/23] .env.example tokens combined --- .env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.example b/.env.example index c273a36..5200ba7 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,5 @@ INPUT_WAKATIME_API_KEY=YOUR_WAKATIME_API_KEY +INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY INPUT_PUSH_BRANCH_NAME=main INPUT_SECTION_NAME=waka INPUT_SHOW_TIMEZONE=True @@ -6,7 +7,6 @@ INPUT_SHOW_PROJECTS=True INPUT_SHOW_EDITORS=True INPUT_SHOW_OS=True INPUT_SHOW_LANGUAGE=True -INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY INPUT_SYMBOL_VERSION=1 INPUT_SHOW_LINES_OF_CODE=True INPUT_SHOW_LOC_CHART=True From c9905b51b1eb9424f0a518db022b32153e8faf6c Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 00:26:44 +0100 Subject: [PATCH 05/23] dockerfile build environment clarified --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5fa5d87..7a2f03f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,10 +3,10 @@ FROM python:3.9-alpine ENV PYTHONUNBUFFERED 1 ENV PYTHONDONTWRITEBYTECODE 1 -WORKDIR /waka-readme-stats +RUN mkdir -p /waka-readme-stats -ADD requirements.txt ./requirements.txt -RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r requirements.txt +ADD requirements.txt /waka-readme-stats/requirements.txt +RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt -ADD sources/* ./ +ADD sources/* /waka-readme-stats/ ENTRYPOINT python3 /waka-readme-stats/main.py From 52318d9afeb5798ec14e712dc047dbdb96d41db8 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 00:38:26 +0100 Subject: [PATCH 06/23] generated files moved to 'assets' dir, graph drawing fixed --- .gitignore | 7 +- Makefile | 3 +- .../{make_bar_graph.py => graph_drawer.py} | 138 +++++++++--------- sources/loc.py | 11 +- sources/main.py | 2 +- 5 files changed, 79 insertions(+), 82 deletions(-) rename sources/{make_bar_graph.py => graph_drawer.py} (64%) diff --git a/.gitignore b/.gitignore index 082302f..53aab97 100644 --- a/.gitignore +++ b/.gitignore @@ -2,19 +2,14 @@ *.env # Generated graph images: -*.png +assets/ # Library roots: -node_modules/ venv/ # Python caches: __pycache__/ -# Package manager configuration files: -package.json -package-lock.json - # IDE configuration files: .vscode .idea diff --git a/Makefile b/Makefile index a93a6e9..8f0d266 100644 --- a/Makefile +++ b/Makefile @@ -31,13 +31,14 @@ run-locally: venv run-container: @ # Run action in container docker build -t waka-readme-stats -f Dockerfile . - docker run --env-file $(ENV) waka-readme-stats + docker run --env-file $(ENV) -v ./assets/:/waka-readme-stats/assets/ waka-readme-stats .PHONY: run-container clean: @ # Clean all build files, including: libraries, package manager configs, docker images and containers rm -rf venv + rm -rf assets rm -f package*.json docker rm -f waka-readme-stats 2>/dev/null || true docker rmi $(docker images | grep "waka-readme-stats") 2> /dev/null || true diff --git a/sources/make_bar_graph.py b/sources/graph_drawer.py similarity index 64% rename from sources/make_bar_graph.py rename to sources/graph_drawer.py index 1081102..5c7dfc4 100644 --- a/sources/make_bar_graph.py +++ b/sources/graph_drawer.py @@ -1,69 +1,69 @@ -from typing import Dict -from os.path import join, dirname -from json import load - -import numpy as np -import matplotlib.patches as mpatches -import matplotlib.pyplot as plt - -from download_manager import DownloadManager - - -MAX_LANGUAGES = 5 - - -async def build_graph(yearly_data: Dict) -> str: - """ - Draws graph of lines of code written by user by quarters of years. - Picks top `MAX_LANGUAGES` languages from each quarter only. - - :param yearly_data: GitHub user yearly data. - :return: String, path to graph file. - """ - colors = await DownloadManager.get_remote_yaml("linguist") - - languages_all_loc = dict() - years = len(yearly_data.keys()) - year_indexes = np.arange(years) - - for i, y in enumerate(sorted(yearly_data.keys())): - for q in yearly_data[y].keys(): - langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES] - - for lang in langs: - if lang not in languages_all_loc: - languages_all_loc[lang] = np.array([[0] * years] * 4) - languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang] - - fig = plt.figure() - ax = fig.add_axes([0, 0, 1.5, 1]) - - language_handles = [] - cumulative = np.array([[0] * years] * 4) - - for key, value in languages_all_loc.items(): - color = colors[key]["color"] if colors[key]["color"] is not None else "w" - language_handles += [mpatches.Patch(color=color, label=key)] - - for quarter in range(4): - ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color) - cumulative[quarter] = np.add(cumulative[quarter], value[quarter]) - - ax.set_ylabel("LOC added", fontdict=dict(weight="bold")) - ax.set_xticks(np.array([np.arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years) - - sax = ax.secondary_xaxis("top") - sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys())) - sax.spines["top"].set_visible(False) - - ax.legend(title="Language", handles=language_handles, loc="upper left", bbox_to_anchor=(1, 1), framealpha=0, title_fontproperties=dict(weight="bold")) - - sax.tick_params(axis="both", length=0) - sax.spines["top"].set_visible(False) - ax.spines["top"].set_visible(False) - ax.spines["right"].set_visible(False) - - plt.ylim(0, 1.05 * np.amax(cumulative)) - plt.savefig("bar_graph.png", bbox_inches="tight") - plt.close(fig) - return "bar_graph.png" +from typing import Dict + +import numpy as np +import matplotlib.patches as mpatches +import matplotlib.pyplot as plt + +from download_manager import DownloadManager + + +MAX_LANGUAGES = 5 + + +async def create_loc_graph(yearly_data: Dict, save_path: str): + """ + Draws graph of lines of code written by user by quarters of years. + Picks top `MAX_LANGUAGES` languages from each quarter only. + + :param yearly_data: GitHub user yearly data. + :param save_path: Path to save the graph file. + """ + colors = await DownloadManager.get_remote_yaml("linguist") + + years = len(yearly_data.keys()) + year_indexes = np.arange(years) + + all_languages = dict() + for year in yearly_data.values(): + for quarter in year.values(): + for language, loc in quarter.items(): + all_languages[language] = all_languages.get(language, 0) + loc + + top_languages_names = sorted(all_languages.keys(), key=lambda l: all_languages[l], reverse=True)[0:MAX_LANGUAGES] + top_languages = {language: np.array([[0] * years] * 4) for language in top_languages_names} + for index, year in enumerate(sorted(yearly_data.keys())): + for quarter, languages in yearly_data[year].items(): + for language, loc in {(lang, loc) for lang, loc in languages.items() if lang in top_languages}: + top_languages[language][quarter - 1][index] = yearly_data[year][quarter][language] + + fig = plt.figure() + ax = fig.add_axes([0, 0, 1.5, 1]) + + language_handles = [] + cumulative = np.array([[0] * years] * 4) + + for key, value in top_languages.items(): + color = colors[key]["color"] if colors[key]["color"] is not None else "w" + language_handles += [mpatches.Patch(color=color, label=key)] + + for quarter in range(4): + ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color) + cumulative[quarter] = np.add(cumulative[quarter], value[quarter]) + + ax.set_ylabel("LOC added", fontdict=dict(weight="bold")) + ax.set_xticks(np.array([np.arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years) + + sax = ax.secondary_xaxis("top") + sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys())) + sax.spines["top"].set_visible(False) + + ax.legend(title="Language", handles=language_handles, loc="upper left", bbox_to_anchor=(1, 1), framealpha=0, title_fontproperties=dict(weight="bold")) + + sax.tick_params(axis="both", length=0) + sax.spines["top"].set_visible(False) + ax.spines["top"].set_visible(False) + ax.spines["right"].set_visible(False) + + plt.ylim(0, 1.05 * np.amax(cumulative)) + plt.savefig(save_path, bbox_inches="tight") + plt.close(fig) diff --git a/sources/loc.py b/sources/loc.py index 7b5cfb1..e1eca1e 100644 --- a/sources/loc.py +++ b/sources/loc.py @@ -5,10 +5,11 @@ from github import Github, InputGitAuthor, AuthenticatedUser import datetime from download_manager import DownloadManager -from make_bar_graph import build_graph +from graph_drawer import create_loc_graph class LinesOfCode: + GRAPH_PATH = "assets/bar_graph.png" def __init__(self, user: AuthenticatedUser, ghtoken, repositoryData, ignored_repos): self.g = Github(ghtoken) @@ -28,7 +29,7 @@ class LinesOfCode: return yearly_data async def plotLoc(self, yearly_data): - await build_graph(yearly_data) + await create_loc_graph(yearly_data, LinesOfCode.GRAPH_PATH) self.pushChart() def getQuarter(self, timeStamp): @@ -67,10 +68,10 @@ class LinesOfCode: def pushChart(self): repo = self.g.get_repo(f"{self.user.login}/{self.user.login}") committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com') - with open('bar_graph.png', 'rb') as input_file: + with open(LinesOfCode.GRAPH_PATH, 'rb') as input_file: data = input_file.read() try: - contents = repo.get_contents("charts/bar_graph.png") + contents = repo.get_contents(LinesOfCode.GRAPH_PATH) repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer) except Exception as e: - repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer) + repo.create_file(LinesOfCode.GRAPH_PATH, "Charts Added", data, committer=committer) diff --git a/sources/main.py b/sources/main.py index a8f78f3..841d5e1 100644 --- a/sources/main.py +++ b/sources/main.py @@ -383,7 +383,7 @@ async def get_stats(github) -> str: if showLocChart.lower() in truthy: stats += '**' + translate['Timeline'] + '**\n\n' branch_name = github.get_repo(f'{user.login}/{user.login}').default_branch - stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + user.login + '/' + user.login + '/' + branch_name + '/charts/bar_graph.png) \n\n' + stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + user.login + '/' + user.login + '/' + branch_name + '/' + LinesOfCode.GRAPH_PATH + ') \n\n' if show_updated_date.lower() in truthy: now = datetime.datetime.utcnow() From e8a1770feb2e2a3c223ac982c25b0f596845f7e2 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 00:44:46 +0100 Subject: [PATCH 07/23] generated files moved to 'assets' dir --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7a2f03f..eebe2f4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM python:3.9-alpine ENV PYTHONUNBUFFERED 1 ENV PYTHONDONTWRITEBYTECODE 1 -RUN mkdir -p /waka-readme-stats +RUN mkdir -p /waka-readme-stats/assets ADD requirements.txt /waka-readme-stats/requirements.txt RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt From 8e675eaafd1c06edc2850d58834ef1e714d338a2 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 15:34:26 +0100 Subject: [PATCH 08/23] code style applied to main --- Dockerfile | 2 +- Makefile | 5 +- action.yml | 28 +- requirements.txt | 22 +- ...aph_drawer.py => graphics_chart_drawer.py} | 37 +- sources/graphics_list_formatter.py | 122 ++++ sources/loc.py | 6 +- sources/main.py | 532 ++++-------------- ...ownload_manager.py => manager_download.py} | 23 +- sources/manager_environment.py | 36 ++ sources/manager_github.py | 77 +++ sources/manager_localization.py | 25 + 12 files changed, 442 insertions(+), 473 deletions(-) rename sources/{graph_drawer.py => graphics_chart_drawer.py} (53%) create mode 100644 sources/graphics_list_formatter.py rename sources/{download_manager.py => manager_download.py} (93%) create mode 100644 sources/manager_environment.py create mode 100644 sources/manager_github.py create mode 100644 sources/manager_localization.py diff --git a/Dockerfile b/Dockerfile index eebe2f4..06a4645 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,4 +9,4 @@ ADD requirements.txt /waka-readme-stats/requirements.txt RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt ADD sources/* /waka-readme-stats/ -ENTRYPOINT python3 /waka-readme-stats/main.py +ENTRYPOINT cd /waka-readme-stats/ && python3 main.py diff --git a/Makefile b/Makefile index 8f0d266..f07347f 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,8 @@ .ONESHELL: .DEFAULT_GOAL = help -SHELL = /bin/bash +.EXPORT_ALL_VARIABLES: + +PATH := venv/bin:$(PATH) ENV = .env.example include $(ENV) @@ -25,6 +27,7 @@ venv: run-locally: venv @ # Run action locally + mkdir ./assets/ 2>/dev/null || true python3 ./sources/main.py .PHONY: run-locally diff --git a/action.yml b/action.yml index 2e42fc5..4a34935 100644 --- a/action.yml +++ b/action.yml @@ -82,20 +82,20 @@ inputs: description: "Shows the short facts" default: "True" - LOCALE: + SHOW_UPDATED_DATE: required: false - description: "Show stats in your own language" - default: "en" + description: "Show updated date" + default: "True" + + SHOW_TOTAL_CODE_TIME: + required: false + description: "Show Total Time you have coded" + default: "True" COMMIT_BY_ME: required: false description: "Git commit with your own name and email" default: "False" - - IGNORED_REPOS: - required: false - description: "Repos you don't want to be counted" - default: "" COMMIT_MESSAGE: required: false @@ -112,20 +112,20 @@ inputs: description: "Git commit custom email" default: "" - SHOW_UPDATED_DATE: + LOCALE: required: false - description: "Show updated date" - default: "True" + description: "Show stats in your own language" + default: "en" UPDATED_DATE_FORMAT: required: false description: "Updated date format" default: "%d/%m/%Y %H:%M:%S" - SHOW_TOTAL_CODE_TIME: + IGNORED_REPOS: required: false - description: "Show Total Time you have coded" - default: "True" + description: "Repos you don't want to be counted" + default: "" SYMBOL_VERSION: required: false diff --git a/requirements.txt b/requirements.txt index b987f54..d9cebe4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,14 @@ -PyGithub==1.54.1 -matplotlib==3.6.3 -numpy==1.24.2 -python-dotenv==0.17.0 -pytz==2021.1 -humanize==3.3.0 -httpx==0.23.3 -PyYAML==6.0 +# GitHub integration modules: +PyGithub~=1.57 + +# Markdown visualization modules: +pytz~=2022.7 +humanize~=4.6 + +# Graphs drawing modules: +matplotlib~=3.7 +numpy~=1.24 + +# Request making and response parsing modules: +httpx~=0.23 +PyYAML~=6.0 diff --git a/sources/graph_drawer.py b/sources/graphics_chart_drawer.py similarity index 53% rename from sources/graph_drawer.py rename to sources/graphics_chart_drawer.py index 5c7dfc4..312b42c 100644 --- a/sources/graph_drawer.py +++ b/sources/graphics_chart_drawer.py @@ -1,10 +1,10 @@ from typing import Dict -import numpy as np +from numpy import arange, array, add, amax import matplotlib.patches as mpatches import matplotlib.pyplot as plt -from download_manager import DownloadManager +from manager_download import DownloadManager as DM MAX_LANGUAGES = 5 @@ -18,40 +18,37 @@ async def create_loc_graph(yearly_data: Dict, save_path: str): :param yearly_data: GitHub user yearly data. :param save_path: Path to save the graph file. """ - colors = await DownloadManager.get_remote_yaml("linguist") + colors = await DM.get_remote_yaml("linguist") years = len(yearly_data.keys()) - year_indexes = np.arange(years) + year_indexes = arange(years) - all_languages = dict() - for year in yearly_data.values(): - for quarter in year.values(): - for language, loc in quarter.items(): - all_languages[language] = all_languages.get(language, 0) + loc + languages_all_loc = dict() + for i, y in enumerate(sorted(yearly_data.keys())): + for q in yearly_data[y].keys(): + langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES] - top_languages_names = sorted(all_languages.keys(), key=lambda l: all_languages[l], reverse=True)[0:MAX_LANGUAGES] - top_languages = {language: np.array([[0] * years] * 4) for language in top_languages_names} - for index, year in enumerate(sorted(yearly_data.keys())): - for quarter, languages in yearly_data[year].items(): - for language, loc in {(lang, loc) for lang, loc in languages.items() if lang in top_languages}: - top_languages[language][quarter - 1][index] = yearly_data[year][quarter][language] + for lang in langs: + if lang not in languages_all_loc: + languages_all_loc[lang] = array([[0] * years] * 4) + languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang] fig = plt.figure() ax = fig.add_axes([0, 0, 1.5, 1]) language_handles = [] - cumulative = np.array([[0] * years] * 4) + cumulative = array([[0] * years] * 4) - for key, value in top_languages.items(): + for key, value in languages_all_loc.items(): color = colors[key]["color"] if colors[key]["color"] is not None else "w" language_handles += [mpatches.Patch(color=color, label=key)] for quarter in range(4): ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color) - cumulative[quarter] = np.add(cumulative[quarter], value[quarter]) + cumulative[quarter] = add(cumulative[quarter], value[quarter]) ax.set_ylabel("LOC added", fontdict=dict(weight="bold")) - ax.set_xticks(np.array([np.arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years) + ax.set_xticks(array([arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years) sax = ax.secondary_xaxis("top") sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys())) @@ -64,6 +61,6 @@ async def create_loc_graph(yearly_data: Dict, save_path: str): ax.spines["top"].set_visible(False) ax.spines["right"].set_visible(False) - plt.ylim(0, 1.05 * np.amax(cumulative)) + plt.ylim(0, 1.05 * amax(cumulative)) plt.savefig(save_path, bbox_inches="tight") plt.close(fig) diff --git a/sources/graphics_list_formatter.py b/sources/graphics_list_formatter.py new file mode 100644 index 0000000..0a430af --- /dev/null +++ b/sources/graphics_list_formatter.py @@ -0,0 +1,122 @@ +from typing import Dict +from datetime import datetime + +from pytz import timezone, utc + +from manager_download import DownloadManager as DM +from manager_environment import EnvironmentManager as EM +from manager_github import GitHubManager as GHM +from manager_localization import LocalizationManager as LM + + +def make_graph(percent: float): + '''Make progress graph from API graph''' + if EM.SYMBOL_VERSION == 1: # version 1 + done_block = '█' + empty_block = '░' + elif EM.SYMBOL_VERSION == 2: # version 2 + done_block = '⣿' + empty_block = '⣀' + elif EM.SYMBOL_VERSION == 3: # version 3 + done_block = '⬛' + empty_block = '⬜' + else: + done_block = '█' # default is version 1 + empty_block = '░' + + pc_rnd = round(percent) + return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}" + + +def make_list(data: list): # TODO: add arg: sorted + '''Make List''' + data_list = [] + for l in data[:5]: + ln = len(l['name']) + ln_text = len(l['text']) + percent = "{:05.2f}".format(float(l['percent'])) + op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % " + data_list.append(op) + return '\n'.join(data_list) + + +def make_commit_list(data: list): + '''Make List''' + data_list = [] + for l in data[:7]: + ln = len(l['name']) + ln_text = len(l['text']) + percent = "{:05.2f}".format(float(l['percent'])) + op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % " + data_list.append(op) + return '\n'.join(data_list) + + +async def generate_commit_list(time_zone: str) -> str: + stats = str() + + result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login) + repos = [d for d in result["data"]["user"]["repositoriesContributedTo"]["nodes"] if d["isFork"] is False] + + day_times = [0] * 4 # 0 - 6, 6 - 12, 12 - 18, 18 - 24 + week_days = [0] * 7 # Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday + + for repository in repos: + result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id) + committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"] + + for committedDate in committed_dates: + local_date = datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ") + date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone)) + + day_times[date.hour // 6] += 1 + week_days[date.isoweekday() - 1] += 1 + + sum_day = sum(day_times) + sum_week = sum(week_days) + day_times = day_times[1:] + day_times[:1] + time_of_day_data = [ + {"name": f"🌞 {LM.t('Morning')}", "text": f"{day_times[0]} commits", "percent": round((day_times[0] / sum_day) * 100, 2)}, + {"name": f"🌆 {LM.t('Daytime')}", "text": f"{day_times[1]} commits", "percent": round((day_times[1] / sum_day) * 100, 2)}, + {"name": f"🌃 {LM.t('Evening')}", "text": f"{day_times[2]} commits", "percent": round((day_times[2] / sum_day) * 100, 2)}, + {"name": f"🌙 {LM.t('Night')}", "text": f"{day_times[3]} commits", "percent": round((day_times[3] / sum_day) * 100, 2)}, + ] + day_of_week_data = [ + {"name": LM.t("Monday"), "text": f"{week_days[0]} commits", "percent": round((week_days[0] / sum_week) * 100, 2)}, + {"name": LM.t("Tuesday"), "text": f"{week_days[1]} commits", "percent": round((week_days[1] / sum_week) * 100, 2)}, + {"name": LM.t("Wednesday"), "text": f"{week_days[2]} commits", "percent": round((week_days[2] / sum_week) * 100, 2)}, + {"name": LM.t("Thursday"), "text": f"{week_days[3]} commits", "percent": round((week_days[3] / sum_week) * 100, 2)}, + {"name": LM.t("Friday"), "text": f"{week_days[4]} commits", "percent": round((week_days[4] / sum_week) * 100, 2)}, + {"name": LM.t("Saturday"), "text": f"{week_days[5]} commits", "percent": round((week_days[5] / sum_week) * 100, 2)}, + {"name": LM.t("Sunday"), "text": f"{week_days[6]} commits", "percent": round((week_days[6] / sum_week) * 100, 2)}, + ] + + title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night") + stats += f"**{title}** \n\n```text\n{make_commit_list(time_of_day_data)}\n\n```\n" + + if EM.SHOW_DAYS_OF_WEEK: + most_productive = max(day_of_week_data, key=lambda d: d["percent"]) + stats += f"📅 **{LM.t('I am Most Productive on') % most_productive['name']}** \n\n```text\n{make_commit_list(day_of_week_data)}\n\n```\n" + + return stats + + +def make_language_per_repo_list(result: Dict) -> str: + language_count = dict() + repos_with_language = [repo for repo in result["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None] + for repo in repos_with_language: + language = repo["node"]["primaryLanguage"]["name"] + language_count[language] = language_count.get(language, {"count": 0}) + language_count[language]["count"] += 1 + + data = list() + for language in language_count.keys(): + data.append({ + "name": language, + "text": f"{language_count[language]['count']} {'repo' if language_count[language]['count'] == 1 else 'repos'}", + "percent": round(language_count[language]["count"] / len(repos_with_language) * 100, 2) + }) + + top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"]) + title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else "" + return f"{title}```text\n{make_list(data)}\n```\n\n" diff --git a/sources/loc.py b/sources/loc.py index e1eca1e..66c17d7 100644 --- a/sources/loc.py +++ b/sources/loc.py @@ -4,8 +4,8 @@ from asyncio import sleep from github import Github, InputGitAuthor, AuthenticatedUser import datetime -from download_manager import DownloadManager -from graph_drawer import create_loc_graph +from manager_download import DownloadManager as DM +from graphics_chart_drawer import create_loc_graph class LinesOfCode: @@ -44,7 +44,7 @@ class LinesOfCode: return 4 async def getCommitStat(self, repoDetails, yearly_data): - commit_data = await DownloadManager.get_remote_graphql("repository_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id) + commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id) if commit_data["data"]["repository"] is None: print("\tSkipping:", repoDetails['name']) diff --git a/sources/main.py b/sources/main.py index 841d5e1..4cd97da 100644 --- a/sources/main.py +++ b/sources/main.py @@ -1,456 +1,160 @@ -''' +""" Readme Development Metrics With waka time progress -''' -import re -import os -import base64 +""" from asyncio import run -from typing import Dict - -from pytz import timezone -import pytz -from github import Github, InputGitAuthor, AuthenticatedUser -import datetime - -from download_manager import init_download_manager, DownloadManager -from loc import LinesOfCode -import humanize +from typing import Dict, Tuple +from datetime import datetime from urllib.parse import quote -import json -import math -from dotenv import load_dotenv +from humanize import intword, naturalsize, intcomma, precisedelta -load_dotenv() - -START_COMMENT = f'' -END_COMMENT = f'' -listReg = f"{START_COMMENT}[\\s\\S]+{END_COMMENT}" - -waka_key = os.getenv('INPUT_WAKATIME_API_KEY') -ghtoken = os.getenv('INPUT_GH_TOKEN') -branchName = os.getenv('INPUT_PUSH_BRANCH_NAME') -showTimeZone = os.getenv('INPUT_SHOW_TIMEZONE') -showProjects = os.getenv('INPUT_SHOW_PROJECTS') -showEditors = os.getenv('INPUT_SHOW_EDITORS') -showOs = os.getenv('INPUT_SHOW_OS') -showCommit = os.getenv('INPUT_SHOW_COMMIT') -showLanguage = os.getenv('INPUT_SHOW_LANGUAGE') -show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE') -show_days_of_week = os.getenv('INPUT_SHOW_DAYS_OF_WEEK') -showLanguagePerRepo = os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO') -showLocChart = os.getenv('INPUT_SHOW_LOC_CHART') -show_profile_view = os.getenv('INPUT_SHOW_PROFILE_VIEWS') -show_short_info = os.getenv('INPUT_SHOW_SHORT_INFO') -locale = os.getenv('INPUT_LOCALE') -commit_by_me = os.getenv('INPUT_COMMIT_BY_ME') -ignored_repos_name = str(os.getenv('INPUT_IGNORED_REPOS') or '').replace(' ', '').split(',') -show_updated_date = os.getenv('INPUT_SHOW_UPDATED_DATE') -updated_date_format = os.getenv('INPUT_UPDATED_DATE_FORMAT') -commit_message = os.getenv('INPUT_COMMIT_MESSAGE') -commit_username = os.getenv('INPUT_COMMIT_USERNAME') -commit_email = os.getenv('INPUT_COMMIT_EMAIL') -show_total_code_time = os.getenv('INPUT_SHOW_TOTAL_CODE_TIME') -symbol_version = os.getenv('INPUT_SYMBOL_VERSION').strip() -show_waka_stats = 'y' - -truthy = ['true', '1', 't', 'y', 'yes'] - -translate: Dict[str, str] -user: AuthenticatedUser +from manager_download import init_download_manager, DownloadManager as DM +from manager_environment import EnvironmentManager as EM +from manager_github import init_github_manager, GitHubManager as GHM +from manager_localization import init_localization_manager, LocalizationManager as LM +from loc import LinesOfCode # TODO: refactor +from graphics_list_formatter import make_list, generate_commit_list, make_language_per_repo_list -def millify(n): - millnames = ['', ' Thousand', ' Million', ' Billion', ' Trillion'] - n = float(n) - millidx = max(0, min(len(millnames) - 1, - int(math.floor(0 - if n == 0 - else math.log10(abs(n)) / 3)))) +async def get_waka_time_stats() -> str: + stats = str() - return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx]) + data = await DM.get_remote_json("waka_latest") + if EM.SHOW_COMMIT: + stats += f"{await generate_commit_list(data['data']['timezone'])}\n\n" + if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS: + no_activity = LM.t("No Activity Tracked This Week") + stats += f"📊 **{LM.t('This Week I Spend My Time On')}** \n\n```text\n" -def make_graph(percent: float): - '''Make progress graph from API graph''' - if (symbol_version == '1'): # version 1 - done_block = '█' - empty_block = '░' - elif (symbol_version == '2'): # version 2 - done_block = '⣿' - empty_block = '⣀' - elif (symbol_version == '3'): # version 3 - done_block = '⬛' - empty_block = '⬜' - else: - done_block = '█' # default is version 1 - empty_block = '░' + if EM.SHOW_TIMEZONE: + time_zone = data["data"]["timezone"] + stats += f"⌚︎ {LM.t('Timezone')}: {time_zone}\n\n" - pc_rnd = round(percent) - return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}" + if EM.SHOW_LANGUAGE: + lang_list = no_activity if len(data["data"]["languages"]) == 0 else make_list(data["data"]["languages"]) + stats += f"💬 {LM.t('Languages')}: \n{lang_list}\n\n" + if EM.SHOW_EDITORS: + edit_list = no_activity if len(data["data"]["editors"]) == 0 else make_list(data["data"]["editors"]) + stats += f"🔥 {LM.t('Editors')}: \n{edit_list}\n\n" -def make_list(data: list): - '''Make List''' - data_list = [] - for l in data[:5]: - ln = len(l['name']) - ln_text = len(l['text']) - percent = "{:05.2f}".format(float(l['percent'])) - op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % " - data_list.append(op) - return '\n'.join(data_list) + if EM.SHOW_PROJECTS: + project_list = no_activity if len(data["data"]["projects"]) == 0 else make_list(data["data"]["projects"]) + stats += f"🐱‍💻 {LM.t('Projects')}: \n{project_list}\n\n" - -def make_commit_list(data: list): - '''Make List''' - data_list = [] - for l in data[:7]: - ln = len(l['name']) - ln_text = len(l['text']) - percent = "{:05.2f}".format(float(l['percent'])) - op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % " - data_list.append(op) - return '\n'.join(data_list) - - -async def generate_commit_list(tz): - string = '' - - result = await DownloadManager.get_remote_graphql("repositories_contributed_to", username=user.login) - nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"] - repos = [d for d in nodes if d['isFork'] is False] - - morning = 0 # 6 - 12 - daytime = 0 # 12 - 18 - evening = 0 # 18 - 24 - night = 0 # 0 - 6 - - Monday = 0 - Tuesday = 0 - Wednesday = 0 - Thursday = 0 - Friday = 0 - Saturday = 0 - Sunday = 0 - - for repository in repos: - result = await DownloadManager.get_remote_graphql("repository_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=user.node_id) - committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"] - for committedDate in committed_dates: - date = datetime.datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc).astimezone(timezone(tz)) - hour = date.hour - weekday = date.strftime('%A') - if 6 <= hour < 12: - morning += 1 - if 12 <= hour < 18: - daytime += 1 - if 18 <= hour < 24: - evening += 1 - if 0 <= hour < 6: - night += 1 - - if weekday == "Monday": - Monday += 1 - if weekday == "Tuesday": - Tuesday += 1 - if weekday == "Wednesday": - Wednesday += 1 - if weekday == "Thursday": - Thursday += 1 - if weekday == "Friday": - Friday += 1 - if weekday == "Saturday": - Saturday += 1 - if weekday == "Sunday": - Sunday += 1 - - sumAll = morning + daytime + evening + night - sum_week = Sunday + Monday + Tuesday + Friday + Saturday + Wednesday + Thursday - title = translate['I am an Early'] if morning + daytime >= evening + night else translate['I am a Night'] - one_day = [ - {"name": "🌞 " + translate['Morning'], "text": str(morning) + " commits", - "percent": round((morning / sumAll) * 100, 2)}, - {"name": "🌆 " + translate['Daytime'], "text": str(daytime) + " commits", - "percent": round((daytime / sumAll) * 100, 2)}, - {"name": "🌃 " + translate['Evening'], "text": str(evening) + " commits", - "percent": round((evening / sumAll) * 100, 2)}, - {"name": "🌙 " + translate['Night'], "text": str(night) + " commits", - "percent": round((night / sumAll) * 100, 2)}, - ] - dayOfWeek = [ - {"name": translate['Monday'], "text": str(Monday) + " commits", "percent": round((Monday / sum_week) * 100, 2)}, - {"name": translate['Tuesday'], "text": str(Tuesday) + " commits", - "percent": round((Tuesday / sum_week) * 100, 2)}, - {"name": translate['Wednesday'], "text": str(Wednesday) + " commits", - "percent": round((Wednesday / sum_week) * 100, 2)}, - {"name": translate['Thursday'], "text": str(Thursday) + " commits", - "percent": round((Thursday / sum_week) * 100, 2)}, - {"name": translate['Friday'], "text": str(Friday) + " commits", "percent": round((Friday / sum_week) * 100, 2)}, - {"name": translate['Saturday'], "text": str(Saturday) + " commits", - "percent": round((Saturday / sum_week) * 100, 2)}, - {"name": translate['Sunday'], "text": str(Sunday) + " commits", "percent": round((Sunday / sum_week) * 100, 2)}, - ] - - string = string + '**' + title + '** \n\n' + '```text\n' + make_commit_list(one_day) + '\n\n```\n' - - if show_days_of_week.lower() in truthy: - max_element = { - 'percent': 0 - } - - for day in dayOfWeek: - if day['percent'] > max_element['percent']: - max_element = day - days_title = translate['I am Most Productive on'] % max_element['name'] - string = string + '📅 **' + days_title + '** \n\n' + '```text\n' + make_commit_list(dayOfWeek) + '\n\n```\n' - - return string - - -async def get_waka_time_stats(): - stats = '' - no_activity = translate["No Activity Tracked This Week"] - - data = await DownloadManager.get_remote_json("waka_latest") - if showCommit.lower() in truthy: - stats = stats + await generate_commit_list(data['data']['timezone']) + '\n\n' - - if showTimeZone.lower() in truthy or showLanguage.lower() in truthy or showEditors.lower() in truthy or showProjects.lower() in truthy or showOs.lower() in truthy: - stats += '📊 **' + translate['This Week I Spend My Time On'] + '** \n\n' - stats += '```text\n' - - if showTimeZone.lower() in truthy: - tzone = data['data']['timezone'] - stats = stats + '⌚︎ ' + translate['Timezone'] + ': ' + tzone + '\n\n' - - if showLanguage.lower() in truthy: - if len(data['data']['languages']) == 0: - lang_list = no_activity - else: - lang_list = make_list(data['data']['languages']) - stats = stats + '💬 ' + translate['Languages'] + ': \n' + lang_list + '\n\n' - - if showEditors.lower() in truthy: - if len(data['data']['editors']) == 0: - edit_list = no_activity - else: - edit_list = make_list(data['data']['editors']) - stats = stats + '🔥 ' + translate['Editors'] + ': \n' + edit_list + '\n\n' - - if showProjects.lower() in truthy: - if len(data['data']['projects']) == 0: - project_list = no_activity - else: - # Re-order the project list by percentage - data['data']['projects'] = sorted(data['data']['projects'], key=lambda x: x["percent"], - reverse=True) - project_list = make_list(data['data']['projects']) - stats = stats + '🐱‍💻 ' + translate['Projects'] + ': \n' + project_list + '\n\n' - - if showOs.lower() in truthy: - if len(data['data']['operating_systems']) == 0: - os_list = no_activity - else: - os_list = make_list(data['data']['operating_systems']) - stats = stats + '💻 ' + translate['operating system'] + ': \n' + os_list + '\n\n' + if EM.SHOW_OS: + os_list = no_activity if len(data["data"]["operating_systems"]) == 0 else make_list(data["data"]["operating_systems"]) + stats += f"💻 {LM.t('operating system')}: \n{os_list}\n\n" stats += '```\n\n' return stats -def generate_language_per_repo(result): - language_count = {} - total = 0 - for repo in result['data']['user']['repositories']['edges']: - if repo['node']['primaryLanguage'] is None: - continue - language = repo['node']['primaryLanguage']['name'] - total += 1 - if language not in language_count.keys(): - language_count[language] = {} - language_count[language]['count'] = 1 - else: - language_count[language]['count'] = language_count[language]['count'] + 1 - data = [] - sorted_labels = list(language_count.keys()) - sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True) - for label in sorted_labels: - percent = round(language_count[label]['count'] / total * 100, 2) - extension = " repos" - if language_count[label]['count'] == 1: - extension = " repo" - data.append({ - "name": label, - "text": str(language_count[label]['count']) + extension, - "percent": percent - }) - - title = '**' + translate['I Mostly Code in'] % sorted_labels[0] + '** \n\n' if len(sorted_labels) > 0 else '' - return title + '```text\n' + make_list(data) + '\n\n```\n' +async def get_yearly_data(repository_list) -> Tuple[LinesOfCode, Dict]: # TODO: refactor! + loc = LinesOfCode(GHM.USER, EM.GH_TOKEN, repository_list, EM.IGNORED_REPOS) + return loc, await loc.calculateLoc() -async def get_yearly_data(): - repository_list = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id) - loc = LinesOfCode(user, ghtoken, repository_list, ignored_repos_name) - yearly_data = await loc.calculateLoc() - if showLocChart.lower() in truthy: - await loc.plotLoc(yearly_data) - return yearly_data +async def get_short_github_info(): + stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n" - -async def get_line_of_code() -> str: - repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id) - loc = LinesOfCode(user, ghtoken, repositoryList, ignored_repos_name) - yearly_data = await loc.calculateLoc() - total_loc = sum( - [yearly_data[year][quarter][lang] for year in yearly_data for quarter in yearly_data[year] for lang in - yearly_data[year][quarter]]) - return millify(int(total_loc)) - - -async def get_short_info(): - string = '**🐱 ' + translate['My GitHub Data'] + '** \n\n' - if user.disk_usage is None: - disk_usage = humanize.naturalsize(0) - print("Please add new github personal access token with user permission") + if GHM.USER.disk_usage is None: + disk_usage = LM.t("Used in GitHub's Storage") % "?" + print("Please add new github personal access token with user permission!") else: - disk_usage = humanize.naturalsize(user.disk_usage) - data = await DownloadManager.get_remote_json("github_stats") - if len(data['years']) > 0: - this_year_data = data['years'][0] - total = this_year_data['total'] - year = this_year_data['year'] - string += '> 🏆 ' + translate['Contributions in the year'] % (humanize.intcomma(total), year) + '\n > \n' + disk_usage = LM.t("Used in GitHub's Storage") % naturalsize(GHM.USER.disk_usage) + stats += f"> 📦 {disk_usage} \n > \n" - string += '> 📦 ' + translate["Used in GitHub's Storage"] % disk_usage + ' \n > \n' - is_hireable = user.hireable - public_repo = user.public_repos - private_repo = user.owned_private_repos - if private_repo is None: - private_repo = 0 - if is_hireable: - string += "> 💼 " + translate["Opted to Hire"] + "\n > \n" + data = await DM.get_remote_json("github_stats") + if len(data["years"]) > 0: + contributions = LM.t('Contributions in the year') % (intcomma(data["years"][0]['total']), data["years"][0]['year']) + stats += f"> 🏆 {contributions}\n > \n" + + opted_to_hire = GHM.USER.hireable + if opted_to_hire: + stats += f"> 💼 {LM.t('Opted to Hire')}\n > \n" else: - string += "> 🚫 " + translate["Not Opted to Hire"] + "\n > \n" + stats += f"> 🚫 {LM.t('Not Opted to Hire')}\n > \n" - string += '> 📜 ' - string += translate['public repositories'] % public_repo + " " + '\n > \n' if public_repo != 1 else translate[ - 'public repository'] % public_repo + " " + '\n > \n' - string += '> 🔑 ' - string += translate['private repositories'] % private_repo + " " + ' \n > \n' if private_repo != 1 else translate[ - 'private repository'] % private_repo + " " + '\n > \n' + public_repo = GHM.USER.public_repos + if public_repo != 1: + stats += f"> 📜 {LM.t('public repositories') % public_repo} \n > \n" + else: + stats += f"> 📜 {LM.t('public repository') % public_repo} \n > \n" - return string - - -async def get_stats(github) -> str: - '''Gets API data and returns markdown progress''' - - stats = '' - repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id) - - if show_loc.lower() in truthy or showLocChart.lower() in truthy: - # This condition is written to calculate the lines of code because it is heavy process soo needs to be calculate once this will reduce the execution time - await get_yearly_data() - - if show_total_code_time.lower() in truthy: - data = await DownloadManager.get_remote_json("waka_all") - stats += '![Code Time](http://img.shields.io/badge/' + quote( - str("Code Time")) + '-' + quote(str( - data['data']['text'])) + '-blue)\n\n' - - if show_profile_view.lower() in truthy: - data = github.get_repo(f"{user.login}/{user.login}").get_views_traffic(per="week") - stats += '![Profile Views](http://img.shields.io/badge/' + quote(str(translate['Profile Views'])) + '-' + str( - data['count']) + '-blue)\n\n' - - if show_loc.lower() in truthy: - stats += '![Lines of code](https://img.shields.io/badge/' + quote( - str(translate['From Hello World I have written'])) + '-' + quote( - str(await get_line_of_code())) + '%20' + quote(str(translate['Lines of code'])) + '-blue)\n\n' - - if show_short_info.lower() in truthy: - stats += await get_short_info() - - if show_waka_stats.lower() in truthy: - stats += await get_waka_time_stats() - - if showLanguagePerRepo.lower() in truthy: - stats = stats + generate_language_per_repo(repositoryList) + '\n\n' - - if showLocChart.lower() in truthy: - stats += '**' + translate['Timeline'] + '**\n\n' - branch_name = github.get_repo(f'{user.login}/{user.login}').default_branch - stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + user.login + '/' + user.login + '/' + branch_name + '/' + LinesOfCode.GRAPH_PATH + ') \n\n' - - if show_updated_date.lower() in truthy: - now = datetime.datetime.utcnow() - d1 = now.strftime(updated_date_format) - stats = stats + "\n Last Updated on " + d1 + " UTC" + private_repo = GHM.USER.owned_private_repos if GHM.USER.owned_private_repos is not None else 0 + if public_repo != 1: + stats += f"> 🔑 {LM.t('private repositories') % private_repo} \n > \n" + else: + stats += f"> 🔑 {LM.t('private repository') % private_repo} \n > \n" return stats -def decode_readme(data: str): - '''Decode the contents of old readme''' - decoded_bytes = base64.b64decode(data) - return str(decoded_bytes, 'utf-8') +async def get_stats() -> str: + """ + Gets API data and returns markdown progress + """ + stats = str() + repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id) + if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART: + loc, yearly_data = await get_yearly_data(repositories) + else: + loc, yearly_data = (None, dict()) -def generate_new_readme(stats: str, readme: str): - '''Generate a new Readme.md''' - stats_in_readme = f"{START_COMMENT}\n{stats}\n{END_COMMENT}" - return re.sub(listReg, stats_in_readme, readme) + if EM.SHOW_TOTAL_CODE_TIME: + data = await DM.get_remote_json("waka_all") + stats += f"![Code Time](http://img.shields.io/badge/{quote('Code Time')}-{quote(str(data['data']['text']))}-blue)\n\n" + + if EM.SHOW_PROFILE_VIEWS: + data = GHM.REPO.get_views_traffic(per="week") + stats += f"![Profile Views](http://img.shields.io/badge/{quote(LM.t('Profile Views'))}-{data['count']}-blue)\n\n" + + if EM.SHOW_LINES_OF_CODE: + total_loc = sum([yearly_data[y][q][d] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()]) + data = f"{intword(total_loc)} {LM.t('Lines of code')}" + stats += f"![Lines of code](https://img.shields.io/badge/{quote(LM.t('From Hello World I have written'))}-{quote(data)}-blue)\n\n" + + if EM.SHOW_SHORT_INFO: + stats += await get_short_github_info() + + stats += await get_waka_time_stats() + + if EM.SHOW_LANGUAGE_PER_REPO: + stats += f"{make_language_per_repo_list(repositories)}\n\n" + + if EM.SHOW_LOC_CHART: + await loc.plotLoc(yearly_data) + chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{LinesOfCode.GRAPH_PATH}" + stats += '**' + LM.t('Timeline') + '**\n\n' + stats += f"![Lines of Code chart](https://raw.githubusercontent.com/{chart_path})\n\n" + + if EM.SHOW_UPDATED_DATE: + stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC" + + return stats async def main(): - global translate, user + init_github_manager() + await init_download_manager() + init_localization_manager() - if ghtoken is None: - raise Exception('Token not available') - user = Github(ghtoken).get_user() - print(f"Current user: {user.login}") - await init_download_manager(waka_key, ghtoken, user) - - try: - with open(os.path.join(os.path.dirname(__file__), 'translation.json'), encoding='utf-8') as config_file: - data = json.load(config_file) - translate = data[locale] - except Exception as e: - print("Cannot find the Locale choosing default to english") - translate = data['en'] - - g = Github(ghtoken) - waka_stats = await get_stats(g) - - repo = g.get_repo(f"{user.login}/{user.login}") - contents = repo.get_readme() - rdmd = decode_readme(contents.content) - new_readme = generate_new_readme(stats=waka_stats, readme=rdmd) - - if commit_by_me.lower() in truthy: - committer = InputGitAuthor(user.login or commit_username, user.email or commit_email) - else: - committer = InputGitAuthor( - commit_username or 'readme-bot', - commit_email or '41898282+github-actions[bot]@users.noreply.github.com' - ) - if new_readme != rdmd: - try: - repo.update_file(path=contents.path, message=commit_message, - content=new_readme, sha=contents.sha, branch=branchName, - committer=committer) - except: - repo.update_file(path=contents.path, message=commit_message, - content=new_readme, sha=contents.sha, branch='main', - committer=committer) - print("Readme updated") + if GHM.update_readme(await get_stats()): + print("Readme updated!") if __name__ == '__main__': - start_time = datetime.datetime.now().timestamp() * 1000 + start_time = datetime.now() run(main()) - end_time = datetime.datetime.now().timestamp() * 1000 - print(f"Program processed in {round(end_time - start_time, 0)} miliseconds.") + run_delta = datetime.now() - start_time + print(f"Program processed in {precisedelta(run_delta, minimum_unit='microseconds')}.") + +# TODO: check function and variable naming +# TODO: check type hints +# TODO: sorted to max / min +# TODO: add 1 to repo count +# TODO: drop not awaited coroutines diff --git a/sources/download_manager.py b/sources/manager_download.py similarity index 93% rename from sources/download_manager.py rename to sources/manager_download.py index cdd7280..2fcb3e0 100644 --- a/sources/download_manager.py +++ b/sources/manager_download.py @@ -5,11 +5,13 @@ from typing import Awaitable, Dict, Callable, Optional from httpx import AsyncClient from yaml import safe_load -from github import AuthenticatedUser + +from manager_environment import EnvironmentManager as EM +from manager_github import GitHubManager as GHM GITHUB_API_QUERIES = { - "repositories_contributed_to": """ + "repos_contributed_to": """ { user(login: "$username") { repositoriesContributedTo(last: 100, includeUserRepositories: true) { @@ -23,7 +25,7 @@ GITHUB_API_QUERIES = { } } }""", - "repository_committed_dates": """ + "repo_committed_dates": """ { repository(owner: "$owner", name: "$name") { defaultBranchRef { @@ -60,7 +62,7 @@ GITHUB_API_QUERIES = { } } """, - "repository_commit_list": """ + "repo_commit_list": """ { repository(owner: "$owner", name: "$name") { refs(refPrefix: "refs/heads/", orderBy: {direction: DESC, field: TAG_COMMIT_DATE}, first: 100) { @@ -92,22 +94,19 @@ GITHUB_API_QUERIES = { } -async def init_download_manager(waka_key: str, github_key: str, user: AuthenticatedUser): +async def init_download_manager(): """ Initialize download manager: - Setup headers for GitHub GraphQL requests. - Launch static queries in background. - :param waka_key: WakaTime API token. - :param github_key: GitHub API token. - :param user: GitHub current user info. """ await DownloadManager.load_remote_resources({ "linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml", - "waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}", - "waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={waka_key}", - "github_stats": f"https://github-contributions.vercel.app/api/v1/{user.login}" + "waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}", + "waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}", + "github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}" }, { - "Authorization": f"Bearer {github_key}" + "Authorization": f"Bearer {EM.GH_TOKEN}" }) diff --git a/sources/manager_environment.py b/sources/manager_environment.py new file mode 100644 index 0000000..f3b3793 --- /dev/null +++ b/sources/manager_environment.py @@ -0,0 +1,36 @@ +from os import getenv, environ + + +class EnvironmentManager: + _TRUTHY = ['true', '1', 't', 'y', 'yes'] + + GH_TOKEN = environ['INPUT_GH_TOKEN'] + WAKATIME_API_KEY = environ['INPUT_WAKATIME_API_KEY'] + + SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka") + BRANCH_NAME = getenv('INPUT_PUSH_BRANCH_NAME', "") + + SHOW_OS = getenv('INPUT_SHOW_OS', "False").lower() in _TRUTHY + SHOW_PROJECTS = getenv('INPUT_SHOW_PROJECTS', "True").lower() in _TRUTHY + SHOW_EDITORS = getenv('INPUT_SHOW_EDITORS', "True").lower() in _TRUTHY + SHOW_TIMEZONE = getenv('INPUT_SHOW_TIMEZONE', "True").lower() in _TRUTHY + SHOW_COMMIT = getenv('INPUT_SHOW_COMMIT', "True").lower() in _TRUTHY + SHOW_LANGUAGE = getenv('INPUT_SHOW_LANGUAGE', "True").lower() in _TRUTHY + SHOW_LINES_OF_CODE = getenv('INPUT_SHOW_LINES_OF_CODE', "False").lower() in _TRUTHY + SHOW_LANGUAGE_PER_REPO = getenv('INPUT_SHOW_LANGUAGE_PER_REPO', "True").lower() in _TRUTHY + SHOW_LOC_CHART = getenv('INPUT_SHOW_LOC_CHART', "True").lower() in _TRUTHY + SHOW_DAYS_OF_WEEK = getenv('INPUT_SHOW_DAYS_OF_WEEK', "True").lower() in _TRUTHY + SHOW_PROFILE_VIEWS = getenv('INPUT_SHOW_PROFILE_VIEWS', "True").lower() in _TRUTHY + SHOW_SHORT_INFO = getenv('INPUT_SHOW_SHORT_INFO', "True").lower() in _TRUTHY + SHOW_UPDATED_DATE = getenv('INPUT_SHOW_UPDATED_DATE', "True").lower() in _TRUTHY + SHOW_TOTAL_CODE_TIME = getenv('INPUT_SHOW_TOTAL_CODE_TIME', "True").lower() in _TRUTHY + + COMMIT_BY_ME = getenv('INPUT_COMMIT_BY_ME', "False").lower() in _TRUTHY + COMMIT_MESSAGE = getenv('INPUT_COMMIT_MESSAGE', "Updated with Dev Metrics") + COMMIT_USERNAME = getenv('INPUT_COMMIT_USERNAME', "") + COMMIT_EMAIL = getenv('INPUT_COMMIT_EMAIL', "") + + LOCALE = getenv('INPUT_LOCALE', "en") + UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S") + IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',') + SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) # TODO: enum? diff --git a/sources/manager_github.py b/sources/manager_github.py new file mode 100644 index 0000000..3a85bf3 --- /dev/null +++ b/sources/manager_github.py @@ -0,0 +1,77 @@ +from base64 import b64decode +from re import sub + +from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor + +from manager_environment import EnvironmentManager as EM + + +def init_github_manager(): + """ + """ + GitHubManager.prepare_github_env() + print(f"Current user: {GitHubManager.USER.login}") + + +class GitHubManager: + USER: AuthenticatedUser + REPO: Repository + README: ContentFile + README_CONTENTS: str + + _START_COMMENT = f'' + _END_COMMENT = f'' + _README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}" + + @staticmethod + def prepare_github_env(): + """ + """ + github = Github(EM.GH_TOKEN) + GitHubManager.USER = github.get_user() + GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}") + GitHubManager.README = GitHubManager.REPO.get_readme() + GitHubManager.README_CONTENTS = str(b64decode(GitHubManager.README.content), 'utf-8') + + @staticmethod + def _generate_new_readme(stats: str): + """ + Generate a new Readme.md + """ + readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}" + return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager.README_CONTENTS) + + @staticmethod + def _get_author(): + """ + """ + if EM.COMMIT_BY_ME: + return InputGitAuthor( + GitHubManager.USER.login or EM.COMMIT_USERNAME, + GitHubManager.USER.email or EM.COMMIT_EMAIL + ) + else: + return InputGitAuthor( + EM.COMMIT_USERNAME or 'readme-bot', + EM.COMMIT_EMAIL or '41898282+github-actions[bot]@users.noreply.github.com' + ) + + @staticmethod + def branch() -> str: + return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME + + @staticmethod + def update_readme(stats: str) -> bool: + new_readme = GitHubManager._generate_new_readme(stats) + if new_readme != GitHubManager.README_CONTENTS: + GitHubManager.REPO.update_file( + path=GitHubManager.README.path, + message=EM.COMMIT_MESSAGE, + content=new_readme, + sha=GitHubManager.README.sha, + branch=GitHubManager.branch(), + committer=GitHubManager._get_author() + ) + return True + else: + return False diff --git a/sources/manager_localization.py b/sources/manager_localization.py new file mode 100644 index 0000000..3946869 --- /dev/null +++ b/sources/manager_localization.py @@ -0,0 +1,25 @@ +from json import load +from os.path import join, dirname +from typing import Dict + +from manager_environment import EnvironmentManager as EM + + +def init_localization_manager(): + """ + """ + LocalizationManager.load_localization("translation.json") + + +class LocalizationManager: + _LOCALIZATION: Dict[str, str] = dict() + + @staticmethod + def load_localization(file: str): + with open(join(dirname(__file__), file), encoding='utf-8') as config_file: + data = load(config_file) + LocalizationManager._LOCALIZATION = data[EM.LOCALE] + + @staticmethod + def t(key: str) -> str: + return LocalizationManager._LOCALIZATION[key] From c378e689d0c61bd67ad70725a7a65a62e31abf9e Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 17:25:32 +0100 Subject: [PATCH 09/23] codestyle applying done --- sources/graphics_list_formatter.py | 114 ++++++++++++---------------- sources/loc.py | 77 ------------------- sources/main.py | 23 +++--- sources/manager_environment.py | 2 +- sources/manager_github.py | 12 ++- sources/yearly_commit_calculator.py | 42 ++++++++++ 6 files changed, 112 insertions(+), 158 deletions(-) delete mode 100644 sources/loc.py create mode 100644 sources/yearly_commit_calculator.py diff --git a/sources/graphics_list_formatter.py b/sources/graphics_list_formatter.py index 0a430af..95f12f1 100644 --- a/sources/graphics_list_formatter.py +++ b/sources/graphics_list_formatter.py @@ -1,4 +1,5 @@ -from typing import Dict +from enum import Enum +from typing import Dict, Tuple, List from datetime import datetime from pytz import timezone, utc @@ -9,50 +10,46 @@ from manager_github import GitHubManager as GHM from manager_localization import LocalizationManager as LM +DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"] +DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"] +WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] + + +class Symbol(Enum): + VERSION_1 = "█", "░" + VERSION_2 = "⣿", "⣀" + VERSION_3 = "⬛", "⬜" + + @staticmethod + def get_symbols(version: int) -> Tuple[str, str]: + return Symbol[f"VERSION_{version}"].value + + def make_graph(percent: float): - '''Make progress graph from API graph''' - if EM.SYMBOL_VERSION == 1: # version 1 - done_block = '█' - empty_block = '░' - elif EM.SYMBOL_VERSION == 2: # version 2 - done_block = '⣿' - empty_block = '⣀' - elif EM.SYMBOL_VERSION == 3: # version 3 - done_block = '⬛' - empty_block = '⬜' - else: - done_block = '█' # default is version 1 - empty_block = '░' - - pc_rnd = round(percent) - return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}" + """ + Make progress graph from API graph + """ + done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION) + percent_quart = round(percent / 4) + return f"{done_block * percent_quart}{empty_block * (25 - percent_quart)}" -def make_list(data: list): # TODO: add arg: sorted - '''Make List''' - data_list = [] - for l in data[:5]: - ln = len(l['name']) - ln_text = len(l['text']) - percent = "{:05.2f}".format(float(l['percent'])) - op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % " - data_list.append(op) +def make_list(data: Dict = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str: + """ + Make List + """ + if data is not None: + names = [value for key, value in data if key == "name"] if names is None else names + texts = [value for key, value in data if key == "text"] if texts is None else texts + percents = [value for key, value in data if key == "percent"] if percents is None else percents + + data = list(zip(names, texts, percents)) + top_data = sorted(data[:top_num], key=lambda _, __, p: p) if sort else data[:top_num] + data_list = [f"{n:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data] return '\n'.join(data_list) -def make_commit_list(data: list): - '''Make List''' - data_list = [] - for l in data[:7]: - ln = len(l['name']) - ln_text = len(l['text']) - percent = "{:05.2f}".format(float(l['percent'])) - op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % " - data_list.append(op) - return '\n'.join(data_list) - - -async def generate_commit_list(time_zone: str) -> str: +async def make_commit_day_time_list(time_zone: str) -> str: stats = str() result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login) @@ -75,28 +72,19 @@ async def generate_commit_list(time_zone: str) -> str: sum_day = sum(day_times) sum_week = sum(week_days) day_times = day_times[1:] + day_times[:1] - time_of_day_data = [ - {"name": f"🌞 {LM.t('Morning')}", "text": f"{day_times[0]} commits", "percent": round((day_times[0] / sum_day) * 100, 2)}, - {"name": f"🌆 {LM.t('Daytime')}", "text": f"{day_times[1]} commits", "percent": round((day_times[1] / sum_day) * 100, 2)}, - {"name": f"🌃 {LM.t('Evening')}", "text": f"{day_times[2]} commits", "percent": round((day_times[2] / sum_day) * 100, 2)}, - {"name": f"🌙 {LM.t('Night')}", "text": f"{day_times[3]} commits", "percent": round((day_times[3] / sum_day) * 100, 2)}, - ] - day_of_week_data = [ - {"name": LM.t("Monday"), "text": f"{week_days[0]} commits", "percent": round((week_days[0] / sum_week) * 100, 2)}, - {"name": LM.t("Tuesday"), "text": f"{week_days[1]} commits", "percent": round((week_days[1] / sum_week) * 100, 2)}, - {"name": LM.t("Wednesday"), "text": f"{week_days[2]} commits", "percent": round((week_days[2] / sum_week) * 100, 2)}, - {"name": LM.t("Thursday"), "text": f"{week_days[3]} commits", "percent": round((week_days[3] / sum_week) * 100, 2)}, - {"name": LM.t("Friday"), "text": f"{week_days[4]} commits", "percent": round((week_days[4] / sum_week) * 100, 2)}, - {"name": LM.t("Saturday"), "text": f"{week_days[5]} commits", "percent": round((week_days[5] / sum_week) * 100, 2)}, - {"name": LM.t("Sunday"), "text": f"{week_days[6]} commits", "percent": round((week_days[6] / sum_week) * 100, 2)}, - ] + day_time_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))] + day_time_texts = [f'{day_time} commits' for day_time in day_times] + day_time_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times] title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night") - stats += f"**{title}** \n\n```text\n{make_commit_list(time_of_day_data)}\n\n```\n" + stats += f"**{title}** \n\n```text\n{make_list(names=day_time_names, texts=day_time_texts, percents=day_time_percents, top_num=7)}\n\n```\n" if EM.SHOW_DAYS_OF_WEEK: - most_productive = max(day_of_week_data, key=lambda d: d["percent"]) - stats += f"📅 **{LM.t('I am Most Productive on') % most_productive['name']}** \n\n```text\n{make_commit_list(day_of_week_data)}\n\n```\n" + week_day_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES] + week_day_texts = [f'{week_day} commits' for week_day in week_days] + week_day_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days] + title = LM.t("I am Most Productive on") % week_day_names[week_day_percents.index(max(week_day_percents))] + stats += f"📅 **{title}** \n\n```text\n{make_list(names=week_day_names, texts=week_day_texts, percents=week_day_percents, top_num=7)}\n\n```\n" return stats @@ -109,14 +97,10 @@ def make_language_per_repo_list(result: Dict) -> str: language_count[language] = language_count.get(language, {"count": 0}) language_count[language]["count"] += 1 - data = list() - for language in language_count.keys(): - data.append({ - "name": language, - "text": f"{language_count[language]['count']} {'repo' if language_count[language]['count'] == 1 else 'repos'}", - "percent": round(language_count[language]["count"] / len(repos_with_language) * 100, 2) - }) + names = list(language_count.keys()) + texts = [f"{language_count[lang]['count']} {'repo' if language_count[lang]['count'] == 1 else 'repos'}" for lang in names] + percents = [round(language_count[lang]["count"] / len(repos_with_language) * 100, 2) for lang in names] top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"]) title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else "" - return f"{title}```text\n{make_list(data)}\n```\n\n" + return f"{title}```text\n{make_list(names=names, texts=texts, percents=percents)}\n```\n\n" diff --git a/sources/loc.py b/sources/loc.py deleted file mode 100644 index 66c17d7..0000000 --- a/sources/loc.py +++ /dev/null @@ -1,77 +0,0 @@ -import re -from asyncio import sleep - -from github import Github, InputGitAuthor, AuthenticatedUser -import datetime - -from manager_download import DownloadManager as DM -from graphics_chart_drawer import create_loc_graph - - -class LinesOfCode: - GRAPH_PATH = "assets/bar_graph.png" - - def __init__(self, user: AuthenticatedUser, ghtoken, repositoryData, ignored_repos): - self.g = Github(ghtoken) - self.user = user - self.repositoryData = repositoryData - self.ignored_repos = ignored_repos - - async def calculateLoc(self): - result = self.repositoryData - yearly_data = {} - total = len(result['data']['user']['repositories']['edges']) - for ind, repo in enumerate(result['data']['user']['repositories']['edges']): - if repo['node']['name'] not in self.ignored_repos: - print(f"{ind}/{total}", "Retrieving repo:", repo['node']["owner"]["login"], repo['node']['name']) - await self.getCommitStat(repo['node'], yearly_data) - await sleep(0.7) - return yearly_data - - async def plotLoc(self, yearly_data): - await create_loc_graph(yearly_data, LinesOfCode.GRAPH_PATH) - self.pushChart() - - def getQuarter(self, timeStamp): - month = datetime.datetime.fromisoformat(timeStamp).month - if month >= 1 and month <= 3: - return 1 - elif month >= 4 and month <= 6: - return 2 - elif month >= 7 and month <= 9: - return 3 - elif month >= 10 and month <= 12: - return 4 - - async def getCommitStat(self, repoDetails, yearly_data): - commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id) - - if commit_data["data"]["repository"] is None: - print("\tSkipping:", repoDetails['name']) - return - - for commit in [commit["node"] for branch in commit_data["data"]["repository"]["refs"]["edges"] for commit in branch["node"]["target"]["history"]["edges"]]: - date = re.search(r'\d+-\d+-\d+', commit["committedDate"]).group(0) - curr_year = datetime.datetime.fromisoformat(date).year - quarter = self.getQuarter(date) - - if repoDetails['primaryLanguage'] is not None: - if curr_year not in yearly_data: - yearly_data[curr_year] = {} - if quarter not in yearly_data[curr_year]: - yearly_data[curr_year][quarter] = {} - if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]: - yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0 - yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (commit["additions"] - commit["deletions"]) - - - def pushChart(self): - repo = self.g.get_repo(f"{self.user.login}/{self.user.login}") - committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com') - with open(LinesOfCode.GRAPH_PATH, 'rb') as input_file: - data = input_file.read() - try: - contents = repo.get_contents(LinesOfCode.GRAPH_PATH) - repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer) - except Exception as e: - repo.create_file(LinesOfCode.GRAPH_PATH, "Charts Added", data, committer=committer) diff --git a/sources/main.py b/sources/main.py index 4cd97da..ad2724d 100644 --- a/sources/main.py +++ b/sources/main.py @@ -2,7 +2,6 @@ Readme Development Metrics With waka time progress """ from asyncio import run -from typing import Dict, Tuple from datetime import datetime from urllib.parse import quote @@ -12,8 +11,9 @@ from manager_download import init_download_manager, DownloadManager as DM from manager_environment import EnvironmentManager as EM from manager_github import init_github_manager, GitHubManager as GHM from manager_localization import init_localization_manager, LocalizationManager as LM -from loc import LinesOfCode # TODO: refactor -from graphics_list_formatter import make_list, generate_commit_list, make_language_per_repo_list +from graphics_chart_drawer import create_loc_graph +from yearly_commit_calculator import GRAPH_PATH, calculate_yearly_commit_data +from graphics_list_formatter import make_list, make_commit_day_time_list, make_language_per_repo_list async def get_waka_time_stats() -> str: @@ -21,7 +21,7 @@ async def get_waka_time_stats() -> str: data = await DM.get_remote_json("waka_latest") if EM.SHOW_COMMIT: - stats += f"{await generate_commit_list(data['data']['timezone'])}\n\n" + stats += f"{await make_commit_day_time_list(data['data']['timezone'])}\n\n" if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS: no_activity = LM.t("No Activity Tracked This Week") @@ -52,11 +52,6 @@ async def get_waka_time_stats() -> str: return stats -async def get_yearly_data(repository_list) -> Tuple[LinesOfCode, Dict]: # TODO: refactor! - loc = LinesOfCode(GHM.USER, EM.GH_TOKEN, repository_list, EM.IGNORED_REPOS) - return loc, await loc.calculateLoc() - - async def get_short_github_info(): stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n" @@ -101,9 +96,9 @@ async def get_stats() -> str: repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id) if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART: - loc, yearly_data = await get_yearly_data(repositories) + yearly_data = await calculate_yearly_commit_data(repositories) else: - loc, yearly_data = (None, dict()) + yearly_data = (None, dict()) if EM.SHOW_TOTAL_CODE_TIME: data = await DM.get_remote_json("waka_all") @@ -127,8 +122,9 @@ async def get_stats() -> str: stats += f"{make_language_per_repo_list(repositories)}\n\n" if EM.SHOW_LOC_CHART: - await loc.plotLoc(yearly_data) - chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{LinesOfCode.GRAPH_PATH}" + await create_loc_graph(yearly_data, GRAPH_PATH) + GHM.update_chart(GRAPH_PATH) + chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}" stats += '**' + LM.t('Timeline') + '**\n\n' stats += f"![Lines of Code chart](https://raw.githubusercontent.com/{chart_path})\n\n" @@ -156,5 +152,4 @@ if __name__ == '__main__': # TODO: check function and variable naming # TODO: check type hints # TODO: sorted to max / min -# TODO: add 1 to repo count # TODO: drop not awaited coroutines diff --git a/sources/manager_environment.py b/sources/manager_environment.py index f3b3793..85f38d5 100644 --- a/sources/manager_environment.py +++ b/sources/manager_environment.py @@ -33,4 +33,4 @@ class EnvironmentManager: LOCALE = getenv('INPUT_LOCALE', "en") UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S") IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',') - SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) # TODO: enum? + SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) diff --git a/sources/manager_github.py b/sources/manager_github.py index 3a85bf3..654dac2 100644 --- a/sources/manager_github.py +++ b/sources/manager_github.py @@ -1,7 +1,7 @@ from base64 import b64decode from re import sub -from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor +from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor, UnknownObjectException from manager_environment import EnvironmentManager as EM @@ -75,3 +75,13 @@ class GitHubManager: return True else: return False + + @staticmethod + def update_chart(chart_path: str): + with open(chart_path, "rb") as input_file: + data = input_file.read() + try: + contents = GitHubManager.REPO.get_contents(chart_path) + GitHubManager.REPO.update_file(contents.path, "Charts Updated", data, contents.sha, committer=GitHubManager._get_author()) + except UnknownObjectException: + GitHubManager.REPO.create_file(chart_path, "Charts Added", data, committer=GitHubManager._get_author()) diff --git a/sources/yearly_commit_calculator.py b/sources/yearly_commit_calculator.py new file mode 100644 index 0000000..284e651 --- /dev/null +++ b/sources/yearly_commit_calculator.py @@ -0,0 +1,42 @@ +from re import search +from datetime import datetime +from typing import Dict + +from manager_download import DownloadManager as DM +from manager_environment import EnvironmentManager as EM +from manager_github import GitHubManager as GHM + + +GRAPH_PATH = "assets/bar_graph.png" + + +async def calculate_yearly_commit_data(repository_data: Dict) -> Dict: + yearly_data = dict() + total = len(repository_data["data"]["user"]["repositories"]["edges"]) + for ind, repo in enumerate(repository_data["data"]["user"]["repositories"]["edges"]): + if repo["node"]["name"] not in EM.IGNORED_REPOS: + print(f"{ind + 1}/{total}", "Retrieving repo:", repo["node"]["owner"]["login"], repo["node"]["name"]) + await update_yearly_data_with_commit_stats(repo["node"], yearly_data) + return yearly_data + + +async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict) -> Dict: + commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repo_details["owner"]["login"], name=repo_details["name"], id=GHM.USER.node_id) + + if commit_data["data"]["repository"] is None: + print(f"\tSkipping repo: {repo_details['name']}") + return dict() + + for commit in [commit["node"] for branch in commit_data["data"]["repository"]["refs"]["edges"] for commit in branch["node"]["target"]["history"]["edges"]]: + date = search(r"\d+-\d+-\d+", commit["committedDate"]).group() + curr_year = datetime.fromisoformat(date).year + quarter = (datetime.fromisoformat(date).month - 1) // 3 + 1 + + if repo_details["primaryLanguage"] is not None: + if curr_year not in yearly_data: + yearly_data[curr_year] = dict() + if quarter not in yearly_data[curr_year]: + yearly_data[curr_year][quarter] = dict() + if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]: + yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0 + yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += (commit["additions"] - commit["deletions"]) From df515b2378f1de1b5cbb9c021160a86966f5f934 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 17:57:26 +0100 Subject: [PATCH 10/23] coroutine cancellation added --- sources/graphics_chart_drawer.py | 1 + sources/graphics_list_formatter.py | 34 ++++++++++++++--------------- sources/main.py | 14 +++++------- sources/manager_download.py | 19 ++++++++++++++++ sources/yearly_commit_calculator.py | 9 +++----- 5 files changed, 45 insertions(+), 32 deletions(-) diff --git a/sources/graphics_chart_drawer.py b/sources/graphics_chart_drawer.py index 312b42c..4aae8cd 100644 --- a/sources/graphics_chart_drawer.py +++ b/sources/graphics_chart_drawer.py @@ -8,6 +8,7 @@ from manager_download import DownloadManager as DM MAX_LANGUAGES = 5 +GRAPH_PATH = "assets/bar_graph.png" async def create_loc_graph(yearly_data: Dict, save_path: str): diff --git a/sources/graphics_list_formatter.py b/sources/graphics_list_formatter.py index 95f12f1..d5b926c 100644 --- a/sources/graphics_list_formatter.py +++ b/sources/graphics_list_formatter.py @@ -34,18 +34,18 @@ def make_graph(percent: float): return f"{done_block * percent_quart}{empty_block * (25 - percent_quart)}" -def make_list(data: Dict = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str: +def make_list(data: List = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str: """ Make List """ if data is not None: - names = [value for key, value in data if key == "name"] if names is None else names - texts = [value for key, value in data if key == "text"] if texts is None else texts - percents = [value for key, value in data if key == "percent"] if percents is None else percents + names = [value for item in data for key, value in item.items() if key == "name"] if names is None else names + texts = [value for item in data for key, value in item.items() if key == "text"] if texts is None else texts + percents = [value for item in data for key, value in item.items() if key == "percent"] if percents is None else percents data = list(zip(names, texts, percents)) - top_data = sorted(data[:top_num], key=lambda _, __, p: p) if sort else data[:top_num] - data_list = [f"{n:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data] + top_data = sorted(data[:top_num], key=lambda record: record[2]) if sort else data[:top_num] + data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data] return '\n'.join(data_list) @@ -73,25 +73,25 @@ async def make_commit_day_time_list(time_zone: str) -> str: sum_week = sum(week_days) day_times = day_times[1:] + day_times[:1] - day_time_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))] - day_time_texts = [f'{day_time} commits' for day_time in day_times] - day_time_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times] + dt_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))] + dt_texts = [f'{day_time} commits' for day_time in day_times] + dt_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times] title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night") - stats += f"**{title}** \n\n```text\n{make_list(names=day_time_names, texts=day_time_texts, percents=day_time_percents, top_num=7)}\n\n```\n" + stats += f"**{title}** \n\n```text\n{make_list(names=dt_names, texts=dt_texts, percents=dt_percents, top_num=7, sort=False)}\n```\n" if EM.SHOW_DAYS_OF_WEEK: - week_day_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES] - week_day_texts = [f'{week_day} commits' for week_day in week_days] - week_day_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days] - title = LM.t("I am Most Productive on") % week_day_names[week_day_percents.index(max(week_day_percents))] - stats += f"📅 **{title}** \n\n```text\n{make_list(names=week_day_names, texts=week_day_texts, percents=week_day_percents, top_num=7)}\n\n```\n" + wd_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES] + wd_texts = [f'{week_day} commits' for week_day in week_days] + wd_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days] + title = LM.t("I am Most Productive on") % wd_names[wd_percents.index(max(wd_percents))] + stats += f"📅 **{title}** \n\n```text\n{make_list(names=wd_names, texts=wd_texts, percents=wd_percents, top_num=7, sort=False)}\n```\n" return stats -def make_language_per_repo_list(result: Dict) -> str: +def make_language_per_repo_list(repositories: Dict) -> str: language_count = dict() - repos_with_language = [repo for repo in result["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None] + repos_with_language = [repo for repo in repositories["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None] for repo in repos_with_language: language = repo["node"]["primaryLanguage"]["name"] language_count[language] = language_count.get(language, {"count": 0}) diff --git a/sources/main.py b/sources/main.py index ad2724d..027e032 100644 --- a/sources/main.py +++ b/sources/main.py @@ -7,12 +7,12 @@ from urllib.parse import quote from humanize import intword, naturalsize, intcomma, precisedelta -from manager_download import init_download_manager, DownloadManager as DM +from manager_download import init_download_manager, DownloadManager as DM, close_download_manager from manager_environment import EnvironmentManager as EM from manager_github import init_github_manager, GitHubManager as GHM from manager_localization import init_localization_manager, LocalizationManager as LM -from graphics_chart_drawer import create_loc_graph -from yearly_commit_calculator import GRAPH_PATH, calculate_yearly_commit_data +from graphics_chart_drawer import create_loc_graph, GRAPH_PATH +from yearly_commit_calculator import calculate_yearly_commit_data from graphics_list_formatter import make_list, make_commit_day_time_list, make_language_per_repo_list @@ -47,7 +47,7 @@ async def get_waka_time_stats() -> str: os_list = no_activity if len(data["data"]["operating_systems"]) == 0 else make_list(data["data"]["operating_systems"]) stats += f"💻 {LM.t('operating system')}: \n{os_list}\n\n" - stats += '```\n\n' + stats = f"{stats[:-1]}```\n\n" return stats @@ -141,6 +141,7 @@ async def main(): if GHM.update_readme(await get_stats()): print("Readme updated!") + await close_download_manager() if __name__ == '__main__': @@ -148,8 +149,3 @@ if __name__ == '__main__': run(main()) run_delta = datetime.now() - start_time print(f"Program processed in {precisedelta(run_delta, minimum_unit='microseconds')}.") - -# TODO: check function and variable naming -# TODO: check type hints -# TODO: sorted to max / min -# TODO: drop not awaited coroutines diff --git a/sources/manager_download.py b/sources/manager_download.py index 2fcb3e0..97e1c7c 100644 --- a/sources/manager_download.py +++ b/sources/manager_download.py @@ -110,6 +110,15 @@ async def init_download_manager(): }) +async def close_download_manager(): + """ + Initialize download manager: + - Setup headers for GitHub GraphQL requests. + - Launch static queries in background. + """ + await DownloadManager.close_remote_resources("linguist", "waka_latest", "waka_all", "github_stats") + + class DownloadManager: """ Class for handling and caching all kinds of requests. @@ -135,6 +144,16 @@ class DownloadManager: DownloadManager._REMOTE_RESOURCES_CACHE[resource] = DownloadManager._client.get(url) DownloadManager._client.headers = github_headers + @staticmethod + async def close_remote_resources(*resource: str): + """ + Prepare DownloadManager to launch GitHub API queries and launch all static queries. + :param resources: Dictionary of static queries, "IDENTIFIER": "URL". + :param github_headers: Dictionary of headers for GitHub API queries. + """ + for resource in [DownloadManager._REMOTE_RESOURCES_CACHE[r] for r in resource if isinstance(DownloadManager._REMOTE_RESOURCES_CACHE[r], Awaitable)]: + resource.cancel() + @staticmethod async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict: """ diff --git a/sources/yearly_commit_calculator.py b/sources/yearly_commit_calculator.py index 284e651..f72d3ac 100644 --- a/sources/yearly_commit_calculator.py +++ b/sources/yearly_commit_calculator.py @@ -7,13 +7,10 @@ from manager_environment import EnvironmentManager as EM from manager_github import GitHubManager as GHM -GRAPH_PATH = "assets/bar_graph.png" - - -async def calculate_yearly_commit_data(repository_data: Dict) -> Dict: +async def calculate_yearly_commit_data(repositories: Dict) -> Dict: yearly_data = dict() - total = len(repository_data["data"]["user"]["repositories"]["edges"]) - for ind, repo in enumerate(repository_data["data"]["user"]["repositories"]["edges"]): + total = len(repositories["data"]["user"]["repositories"]["edges"]) + for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["edges"]): if repo["node"]["name"] not in EM.IGNORED_REPOS: print(f"{ind + 1}/{total}", "Retrieving repo:", repo["node"]["owner"]["login"], repo["node"]["name"]) await update_yearly_data_with_commit_stats(repo["node"], yearly_data) From 28c4c662729afd12ef967a0b5f98d6d836f668c4 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 18:04:37 +0100 Subject: [PATCH 11/23] codestyle added --- .github/workflows/codestyle.yml | 24 ++++++++++++++++++++++++ Makefile | 5 +++++ 2 files changed, 29 insertions(+) create mode 100644 .github/workflows/codestyle.yml diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml new file mode 100644 index 0000000..eb29b76 --- /dev/null +++ b/.github/workflows/codestyle.yml @@ -0,0 +1,24 @@ +name: CODESTYLE + +on: + push: + +jobs: + lint: + name: Run codestyle check + runs-on: ubuntu-latest + + steps: + - name: Checkout 🛎️ + uses: actions/checkout@v3 + + - name: Setup Python 3.6 🐍 + uses: actions/setup-python@v3 + with: + python-version: 3.6 + + - name: Install Dependencies 📥 + run: pip install -r requirements.txt + + - name: Run Codestyle ✔️ + run: flake8 --max-line-length=160 --exclude venv,assets . \ No newline at end of file diff --git a/Makefile b/Makefile index f07347f..9c92749 100644 --- a/Makefile +++ b/Makefile @@ -38,6 +38,11 @@ run-container: .PHONY: run-container +lint: + @ # Run flake8 linter + flake8 --max-line-length=160 --exclude venv,assets . +.PHONY: lint + clean: @ # Clean all build files, including: libraries, package manager configs, docker images and containers rm -rf venv From 94da25c639a233df0524dca8933edfe49e1a1ef5 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 18:17:23 +0100 Subject: [PATCH 12/23] python version updated --- .github/workflows/codestyle.yml | 2 +- requirements.txt | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index eb29b76..aa66ef2 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -15,7 +15,7 @@ jobs: - name: Setup Python 3.6 🐍 uses: actions/setup-python@v3 with: - python-version: 3.6 + python-version: 3.6.7 - name: Install Dependencies 📥 run: pip install -r requirements.txt diff --git a/requirements.txt b/requirements.txt index d9cebe4..8d33d56 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,6 @@ numpy~=1.24 # Request making and response parsing modules: httpx~=0.23 PyYAML~=6.0 + +# Codestyle checking module: +flake8~=6.0 From b36e374320ab7204e08cc00db924f60a4d9ca630 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 18:33:29 +0100 Subject: [PATCH 13/23] ubuntu version changed to compatible with python 3.6 --- .github/workflows/build_image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_image.yml b/.github/workflows/build_image.yml index 4ae01df..c9a0e34 100644 --- a/.github/workflows/build_image.yml +++ b/.github/workflows/build_image.yml @@ -6,7 +6,7 @@ on: jobs: publish-server-image: name: Publish 'waka-readme-stats' image - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - name: Checkout 🛎️ From 2e494aa87bc956b5b6f43ba9749804e088e472eb Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 22:41:08 +0100 Subject: [PATCH 14/23] documentation added, `black` linter added --- .github/workflows/build_image.yml | 2 +- .github/workflows/codestyle.yml | 4 +- Makefile | 3 +- requirements.txt | 3 +- sources/graphics_chart_drawer.py | 6 +-- sources/graphics_list_formatter.py | 60 ++++++++++++++++++++---- sources/main.py | 34 ++++++++++---- sources/manager_download.py | 52 ++++++++++----------- sources/manager_environment.py | 61 +++++++++++++----------- sources/manager_github.py | 72 ++++++++++++++++++++--------- sources/manager_localization.py | 20 +++++++- sources/yearly_commit_calculator.py | 18 +++++++- 12 files changed, 230 insertions(+), 105 deletions(-) diff --git a/.github/workflows/build_image.yml b/.github/workflows/build_image.yml index c9a0e34..4ae01df 100644 --- a/.github/workflows/build_image.yml +++ b/.github/workflows/build_image.yml @@ -6,7 +6,7 @@ on: jobs: publish-server-image: name: Publish 'waka-readme-stats' image - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Checkout 🛎️ diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index aa66ef2..bc13463 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -6,7 +6,7 @@ on: jobs: lint: name: Run codestyle check - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 steps: - name: Checkout 🛎️ @@ -21,4 +21,4 @@ jobs: run: pip install -r requirements.txt - name: Run Codestyle ✔️ - run: flake8 --max-line-length=160 --exclude venv,assets . \ No newline at end of file + run: flake8 --max-line-length=160 --exclude venv,assets . && black --line-length=160 --check --exclude='/venv/|/assets/' . \ No newline at end of file diff --git a/Makefile b/Makefile index 9c92749..88ec394 100644 --- a/Makefile +++ b/Makefile @@ -38,9 +38,10 @@ run-container: .PHONY: run-container -lint: +lint: venv @ # Run flake8 linter flake8 --max-line-length=160 --exclude venv,assets . + black --line-length=160 --check --exclude='/venv/|/assets/' . .PHONY: lint clean: diff --git a/requirements.txt b/requirements.txt index 8d33d56..c191e13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,5 +13,6 @@ numpy~=1.24 httpx~=0.23 PyYAML~=6.0 -# Codestyle checking module: +# Codestyle checking modules: flake8~=6.0 +black~=23.1 diff --git a/sources/graphics_chart_drawer.py b/sources/graphics_chart_drawer.py index 4aae8cd..786091e 100644 --- a/sources/graphics_chart_drawer.py +++ b/sources/graphics_chart_drawer.py @@ -7,8 +7,8 @@ import matplotlib.pyplot as plt from manager_download import DownloadManager as DM -MAX_LANGUAGES = 5 -GRAPH_PATH = "assets/bar_graph.png" +MAX_LANGUAGES = 5 # Number of top languages to add to chart, for each year quarter +GRAPH_PATH = "assets/bar_graph.png" # Chart saving path. async def create_loc_graph(yearly_data: Dict, save_path: str): @@ -27,7 +27,7 @@ async def create_loc_graph(yearly_data: Dict, save_path: str): languages_all_loc = dict() for i, y in enumerate(sorted(yearly_data.keys())): for q in yearly_data[y].keys(): - langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES] + langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n], reverse=True)[0:MAX_LANGUAGES] for lang in langs: if lang not in languages_all_loc: diff --git a/sources/graphics_list_formatter.py b/sources/graphics_list_formatter.py index d5b926c..ddb949e 100644 --- a/sources/graphics_list_formatter.py +++ b/sources/graphics_list_formatter.py @@ -10,24 +10,39 @@ from manager_github import GitHubManager as GHM from manager_localization import LocalizationManager as LM -DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"] -DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"] -WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] +DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"] # Emojis, representing different times of day. +DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"] # Localization identifiers for different times of day. +WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] # Localization identifiers for different days of week. class Symbol(Enum): + """ + Symbol version enum. + Allows to retrieve symbols pairs by calling `Symbol.get_symbols(version)`. + """ + VERSION_1 = "█", "░" VERSION_2 = "⣿", "⣀" VERSION_3 = "⬛", "⬜" @staticmethod def get_symbols(version: int) -> Tuple[str, str]: + """ + Retrieves symbols pair for specified version. + + :param version: Required symbols version. + :returns: Two strings for filled and empty symbol value in a tuple. + """ return Symbol[f"VERSION_{version}"].value def make_graph(percent: float): """ - Make progress graph from API graph + Make text progress bar. + Length of the progress bar is 25 characters. + + :param percent: Completion percent of the progress bar. + :return: The string progress bar representation. """ done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION) percent_quart = round(percent / 4) @@ -36,7 +51,20 @@ def make_graph(percent: float): def make_list(data: List = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str: """ - Make List + Make list of text progress bars with supportive info. + Each row has the following structure: [name of the measure] [quantity description (with words)] [progress bar] [total percentage]. + Name of the measure: up to 25 characters. + Quantity description: how many _things_ were found, up to 20 characters. + Progress bar: measure percentage, 25 characters. + Total percentage: floating point percentage. + + :param data: list of dictionaries, each of them containing a measure (name, text and percent). + :param names: list of names (names of measure), overloads data if defined. + :param texts: list of texts (quantity descriptions), overloads data if defined. + :param percents: list of percents (total percentages), overloads data if defined. + :param top_num: how many measures to display, default: 5. + :param sort: if measures should be sorted by total percentage, default: True. + :returns: The string representation of the list. """ if data is not None: names = [value for item in data for key, value in item.items() if key == "name"] if names is None else names @@ -46,10 +74,16 @@ def make_list(data: List = None, names: List[str] = None, texts: List[str] = Non data = list(zip(names, texts, percents)) top_data = sorted(data[:top_num], key=lambda record: record[2]) if sort else data[:top_num] data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data] - return '\n'.join(data_list) + return "\n".join(data_list) async def make_commit_day_time_list(time_zone: str) -> str: + """ + Calculate commit-related info, how many commits were made, and at what time of day and day of week. + + :param time_zone: User time zone. + :returns: string representation of statistics. + """ stats = str() result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login) @@ -62,8 +96,8 @@ async def make_commit_day_time_list(time_zone: str) -> str: result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id) committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"] - for committedDate in committed_dates: - local_date = datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ") + for committed_date in committed_dates: + local_date = datetime.strptime(committed_date["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ") date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone)) day_times[date.hour // 6] += 1 @@ -74,14 +108,14 @@ async def make_commit_day_time_list(time_zone: str) -> str: day_times = day_times[1:] + day_times[:1] dt_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))] - dt_texts = [f'{day_time} commits' for day_time in day_times] + dt_texts = [f"{day_time} commits" for day_time in day_times] dt_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times] title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night") stats += f"**{title}** \n\n```text\n{make_list(names=dt_names, texts=dt_texts, percents=dt_percents, top_num=7, sort=False)}\n```\n" if EM.SHOW_DAYS_OF_WEEK: wd_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES] - wd_texts = [f'{week_day} commits' for week_day in week_days] + wd_texts = [f"{week_day} commits" for week_day in week_days] wd_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days] title = LM.t("I am Most Productive on") % wd_names[wd_percents.index(max(wd_percents))] stats += f"📅 **{title}** \n\n```text\n{make_list(names=wd_names, texts=wd_texts, percents=wd_percents, top_num=7, sort=False)}\n```\n" @@ -90,6 +124,12 @@ async def make_commit_day_time_list(time_zone: str) -> str: def make_language_per_repo_list(repositories: Dict) -> str: + """ + Calculate language-related info, how many repositories in what language user has. + + :param repositories: User repositories. + :returns: string representation of statistics. + """ language_count = dict() repos_with_language = [repo for repo in repositories["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None] for repo in repos_with_language: diff --git a/sources/main.py b/sources/main.py index 027e032..9bb43e8 100644 --- a/sources/main.py +++ b/sources/main.py @@ -7,7 +7,7 @@ from urllib.parse import quote from humanize import intword, naturalsize, intcomma, precisedelta -from manager_download import init_download_manager, DownloadManager as DM, close_download_manager +from manager_download import init_download_manager, DownloadManager as DM from manager_environment import EnvironmentManager as EM from manager_github import init_github_manager, GitHubManager as GHM from manager_localization import init_localization_manager, LocalizationManager as LM @@ -17,6 +17,12 @@ from graphics_list_formatter import make_list, make_commit_day_time_list, make_l async def get_waka_time_stats() -> str: + """ + Collects user info from wakatime. + Info includes most common commit time, timezone, language, editors, projects and OSs. + + :returns: String representation of the info. + """ stats = str() data = await DM.get_remote_json("waka_latest") @@ -52,7 +58,13 @@ async def get_waka_time_stats() -> str: return stats -async def get_short_github_info(): +async def get_short_github_info() -> str: + """ + Collects user info from GitHub public profile. + The stats include: disk usage, contributions number, whether the user has opted to hire, public and private repositories number. + + :returns: String representation of the info. + """ stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n" if GHM.USER.disk_usage is None: @@ -64,7 +76,7 @@ async def get_short_github_info(): data = await DM.get_remote_json("github_stats") if len(data["years"]) > 0: - contributions = LM.t('Contributions in the year') % (intcomma(data["years"][0]['total']), data["years"][0]['year']) + contributions = LM.t("Contributions in the year") % (intcomma(data["years"][0]["total"]), data["years"][0]["year"]) stats += f"> 🏆 {contributions}\n > \n" opted_to_hire = GHM.USER.hireable @@ -90,7 +102,10 @@ async def get_short_github_info(): async def get_stats() -> str: """ - Gets API data and returns markdown progress + Creates new README.md content from all the acquired statistics from all places. + The readme includes data from wakatime, contributed lines of code number, GitHub profile info and last updated date. + + :returns: String representation of README.md contents. """ stats = str() repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id) @@ -125,8 +140,7 @@ async def get_stats() -> str: await create_loc_graph(yearly_data, GRAPH_PATH) GHM.update_chart(GRAPH_PATH) chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}" - stats += '**' + LM.t('Timeline') + '**\n\n' - stats += f"![Lines of Code chart](https://raw.githubusercontent.com/{chart_path})\n\n" + stats += f"**{LM.t('Timeline')}**\n\n![Lines of Code chart](https://raw.githubusercontent.com/{chart_path})\n\n" if EM.SHOW_UPDATED_DATE: stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC" @@ -135,16 +149,20 @@ async def get_stats() -> str: async def main(): + """ + Application main function. + Initializes all managers, collects user info and updates README.md if necessary. + """ init_github_manager() await init_download_manager() init_localization_manager() if GHM.update_readme(await get_stats()): print("Readme updated!") - await close_download_manager() + await DM.close_remote_resources() -if __name__ == '__main__': +if __name__ == "__main__": start_time = datetime.now() run(main()) run_delta = datetime.now() - start_time diff --git a/sources/manager_download.py b/sources/manager_download.py index 97e1c7c..538d0a3 100644 --- a/sources/manager_download.py +++ b/sources/manager_download.py @@ -1,3 +1,4 @@ +from asyncio import Task from hashlib import md5 from json import dumps from string import Template @@ -11,6 +12,7 @@ from manager_github import GitHubManager as GHM GITHUB_API_QUERIES = { + # Query to collect info about all user repositories, including: is it a fork, name and owner login. "repos_contributed_to": """ { user(login: "$username") { @@ -25,6 +27,7 @@ GITHUB_API_QUERIES = { } } }""", + # Query to collect info about all commits in user repositories, including: commit date. "repo_committed_dates": """ { repository(owner: "$owner", name: "$name") { @@ -43,6 +46,7 @@ GITHUB_API_QUERIES = { } } }""", + # Query to collect info about all repositories user created or collaborated on, including: name, primary language and owner login. "user_repository_list": """ { user(login: "$username") { @@ -62,6 +66,7 @@ GITHUB_API_QUERIES = { } } """, + # Query to collect info about user commits to given repository, including: commit date, additions and deletions numbers. "repo_commit_list": """ { repository(owner: "$owner", name: "$name") { @@ -90,7 +95,7 @@ GITHUB_API_QUERIES = { } } } -""" +""", } @@ -100,23 +105,15 @@ async def init_download_manager(): - Setup headers for GitHub GraphQL requests. - Launch static queries in background. """ - await DownloadManager.load_remote_resources({ - "linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml", - "waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}", - "waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}", - "github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}" - }, { - "Authorization": f"Bearer {EM.GH_TOKEN}" - }) - - -async def close_download_manager(): - """ - Initialize download manager: - - Setup headers for GitHub GraphQL requests. - - Launch static queries in background. - """ - await DownloadManager.close_remote_resources("linguist", "waka_latest", "waka_all", "github_stats") + await DownloadManager.load_remote_resources( + { + "linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml", + "waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}", + "waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}", + "github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}", + }, + {"Authorization": f"Bearer {EM.GH_TOKEN}"}, + ) class DownloadManager: @@ -130,6 +127,7 @@ class DownloadManager: DownloadManager launches all static queries asynchronously upon initialization and caches their results. It also executes dynamic queries upon request and caches result. """ + _client = AsyncClient(timeout=60.0) _REMOTE_RESOURCES_CACHE = dict() @@ -145,14 +143,16 @@ class DownloadManager: DownloadManager._client.headers = github_headers @staticmethod - async def close_remote_resources(*resource: str): + async def close_remote_resources(): """ - Prepare DownloadManager to launch GitHub API queries and launch all static queries. - :param resources: Dictionary of static queries, "IDENTIFIER": "URL". - :param github_headers: Dictionary of headers for GitHub API queries. + Close DownloadManager and cancel all un-awaited static web queries. + Await all queries that could not be cancelled. """ - for resource in [DownloadManager._REMOTE_RESOURCES_CACHE[r] for r in resource if isinstance(DownloadManager._REMOTE_RESOURCES_CACHE[r], Awaitable)]: - resource.cancel() + for resource in DownloadManager._REMOTE_RESOURCES_CACHE.values(): + if isinstance(resource, Task): + resource.cancel() + elif isinstance(resource, Awaitable): + await resource @staticmethod async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict: @@ -208,9 +208,7 @@ class DownloadManager: """ key = f"{query}_{md5(dumps(kwargs, sort_keys=True).encode('utf-8')).digest()}" if key not in DownloadManager._REMOTE_RESOURCES_CACHE: - res = await DownloadManager._client.post("https://api.github.com/graphql", json={ - "query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs) - }) + res = await DownloadManager._client.post("https://api.github.com/graphql", json={"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)}) DownloadManager._REMOTE_RESOURCES_CACHE[key] = res else: res = DownloadManager._REMOTE_RESOURCES_CACHE[key] diff --git a/sources/manager_environment.py b/sources/manager_environment.py index 85f38d5..85b9ae5 100644 --- a/sources/manager_environment.py +++ b/sources/manager_environment.py @@ -2,35 +2,44 @@ from os import getenv, environ class EnvironmentManager: - _TRUTHY = ['true', '1', 't', 'y', 'yes'] + """ + Class for handling all environmental variables used by the action. + There are only two required variables: `INPUT_GH_TOKEN` and `INPUT_WAKATIME_API_KEY`. + The others have a provided default value. + For all boolean variables a 'truthy'-list is checked (not only true/false, but also 1, t, y and yes are accepted). + List variable `IGNORED_REPOS` is split and parsed. + Integer variable `SYMBOL_VERSION` is parsed. + """ - GH_TOKEN = environ['INPUT_GH_TOKEN'] - WAKATIME_API_KEY = environ['INPUT_WAKATIME_API_KEY'] + _TRUTHY = ["true", "1", "t", "y", "yes"] + + GH_TOKEN = environ["INPUT_GH_TOKEN"] + WAKATIME_API_KEY = environ["INPUT_WAKATIME_API_KEY"] SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka") - BRANCH_NAME = getenv('INPUT_PUSH_BRANCH_NAME', "") + BRANCH_NAME = getenv("INPUT_PUSH_BRANCH_NAME", "") - SHOW_OS = getenv('INPUT_SHOW_OS', "False").lower() in _TRUTHY - SHOW_PROJECTS = getenv('INPUT_SHOW_PROJECTS', "True").lower() in _TRUTHY - SHOW_EDITORS = getenv('INPUT_SHOW_EDITORS', "True").lower() in _TRUTHY - SHOW_TIMEZONE = getenv('INPUT_SHOW_TIMEZONE', "True").lower() in _TRUTHY - SHOW_COMMIT = getenv('INPUT_SHOW_COMMIT', "True").lower() in _TRUTHY - SHOW_LANGUAGE = getenv('INPUT_SHOW_LANGUAGE', "True").lower() in _TRUTHY - SHOW_LINES_OF_CODE = getenv('INPUT_SHOW_LINES_OF_CODE', "False").lower() in _TRUTHY - SHOW_LANGUAGE_PER_REPO = getenv('INPUT_SHOW_LANGUAGE_PER_REPO', "True").lower() in _TRUTHY - SHOW_LOC_CHART = getenv('INPUT_SHOW_LOC_CHART', "True").lower() in _TRUTHY - SHOW_DAYS_OF_WEEK = getenv('INPUT_SHOW_DAYS_OF_WEEK', "True").lower() in _TRUTHY - SHOW_PROFILE_VIEWS = getenv('INPUT_SHOW_PROFILE_VIEWS', "True").lower() in _TRUTHY - SHOW_SHORT_INFO = getenv('INPUT_SHOW_SHORT_INFO', "True").lower() in _TRUTHY - SHOW_UPDATED_DATE = getenv('INPUT_SHOW_UPDATED_DATE', "True").lower() in _TRUTHY - SHOW_TOTAL_CODE_TIME = getenv('INPUT_SHOW_TOTAL_CODE_TIME', "True").lower() in _TRUTHY + SHOW_OS = getenv("INPUT_SHOW_OS", "False").lower() in _TRUTHY + SHOW_PROJECTS = getenv("INPUT_SHOW_PROJECTS", "True").lower() in _TRUTHY + SHOW_EDITORS = getenv("INPUT_SHOW_EDITORS", "True").lower() in _TRUTHY + SHOW_TIMEZONE = getenv("INPUT_SHOW_TIMEZONE", "True").lower() in _TRUTHY + SHOW_COMMIT = getenv("INPUT_SHOW_COMMIT", "True").lower() in _TRUTHY + SHOW_LANGUAGE = getenv("INPUT_SHOW_LANGUAGE", "True").lower() in _TRUTHY + SHOW_LINES_OF_CODE = getenv("INPUT_SHOW_LINES_OF_CODE", "False").lower() in _TRUTHY + SHOW_LANGUAGE_PER_REPO = getenv("INPUT_SHOW_LANGUAGE_PER_REPO", "True").lower() in _TRUTHY + SHOW_LOC_CHART = getenv("INPUT_SHOW_LOC_CHART", "True").lower() in _TRUTHY + SHOW_DAYS_OF_WEEK = getenv("INPUT_SHOW_DAYS_OF_WEEK", "True").lower() in _TRUTHY + SHOW_PROFILE_VIEWS = getenv("INPUT_SHOW_PROFILE_VIEWS", "True").lower() in _TRUTHY + SHOW_SHORT_INFO = getenv("INPUT_SHOW_SHORT_INFO", "True").lower() in _TRUTHY + SHOW_UPDATED_DATE = getenv("INPUT_SHOW_UPDATED_DATE", "True").lower() in _TRUTHY + SHOW_TOTAL_CODE_TIME = getenv("INPUT_SHOW_TOTAL_CODE_TIME", "True").lower() in _TRUTHY - COMMIT_BY_ME = getenv('INPUT_COMMIT_BY_ME', "False").lower() in _TRUTHY - COMMIT_MESSAGE = getenv('INPUT_COMMIT_MESSAGE', "Updated with Dev Metrics") - COMMIT_USERNAME = getenv('INPUT_COMMIT_USERNAME', "") - COMMIT_EMAIL = getenv('INPUT_COMMIT_EMAIL', "") + COMMIT_BY_ME = getenv("INPUT_COMMIT_BY_ME", "False").lower() in _TRUTHY + COMMIT_MESSAGE = getenv("INPUT_COMMIT_MESSAGE", "Updated with Dev Metrics") + COMMIT_USERNAME = getenv("INPUT_COMMIT_USERNAME", "") + COMMIT_EMAIL = getenv("INPUT_COMMIT_EMAIL", "") - LOCALE = getenv('INPUT_LOCALE', "en") - UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S") - IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',') - SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) + LOCALE = getenv("INPUT_LOCALE", "en") + UPDATED_DATE_FORMAT = getenv("INPUT_UPDATED_DATE_FORMAT", "%d/%m/%Y %H:%M:%S") + IGNORED_REPOS = getenv("INPUT_IGNORED_REPOS", "").replace(" ", "").split(",") + SYMBOL_VERSION = int(getenv("INPUT_SYMBOL_VERSION")) diff --git a/sources/manager_github.py b/sources/manager_github.py index 654dac2..45ce2e3 100644 --- a/sources/manager_github.py +++ b/sources/manager_github.py @@ -8,6 +8,8 @@ from manager_environment import EnvironmentManager as EM def init_github_manager(): """ + Initialize GitHub manager. + Current user, user readme repo and readme file are downloaded. """ GitHubManager.prepare_github_env() print(f"Current user: {GitHubManager.USER.login}") @@ -16,61 +18,79 @@ def init_github_manager(): class GitHubManager: USER: AuthenticatedUser REPO: Repository - README: ContentFile - README_CONTENTS: str + _README: ContentFile + _README_CONTENTS: str - _START_COMMENT = f'' - _END_COMMENT = f'' + _START_COMMENT = f"" + _END_COMMENT = f"" _README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}" @staticmethod def prepare_github_env(): """ + Download and store for future use: + - Current GitHub user. + - Named repo of the user [username]/[username]. + - README.md file of this repo. + - Parsed contents of the file. """ github = Github(EM.GH_TOKEN) GitHubManager.USER = github.get_user() - GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}") - GitHubManager.README = GitHubManager.REPO.get_readme() - GitHubManager.README_CONTENTS = str(b64decode(GitHubManager.README.content), 'utf-8') + GitHubManager._REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}") + GitHubManager._README = GitHubManager._REPO.get_readme() + GitHubManager._README_CONTENTS = str(b64decode(GitHubManager._README.content), "utf-8") @staticmethod - def _generate_new_readme(stats: str): + def _generate_new_readme(stats: str) -> str: """ - Generate a new Readme.md + Generates new README.md file, inserts its contents between start and end tags. + + :param stats: contents to insert. + :returns: new README.md string. """ readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}" - return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager.README_CONTENTS) + return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager._README_CONTENTS) @staticmethod - def _get_author(): + def _get_author() -> InputGitAuthor: """ + Gets GitHub commit author specified by environmental variables. + It is the user himself or a 'readme-bot'. + + :returns: Commit author. """ if EM.COMMIT_BY_ME: - return InputGitAuthor( - GitHubManager.USER.login or EM.COMMIT_USERNAME, - GitHubManager.USER.email or EM.COMMIT_EMAIL - ) + return InputGitAuthor(GitHubManager.USER.login or EM.COMMIT_USERNAME, GitHubManager.USER.email or EM.COMMIT_EMAIL) else: - return InputGitAuthor( - EM.COMMIT_USERNAME or 'readme-bot', - EM.COMMIT_EMAIL or '41898282+github-actions[bot]@users.noreply.github.com' - ) + return InputGitAuthor(EM.COMMIT_USERNAME or "readme-bot", EM.COMMIT_EMAIL or "41898282+github-actions[bot]@users.noreply.github.com") @staticmethod def branch() -> str: + """ + Gets name of branch to commit to specified by environmental variables. + It is the default branch (regularly, 'main' or 'master') or a branch specified by user. + + :returns: Commit author. + """ return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME @staticmethod def update_readme(stats: str) -> bool: + """ + Updates readme with given data if necessary. + Uses commit author, commit message and branch name specified by environmental variables. + + :returns: whether the README.md file was updated or not. + """ new_readme = GitHubManager._generate_new_readme(stats) - if new_readme != GitHubManager.README_CONTENTS: + if new_readme != GitHubManager._README_CONTENTS: GitHubManager.REPO.update_file( - path=GitHubManager.README.path, + path=GitHubManager._README.path, message=EM.COMMIT_MESSAGE, content=new_readme, - sha=GitHubManager.README.sha, + sha=GitHubManager._README.sha, branch=GitHubManager.branch(), - committer=GitHubManager._get_author() + committer=GitHubManager._get_author(), ) return True else: @@ -78,6 +98,12 @@ class GitHubManager: @staticmethod def update_chart(chart_path: str): + """ + Updates lines of code chart. + Uses commit author, commit message and branch name specified by environmental variables. + + :param chart_path: path to saved lines of code chart. + """ with open(chart_path, "rb") as input_file: data = input_file.read() try: diff --git a/sources/manager_localization.py b/sources/manager_localization.py index 3946869..497ec2e 100644 --- a/sources/manager_localization.py +++ b/sources/manager_localization.py @@ -7,19 +7,37 @@ from manager_environment import EnvironmentManager as EM def init_localization_manager(): """ + Initialize localization manager. + Load GUI translations JSON file. """ LocalizationManager.load_localization("translation.json") class LocalizationManager: + """ + Class for handling localization (and maybe other file IO in future). + Stores localization in dictionary. + """ + _LOCALIZATION: Dict[str, str] = dict() @staticmethod def load_localization(file: str): - with open(join(dirname(__file__), file), encoding='utf-8') as config_file: + """ + Read localization file and store locale defined with environmental variable. + + :param file: Localization file path, related to current file (in sources root). + """ + with open(join(dirname(__file__), file), encoding="utf-8") as config_file: data = load(config_file) LocalizationManager._LOCALIZATION = data[EM.LOCALE] @staticmethod def t(key: str) -> str: + """ + Translate string to current localization. + + :param key: Localization key. + :returns: Translation string. + """ return LocalizationManager._LOCALIZATION[key] diff --git a/sources/yearly_commit_calculator.py b/sources/yearly_commit_calculator.py index f72d3ac..891025a 100644 --- a/sources/yearly_commit_calculator.py +++ b/sources/yearly_commit_calculator.py @@ -8,6 +8,13 @@ from manager_github import GitHubManager as GHM async def calculate_yearly_commit_data(repositories: Dict) -> Dict: + """ + Calculate commit data by years. + Commit data includes difference between contribution additions and deletions in each quarter of each recorded year. + + :param repositories: user repositories info dictionary. + :returns: Commit quarter yearly data dictionary. + """ yearly_data = dict() total = len(repositories["data"]["user"]["repositories"]["edges"]) for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["edges"]): @@ -17,7 +24,14 @@ async def calculate_yearly_commit_data(repositories: Dict) -> Dict: return yearly_data -async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict) -> Dict: +async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict): + """ + Updates yearly commit data with commits from given repository. + Skips update if the commit isn't related to any repository. + + :param repo_details: Dictionary with information about the given repository. + :param yearly_data: Yearly data dictionary to update. + """ commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repo_details["owner"]["login"], name=repo_details["name"], id=GHM.USER.node_id) if commit_data["data"]["repository"] is None: @@ -36,4 +50,4 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: yearly_data[curr_year][quarter] = dict() if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]: yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0 - yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += (commit["additions"] - commit["deletions"]) + yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += commit["additions"] - commit["deletions"] From a43205757b07254a2dd0a5150e9104e756625ec4 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 22:42:44 +0100 Subject: [PATCH 15/23] py3.6 pygithub version compatibility --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c191e13..1ce01ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # GitHub integration modules: -PyGithub~=1.57 +PyGithub~=1.56 # Markdown visualization modules: pytz~=2022.7 From 4afa664de2b67eb0cf24d78b0dc64893a7d5dbfa Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 22:45:25 +0100 Subject: [PATCH 16/23] required python version updated to 3.8 (3.6 is pretty old though) --- .github/workflows/codestyle.yml | 6 +++--- CONTRIBUTING.md | 2 +- Makefile | 2 +- requirements.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index bc13463..174ca6c 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -6,16 +6,16 @@ on: jobs: lint: name: Run codestyle check - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Checkout 🛎️ uses: actions/checkout@v3 - - name: Setup Python 3.6 🐍 + - name: Setup Python 3.8 🐍 uses: actions/setup-python@v3 with: - python-version: 3.6.7 + python-version: 3.8 - name: Install Dependencies 📥 run: pip install -r requirements.txt diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 76ea883..4489c75 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,7 +21,7 @@ Once you've worked on your feature/bugfix etc, you can open a pull request using ### Setting up development environment -This project is written in Python, requires **Python 3.6 or higher**, and uses `pip` . +This project is written in Python, requires **Python 3.8 or higher**, and uses `pip` . To set it up, just fork + clone it, install all the dependencies: diff --git a/Makefile b/Makefile index 88ec394..baa64a9 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ help: @ # Print help commands echo "Welcome to 'waka-readme-stats' GitHub Actions!" echo "The action can be tested locally with: 'make run'." - echo "NB! For local testing Python version 3.6+ is required." + echo "NB! For local testing Python version 3.8+ is required." echo "The action image can be built locally with: 'make container'." echo "NB! For local container building Docker version 20+ is required." echo "The action directory and image can be cleaned with: 'make clean'." diff --git a/requirements.txt b/requirements.txt index 1ce01ef..c191e13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # GitHub integration modules: -PyGithub~=1.56 +PyGithub~=1.57 # Markdown visualization modules: pytz~=2022.7 From 1778166ac71934bfd44d1de9bef29d89d4f5b2f0 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 22:48:00 +0100 Subject: [PATCH 17/23] actions version updated --- .github/workflows/build_image.yml | 2 +- .github/workflows/codestyle.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_image.yml b/.github/workflows/build_image.yml index 4ae01df..6a27942 100644 --- a/.github/workflows/build_image.yml +++ b/.github/workflows/build_image.yml @@ -30,7 +30,7 @@ jobs: type=semver,pattern={{major}}.{{minor}} - name: Build and push Docker image 🏗️ - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v4 with: push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/releases') }} tags: ${{ steps.meta.outputs.tags }} diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index 174ca6c..3d8c1ac 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -13,7 +13,7 @@ jobs: uses: actions/checkout@v3 - name: Setup Python 3.8 🐍 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: 3.8 From e66fc6776e7a9f694004a9e0e3aa0af2e0257992 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 22:54:06 +0100 Subject: [PATCH 18/23] small fixes --- .github/workflows/codestyle.yml | 2 +- Makefile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index 3d8c1ac..5ac6780 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -21,4 +21,4 @@ jobs: run: pip install -r requirements.txt - name: Run Codestyle ✔️ - run: flake8 --max-line-length=160 --exclude venv,assets . && black --line-length=160 --check --exclude='/venv/|/assets/' . \ No newline at end of file + run: flake8 --max-line-length=160 --exclude venv,assets . && black --line-length=160 --check --exclude='/venv/|/assets/' . diff --git a/Makefile b/Makefile index baa64a9..d7c3ed0 100644 --- a/Makefile +++ b/Makefile @@ -39,7 +39,7 @@ run-container: lint: venv - @ # Run flake8 linter + @ # Run flake8 and black linters flake8 --max-line-length=160 --exclude venv,assets . black --line-length=160 --check --exclude='/venv/|/assets/' . .PHONY: lint From 99f85cb7f93d359a3e1fb25e6e80ab163a4bd471 Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 23:06:36 +0100 Subject: [PATCH 19/23] sorting, reversed --- sources/graphics_list_formatter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sources/graphics_list_formatter.py b/sources/graphics_list_formatter.py index ddb949e..f1f24d4 100644 --- a/sources/graphics_list_formatter.py +++ b/sources/graphics_list_formatter.py @@ -72,7 +72,7 @@ def make_list(data: List = None, names: List[str] = None, texts: List[str] = Non percents = [value for item in data for key, value in item.items() if key == "percent"] if percents is None else percents data = list(zip(names, texts, percents)) - top_data = sorted(data[:top_num], key=lambda record: record[2]) if sort else data[:top_num] + top_data = sorted(data[:top_num], key=lambda record: record[2], reverse=True) if sort else data[:top_num] data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data] return "\n".join(data_list) From 028bf2328ef245f00f1067ea785d35877665c3eb Mon Sep 17 00:00:00 2001 From: pseusys Date: Fri, 17 Feb 2023 23:44:35 +0100 Subject: [PATCH 20/23] github manager fix --- sources/manager_github.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sources/manager_github.py b/sources/manager_github.py index 45ce2e3..931d8ca 100644 --- a/sources/manager_github.py +++ b/sources/manager_github.py @@ -36,8 +36,8 @@ class GitHubManager: """ github = Github(EM.GH_TOKEN) GitHubManager.USER = github.get_user() - GitHubManager._REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}") - GitHubManager._README = GitHubManager._REPO.get_readme() + GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}") + GitHubManager._README = GitHubManager.REPO.get_readme() GitHubManager._README_CONTENTS = str(b64decode(GitHubManager._README.content), "utf-8") @staticmethod From 36fafbd2633c34782bb0c8e9dc778647c3ff6ff1 Mon Sep 17 00:00:00 2001 From: pseusys Date: Sat, 18 Feb 2023 17:53:14 +0100 Subject: [PATCH 21/23] query limitations info added --- sources/manager_download.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sources/manager_download.py b/sources/manager_download.py index 512ea24..8ae1a0a 100644 --- a/sources/manager_download.py +++ b/sources/manager_download.py @@ -13,6 +13,7 @@ from manager_github import GitHubManager as GHM GITHUB_API_QUERIES = { # Query to collect info about all user repositories, including: is it a fork, name and owner login. + # NB! Query includes information about recent repositories only (apparently, contributed within a year). "repos_contributed_to": """ { user(login: "$username") { @@ -28,6 +29,7 @@ GITHUB_API_QUERIES = { } }""", # Query to collect info about all commits in user repositories, including: commit date. + # NB! Query includes information about repositories owned by user only. "repo_committed_dates": """ { repository(owner: "$owner", name: "$name") { @@ -47,6 +49,7 @@ GITHUB_API_QUERIES = { } }""", # Query to collect info about all repositories user created or collaborated on, including: name, primary language and owner login. + # NB! Query doesn't include information about repositories user contributed to via pull requests. "user_repository_list": """ { user(login: "$username") { From 1dc8b309c8d1161e9a30fbe90a1cd51b578f8127 Mon Sep 17 00:00:00 2001 From: pseusys Date: Sat, 18 Feb 2023 19:13:25 +0100 Subject: [PATCH 22/23] timezone emoji updated --- sources/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sources/main.py b/sources/main.py index 9bb43e8..fc131b4 100644 --- a/sources/main.py +++ b/sources/main.py @@ -35,7 +35,7 @@ async def get_waka_time_stats() -> str: if EM.SHOW_TIMEZONE: time_zone = data["data"]["timezone"] - stats += f"⌚︎ {LM.t('Timezone')}: {time_zone}\n\n" + stats += f"🕑︎ {LM.t('Timezone')}: {time_zone}\n\n" if EM.SHOW_LANGUAGE: lang_list = no_activity if len(data["data"]["languages"]) == 0 else make_list(data["data"]["languages"]) From a97119e612028509af0791342f05b528e06a035d Mon Sep 17 00:00:00 2001 From: pseusys Date: Tue, 21 Feb 2023 20:59:30 +0100 Subject: [PATCH 23/23] linted --- sources/manager_download.py | 6 ++---- sources/yearly_commit_calculator.py | 12 ++++++------ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/sources/manager_download.py b/sources/manager_download.py index 37e2592..140130a 100644 --- a/sources/manager_download.py +++ b/sources/manager_download.py @@ -225,9 +225,7 @@ class DownloadManager: :param kwargs: Parameters for substitution of variables in dynamic query. :return: Response JSON dictionary. """ - res = await DownloadManager._client.post("https://api.github.com/graphql", json={ - "query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs) - }) + res = await DownloadManager._client.post("https://api.github.com/graphql", json={"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)}) if res.status_code == 200: return res.json() else: @@ -266,7 +264,7 @@ class DownloadManager: :param kwargs: Parameters for substitution of variables in dynamic query. :return: Response JSON dictionary. """ - initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f"first: 100") + initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination="first: 100") page_list, page_info = DownloadManager._find_pagination_and_data_list(initial_query_response) while page_info["hasNextPage"]: query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f'first: 100, after: "{page_info["endCursor"]}"') diff --git a/sources/yearly_commit_calculator.py b/sources/yearly_commit_calculator.py index 8170c7d..458c433 100644 --- a/sources/yearly_commit_calculator.py +++ b/sources/yearly_commit_calculator.py @@ -16,10 +16,10 @@ async def calculate_yearly_commit_data(repositories: Dict) -> Dict: :returns: Commit quarter yearly data dictionary. """ yearly_data = dict() - total = len(repositories['data']['user']['repositories']['nodes']) - for ind, repo in enumerate(repositories['data']['user']['repositories']['nodes']): - if repo['name'] not in EM.IGNORED_REPOS: - print(f"{ind + 1}/{total}", "Retrieving repo:", repo["owner"]["login"], repo['name']) + total = len(repositories["data"]["user"]["repositories"]["nodes"]) + for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["nodes"]): + if repo["name"] not in EM.IGNORED_REPOS: + print(f"{ind + 1}/{total}", "Retrieving repo:", repo["owner"]["login"], repo["name"]) await update_yearly_data_with_commit_stats(repo, yearly_data) return yearly_data @@ -33,13 +33,13 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: :param yearly_data: Yearly data dictionary to update. """ owner = repo_details["owner"]["login"] - branch_data = await DM.get_remote_graphql("repo_branch_list", owner=owner, name=repo_details['name']) + branch_data = await DM.get_remote_graphql("repo_branch_list", owner=owner, name=repo_details["name"]) if branch_data["data"]["repository"] is None: print(f"\tSkipping repo: {repo_details['name']}") return dict() for branch in branch_data["data"]["repository"]["refs"]["nodes"]: - commit_data = await DM.get_remote_graphql("repo_commit_list", owner=owner, name=repo_details['name'], branch=branch["name"], id=GHM.USER.node_id) + commit_data = await DM.get_remote_graphql("repo_commit_list", owner=owner, name=repo_details["name"], branch=branch["name"], id=GHM.USER.node_id) for commit in commit_data["data"]["repository"]["ref"]["target"]["history"]["nodes"]: date = search(r"\d+-\d+-\d+", commit["committedDate"]).group() curr_year = datetime.fromisoformat(date).year