code style applied to main

This commit is contained in:
pseusys
2023-02-17 15:34:26 +01:00
parent e8a1770feb
commit 8e675eaafd
12 changed files with 442 additions and 473 deletions

View File

@@ -9,4 +9,4 @@ ADD requirements.txt /waka-readme-stats/requirements.txt
RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt
ADD sources/* /waka-readme-stats/ ADD sources/* /waka-readme-stats/
ENTRYPOINT python3 /waka-readme-stats/main.py ENTRYPOINT cd /waka-readme-stats/ && python3 main.py

View File

@@ -1,6 +1,8 @@
.ONESHELL: .ONESHELL:
.DEFAULT_GOAL = help .DEFAULT_GOAL = help
SHELL = /bin/bash .EXPORT_ALL_VARIABLES:
PATH := venv/bin:$(PATH)
ENV = .env.example ENV = .env.example
include $(ENV) include $(ENV)
@@ -25,6 +27,7 @@ venv:
run-locally: venv run-locally: venv
@ # Run action locally @ # Run action locally
mkdir ./assets/ 2>/dev/null || true
python3 ./sources/main.py python3 ./sources/main.py
.PHONY: run-locally .PHONY: run-locally

View File

@@ -82,20 +82,20 @@ inputs:
description: "Shows the short facts" description: "Shows the short facts"
default: "True" default: "True"
LOCALE: SHOW_UPDATED_DATE:
required: false required: false
description: "Show stats in your own language" description: "Show updated date"
default: "en" default: "True"
SHOW_TOTAL_CODE_TIME:
required: false
description: "Show Total Time you have coded"
default: "True"
COMMIT_BY_ME: COMMIT_BY_ME:
required: false required: false
description: "Git commit with your own name and email" description: "Git commit with your own name and email"
default: "False" default: "False"
IGNORED_REPOS:
required: false
description: "Repos you don't want to be counted"
default: ""
COMMIT_MESSAGE: COMMIT_MESSAGE:
required: false required: false
@@ -112,20 +112,20 @@ inputs:
description: "Git commit custom email" description: "Git commit custom email"
default: "" default: ""
SHOW_UPDATED_DATE: LOCALE:
required: false required: false
description: "Show updated date" description: "Show stats in your own language"
default: "True" default: "en"
UPDATED_DATE_FORMAT: UPDATED_DATE_FORMAT:
required: false required: false
description: "Updated date format" description: "Updated date format"
default: "%d/%m/%Y %H:%M:%S" default: "%d/%m/%Y %H:%M:%S"
SHOW_TOTAL_CODE_TIME: IGNORED_REPOS:
required: false required: false
description: "Show Total Time you have coded" description: "Repos you don't want to be counted"
default: "True" default: ""
SYMBOL_VERSION: SYMBOL_VERSION:
required: false required: false

View File

@@ -1,8 +1,14 @@
PyGithub==1.54.1 # GitHub integration modules:
matplotlib==3.6.3 PyGithub~=1.57
numpy==1.24.2
python-dotenv==0.17.0 # Markdown visualization modules:
pytz==2021.1 pytz~=2022.7
humanize==3.3.0 humanize~=4.6
httpx==0.23.3
PyYAML==6.0 # Graphs drawing modules:
matplotlib~=3.7
numpy~=1.24
# Request making and response parsing modules:
httpx~=0.23
PyYAML~=6.0

View File

@@ -1,10 +1,10 @@
from typing import Dict from typing import Dict
import numpy as np from numpy import arange, array, add, amax
import matplotlib.patches as mpatches import matplotlib.patches as mpatches
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from download_manager import DownloadManager from manager_download import DownloadManager as DM
MAX_LANGUAGES = 5 MAX_LANGUAGES = 5
@@ -18,40 +18,37 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
:param yearly_data: GitHub user yearly data. :param yearly_data: GitHub user yearly data.
:param save_path: Path to save the graph file. :param save_path: Path to save the graph file.
""" """
colors = await DownloadManager.get_remote_yaml("linguist") colors = await DM.get_remote_yaml("linguist")
years = len(yearly_data.keys()) years = len(yearly_data.keys())
year_indexes = np.arange(years) year_indexes = arange(years)
all_languages = dict() languages_all_loc = dict()
for year in yearly_data.values(): for i, y in enumerate(sorted(yearly_data.keys())):
for quarter in year.values(): for q in yearly_data[y].keys():
for language, loc in quarter.items(): langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES]
all_languages[language] = all_languages.get(language, 0) + loc
top_languages_names = sorted(all_languages.keys(), key=lambda l: all_languages[l], reverse=True)[0:MAX_LANGUAGES] for lang in langs:
top_languages = {language: np.array([[0] * years] * 4) for language in top_languages_names} if lang not in languages_all_loc:
for index, year in enumerate(sorted(yearly_data.keys())): languages_all_loc[lang] = array([[0] * years] * 4)
for quarter, languages in yearly_data[year].items(): languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang]
for language, loc in {(lang, loc) for lang, loc in languages.items() if lang in top_languages}:
top_languages[language][quarter - 1][index] = yearly_data[year][quarter][language]
fig = plt.figure() fig = plt.figure()
ax = fig.add_axes([0, 0, 1.5, 1]) ax = fig.add_axes([0, 0, 1.5, 1])
language_handles = [] language_handles = []
cumulative = np.array([[0] * years] * 4) cumulative = array([[0] * years] * 4)
for key, value in top_languages.items(): for key, value in languages_all_loc.items():
color = colors[key]["color"] if colors[key]["color"] is not None else "w" color = colors[key]["color"] if colors[key]["color"] is not None else "w"
language_handles += [mpatches.Patch(color=color, label=key)] language_handles += [mpatches.Patch(color=color, label=key)]
for quarter in range(4): for quarter in range(4):
ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color) ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color)
cumulative[quarter] = np.add(cumulative[quarter], value[quarter]) cumulative[quarter] = add(cumulative[quarter], value[quarter])
ax.set_ylabel("LOC added", fontdict=dict(weight="bold")) ax.set_ylabel("LOC added", fontdict=dict(weight="bold"))
ax.set_xticks(np.array([np.arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years) ax.set_xticks(array([arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years)
sax = ax.secondary_xaxis("top") sax = ax.secondary_xaxis("top")
sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys())) sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys()))
@@ -64,6 +61,6 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
ax.spines["top"].set_visible(False) ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False) ax.spines["right"].set_visible(False)
plt.ylim(0, 1.05 * np.amax(cumulative)) plt.ylim(0, 1.05 * amax(cumulative))
plt.savefig(save_path, bbox_inches="tight") plt.savefig(save_path, bbox_inches="tight")
plt.close(fig) plt.close(fig)

View File

@@ -0,0 +1,122 @@
from typing import Dict
from datetime import datetime
from pytz import timezone, utc
from manager_download import DownloadManager as DM
from manager_environment import EnvironmentManager as EM
from manager_github import GitHubManager as GHM
from manager_localization import LocalizationManager as LM
def make_graph(percent: float):
'''Make progress graph from API graph'''
if EM.SYMBOL_VERSION == 1: # version 1
done_block = ''
empty_block = ''
elif EM.SYMBOL_VERSION == 2: # version 2
done_block = ''
empty_block = ''
elif EM.SYMBOL_VERSION == 3: # version 3
done_block = ''
empty_block = ''
else:
done_block = '' # default is version 1
empty_block = ''
pc_rnd = round(percent)
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}"
def make_list(data: list): # TODO: add arg: sorted
'''Make List'''
data_list = []
for l in data[:5]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
def make_commit_list(data: list):
'''Make List'''
data_list = []
for l in data[:7]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
async def generate_commit_list(time_zone: str) -> str:
stats = str()
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
repos = [d for d in result["data"]["user"]["repositoriesContributedTo"]["nodes"] if d["isFork"] is False]
day_times = [0] * 4 # 0 - 6, 6 - 12, 12 - 18, 18 - 24
week_days = [0] * 7 # Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday
for repository in repos:
result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id)
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
for committedDate in committed_dates:
local_date = datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ")
date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone))
day_times[date.hour // 6] += 1
week_days[date.isoweekday() - 1] += 1
sum_day = sum(day_times)
sum_week = sum(week_days)
day_times = day_times[1:] + day_times[:1]
time_of_day_data = [
{"name": f"🌞 {LM.t('Morning')}", "text": f"{day_times[0]} commits", "percent": round((day_times[0] / sum_day) * 100, 2)},
{"name": f"🌆 {LM.t('Daytime')}", "text": f"{day_times[1]} commits", "percent": round((day_times[1] / sum_day) * 100, 2)},
{"name": f"🌃 {LM.t('Evening')}", "text": f"{day_times[2]} commits", "percent": round((day_times[2] / sum_day) * 100, 2)},
{"name": f"🌙 {LM.t('Night')}", "text": f"{day_times[3]} commits", "percent": round((day_times[3] / sum_day) * 100, 2)},
]
day_of_week_data = [
{"name": LM.t("Monday"), "text": f"{week_days[0]} commits", "percent": round((week_days[0] / sum_week) * 100, 2)},
{"name": LM.t("Tuesday"), "text": f"{week_days[1]} commits", "percent": round((week_days[1] / sum_week) * 100, 2)},
{"name": LM.t("Wednesday"), "text": f"{week_days[2]} commits", "percent": round((week_days[2] / sum_week) * 100, 2)},
{"name": LM.t("Thursday"), "text": f"{week_days[3]} commits", "percent": round((week_days[3] / sum_week) * 100, 2)},
{"name": LM.t("Friday"), "text": f"{week_days[4]} commits", "percent": round((week_days[4] / sum_week) * 100, 2)},
{"name": LM.t("Saturday"), "text": f"{week_days[5]} commits", "percent": round((week_days[5] / sum_week) * 100, 2)},
{"name": LM.t("Sunday"), "text": f"{week_days[6]} commits", "percent": round((week_days[6] / sum_week) * 100, 2)},
]
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
stats += f"**{title}** \n\n```text\n{make_commit_list(time_of_day_data)}\n\n```\n"
if EM.SHOW_DAYS_OF_WEEK:
most_productive = max(day_of_week_data, key=lambda d: d["percent"])
stats += f"📅 **{LM.t('I am Most Productive on') % most_productive['name']}** \n\n```text\n{make_commit_list(day_of_week_data)}\n\n```\n"
return stats
def make_language_per_repo_list(result: Dict) -> str:
language_count = dict()
repos_with_language = [repo for repo in result["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None]
for repo in repos_with_language:
language = repo["node"]["primaryLanguage"]["name"]
language_count[language] = language_count.get(language, {"count": 0})
language_count[language]["count"] += 1
data = list()
for language in language_count.keys():
data.append({
"name": language,
"text": f"{language_count[language]['count']} {'repo' if language_count[language]['count'] == 1 else 'repos'}",
"percent": round(language_count[language]["count"] / len(repos_with_language) * 100, 2)
})
top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"])
title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else ""
return f"{title}```text\n{make_list(data)}\n```\n\n"

View File

@@ -4,8 +4,8 @@ from asyncio import sleep
from github import Github, InputGitAuthor, AuthenticatedUser from github import Github, InputGitAuthor, AuthenticatedUser
import datetime import datetime
from download_manager import DownloadManager from manager_download import DownloadManager as DM
from graph_drawer import create_loc_graph from graphics_chart_drawer import create_loc_graph
class LinesOfCode: class LinesOfCode:
@@ -44,7 +44,7 @@ class LinesOfCode:
return 4 return 4
async def getCommitStat(self, repoDetails, yearly_data): async def getCommitStat(self, repoDetails, yearly_data):
commit_data = await DownloadManager.get_remote_graphql("repository_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id) commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id)
if commit_data["data"]["repository"] is None: if commit_data["data"]["repository"] is None:
print("\tSkipping:", repoDetails['name']) print("\tSkipping:", repoDetails['name'])

View File

@@ -1,456 +1,160 @@
''' """
Readme Development Metrics With waka time progress Readme Development Metrics With waka time progress
''' """
import re
import os
import base64
from asyncio import run from asyncio import run
from typing import Dict from typing import Dict, Tuple
from datetime import datetime
from pytz import timezone
import pytz
from github import Github, InputGitAuthor, AuthenticatedUser
import datetime
from download_manager import init_download_manager, DownloadManager
from loc import LinesOfCode
import humanize
from urllib.parse import quote from urllib.parse import quote
import json
import math
from dotenv import load_dotenv from humanize import intword, naturalsize, intcomma, precisedelta
load_dotenv() from manager_download import init_download_manager, DownloadManager as DM
from manager_environment import EnvironmentManager as EM
START_COMMENT = f'<!--START_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->' from manager_github import init_github_manager, GitHubManager as GHM
END_COMMENT = f'<!--END_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->' from manager_localization import init_localization_manager, LocalizationManager as LM
listReg = f"{START_COMMENT}[\\s\\S]+{END_COMMENT}" from loc import LinesOfCode # TODO: refactor
from graphics_list_formatter import make_list, generate_commit_list, make_language_per_repo_list
waka_key = os.getenv('INPUT_WAKATIME_API_KEY')
ghtoken = os.getenv('INPUT_GH_TOKEN')
branchName = os.getenv('INPUT_PUSH_BRANCH_NAME')
showTimeZone = os.getenv('INPUT_SHOW_TIMEZONE')
showProjects = os.getenv('INPUT_SHOW_PROJECTS')
showEditors = os.getenv('INPUT_SHOW_EDITORS')
showOs = os.getenv('INPUT_SHOW_OS')
showCommit = os.getenv('INPUT_SHOW_COMMIT')
showLanguage = os.getenv('INPUT_SHOW_LANGUAGE')
show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE')
show_days_of_week = os.getenv('INPUT_SHOW_DAYS_OF_WEEK')
showLanguagePerRepo = os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO')
showLocChart = os.getenv('INPUT_SHOW_LOC_CHART')
show_profile_view = os.getenv('INPUT_SHOW_PROFILE_VIEWS')
show_short_info = os.getenv('INPUT_SHOW_SHORT_INFO')
locale = os.getenv('INPUT_LOCALE')
commit_by_me = os.getenv('INPUT_COMMIT_BY_ME')
ignored_repos_name = str(os.getenv('INPUT_IGNORED_REPOS') or '').replace(' ', '').split(',')
show_updated_date = os.getenv('INPUT_SHOW_UPDATED_DATE')
updated_date_format = os.getenv('INPUT_UPDATED_DATE_FORMAT')
commit_message = os.getenv('INPUT_COMMIT_MESSAGE')
commit_username = os.getenv('INPUT_COMMIT_USERNAME')
commit_email = os.getenv('INPUT_COMMIT_EMAIL')
show_total_code_time = os.getenv('INPUT_SHOW_TOTAL_CODE_TIME')
symbol_version = os.getenv('INPUT_SYMBOL_VERSION').strip()
show_waka_stats = 'y'
truthy = ['true', '1', 't', 'y', 'yes']
translate: Dict[str, str]
user: AuthenticatedUser
def millify(n): async def get_waka_time_stats() -> str:
millnames = ['', ' Thousand', ' Million', ' Billion', ' Trillion'] stats = str()
n = float(n)
millidx = max(0, min(len(millnames) - 1,
int(math.floor(0
if n == 0
else math.log10(abs(n)) / 3))))
return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx]) data = await DM.get_remote_json("waka_latest")
if EM.SHOW_COMMIT:
stats += f"{await generate_commit_list(data['data']['timezone'])}\n\n"
if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS:
no_activity = LM.t("No Activity Tracked This Week")
stats += f"📊 **{LM.t('This Week I Spend My Time On')}** \n\n```text\n"
def make_graph(percent: float): if EM.SHOW_TIMEZONE:
'''Make progress graph from API graph''' time_zone = data["data"]["timezone"]
if (symbol_version == '1'): # version 1 stats += f"⌚︎ {LM.t('Timezone')}: {time_zone}\n\n"
done_block = ''
empty_block = ''
elif (symbol_version == '2'): # version 2
done_block = ''
empty_block = ''
elif (symbol_version == '3'): # version 3
done_block = ''
empty_block = ''
else:
done_block = '' # default is version 1
empty_block = ''
pc_rnd = round(percent) if EM.SHOW_LANGUAGE:
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}" lang_list = no_activity if len(data["data"]["languages"]) == 0 else make_list(data["data"]["languages"])
stats += f"💬 {LM.t('Languages')}: \n{lang_list}\n\n"
if EM.SHOW_EDITORS:
edit_list = no_activity if len(data["data"]["editors"]) == 0 else make_list(data["data"]["editors"])
stats += f"🔥 {LM.t('Editors')}: \n{edit_list}\n\n"
def make_list(data: list): if EM.SHOW_PROJECTS:
'''Make List''' project_list = no_activity if len(data["data"]["projects"]) == 0 else make_list(data["data"]["projects"])
data_list = [] stats += f"🐱‍💻 {LM.t('Projects')}: \n{project_list}\n\n"
for l in data[:5]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
if EM.SHOW_OS:
def make_commit_list(data: list): os_list = no_activity if len(data["data"]["operating_systems"]) == 0 else make_list(data["data"]["operating_systems"])
'''Make List''' stats += f"💻 {LM.t('operating system')}: \n{os_list}\n\n"
data_list = []
for l in data[:7]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
async def generate_commit_list(tz):
string = ''
result = await DownloadManager.get_remote_graphql("repositories_contributed_to", username=user.login)
nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"]
repos = [d for d in nodes if d['isFork'] is False]
morning = 0 # 6 - 12
daytime = 0 # 12 - 18
evening = 0 # 18 - 24
night = 0 # 0 - 6
Monday = 0
Tuesday = 0
Wednesday = 0
Thursday = 0
Friday = 0
Saturday = 0
Sunday = 0
for repository in repos:
result = await DownloadManager.get_remote_graphql("repository_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=user.node_id)
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
for committedDate in committed_dates:
date = datetime.datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc).astimezone(timezone(tz))
hour = date.hour
weekday = date.strftime('%A')
if 6 <= hour < 12:
morning += 1
if 12 <= hour < 18:
daytime += 1
if 18 <= hour < 24:
evening += 1
if 0 <= hour < 6:
night += 1
if weekday == "Monday":
Monday += 1
if weekday == "Tuesday":
Tuesday += 1
if weekday == "Wednesday":
Wednesday += 1
if weekday == "Thursday":
Thursday += 1
if weekday == "Friday":
Friday += 1
if weekday == "Saturday":
Saturday += 1
if weekday == "Sunday":
Sunday += 1
sumAll = morning + daytime + evening + night
sum_week = Sunday + Monday + Tuesday + Friday + Saturday + Wednesday + Thursday
title = translate['I am an Early'] if morning + daytime >= evening + night else translate['I am a Night']
one_day = [
{"name": "🌞 " + translate['Morning'], "text": str(morning) + " commits",
"percent": round((morning / sumAll) * 100, 2)},
{"name": "🌆 " + translate['Daytime'], "text": str(daytime) + " commits",
"percent": round((daytime / sumAll) * 100, 2)},
{"name": "🌃 " + translate['Evening'], "text": str(evening) + " commits",
"percent": round((evening / sumAll) * 100, 2)},
{"name": "🌙 " + translate['Night'], "text": str(night) + " commits",
"percent": round((night / sumAll) * 100, 2)},
]
dayOfWeek = [
{"name": translate['Monday'], "text": str(Monday) + " commits", "percent": round((Monday / sum_week) * 100, 2)},
{"name": translate['Tuesday'], "text": str(Tuesday) + " commits",
"percent": round((Tuesday / sum_week) * 100, 2)},
{"name": translate['Wednesday'], "text": str(Wednesday) + " commits",
"percent": round((Wednesday / sum_week) * 100, 2)},
{"name": translate['Thursday'], "text": str(Thursday) + " commits",
"percent": round((Thursday / sum_week) * 100, 2)},
{"name": translate['Friday'], "text": str(Friday) + " commits", "percent": round((Friday / sum_week) * 100, 2)},
{"name": translate['Saturday'], "text": str(Saturday) + " commits",
"percent": round((Saturday / sum_week) * 100, 2)},
{"name": translate['Sunday'], "text": str(Sunday) + " commits", "percent": round((Sunday / sum_week) * 100, 2)},
]
string = string + '**' + title + '** \n\n' + '```text\n' + make_commit_list(one_day) + '\n\n```\n'
if show_days_of_week.lower() in truthy:
max_element = {
'percent': 0
}
for day in dayOfWeek:
if day['percent'] > max_element['percent']:
max_element = day
days_title = translate['I am Most Productive on'] % max_element['name']
string = string + '📅 **' + days_title + '** \n\n' + '```text\n' + make_commit_list(dayOfWeek) + '\n\n```\n'
return string
async def get_waka_time_stats():
stats = ''
no_activity = translate["No Activity Tracked This Week"]
data = await DownloadManager.get_remote_json("waka_latest")
if showCommit.lower() in truthy:
stats = stats + await generate_commit_list(data['data']['timezone']) + '\n\n'
if showTimeZone.lower() in truthy or showLanguage.lower() in truthy or showEditors.lower() in truthy or showProjects.lower() in truthy or showOs.lower() in truthy:
stats += '📊 **' + translate['This Week I Spend My Time On'] + '** \n\n'
stats += '```text\n'
if showTimeZone.lower() in truthy:
tzone = data['data']['timezone']
stats = stats + '⌚︎ ' + translate['Timezone'] + ': ' + tzone + '\n\n'
if showLanguage.lower() in truthy:
if len(data['data']['languages']) == 0:
lang_list = no_activity
else:
lang_list = make_list(data['data']['languages'])
stats = stats + '💬 ' + translate['Languages'] + ': \n' + lang_list + '\n\n'
if showEditors.lower() in truthy:
if len(data['data']['editors']) == 0:
edit_list = no_activity
else:
edit_list = make_list(data['data']['editors'])
stats = stats + '🔥 ' + translate['Editors'] + ': \n' + edit_list + '\n\n'
if showProjects.lower() in truthy:
if len(data['data']['projects']) == 0:
project_list = no_activity
else:
# Re-order the project list by percentage
data['data']['projects'] = sorted(data['data']['projects'], key=lambda x: x["percent"],
reverse=True)
project_list = make_list(data['data']['projects'])
stats = stats + '🐱‍💻 ' + translate['Projects'] + ': \n' + project_list + '\n\n'
if showOs.lower() in truthy:
if len(data['data']['operating_systems']) == 0:
os_list = no_activity
else:
os_list = make_list(data['data']['operating_systems'])
stats = stats + '💻 ' + translate['operating system'] + ': \n' + os_list + '\n\n'
stats += '```\n\n' stats += '```\n\n'
return stats return stats
def generate_language_per_repo(result): async def get_yearly_data(repository_list) -> Tuple[LinesOfCode, Dict]: # TODO: refactor!
language_count = {} loc = LinesOfCode(GHM.USER, EM.GH_TOKEN, repository_list, EM.IGNORED_REPOS)
total = 0 return loc, await loc.calculateLoc()
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['primaryLanguage'] is None:
continue
language = repo['node']['primaryLanguage']['name']
total += 1
if language not in language_count.keys():
language_count[language] = {}
language_count[language]['count'] = 1
else:
language_count[language]['count'] = language_count[language]['count'] + 1
data = []
sorted_labels = list(language_count.keys())
sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True)
for label in sorted_labels:
percent = round(language_count[label]['count'] / total * 100, 2)
extension = " repos"
if language_count[label]['count'] == 1:
extension = " repo"
data.append({
"name": label,
"text": str(language_count[label]['count']) + extension,
"percent": percent
})
title = '**' + translate['I Mostly Code in'] % sorted_labels[0] + '** \n\n' if len(sorted_labels) > 0 else ''
return title + '```text\n' + make_list(data) + '\n\n```\n'
async def get_yearly_data(): async def get_short_github_info():
repository_list = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id) stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
loc = LinesOfCode(user, ghtoken, repository_list, ignored_repos_name)
yearly_data = await loc.calculateLoc()
if showLocChart.lower() in truthy:
await loc.plotLoc(yearly_data)
return yearly_data
if GHM.USER.disk_usage is None:
async def get_line_of_code() -> str: disk_usage = LM.t("Used in GitHub's Storage") % "?"
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id) print("Please add new github personal access token with user permission!")
loc = LinesOfCode(user, ghtoken, repositoryList, ignored_repos_name)
yearly_data = await loc.calculateLoc()
total_loc = sum(
[yearly_data[year][quarter][lang] for year in yearly_data for quarter in yearly_data[year] for lang in
yearly_data[year][quarter]])
return millify(int(total_loc))
async def get_short_info():
string = '**🐱 ' + translate['My GitHub Data'] + '** \n\n'
if user.disk_usage is None:
disk_usage = humanize.naturalsize(0)
print("Please add new github personal access token with user permission")
else: else:
disk_usage = humanize.naturalsize(user.disk_usage) disk_usage = LM.t("Used in GitHub's Storage") % naturalsize(GHM.USER.disk_usage)
data = await DownloadManager.get_remote_json("github_stats") stats += f"> 📦 {disk_usage} \n > \n"
if len(data['years']) > 0:
this_year_data = data['years'][0]
total = this_year_data['total']
year = this_year_data['year']
string += '> 🏆 ' + translate['Contributions in the year'] % (humanize.intcomma(total), year) + '\n > \n'
string += '> 📦 ' + translate["Used in GitHub's Storage"] % disk_usage + ' \n > \n' data = await DM.get_remote_json("github_stats")
is_hireable = user.hireable if len(data["years"]) > 0:
public_repo = user.public_repos contributions = LM.t('Contributions in the year') % (intcomma(data["years"][0]['total']), data["years"][0]['year'])
private_repo = user.owned_private_repos stats += f"> 🏆 {contributions}\n > \n"
if private_repo is None:
private_repo = 0 opted_to_hire = GHM.USER.hireable
if is_hireable: if opted_to_hire:
string += "> 💼 " + translate["Opted to Hire"] + "\n > \n" stats += f"> 💼 {LM.t('Opted to Hire')}\n > \n"
else: else:
string += "> 🚫 " + translate["Not Opted to Hire"] + "\n > \n" stats += f"> 🚫 {LM.t('Not Opted to Hire')}\n > \n"
string += '> 📜 ' public_repo = GHM.USER.public_repos
string += translate['public repositories'] % public_repo + " " + '\n > \n' if public_repo != 1 else translate[ if public_repo != 1:
'public repository'] % public_repo + " " + '\n > \n' stats += f"> 📜 {LM.t('public repositories') % public_repo} \n > \n"
string += '> 🔑 ' else:
string += translate['private repositories'] % private_repo + " " + ' \n > \n' if private_repo != 1 else translate[ stats += f"> 📜 {LM.t('public repository') % public_repo} \n > \n"
'private repository'] % private_repo + " " + '\n > \n'
return string private_repo = GHM.USER.owned_private_repos if GHM.USER.owned_private_repos is not None else 0
if public_repo != 1:
stats += f"> 🔑 {LM.t('private repositories') % private_repo} \n > \n"
async def get_stats(github) -> str: else:
'''Gets API data and returns markdown progress''' stats += f"> 🔑 {LM.t('private repository') % private_repo} \n > \n"
stats = ''
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
if show_loc.lower() in truthy or showLocChart.lower() in truthy:
# This condition is written to calculate the lines of code because it is heavy process soo needs to be calculate once this will reduce the execution time
await get_yearly_data()
if show_total_code_time.lower() in truthy:
data = await DownloadManager.get_remote_json("waka_all")
stats += '![Code Time](http://img.shields.io/badge/' + quote(
str("Code Time")) + '-' + quote(str(
data['data']['text'])) + '-blue)\n\n'
if show_profile_view.lower() in truthy:
data = github.get_repo(f"{user.login}/{user.login}").get_views_traffic(per="week")
stats += '![Profile Views](http://img.shields.io/badge/' + quote(str(translate['Profile Views'])) + '-' + str(
data['count']) + '-blue)\n\n'
if show_loc.lower() in truthy:
stats += '![Lines of code](https://img.shields.io/badge/' + quote(
str(translate['From Hello World I have written'])) + '-' + quote(
str(await get_line_of_code())) + '%20' + quote(str(translate['Lines of code'])) + '-blue)\n\n'
if show_short_info.lower() in truthy:
stats += await get_short_info()
if show_waka_stats.lower() in truthy:
stats += await get_waka_time_stats()
if showLanguagePerRepo.lower() in truthy:
stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
if showLocChart.lower() in truthy:
stats += '**' + translate['Timeline'] + '**\n\n'
branch_name = github.get_repo(f'{user.login}/{user.login}').default_branch
stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + user.login + '/' + user.login + '/' + branch_name + '/' + LinesOfCode.GRAPH_PATH + ') \n\n'
if show_updated_date.lower() in truthy:
now = datetime.datetime.utcnow()
d1 = now.strftime(updated_date_format)
stats = stats + "\n Last Updated on " + d1 + " UTC"
return stats return stats
def decode_readme(data: str): async def get_stats() -> str:
'''Decode the contents of old readme''' """
decoded_bytes = base64.b64decode(data) Gets API data and returns markdown progress
return str(decoded_bytes, 'utf-8') """
stats = str()
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART:
loc, yearly_data = await get_yearly_data(repositories)
else:
loc, yearly_data = (None, dict())
def generate_new_readme(stats: str, readme: str): if EM.SHOW_TOTAL_CODE_TIME:
'''Generate a new Readme.md''' data = await DM.get_remote_json("waka_all")
stats_in_readme = f"{START_COMMENT}\n{stats}\n{END_COMMENT}" stats += f"![Code Time](http://img.shields.io/badge/{quote('Code Time')}-{quote(str(data['data']['text']))}-blue)\n\n"
return re.sub(listReg, stats_in_readme, readme)
if EM.SHOW_PROFILE_VIEWS:
data = GHM.REPO.get_views_traffic(per="week")
stats += f"![Profile Views](http://img.shields.io/badge/{quote(LM.t('Profile Views'))}-{data['count']}-blue)\n\n"
if EM.SHOW_LINES_OF_CODE:
total_loc = sum([yearly_data[y][q][d] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
data = f"{intword(total_loc)} {LM.t('Lines of code')}"
stats += f"![Lines of code](https://img.shields.io/badge/{quote(LM.t('From Hello World I have written'))}-{quote(data)}-blue)\n\n"
if EM.SHOW_SHORT_INFO:
stats += await get_short_github_info()
stats += await get_waka_time_stats()
if EM.SHOW_LANGUAGE_PER_REPO:
stats += f"{make_language_per_repo_list(repositories)}\n\n"
if EM.SHOW_LOC_CHART:
await loc.plotLoc(yearly_data)
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{LinesOfCode.GRAPH_PATH}"
stats += '**' + LM.t('Timeline') + '**\n\n'
stats += f"![Lines of Code chart](https://raw.githubusercontent.com/{chart_path})\n\n"
if EM.SHOW_UPDATED_DATE:
stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC"
return stats
async def main(): async def main():
global translate, user init_github_manager()
await init_download_manager()
init_localization_manager()
if ghtoken is None: if GHM.update_readme(await get_stats()):
raise Exception('Token not available') print("Readme updated!")
user = Github(ghtoken).get_user()
print(f"Current user: {user.login}")
await init_download_manager(waka_key, ghtoken, user)
try:
with open(os.path.join(os.path.dirname(__file__), 'translation.json'), encoding='utf-8') as config_file:
data = json.load(config_file)
translate = data[locale]
except Exception as e:
print("Cannot find the Locale choosing default to english")
translate = data['en']
g = Github(ghtoken)
waka_stats = await get_stats(g)
repo = g.get_repo(f"{user.login}/{user.login}")
contents = repo.get_readme()
rdmd = decode_readme(contents.content)
new_readme = generate_new_readme(stats=waka_stats, readme=rdmd)
if commit_by_me.lower() in truthy:
committer = InputGitAuthor(user.login or commit_username, user.email or commit_email)
else:
committer = InputGitAuthor(
commit_username or 'readme-bot',
commit_email or '41898282+github-actions[bot]@users.noreply.github.com'
)
if new_readme != rdmd:
try:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch=branchName,
committer=committer)
except:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch='main',
committer=committer)
print("Readme updated")
if __name__ == '__main__': if __name__ == '__main__':
start_time = datetime.datetime.now().timestamp() * 1000 start_time = datetime.now()
run(main()) run(main())
end_time = datetime.datetime.now().timestamp() * 1000 run_delta = datetime.now() - start_time
print(f"Program processed in {round(end_time - start_time, 0)} miliseconds.") print(f"Program processed in {precisedelta(run_delta, minimum_unit='microseconds')}.")
# TODO: check function and variable naming
# TODO: check type hints
# TODO: sorted to max / min
# TODO: add 1 to repo count
# TODO: drop not awaited coroutines

View File

@@ -5,11 +5,13 @@ from typing import Awaitable, Dict, Callable, Optional
from httpx import AsyncClient from httpx import AsyncClient
from yaml import safe_load from yaml import safe_load
from github import AuthenticatedUser
from manager_environment import EnvironmentManager as EM
from manager_github import GitHubManager as GHM
GITHUB_API_QUERIES = { GITHUB_API_QUERIES = {
"repositories_contributed_to": """ "repos_contributed_to": """
{ {
user(login: "$username") { user(login: "$username") {
repositoriesContributedTo(last: 100, includeUserRepositories: true) { repositoriesContributedTo(last: 100, includeUserRepositories: true) {
@@ -23,7 +25,7 @@ GITHUB_API_QUERIES = {
} }
} }
}""", }""",
"repository_committed_dates": """ "repo_committed_dates": """
{ {
repository(owner: "$owner", name: "$name") { repository(owner: "$owner", name: "$name") {
defaultBranchRef { defaultBranchRef {
@@ -60,7 +62,7 @@ GITHUB_API_QUERIES = {
} }
} }
""", """,
"repository_commit_list": """ "repo_commit_list": """
{ {
repository(owner: "$owner", name: "$name") { repository(owner: "$owner", name: "$name") {
refs(refPrefix: "refs/heads/", orderBy: {direction: DESC, field: TAG_COMMIT_DATE}, first: 100) { refs(refPrefix: "refs/heads/", orderBy: {direction: DESC, field: TAG_COMMIT_DATE}, first: 100) {
@@ -92,22 +94,19 @@ GITHUB_API_QUERIES = {
} }
async def init_download_manager(waka_key: str, github_key: str, user: AuthenticatedUser): async def init_download_manager():
""" """
Initialize download manager: Initialize download manager:
- Setup headers for GitHub GraphQL requests. - Setup headers for GitHub GraphQL requests.
- Launch static queries in background. - Launch static queries in background.
:param waka_key: WakaTime API token.
:param github_key: GitHub API token.
:param user: GitHub current user info.
""" """
await DownloadManager.load_remote_resources({ await DownloadManager.load_remote_resources({
"linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml", "linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml",
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}", "waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}",
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={waka_key}", "waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}",
"github_stats": f"https://github-contributions.vercel.app/api/v1/{user.login}" "github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}"
}, { }, {
"Authorization": f"Bearer {github_key}" "Authorization": f"Bearer {EM.GH_TOKEN}"
}) })

View File

@@ -0,0 +1,36 @@
from os import getenv, environ
class EnvironmentManager:
_TRUTHY = ['true', '1', 't', 'y', 'yes']
GH_TOKEN = environ['INPUT_GH_TOKEN']
WAKATIME_API_KEY = environ['INPUT_WAKATIME_API_KEY']
SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka")
BRANCH_NAME = getenv('INPUT_PUSH_BRANCH_NAME', "")
SHOW_OS = getenv('INPUT_SHOW_OS', "False").lower() in _TRUTHY
SHOW_PROJECTS = getenv('INPUT_SHOW_PROJECTS', "True").lower() in _TRUTHY
SHOW_EDITORS = getenv('INPUT_SHOW_EDITORS', "True").lower() in _TRUTHY
SHOW_TIMEZONE = getenv('INPUT_SHOW_TIMEZONE', "True").lower() in _TRUTHY
SHOW_COMMIT = getenv('INPUT_SHOW_COMMIT', "True").lower() in _TRUTHY
SHOW_LANGUAGE = getenv('INPUT_SHOW_LANGUAGE', "True").lower() in _TRUTHY
SHOW_LINES_OF_CODE = getenv('INPUT_SHOW_LINES_OF_CODE', "False").lower() in _TRUTHY
SHOW_LANGUAGE_PER_REPO = getenv('INPUT_SHOW_LANGUAGE_PER_REPO', "True").lower() in _TRUTHY
SHOW_LOC_CHART = getenv('INPUT_SHOW_LOC_CHART', "True").lower() in _TRUTHY
SHOW_DAYS_OF_WEEK = getenv('INPUT_SHOW_DAYS_OF_WEEK', "True").lower() in _TRUTHY
SHOW_PROFILE_VIEWS = getenv('INPUT_SHOW_PROFILE_VIEWS', "True").lower() in _TRUTHY
SHOW_SHORT_INFO = getenv('INPUT_SHOW_SHORT_INFO', "True").lower() in _TRUTHY
SHOW_UPDATED_DATE = getenv('INPUT_SHOW_UPDATED_DATE', "True").lower() in _TRUTHY
SHOW_TOTAL_CODE_TIME = getenv('INPUT_SHOW_TOTAL_CODE_TIME', "True").lower() in _TRUTHY
COMMIT_BY_ME = getenv('INPUT_COMMIT_BY_ME', "False").lower() in _TRUTHY
COMMIT_MESSAGE = getenv('INPUT_COMMIT_MESSAGE', "Updated with Dev Metrics")
COMMIT_USERNAME = getenv('INPUT_COMMIT_USERNAME', "")
COMMIT_EMAIL = getenv('INPUT_COMMIT_EMAIL', "")
LOCALE = getenv('INPUT_LOCALE', "en")
UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S")
IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',')
SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION')) # TODO: enum?

77
sources/manager_github.py Normal file
View File

@@ -0,0 +1,77 @@
from base64 import b64decode
from re import sub
from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor
from manager_environment import EnvironmentManager as EM
def init_github_manager():
"""
"""
GitHubManager.prepare_github_env()
print(f"Current user: {GitHubManager.USER.login}")
class GitHubManager:
USER: AuthenticatedUser
REPO: Repository
README: ContentFile
README_CONTENTS: str
_START_COMMENT = f'<!--START_SECTION:{EM.SECTION_NAME}-->'
_END_COMMENT = f'<!--END_SECTION:{EM.SECTION_NAME}-->'
_README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}"
@staticmethod
def prepare_github_env():
"""
"""
github = Github(EM.GH_TOKEN)
GitHubManager.USER = github.get_user()
GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}")
GitHubManager.README = GitHubManager.REPO.get_readme()
GitHubManager.README_CONTENTS = str(b64decode(GitHubManager.README.content), 'utf-8')
@staticmethod
def _generate_new_readme(stats: str):
"""
Generate a new Readme.md
"""
readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}"
return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager.README_CONTENTS)
@staticmethod
def _get_author():
"""
"""
if EM.COMMIT_BY_ME:
return InputGitAuthor(
GitHubManager.USER.login or EM.COMMIT_USERNAME,
GitHubManager.USER.email or EM.COMMIT_EMAIL
)
else:
return InputGitAuthor(
EM.COMMIT_USERNAME or 'readme-bot',
EM.COMMIT_EMAIL or '41898282+github-actions[bot]@users.noreply.github.com'
)
@staticmethod
def branch() -> str:
return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME
@staticmethod
def update_readme(stats: str) -> bool:
new_readme = GitHubManager._generate_new_readme(stats)
if new_readme != GitHubManager.README_CONTENTS:
GitHubManager.REPO.update_file(
path=GitHubManager.README.path,
message=EM.COMMIT_MESSAGE,
content=new_readme,
sha=GitHubManager.README.sha,
branch=GitHubManager.branch(),
committer=GitHubManager._get_author()
)
return True
else:
return False

View File

@@ -0,0 +1,25 @@
from json import load
from os.path import join, dirname
from typing import Dict
from manager_environment import EnvironmentManager as EM
def init_localization_manager():
"""
"""
LocalizationManager.load_localization("translation.json")
class LocalizationManager:
_LOCALIZATION: Dict[str, str] = dict()
@staticmethod
def load_localization(file: str):
with open(join(dirname(__file__), file), encoding='utf-8') as config_file:
data = load(config_file)
LocalizationManager._LOCALIZATION = data[EM.LOCALE]
@staticmethod
def t(key: str) -> str:
return LocalizationManager._LOCALIZATION[key]