You've already forked wakapi-readme-stats
@@ -1,4 +1,5 @@
|
||||
INPUT_WAKATIME_API_KEY=YOUR_WAKATIME_API_KEY
|
||||
INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY
|
||||
INPUT_PUSH_BRANCH_NAME=main
|
||||
INPUT_SECTION_NAME=waka
|
||||
INPUT_SHOW_TIMEZONE=True
|
||||
@@ -6,7 +7,6 @@ INPUT_SHOW_PROJECTS=True
|
||||
INPUT_SHOW_EDITORS=True
|
||||
INPUT_SHOW_OS=True
|
||||
INPUT_SHOW_LANGUAGE=True
|
||||
INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY
|
||||
INPUT_SYMBOL_VERSION=1
|
||||
INPUT_SHOW_LINES_OF_CODE=True
|
||||
INPUT_SHOW_LOC_CHART=True
|
||||
|
||||
2
.github/workflows/build_image.yml
vendored
2
.github/workflows/build_image.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Build and push Docker image 🏗️
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/releases') }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
24
.github/workflows/codestyle.yml
vendored
Normal file
24
.github/workflows/codestyle.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: CODESTYLE
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Run codestyle check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout 🛎️
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Python 3.8 🐍
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
|
||||
- name: Install Dependencies 📥
|
||||
run: pip install -r requirements.txt
|
||||
|
||||
- name: Run Codestyle ✔️
|
||||
run: flake8 --max-line-length=160 --exclude venv,assets . && black --line-length=160 --check --exclude='/venv/|/assets/' .
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -2,19 +2,14 @@
|
||||
*.env
|
||||
|
||||
# Generated graph images:
|
||||
*.png
|
||||
assets/
|
||||
|
||||
# Library roots:
|
||||
node_modules/
|
||||
venv/
|
||||
|
||||
# Python caches:
|
||||
__pycache__/
|
||||
|
||||
# Package manager configuration files:
|
||||
package.json
|
||||
package-lock.json
|
||||
|
||||
# IDE configuration files:
|
||||
.vscode
|
||||
.idea
|
||||
|
||||
@@ -21,7 +21,7 @@ Once you've worked on your feature/bugfix etc, you can open a pull request using
|
||||
|
||||
### Setting up development environment
|
||||
|
||||
This project is written in Python, requires **Python 3.6 or higher**, and uses `pip` .
|
||||
This project is written in Python, requires **Python 3.8 or higher**, and uses `pip` .
|
||||
|
||||
To set it up, just fork + clone it, install all the dependencies:
|
||||
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -3,10 +3,10 @@ FROM python:3.9-alpine
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
WORKDIR /waka-readme-stats
|
||||
RUN mkdir -p /waka-readme-stats/assets
|
||||
|
||||
ADD requirements.txt ./requirements.txt
|
||||
RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r requirements.txt
|
||||
ADD requirements.txt /waka-readme-stats/requirements.txt
|
||||
RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r /waka-readme-stats/requirements.txt
|
||||
|
||||
ADD sources/* ./
|
||||
ENTRYPOINT python3 /waka-readme-stats/main.py
|
||||
ADD sources/* /waka-readme-stats/
|
||||
ENTRYPOINT cd /waka-readme-stats/ && python3 main.py
|
||||
|
||||
29
Makefile
29
Makefile
@@ -1,15 +1,18 @@
|
||||
.ONESHELL:
|
||||
.DEFAULT_GOAL = help
|
||||
SHELL = /bin/bash
|
||||
.EXPORT_ALL_VARIABLES:
|
||||
|
||||
PATH := venv/bin:node_modules/.bin:$(PATH)
|
||||
PATH := venv/bin:$(PATH)
|
||||
|
||||
ENV = .env.example
|
||||
include $(ENV)
|
||||
|
||||
|
||||
help:
|
||||
@ # Print help commands
|
||||
echo "Welcome to 'waka-readme-stats' GitHub Actions!"
|
||||
echo "The action can be tested locally with: 'make run'."
|
||||
echo "NB! For local testing Python version 3.6+ and NodeJS version between 14 and 16 are required."
|
||||
echo "NB! For local testing Python version 3.8+ is required."
|
||||
echo "The action image can be built locally with: 'make container'."
|
||||
echo "NB! For local container building Docker version 20+ is required."
|
||||
echo "The action directory and image can be cleaned with: 'make clean'."
|
||||
@@ -21,28 +24,30 @@ venv:
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
node_modules:
|
||||
@ # Install NodeJS dependencies
|
||||
npm i npm@next-8
|
||||
npm i vega vega-lite vega-cli canvas
|
||||
|
||||
|
||||
run-locally: venv node_modules
|
||||
run-locally: venv
|
||||
@ # Run action locally
|
||||
source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py
|
||||
mkdir ./assets/ 2>/dev/null || true
|
||||
python3 ./sources/main.py
|
||||
.PHONY: run-locally
|
||||
|
||||
run-container:
|
||||
@ # Run action in container
|
||||
docker build -t waka-readme-stats -f Dockerfile .
|
||||
docker run --env-file .env.example waka-readme-stats
|
||||
docker run --env-file $(ENV) -v ./assets/:/waka-readme-stats/assets/ waka-readme-stats
|
||||
.PHONY: run-container
|
||||
|
||||
|
||||
lint: venv
|
||||
@ # Run flake8 and black linters
|
||||
flake8 --max-line-length=160 --exclude venv,assets .
|
||||
black --line-length=160 --exclude='/venv/|/assets/' .
|
||||
.PHONY: lint
|
||||
|
||||
clean:
|
||||
@ # Clean all build files, including: libraries, package manager configs, docker images and containers
|
||||
rm -rf venv
|
||||
rm -rf node_modules
|
||||
rm -rf assets
|
||||
rm -f package*.json
|
||||
docker rm -f waka-readme-stats 2>/dev/null || true
|
||||
docker rmi $(docker images | grep "waka-readme-stats") 2> /dev/null || true
|
||||
|
||||
28
action.yml
28
action.yml
@@ -82,21 +82,21 @@ inputs:
|
||||
description: "Shows the short facts"
|
||||
default: "True"
|
||||
|
||||
LOCALE:
|
||||
SHOW_UPDATED_DATE:
|
||||
required: false
|
||||
description: "Show stats in your own language"
|
||||
default: "en"
|
||||
description: "Show updated date"
|
||||
default: "True"
|
||||
|
||||
SHOW_TOTAL_CODE_TIME:
|
||||
required: false
|
||||
description: "Show Total Time you have coded"
|
||||
default: "True"
|
||||
|
||||
COMMIT_BY_ME:
|
||||
required: false
|
||||
description: "Git commit with your own name and email"
|
||||
default: "False"
|
||||
|
||||
IGNORED_REPOS:
|
||||
required: false
|
||||
description: "Repos you don't want to be counted"
|
||||
default: ""
|
||||
|
||||
COMMIT_MESSAGE:
|
||||
required: false
|
||||
description: "Git commit message"
|
||||
@@ -112,20 +112,20 @@ inputs:
|
||||
description: "Git commit custom email"
|
||||
default: ""
|
||||
|
||||
SHOW_UPDATED_DATE:
|
||||
LOCALE:
|
||||
required: false
|
||||
description: "Show updated date"
|
||||
default: "True"
|
||||
description: "Show stats in your own language"
|
||||
default: "en"
|
||||
|
||||
UPDATED_DATE_FORMAT:
|
||||
required: false
|
||||
description: "Updated date format"
|
||||
default: "%d/%m/%Y %H:%M:%S"
|
||||
|
||||
SHOW_TOTAL_CODE_TIME:
|
||||
IGNORED_REPOS:
|
||||
required: false
|
||||
description: "Show Total Time you have coded"
|
||||
default: "True"
|
||||
description: "Repos you don't want to be counted"
|
||||
default: ""
|
||||
|
||||
SYMBOL_VERSION:
|
||||
required: false
|
||||
|
||||
@@ -1,8 +1,18 @@
|
||||
PyGithub==1.54.1
|
||||
matplotlib==3.6.3
|
||||
numpy==1.24.2
|
||||
python-dotenv==0.17.0
|
||||
pytz==2021.1
|
||||
humanize==3.3.0
|
||||
httpx==0.23.3
|
||||
PyYAML==6.0
|
||||
# GitHub integration modules:
|
||||
PyGithub~=1.57
|
||||
|
||||
# Markdown visualization modules:
|
||||
pytz~=2022.7
|
||||
humanize~=4.6
|
||||
|
||||
# Graphs drawing modules:
|
||||
matplotlib~=3.7
|
||||
numpy~=1.24
|
||||
|
||||
# Request making and response parsing modules:
|
||||
httpx~=0.23
|
||||
PyYAML~=6.0
|
||||
|
||||
# Codestyle checking modules:
|
||||
flake8~=6.0
|
||||
black~=23.1
|
||||
|
||||
@@ -1,45 +1,44 @@
|
||||
from typing import Dict
|
||||
from os.path import join, dirname
|
||||
from json import load
|
||||
|
||||
import numpy as np
|
||||
from numpy import arange, array, add, amax
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from download_manager import DownloadManager
|
||||
from manager_download import DownloadManager as DM
|
||||
|
||||
|
||||
MAX_LANGUAGES = 5
|
||||
MAX_LANGUAGES = 5 # Number of top languages to add to chart, for each year quarter
|
||||
GRAPH_PATH = "assets/bar_graph.png" # Chart saving path.
|
||||
|
||||
|
||||
async def build_graph(yearly_data: Dict) -> str:
|
||||
async def create_loc_graph(yearly_data: Dict, save_path: str):
|
||||
"""
|
||||
Draws graph of lines of code written by user by quarters of years.
|
||||
Picks top `MAX_LANGUAGES` languages from each quarter only.
|
||||
|
||||
:param yearly_data: GitHub user yearly data.
|
||||
:return: String, path to graph file.
|
||||
:param save_path: Path to save the graph file.
|
||||
"""
|
||||
colors = await DownloadManager.get_remote_yaml("linguist")
|
||||
colors = await DM.get_remote_yaml("linguist")
|
||||
|
||||
years = len(yearly_data.keys())
|
||||
year_indexes = arange(years)
|
||||
|
||||
languages_all_loc = dict()
|
||||
years = len(yearly_data.keys())
|
||||
year_indexes = np.arange(years)
|
||||
|
||||
for i, y in enumerate(sorted(yearly_data.keys())):
|
||||
for q in yearly_data[y].keys():
|
||||
langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES]
|
||||
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n], reverse=True)[0:MAX_LANGUAGES]
|
||||
|
||||
for lang in langs:
|
||||
if lang not in languages_all_loc:
|
||||
languages_all_loc[lang] = np.array([[0] * years] * 4)
|
||||
languages_all_loc[lang] = array([[0] * years] * 4)
|
||||
languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang]
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_axes([0, 0, 1.5, 1])
|
||||
|
||||
language_handles = []
|
||||
cumulative = np.array([[0] * years] * 4)
|
||||
cumulative = array([[0] * years] * 4)
|
||||
|
||||
for key, value in languages_all_loc.items():
|
||||
color = colors[key]["color"] if colors[key]["color"] is not None else "w"
|
||||
@@ -47,10 +46,10 @@ async def build_graph(yearly_data: Dict) -> str:
|
||||
|
||||
for quarter in range(4):
|
||||
ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color)
|
||||
cumulative[quarter] = np.add(cumulative[quarter], value[quarter])
|
||||
cumulative[quarter] = add(cumulative[quarter], value[quarter])
|
||||
|
||||
ax.set_ylabel("LOC added", fontdict=dict(weight="bold"))
|
||||
ax.set_xticks(np.array([np.arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years)
|
||||
ax.set_xticks(array([arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years)
|
||||
|
||||
sax = ax.secondary_xaxis("top")
|
||||
sax.set_xticks(year_indexes + 0.42, labels=sorted(yearly_data.keys()))
|
||||
@@ -63,7 +62,6 @@ async def build_graph(yearly_data: Dict) -> str:
|
||||
ax.spines["top"].set_visible(False)
|
||||
ax.spines["right"].set_visible(False)
|
||||
|
||||
plt.ylim(0, 1.05 * np.amax(cumulative))
|
||||
plt.savefig("bar_graph.png", bbox_inches="tight")
|
||||
plt.ylim(0, 1.05 * amax(cumulative))
|
||||
plt.savefig(save_path, bbox_inches="tight")
|
||||
plt.close(fig)
|
||||
return "bar_graph.png"
|
||||
148
sources/graphics_list_formatter.py
Normal file
148
sources/graphics_list_formatter.py
Normal file
@@ -0,0 +1,148 @@
|
||||
from enum import Enum
|
||||
from typing import Dict, Tuple, List
|
||||
from datetime import datetime
|
||||
|
||||
from pytz import timezone, utc
|
||||
|
||||
from manager_download import DownloadManager as DM
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
from manager_github import GitHubManager as GHM
|
||||
from manager_localization import LocalizationManager as LM
|
||||
|
||||
|
||||
DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"] # Emojis, representing different times of day.
|
||||
DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"] # Localization identifiers for different times of day.
|
||||
WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] # Localization identifiers for different days of week.
|
||||
|
||||
|
||||
class Symbol(Enum):
|
||||
"""
|
||||
Symbol version enum.
|
||||
Allows to retrieve symbols pairs by calling `Symbol.get_symbols(version)`.
|
||||
"""
|
||||
|
||||
VERSION_1 = "█", "░"
|
||||
VERSION_2 = "⣿", "⣀"
|
||||
VERSION_3 = "⬛", "⬜"
|
||||
|
||||
@staticmethod
|
||||
def get_symbols(version: int) -> Tuple[str, str]:
|
||||
"""
|
||||
Retrieves symbols pair for specified version.
|
||||
|
||||
:param version: Required symbols version.
|
||||
:returns: Two strings for filled and empty symbol value in a tuple.
|
||||
"""
|
||||
return Symbol[f"VERSION_{version}"].value
|
||||
|
||||
|
||||
def make_graph(percent: float):
|
||||
"""
|
||||
Make text progress bar.
|
||||
Length of the progress bar is 25 characters.
|
||||
|
||||
:param percent: Completion percent of the progress bar.
|
||||
:return: The string progress bar representation.
|
||||
"""
|
||||
done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION)
|
||||
percent_quart = round(percent / 4)
|
||||
return f"{done_block * percent_quart}{empty_block * (25 - percent_quart)}"
|
||||
|
||||
|
||||
def make_list(data: List = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str:
|
||||
"""
|
||||
Make list of text progress bars with supportive info.
|
||||
Each row has the following structure: [name of the measure] [quantity description (with words)] [progress bar] [total percentage].
|
||||
Name of the measure: up to 25 characters.
|
||||
Quantity description: how many _things_ were found, up to 20 characters.
|
||||
Progress bar: measure percentage, 25 characters.
|
||||
Total percentage: floating point percentage.
|
||||
|
||||
:param data: list of dictionaries, each of them containing a measure (name, text and percent).
|
||||
:param names: list of names (names of measure), overloads data if defined.
|
||||
:param texts: list of texts (quantity descriptions), overloads data if defined.
|
||||
:param percents: list of percents (total percentages), overloads data if defined.
|
||||
:param top_num: how many measures to display, default: 5.
|
||||
:param sort: if measures should be sorted by total percentage, default: True.
|
||||
:returns: The string representation of the list.
|
||||
"""
|
||||
if data is not None:
|
||||
names = [value for item in data for key, value in item.items() if key == "name"] if names is None else names
|
||||
texts = [value for item in data for key, value in item.items() if key == "text"] if texts is None else texts
|
||||
percents = [value for item in data for key, value in item.items() if key == "percent"] if percents is None else percents
|
||||
|
||||
data = list(zip(names, texts, percents))
|
||||
top_data = sorted(data[:top_num], key=lambda record: record[2], reverse=True) if sort else data[:top_num]
|
||||
data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data]
|
||||
return "\n".join(data_list)
|
||||
|
||||
|
||||
async def make_commit_day_time_list(time_zone: str) -> str:
|
||||
"""
|
||||
Calculate commit-related info, how many commits were made, and at what time of day and day of week.
|
||||
|
||||
:param time_zone: User time zone.
|
||||
:returns: string representation of statistics.
|
||||
"""
|
||||
stats = str()
|
||||
|
||||
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
|
||||
repos = [d for d in result["data"]["user"]["repositoriesContributedTo"]["nodes"] if d["isFork"] is False]
|
||||
|
||||
day_times = [0] * 4 # 0 - 6, 6 - 12, 12 - 18, 18 - 24
|
||||
week_days = [0] * 7 # Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday
|
||||
|
||||
for repository in repos:
|
||||
result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id)
|
||||
if result["data"]["repository"] is None or result["data"]["repository"]["defaultBranchRef"] is None:
|
||||
continue
|
||||
|
||||
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["nodes"]
|
||||
for committed_date in committed_dates:
|
||||
local_date = datetime.strptime(committed_date["committedDate"], "%Y-%m-%dT%H:%M:%SZ")
|
||||
date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone))
|
||||
|
||||
day_times[date.hour // 6] += 1
|
||||
week_days[date.isoweekday() - 1] += 1
|
||||
|
||||
sum_day = sum(day_times)
|
||||
sum_week = sum(week_days)
|
||||
day_times = day_times[1:] + day_times[:1]
|
||||
|
||||
dt_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))]
|
||||
dt_texts = [f"{day_time} commits" for day_time in day_times]
|
||||
dt_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times]
|
||||
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
|
||||
stats += f"**{title}** \n\n```text\n{make_list(names=dt_names, texts=dt_texts, percents=dt_percents, top_num=7, sort=False)}\n```\n"
|
||||
|
||||
if EM.SHOW_DAYS_OF_WEEK:
|
||||
wd_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES]
|
||||
wd_texts = [f"{week_day} commits" for week_day in week_days]
|
||||
wd_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days]
|
||||
title = LM.t("I am Most Productive on") % wd_names[wd_percents.index(max(wd_percents))]
|
||||
stats += f"📅 **{title}** \n\n```text\n{make_list(names=wd_names, texts=wd_texts, percents=wd_percents, top_num=7, sort=False)}\n```\n"
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def make_language_per_repo_list(repositories: Dict) -> str:
|
||||
"""
|
||||
Calculate language-related info, how many repositories in what language user has.
|
||||
|
||||
:param repositories: User repositories.
|
||||
:returns: string representation of statistics.
|
||||
"""
|
||||
language_count = dict()
|
||||
repos_with_language = [repo for repo in repositories["data"]["user"]["repositories"]["nodes"] if repo["primaryLanguage"] is not None]
|
||||
for repo in repos_with_language:
|
||||
language = repo["primaryLanguage"]["name"]
|
||||
language_count[language] = language_count.get(language, {"count": 0})
|
||||
language_count[language]["count"] += 1
|
||||
|
||||
names = list(language_count.keys())
|
||||
texts = [f"{language_count[lang]['count']} {'repo' if language_count[lang]['count'] == 1 else 'repos'}" for lang in names]
|
||||
percents = [round(language_count[lang]["count"] / len(repos_with_language) * 100, 2) for lang in names]
|
||||
|
||||
top_language = max(list(language_count.keys()), key=lambda x: language_count[x]["count"])
|
||||
title = f"**{LM.t('I Mostly Code in') % top_language}** \n\n" if len(repos_with_language) > 0 else ""
|
||||
return f"{title}```text\n{make_list(names=names, texts=texts, percents=percents)}\n```\n\n"
|
||||
@@ -1,77 +0,0 @@
|
||||
import re
|
||||
from asyncio import sleep
|
||||
|
||||
from github import Github, InputGitAuthor, AuthenticatedUser
|
||||
import datetime
|
||||
|
||||
from download_manager import DownloadManager
|
||||
from make_bar_graph import build_graph
|
||||
|
||||
|
||||
class LinesOfCode:
|
||||
|
||||
def __init__(self, user: AuthenticatedUser, ghtoken, repositoryData, ignored_repos):
|
||||
self.g = Github(ghtoken)
|
||||
self.user = user
|
||||
self.repositoryData = repositoryData
|
||||
self.ignored_repos = ignored_repos
|
||||
|
||||
async def calculateLoc(self):
|
||||
result = self.repositoryData
|
||||
yearly_data = {}
|
||||
total = len(result['data']['user']['repositories']['nodes'])
|
||||
for ind, repo in enumerate(result['data']['user']['repositories']['nodes']):
|
||||
if repo['name'] not in self.ignored_repos:
|
||||
print(f"{ind}/{total}", "Retrieving repo:", repo["owner"]["login"], repo['name'])
|
||||
await self.getCommitStat(repo, yearly_data)
|
||||
await sleep(0.7)
|
||||
return yearly_data
|
||||
|
||||
async def plotLoc(self, yearly_data):
|
||||
await build_graph(yearly_data)
|
||||
self.pushChart()
|
||||
|
||||
def getQuarter(self, timeStamp):
|
||||
month = datetime.datetime.fromisoformat(timeStamp).month
|
||||
if month >= 1 and month <= 3:
|
||||
return 1
|
||||
elif month >= 4 and month <= 6:
|
||||
return 2
|
||||
elif month >= 7 and month <= 9:
|
||||
return 3
|
||||
elif month >= 10 and month <= 12:
|
||||
return 4
|
||||
|
||||
async def getCommitStat(self, repoDetails, yearly_data):
|
||||
branch_data = await DownloadManager.get_remote_graphql("repository_branches_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'])
|
||||
if branch_data["data"]["repository"] is None:
|
||||
print("\tSkipping:", repoDetails['name'])
|
||||
return
|
||||
|
||||
for branch in branch_data["data"]["repository"]["refs"]["nodes"]:
|
||||
commit_data = await DownloadManager.get_remote_graphql("repository_branch_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], branch=branch["name"], id=self.user.node_id)
|
||||
|
||||
for commit in commit_data["data"]["repository"]["ref"]["target"]["history"]["nodes"]:
|
||||
date = re.search(r'\d+-\d+-\d+', commit["committedDate"]).group(0)
|
||||
curr_year = datetime.datetime.fromisoformat(date).year
|
||||
quarter = self.getQuarter(date)
|
||||
|
||||
if repoDetails['primaryLanguage'] is not None:
|
||||
if curr_year not in yearly_data:
|
||||
yearly_data[curr_year] = {}
|
||||
if quarter not in yearly_data[curr_year]:
|
||||
yearly_data[curr_year][quarter] = {}
|
||||
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
|
||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
|
||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (commit["additions"] - commit["deletions"])
|
||||
|
||||
def pushChart(self):
|
||||
repo = self.g.get_repo(f"{self.user.login}/{self.user.login}")
|
||||
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
|
||||
with open('bar_graph.png', 'rb') as input_file:
|
||||
data = input_file.read()
|
||||
try:
|
||||
contents = repo.get_contents("charts/bar_graph.png")
|
||||
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
|
||||
except Exception as e:
|
||||
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)
|
||||
528
sources/main.py
528
sources/main.py
@@ -1,459 +1,169 @@
|
||||
'''
|
||||
"""
|
||||
Readme Development Metrics With waka time progress
|
||||
'''
|
||||
import re
|
||||
import os
|
||||
import base64
|
||||
"""
|
||||
from asyncio import run
|
||||
from typing import Dict
|
||||
|
||||
from pytz import timezone
|
||||
import pytz
|
||||
from github import Github, InputGitAuthor, AuthenticatedUser
|
||||
import datetime
|
||||
|
||||
from download_manager import init_download_manager, DownloadManager
|
||||
from loc import LinesOfCode
|
||||
import humanize
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote
|
||||
import json
|
||||
import math
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from humanize import intword, naturalsize, intcomma, precisedelta
|
||||
|
||||
load_dotenv()
|
||||
|
||||
START_COMMENT = f'<!--START_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
|
||||
END_COMMENT = f'<!--END_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
|
||||
listReg = f"{START_COMMENT}[\\s\\S]+{END_COMMENT}"
|
||||
|
||||
waka_key = os.getenv('INPUT_WAKATIME_API_KEY')
|
||||
ghtoken = os.getenv('INPUT_GH_TOKEN')
|
||||
branchName = os.getenv('INPUT_PUSH_BRANCH_NAME')
|
||||
showTimeZone = os.getenv('INPUT_SHOW_TIMEZONE')
|
||||
showProjects = os.getenv('INPUT_SHOW_PROJECTS')
|
||||
showEditors = os.getenv('INPUT_SHOW_EDITORS')
|
||||
showOs = os.getenv('INPUT_SHOW_OS')
|
||||
showCommit = os.getenv('INPUT_SHOW_COMMIT')
|
||||
showLanguage = os.getenv('INPUT_SHOW_LANGUAGE')
|
||||
show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE')
|
||||
show_days_of_week = os.getenv('INPUT_SHOW_DAYS_OF_WEEK')
|
||||
showLanguagePerRepo = os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO')
|
||||
showLocChart = os.getenv('INPUT_SHOW_LOC_CHART')
|
||||
show_profile_view = os.getenv('INPUT_SHOW_PROFILE_VIEWS')
|
||||
show_short_info = os.getenv('INPUT_SHOW_SHORT_INFO')
|
||||
locale = os.getenv('INPUT_LOCALE')
|
||||
commit_by_me = os.getenv('INPUT_COMMIT_BY_ME')
|
||||
ignored_repos_name = str(os.getenv('INPUT_IGNORED_REPOS') or '').replace(' ', '').split(',')
|
||||
show_updated_date = os.getenv('INPUT_SHOW_UPDATED_DATE')
|
||||
updated_date_format = os.getenv('INPUT_UPDATED_DATE_FORMAT')
|
||||
commit_message = os.getenv('INPUT_COMMIT_MESSAGE')
|
||||
commit_username = os.getenv('INPUT_COMMIT_USERNAME')
|
||||
commit_email = os.getenv('INPUT_COMMIT_EMAIL')
|
||||
show_total_code_time = os.getenv('INPUT_SHOW_TOTAL_CODE_TIME')
|
||||
symbol_version = os.getenv('INPUT_SYMBOL_VERSION').strip()
|
||||
show_waka_stats = 'y'
|
||||
|
||||
truthy = ['true', '1', 't', 'y', 'yes']
|
||||
|
||||
translate: Dict[str, str]
|
||||
user: AuthenticatedUser
|
||||
from manager_download import init_download_manager, DownloadManager as DM
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
from manager_github import init_github_manager, GitHubManager as GHM
|
||||
from manager_localization import init_localization_manager, LocalizationManager as LM
|
||||
from graphics_chart_drawer import create_loc_graph, GRAPH_PATH
|
||||
from yearly_commit_calculator import calculate_yearly_commit_data
|
||||
from graphics_list_formatter import make_list, make_commit_day_time_list, make_language_per_repo_list
|
||||
|
||||
|
||||
def millify(n):
|
||||
millnames = ['', ' Thousand', ' Million', ' Billion', ' Trillion']
|
||||
n = float(n)
|
||||
millidx = max(0, min(len(millnames) - 1,
|
||||
int(math.floor(0
|
||||
if n == 0
|
||||
else math.log10(abs(n)) / 3))))
|
||||
async def get_waka_time_stats() -> str:
|
||||
"""
|
||||
Collects user info from wakatime.
|
||||
Info includes most common commit time, timezone, language, editors, projects and OSs.
|
||||
|
||||
return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx])
|
||||
:returns: String representation of the info.
|
||||
"""
|
||||
stats = str()
|
||||
|
||||
data = await DM.get_remote_json("waka_latest")
|
||||
if EM.SHOW_COMMIT:
|
||||
stats += f"{await make_commit_day_time_list(data['data']['timezone'])}\n\n"
|
||||
|
||||
def make_graph(percent: float):
|
||||
'''Make progress graph from API graph'''
|
||||
if (symbol_version == '1'): # version 1
|
||||
done_block = '█'
|
||||
empty_block = '░'
|
||||
elif (symbol_version == '2'): # version 2
|
||||
done_block = '⣿'
|
||||
empty_block = '⣀'
|
||||
elif (symbol_version == '3'): # version 3
|
||||
done_block = '⬛'
|
||||
empty_block = '⬜'
|
||||
else:
|
||||
done_block = '█' # default is version 1
|
||||
empty_block = '░'
|
||||
if EM.SHOW_TIMEZONE or EM.SHOW_LANGUAGE or EM.SHOW_EDITORS or EM.SHOW_PROJECTS or EM.SHOW_OS:
|
||||
no_activity = LM.t("No Activity Tracked This Week")
|
||||
stats += f"📊 **{LM.t('This Week I Spend My Time On')}** \n\n```text\n"
|
||||
|
||||
pc_rnd = round(percent)
|
||||
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}"
|
||||
if EM.SHOW_TIMEZONE:
|
||||
time_zone = data["data"]["timezone"]
|
||||
stats += f"🕑︎ {LM.t('Timezone')}: {time_zone}\n\n"
|
||||
|
||||
if EM.SHOW_LANGUAGE:
|
||||
lang_list = no_activity if len(data["data"]["languages"]) == 0 else make_list(data["data"]["languages"])
|
||||
stats += f"💬 {LM.t('Languages')}: \n{lang_list}\n\n"
|
||||
|
||||
def make_list(data: list):
|
||||
'''Make List'''
|
||||
data_list = []
|
||||
for l in data[:5]:
|
||||
ln = len(l['name'])
|
||||
ln_text = len(l['text'])
|
||||
percent = "{:05.2f}".format(float(l['percent']))
|
||||
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
|
||||
data_list.append(op)
|
||||
return '\n'.join(data_list)
|
||||
if EM.SHOW_EDITORS:
|
||||
edit_list = no_activity if len(data["data"]["editors"]) == 0 else make_list(data["data"]["editors"])
|
||||
stats += f"🔥 {LM.t('Editors')}: \n{edit_list}\n\n"
|
||||
|
||||
if EM.SHOW_PROJECTS:
|
||||
project_list = no_activity if len(data["data"]["projects"]) == 0 else make_list(data["data"]["projects"])
|
||||
stats += f"🐱💻 {LM.t('Projects')}: \n{project_list}\n\n"
|
||||
|
||||
def make_commit_list(data: list):
|
||||
'''Make List'''
|
||||
data_list = []
|
||||
for l in data[:7]:
|
||||
ln = len(l['name'])
|
||||
ln_text = len(l['text'])
|
||||
percent = "{:05.2f}".format(float(l['percent']))
|
||||
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
|
||||
data_list.append(op)
|
||||
return '\n'.join(data_list)
|
||||
if EM.SHOW_OS:
|
||||
os_list = no_activity if len(data["data"]["operating_systems"]) == 0 else make_list(data["data"]["operating_systems"])
|
||||
stats += f"💻 {LM.t('operating system')}: \n{os_list}\n\n"
|
||||
|
||||
|
||||
async def generate_commit_list(tz):
|
||||
string = ''
|
||||
|
||||
result = await DownloadManager.get_remote_graphql("repositories_contributed_to", username=user.login)
|
||||
nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"]
|
||||
repos = [d for d in nodes if d['isFork'] is False]
|
||||
|
||||
morning = 0 # 6 - 12
|
||||
daytime = 0 # 12 - 18
|
||||
evening = 0 # 18 - 24
|
||||
night = 0 # 0 - 6
|
||||
|
||||
Monday = 0
|
||||
Tuesday = 0
|
||||
Wednesday = 0
|
||||
Thursday = 0
|
||||
Friday = 0
|
||||
Saturday = 0
|
||||
Sunday = 0
|
||||
|
||||
for repository in repos:
|
||||
result = await DownloadManager.get_remote_graphql("repository_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=user.node_id)
|
||||
if result["data"]["repository"] is None or result["data"]["repository"]["defaultBranchRef"] is None:
|
||||
continue
|
||||
|
||||
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["nodes"]
|
||||
for committedDate in committed_dates:
|
||||
date = datetime.datetime.strptime(committedDate["committedDate"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc).astimezone(timezone(tz))
|
||||
hour = date.hour
|
||||
weekday = date.strftime('%A')
|
||||
if 6 <= hour < 12:
|
||||
morning += 1
|
||||
if 12 <= hour < 18:
|
||||
daytime += 1
|
||||
if 18 <= hour < 24:
|
||||
evening += 1
|
||||
if 0 <= hour < 6:
|
||||
night += 1
|
||||
|
||||
if weekday == "Monday":
|
||||
Monday += 1
|
||||
if weekday == "Tuesday":
|
||||
Tuesday += 1
|
||||
if weekday == "Wednesday":
|
||||
Wednesday += 1
|
||||
if weekday == "Thursday":
|
||||
Thursday += 1
|
||||
if weekday == "Friday":
|
||||
Friday += 1
|
||||
if weekday == "Saturday":
|
||||
Saturday += 1
|
||||
if weekday == "Sunday":
|
||||
Sunday += 1
|
||||
|
||||
sumAll = morning + daytime + evening + night
|
||||
sum_week = Sunday + Monday + Tuesday + Friday + Saturday + Wednesday + Thursday
|
||||
title = translate['I am an Early'] if morning + daytime >= evening + night else translate['I am a Night']
|
||||
one_day = [
|
||||
{"name": "🌞 " + translate['Morning'], "text": str(morning) + " commits",
|
||||
"percent": round((morning / sumAll) * 100, 2)},
|
||||
{"name": "🌆 " + translate['Daytime'], "text": str(daytime) + " commits",
|
||||
"percent": round((daytime / sumAll) * 100, 2)},
|
||||
{"name": "🌃 " + translate['Evening'], "text": str(evening) + " commits",
|
||||
"percent": round((evening / sumAll) * 100, 2)},
|
||||
{"name": "🌙 " + translate['Night'], "text": str(night) + " commits",
|
||||
"percent": round((night / sumAll) * 100, 2)},
|
||||
]
|
||||
dayOfWeek = [
|
||||
{"name": translate['Monday'], "text": str(Monday) + " commits", "percent": round((Monday / sum_week) * 100, 2)},
|
||||
{"name": translate['Tuesday'], "text": str(Tuesday) + " commits",
|
||||
"percent": round((Tuesday / sum_week) * 100, 2)},
|
||||
{"name": translate['Wednesday'], "text": str(Wednesday) + " commits",
|
||||
"percent": round((Wednesday / sum_week) * 100, 2)},
|
||||
{"name": translate['Thursday'], "text": str(Thursday) + " commits",
|
||||
"percent": round((Thursday / sum_week) * 100, 2)},
|
||||
{"name": translate['Friday'], "text": str(Friday) + " commits", "percent": round((Friday / sum_week) * 100, 2)},
|
||||
{"name": translate['Saturday'], "text": str(Saturday) + " commits",
|
||||
"percent": round((Saturday / sum_week) * 100, 2)},
|
||||
{"name": translate['Sunday'], "text": str(Sunday) + " commits", "percent": round((Sunday / sum_week) * 100, 2)},
|
||||
]
|
||||
|
||||
string = string + '**' + title + '** \n\n' + '```text\n' + make_commit_list(one_day) + '\n\n```\n'
|
||||
|
||||
if show_days_of_week.lower() in truthy:
|
||||
max_element = {
|
||||
'percent': 0
|
||||
}
|
||||
|
||||
for day in dayOfWeek:
|
||||
if day['percent'] > max_element['percent']:
|
||||
max_element = day
|
||||
days_title = translate['I am Most Productive on'] % max_element['name']
|
||||
string = string + '📅 **' + days_title + '** \n\n' + '```text\n' + make_commit_list(dayOfWeek) + '\n\n```\n'
|
||||
|
||||
return string
|
||||
|
||||
|
||||
async def get_waka_time_stats():
|
||||
stats = ''
|
||||
no_activity = translate["No Activity Tracked This Week"]
|
||||
|
||||
data = await DownloadManager.get_remote_json("waka_latest")
|
||||
if showCommit.lower() in truthy:
|
||||
stats = stats + await generate_commit_list(data['data']['timezone']) + '\n\n'
|
||||
|
||||
if showTimeZone.lower() in truthy or showLanguage.lower() in truthy or showEditors.lower() in truthy or showProjects.lower() in truthy or showOs.lower() in truthy:
|
||||
stats += '📊 **' + translate['This Week I Spend My Time On'] + '** \n\n'
|
||||
stats += '```text\n'
|
||||
|
||||
if showTimeZone.lower() in truthy:
|
||||
tzone = data['data']['timezone']
|
||||
stats = stats + '⌚︎ ' + translate['Timezone'] + ': ' + tzone + '\n\n'
|
||||
|
||||
if showLanguage.lower() in truthy:
|
||||
if len(data['data']['languages']) == 0:
|
||||
lang_list = no_activity
|
||||
else:
|
||||
lang_list = make_list(data['data']['languages'])
|
||||
stats = stats + '💬 ' + translate['Languages'] + ': \n' + lang_list + '\n\n'
|
||||
|
||||
if showEditors.lower() in truthy:
|
||||
if len(data['data']['editors']) == 0:
|
||||
edit_list = no_activity
|
||||
else:
|
||||
edit_list = make_list(data['data']['editors'])
|
||||
stats = stats + '🔥 ' + translate['Editors'] + ': \n' + edit_list + '\n\n'
|
||||
|
||||
if showProjects.lower() in truthy:
|
||||
if len(data['data']['projects']) == 0:
|
||||
project_list = no_activity
|
||||
else:
|
||||
# Re-order the project list by percentage
|
||||
data['data']['projects'] = sorted(data['data']['projects'], key=lambda x: x["percent"],
|
||||
reverse=True)
|
||||
project_list = make_list(data['data']['projects'])
|
||||
stats = stats + '🐱💻 ' + translate['Projects'] + ': \n' + project_list + '\n\n'
|
||||
|
||||
if showOs.lower() in truthy:
|
||||
if len(data['data']['operating_systems']) == 0:
|
||||
os_list = no_activity
|
||||
else:
|
||||
os_list = make_list(data['data']['operating_systems'])
|
||||
stats = stats + '💻 ' + translate['operating system'] + ': \n' + os_list + '\n\n'
|
||||
|
||||
stats += '```\n\n'
|
||||
stats = f"{stats[:-1]}```\n\n"
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def generate_language_per_repo(result):
|
||||
language_count = {}
|
||||
total = 0
|
||||
for repo in result['data']['user']['repositories']['nodes']:
|
||||
if repo['primaryLanguage'] is None:
|
||||
continue
|
||||
language = repo['primaryLanguage']['name']
|
||||
total += 1
|
||||
if language not in language_count.keys():
|
||||
language_count[language] = {}
|
||||
language_count[language]['count'] = 1
|
||||
async def get_short_github_info() -> str:
|
||||
"""
|
||||
Collects user info from GitHub public profile.
|
||||
The stats include: disk usage, contributions number, whether the user has opted to hire, public and private repositories number.
|
||||
|
||||
:returns: String representation of the info.
|
||||
"""
|
||||
stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
|
||||
|
||||
if GHM.USER.disk_usage is None:
|
||||
disk_usage = LM.t("Used in GitHub's Storage") % "?"
|
||||
print("Please add new github personal access token with user permission!")
|
||||
else:
|
||||
language_count[language]['count'] = language_count[language]['count'] + 1
|
||||
data = []
|
||||
sorted_labels = list(language_count.keys())
|
||||
sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True)
|
||||
for label in sorted_labels:
|
||||
percent = round(language_count[label]['count'] / total * 100, 2)
|
||||
extension = " repos"
|
||||
if language_count[label]['count'] == 1:
|
||||
extension = " repo"
|
||||
data.append({
|
||||
"name": label,
|
||||
"text": str(language_count[label]['count']) + extension,
|
||||
"percent": percent
|
||||
})
|
||||
disk_usage = LM.t("Used in GitHub's Storage") % naturalsize(GHM.USER.disk_usage)
|
||||
stats += f"> 📦 {disk_usage} \n > \n"
|
||||
|
||||
title = '**' + translate['I Mostly Code in'] % sorted_labels[0] + '** \n\n' if len(sorted_labels) > 0 else ''
|
||||
return title + '```text\n' + make_list(data) + '\n\n```\n'
|
||||
data = await DM.get_remote_json("github_stats")
|
||||
if len(data["years"]) > 0:
|
||||
contributions = LM.t("Contributions in the year") % (intcomma(data["years"][0]["total"]), data["years"][0]["year"])
|
||||
stats += f"> 🏆 {contributions}\n > \n"
|
||||
|
||||
|
||||
async def get_yearly_data():
|
||||
repository_list = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
|
||||
loc = LinesOfCode(user, ghtoken, repository_list, ignored_repos_name)
|
||||
yearly_data = await loc.calculateLoc()
|
||||
if showLocChart.lower() in truthy:
|
||||
await loc.plotLoc(yearly_data)
|
||||
return yearly_data
|
||||
|
||||
|
||||
async def get_line_of_code() -> str:
|
||||
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
|
||||
loc = LinesOfCode(user, ghtoken, repositoryList, ignored_repos_name)
|
||||
yearly_data = await loc.calculateLoc()
|
||||
total_loc = sum(
|
||||
[yearly_data[year][quarter][lang] for year in yearly_data for quarter in yearly_data[year] for lang in
|
||||
yearly_data[year][quarter]])
|
||||
return millify(int(total_loc))
|
||||
|
||||
|
||||
async def get_short_info():
|
||||
string = '**🐱 ' + translate['My GitHub Data'] + '** \n\n'
|
||||
if user.disk_usage is None:
|
||||
disk_usage = humanize.naturalsize(0)
|
||||
print("Please add new github personal access token with user permission")
|
||||
opted_to_hire = GHM.USER.hireable
|
||||
if opted_to_hire:
|
||||
stats += f"> 💼 {LM.t('Opted to Hire')}\n > \n"
|
||||
else:
|
||||
disk_usage = humanize.naturalsize(user.disk_usage)
|
||||
data = await DownloadManager.get_remote_json("github_stats")
|
||||
if len(data['years']) > 0:
|
||||
this_year_data = data['years'][0]
|
||||
total = this_year_data['total']
|
||||
year = this_year_data['year']
|
||||
string += '> 🏆 ' + translate['Contributions in the year'] % (humanize.intcomma(total), year) + '\n > \n'
|
||||
stats += f"> 🚫 {LM.t('Not Opted to Hire')}\n > \n"
|
||||
|
||||
string += '> 📦 ' + translate["Used in GitHub's Storage"] % disk_usage + ' \n > \n'
|
||||
is_hireable = user.hireable
|
||||
public_repo = user.public_repos
|
||||
private_repo = user.owned_private_repos
|
||||
if private_repo is None:
|
||||
private_repo = 0
|
||||
if is_hireable:
|
||||
string += "> 💼 " + translate["Opted to Hire"] + "\n > \n"
|
||||
public_repo = GHM.USER.public_repos
|
||||
if public_repo != 1:
|
||||
stats += f"> 📜 {LM.t('public repositories') % public_repo} \n > \n"
|
||||
else:
|
||||
string += "> 🚫 " + translate["Not Opted to Hire"] + "\n > \n"
|
||||
stats += f"> 📜 {LM.t('public repository') % public_repo} \n > \n"
|
||||
|
||||
string += '> 📜 '
|
||||
string += translate['public repositories'] % public_repo + " " + '\n > \n' if public_repo != 1 else translate[
|
||||
'public repository'] % public_repo + " " + '\n > \n'
|
||||
string += '> 🔑 '
|
||||
string += translate['private repositories'] % private_repo + " " + ' \n > \n' if private_repo != 1 else translate[
|
||||
'private repository'] % private_repo + " " + '\n > \n'
|
||||
private_repo = GHM.USER.owned_private_repos if GHM.USER.owned_private_repos is not None else 0
|
||||
if public_repo != 1:
|
||||
stats += f"> 🔑 {LM.t('private repositories') % private_repo} \n > \n"
|
||||
else:
|
||||
stats += f"> 🔑 {LM.t('private repository') % private_repo} \n > \n"
|
||||
|
||||
return string
|
||||
return stats
|
||||
|
||||
|
||||
async def get_stats(github) -> str:
|
||||
'''Gets API data and returns markdown progress'''
|
||||
async def get_stats() -> str:
|
||||
"""
|
||||
Creates new README.md content from all the acquired statistics from all places.
|
||||
The readme includes data from wakatime, contributed lines of code number, GitHub profile info and last updated date.
|
||||
|
||||
stats = ''
|
||||
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
|
||||
:returns: String representation of README.md contents.
|
||||
"""
|
||||
stats = str()
|
||||
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
|
||||
|
||||
if show_loc.lower() in truthy or showLocChart.lower() in truthy:
|
||||
# This condition is written to calculate the lines of code because it is heavy process soo needs to be calculate once this will reduce the execution time
|
||||
await get_yearly_data()
|
||||
if EM.SHOW_LINES_OF_CODE or EM.SHOW_LOC_CHART:
|
||||
yearly_data = await calculate_yearly_commit_data(repositories)
|
||||
else:
|
||||
yearly_data = (None, dict())
|
||||
|
||||
if show_total_code_time.lower() in truthy:
|
||||
data = await DownloadManager.get_remote_json("waka_all")
|
||||
stats += ') + '-' + quote(str(
|
||||
data['data']['text'])) + '-blue)\n\n'
|
||||
if EM.SHOW_TOTAL_CODE_TIME:
|
||||
data = await DM.get_remote_json("waka_all")
|
||||
stats += f"}-{quote(str(data['data']['text']))}-blue)\n\n"
|
||||
|
||||
if show_profile_view.lower() in truthy:
|
||||
data = github.get_repo(f"{user.login}/{user.login}").get_views_traffic(per="week")
|
||||
stats += ') + '-' + str(
|
||||
data['count']) + '-blue)\n\n'
|
||||
if EM.SHOW_PROFILE_VIEWS:
|
||||
data = GHM.REPO.get_views_traffic(per="week")
|
||||
stats += f")}-{data['count']}-blue)\n\n"
|
||||
|
||||
if show_loc.lower() in truthy:
|
||||
stats += ') + '-' + quote(
|
||||
str(await get_line_of_code())) + '%20' + quote(str(translate['Lines of code'])) + '-blue)\n\n'
|
||||
if EM.SHOW_LINES_OF_CODE:
|
||||
total_loc = sum([yearly_data[y][q][d] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
|
||||
data = f"{intword(total_loc)} {LM.t('Lines of code')}"
|
||||
stats += f")}-{quote(data)}-blue)\n\n"
|
||||
|
||||
if show_short_info.lower() in truthy:
|
||||
stats += await get_short_info()
|
||||
if EM.SHOW_SHORT_INFO:
|
||||
stats += await get_short_github_info()
|
||||
|
||||
if show_waka_stats.lower() in truthy:
|
||||
stats += await get_waka_time_stats()
|
||||
|
||||
if showLanguagePerRepo.lower() in truthy:
|
||||
stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
|
||||
if EM.SHOW_LANGUAGE_PER_REPO:
|
||||
stats += f"{make_language_per_repo_list(repositories)}\n\n"
|
||||
|
||||
if showLocChart.lower() in truthy:
|
||||
stats += '**' + translate['Timeline'] + '**\n\n'
|
||||
branch_name = github.get_repo(f'{user.login}/{user.login}').default_branch
|
||||
stats = stats + ' \n\n'
|
||||
if EM.SHOW_LOC_CHART:
|
||||
await create_loc_graph(yearly_data, GRAPH_PATH)
|
||||
GHM.update_chart(GRAPH_PATH)
|
||||
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}"
|
||||
stats += f"**{LM.t('Timeline')}**\n\n\n\n"
|
||||
|
||||
if show_updated_date.lower() in truthy:
|
||||
now = datetime.datetime.utcnow()
|
||||
d1 = now.strftime(updated_date_format)
|
||||
stats = stats + "\n Last Updated on " + d1 + " UTC"
|
||||
if EM.SHOW_UPDATED_DATE:
|
||||
stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC"
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def decode_readme(data: str):
|
||||
'''Decode the contents of old readme'''
|
||||
decoded_bytes = base64.b64decode(data)
|
||||
return str(decoded_bytes, 'utf-8')
|
||||
|
||||
|
||||
def generate_new_readme(stats: str, readme: str):
|
||||
'''Generate a new Readme.md'''
|
||||
stats_in_readme = f"{START_COMMENT}\n{stats}\n{END_COMMENT}"
|
||||
return re.sub(listReg, stats_in_readme, readme)
|
||||
|
||||
|
||||
async def main():
|
||||
global translate, user
|
||||
"""
|
||||
Application main function.
|
||||
Initializes all managers, collects user info and updates README.md if necessary.
|
||||
"""
|
||||
init_github_manager()
|
||||
await init_download_manager()
|
||||
init_localization_manager()
|
||||
|
||||
if ghtoken is None:
|
||||
raise Exception('Token not available')
|
||||
user = Github(ghtoken).get_user()
|
||||
print(f"Current user: {user.login}")
|
||||
await init_download_manager(waka_key, ghtoken, user)
|
||||
|
||||
try:
|
||||
with open(os.path.join(os.path.dirname(__file__), 'translation.json'), encoding='utf-8') as config_file:
|
||||
data = json.load(config_file)
|
||||
translate = data[locale]
|
||||
except Exception as e:
|
||||
print("Cannot find the Locale choosing default to english")
|
||||
translate = data['en']
|
||||
|
||||
g = Github(ghtoken)
|
||||
waka_stats = await get_stats(g)
|
||||
|
||||
repo = g.get_repo(f"{user.login}/{user.login}")
|
||||
contents = repo.get_readme()
|
||||
rdmd = decode_readme(contents.content)
|
||||
new_readme = generate_new_readme(stats=waka_stats, readme=rdmd)
|
||||
|
||||
if commit_by_me.lower() in truthy:
|
||||
committer = InputGitAuthor(user.login or commit_username, user.email or commit_email)
|
||||
else:
|
||||
committer = InputGitAuthor(
|
||||
commit_username or 'readme-bot',
|
||||
commit_email or '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
)
|
||||
if new_readme != rdmd:
|
||||
try:
|
||||
repo.update_file(path=contents.path, message=commit_message,
|
||||
content=new_readme, sha=contents.sha, branch=branchName,
|
||||
committer=committer)
|
||||
except:
|
||||
repo.update_file(path=contents.path, message=commit_message,
|
||||
content=new_readme, sha=contents.sha, branch='main',
|
||||
committer=committer)
|
||||
print("Readme updated")
|
||||
if GHM.update_readme(await get_stats()):
|
||||
print("Readme updated!")
|
||||
await DM.close_remote_resources()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_time = datetime.datetime.now().timestamp() * 1000
|
||||
if __name__ == "__main__":
|
||||
start_time = datetime.now()
|
||||
run(main())
|
||||
end_time = datetime.datetime.now().timestamp() * 1000
|
||||
print(f"Program processed in {round(end_time - start_time, 0)} miliseconds.")
|
||||
run_delta = datetime.now() - start_time
|
||||
print(f"Program processed in {precisedelta(run_delta, minimum_unit='microseconds')}.")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from asyncio import Task
|
||||
from hashlib import md5
|
||||
from json import dumps
|
||||
from string import Template
|
||||
@@ -5,11 +6,15 @@ from typing import Awaitable, Dict, Callable, Optional, List, Tuple
|
||||
|
||||
from httpx import AsyncClient
|
||||
from yaml import safe_load
|
||||
from github import AuthenticatedUser
|
||||
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
from manager_github import GitHubManager as GHM
|
||||
|
||||
|
||||
GITHUB_API_QUERIES = {
|
||||
"repositories_contributed_to": """
|
||||
# Query to collect info about all user repositories, including: is it a fork, name and owner login.
|
||||
# NB! Query includes information about recent repositories only (apparently, contributed within a year).
|
||||
"repos_contributed_to": """
|
||||
{
|
||||
user(login: "$username") {
|
||||
repositoriesContributedTo(orderBy: {field: CREATED_AT, direction: DESC}, $pagination, includeUserRepositories: true) {
|
||||
@@ -27,7 +32,9 @@ GITHUB_API_QUERIES = {
|
||||
}
|
||||
}
|
||||
}""",
|
||||
"repository_committed_dates": """
|
||||
# Query to collect info about all commits in user repositories, including: commit date.
|
||||
# NB! Query includes information about repositories owned by user only.
|
||||
"repo_committed_dates": """
|
||||
{
|
||||
repository(owner: "$owner", name: "$name") {
|
||||
defaultBranchRef {
|
||||
@@ -47,6 +54,8 @@ GITHUB_API_QUERIES = {
|
||||
}
|
||||
}
|
||||
}""",
|
||||
# Query to collect info about all repositories user created or collaborated on, including: name, primary language and owner login.
|
||||
# NB! Query doesn't include information about repositories user contributed to via pull requests.
|
||||
"user_repository_list": """
|
||||
{
|
||||
user(login: "$username") {
|
||||
@@ -68,7 +77,8 @@ GITHUB_API_QUERIES = {
|
||||
}
|
||||
}
|
||||
""",
|
||||
"repository_branches_list": """
|
||||
# Query to collect info about branches in the given repository, including: names.
|
||||
"repo_branch_list": """
|
||||
{
|
||||
repository(owner: "$owner", name: "$name") {
|
||||
refs(refPrefix: "refs/heads/", orderBy: {direction: DESC, field: TAG_COMMIT_DATE}, $pagination) {
|
||||
@@ -83,7 +93,8 @@ GITHUB_API_QUERIES = {
|
||||
}
|
||||
}
|
||||
""",
|
||||
"repository_branch_commit_list": """
|
||||
# Query to collect info about user commits to given repository, including: commit date, additions and deletions numbers.
|
||||
"repo_commit_list": """
|
||||
{
|
||||
repository(owner: "$owner", name: "$name") {
|
||||
ref(qualifiedName: "refs/heads/$branch") {
|
||||
@@ -107,27 +118,25 @@ GITHUB_API_QUERIES = {
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
async def init_download_manager(waka_key: str, github_key: str, user: AuthenticatedUser):
|
||||
async def init_download_manager():
|
||||
"""
|
||||
Initialize download manager:
|
||||
- Setup headers for GitHub GraphQL requests.
|
||||
- Launch static queries in background.
|
||||
:param waka_key: WakaTime API token.
|
||||
:param github_key: GitHub API token.
|
||||
:param user: GitHub current user info.
|
||||
"""
|
||||
await DownloadManager.load_remote_resources({
|
||||
await DownloadManager.load_remote_resources(
|
||||
{
|
||||
"linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml",
|
||||
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}",
|
||||
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={waka_key}",
|
||||
"github_stats": f"https://github-contributions.vercel.app/api/v1/{user.login}"
|
||||
}, {
|
||||
"Authorization": f"Bearer {github_key}"
|
||||
})
|
||||
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}",
|
||||
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}",
|
||||
"github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}",
|
||||
},
|
||||
{"Authorization": f"Bearer {EM.GH_TOKEN}"},
|
||||
)
|
||||
|
||||
|
||||
class DownloadManager:
|
||||
@@ -141,6 +150,7 @@ class DownloadManager:
|
||||
DownloadManager launches all static queries asynchronously upon initialization and caches their results.
|
||||
It also executes dynamic queries upon request and caches result.
|
||||
"""
|
||||
|
||||
_client = AsyncClient(timeout=60.0)
|
||||
_REMOTE_RESOURCES_CACHE = dict()
|
||||
|
||||
@@ -155,6 +165,18 @@ class DownloadManager:
|
||||
DownloadManager._REMOTE_RESOURCES_CACHE[resource] = DownloadManager._client.get(url)
|
||||
DownloadManager._client.headers = github_headers
|
||||
|
||||
@staticmethod
|
||||
async def close_remote_resources():
|
||||
"""
|
||||
Close DownloadManager and cancel all un-awaited static web queries.
|
||||
Await all queries that could not be cancelled.
|
||||
"""
|
||||
for resource in DownloadManager._REMOTE_RESOURCES_CACHE.values():
|
||||
if isinstance(resource, Task):
|
||||
resource.cancel()
|
||||
elif isinstance(resource, Awaitable):
|
||||
await resource
|
||||
|
||||
@staticmethod
|
||||
async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict:
|
||||
"""
|
||||
@@ -203,9 +225,7 @@ class DownloadManager:
|
||||
:param kwargs: Parameters for substitution of variables in dynamic query.
|
||||
:return: Response JSON dictionary.
|
||||
"""
|
||||
res = await DownloadManager._client.post("https://api.github.com/graphql", json={
|
||||
"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)
|
||||
})
|
||||
res = await DownloadManager._client.post("https://api.github.com/graphql", json={"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)})
|
||||
if res.status_code == 200:
|
||||
return res.json()
|
||||
else:
|
||||
@@ -244,7 +264,7 @@ class DownloadManager:
|
||||
:param kwargs: Parameters for substitution of variables in dynamic query.
|
||||
:return: Response JSON dictionary.
|
||||
"""
|
||||
initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f"first: 100")
|
||||
initial_query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination="first: 100")
|
||||
page_list, page_info = DownloadManager._find_pagination_and_data_list(initial_query_response)
|
||||
while page_info["hasNextPage"]:
|
||||
query_response = await DownloadManager._fetch_graphql_query(query, **kwargs, pagination=f'first: 100, after: "{page_info["endCursor"]}"')
|
||||
45
sources/manager_environment.py
Normal file
45
sources/manager_environment.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from os import getenv, environ
|
||||
|
||||
|
||||
class EnvironmentManager:
|
||||
"""
|
||||
Class for handling all environmental variables used by the action.
|
||||
There are only two required variables: `INPUT_GH_TOKEN` and `INPUT_WAKATIME_API_KEY`.
|
||||
The others have a provided default value.
|
||||
For all boolean variables a 'truthy'-list is checked (not only true/false, but also 1, t, y and yes are accepted).
|
||||
List variable `IGNORED_REPOS` is split and parsed.
|
||||
Integer variable `SYMBOL_VERSION` is parsed.
|
||||
"""
|
||||
|
||||
_TRUTHY = ["true", "1", "t", "y", "yes"]
|
||||
|
||||
GH_TOKEN = environ["INPUT_GH_TOKEN"]
|
||||
WAKATIME_API_KEY = environ["INPUT_WAKATIME_API_KEY"]
|
||||
|
||||
SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka")
|
||||
BRANCH_NAME = getenv("INPUT_PUSH_BRANCH_NAME", "")
|
||||
|
||||
SHOW_OS = getenv("INPUT_SHOW_OS", "False").lower() in _TRUTHY
|
||||
SHOW_PROJECTS = getenv("INPUT_SHOW_PROJECTS", "True").lower() in _TRUTHY
|
||||
SHOW_EDITORS = getenv("INPUT_SHOW_EDITORS", "True").lower() in _TRUTHY
|
||||
SHOW_TIMEZONE = getenv("INPUT_SHOW_TIMEZONE", "True").lower() in _TRUTHY
|
||||
SHOW_COMMIT = getenv("INPUT_SHOW_COMMIT", "True").lower() in _TRUTHY
|
||||
SHOW_LANGUAGE = getenv("INPUT_SHOW_LANGUAGE", "True").lower() in _TRUTHY
|
||||
SHOW_LINES_OF_CODE = getenv("INPUT_SHOW_LINES_OF_CODE", "False").lower() in _TRUTHY
|
||||
SHOW_LANGUAGE_PER_REPO = getenv("INPUT_SHOW_LANGUAGE_PER_REPO", "True").lower() in _TRUTHY
|
||||
SHOW_LOC_CHART = getenv("INPUT_SHOW_LOC_CHART", "True").lower() in _TRUTHY
|
||||
SHOW_DAYS_OF_WEEK = getenv("INPUT_SHOW_DAYS_OF_WEEK", "True").lower() in _TRUTHY
|
||||
SHOW_PROFILE_VIEWS = getenv("INPUT_SHOW_PROFILE_VIEWS", "True").lower() in _TRUTHY
|
||||
SHOW_SHORT_INFO = getenv("INPUT_SHOW_SHORT_INFO", "True").lower() in _TRUTHY
|
||||
SHOW_UPDATED_DATE = getenv("INPUT_SHOW_UPDATED_DATE", "True").lower() in _TRUTHY
|
||||
SHOW_TOTAL_CODE_TIME = getenv("INPUT_SHOW_TOTAL_CODE_TIME", "True").lower() in _TRUTHY
|
||||
|
||||
COMMIT_BY_ME = getenv("INPUT_COMMIT_BY_ME", "False").lower() in _TRUTHY
|
||||
COMMIT_MESSAGE = getenv("INPUT_COMMIT_MESSAGE", "Updated with Dev Metrics")
|
||||
COMMIT_USERNAME = getenv("INPUT_COMMIT_USERNAME", "")
|
||||
COMMIT_EMAIL = getenv("INPUT_COMMIT_EMAIL", "")
|
||||
|
||||
LOCALE = getenv("INPUT_LOCALE", "en")
|
||||
UPDATED_DATE_FORMAT = getenv("INPUT_UPDATED_DATE_FORMAT", "%d/%m/%Y %H:%M:%S")
|
||||
IGNORED_REPOS = getenv("INPUT_IGNORED_REPOS", "").replace(" ", "").split(",")
|
||||
SYMBOL_VERSION = int(getenv("INPUT_SYMBOL_VERSION"))
|
||||
113
sources/manager_github.py
Normal file
113
sources/manager_github.py
Normal file
@@ -0,0 +1,113 @@
|
||||
from base64 import b64decode
|
||||
from re import sub
|
||||
|
||||
from github import Github, AuthenticatedUser, Repository, ContentFile, InputGitAuthor, UnknownObjectException
|
||||
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
|
||||
|
||||
def init_github_manager():
|
||||
"""
|
||||
Initialize GitHub manager.
|
||||
Current user, user readme repo and readme file are downloaded.
|
||||
"""
|
||||
GitHubManager.prepare_github_env()
|
||||
print(f"Current user: {GitHubManager.USER.login}")
|
||||
|
||||
|
||||
class GitHubManager:
|
||||
USER: AuthenticatedUser
|
||||
REPO: Repository
|
||||
_README: ContentFile
|
||||
_README_CONTENTS: str
|
||||
|
||||
_START_COMMENT = f"<!--START_SECTION:{EM.SECTION_NAME}-->"
|
||||
_END_COMMENT = f"<!--END_SECTION:{EM.SECTION_NAME}-->"
|
||||
_README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}"
|
||||
|
||||
@staticmethod
|
||||
def prepare_github_env():
|
||||
"""
|
||||
Download and store for future use:
|
||||
- Current GitHub user.
|
||||
- Named repo of the user [username]/[username].
|
||||
- README.md file of this repo.
|
||||
- Parsed contents of the file.
|
||||
"""
|
||||
github = Github(EM.GH_TOKEN)
|
||||
GitHubManager.USER = github.get_user()
|
||||
GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}")
|
||||
GitHubManager._README = GitHubManager.REPO.get_readme()
|
||||
GitHubManager._README_CONTENTS = str(b64decode(GitHubManager._README.content), "utf-8")
|
||||
|
||||
@staticmethod
|
||||
def _generate_new_readme(stats: str) -> str:
|
||||
"""
|
||||
Generates new README.md file, inserts its contents between start and end tags.
|
||||
|
||||
:param stats: contents to insert.
|
||||
:returns: new README.md string.
|
||||
"""
|
||||
readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}"
|
||||
return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager._README_CONTENTS)
|
||||
|
||||
@staticmethod
|
||||
def _get_author() -> InputGitAuthor:
|
||||
"""
|
||||
Gets GitHub commit author specified by environmental variables.
|
||||
It is the user himself or a 'readme-bot'.
|
||||
|
||||
:returns: Commit author.
|
||||
"""
|
||||
if EM.COMMIT_BY_ME:
|
||||
return InputGitAuthor(GitHubManager.USER.login or EM.COMMIT_USERNAME, GitHubManager.USER.email or EM.COMMIT_EMAIL)
|
||||
else:
|
||||
return InputGitAuthor(EM.COMMIT_USERNAME or "readme-bot", EM.COMMIT_EMAIL or "41898282+github-actions[bot]@users.noreply.github.com")
|
||||
|
||||
@staticmethod
|
||||
def branch() -> str:
|
||||
"""
|
||||
Gets name of branch to commit to specified by environmental variables.
|
||||
It is the default branch (regularly, 'main' or 'master') or a branch specified by user.
|
||||
|
||||
:returns: Commit author.
|
||||
"""
|
||||
return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME
|
||||
|
||||
@staticmethod
|
||||
def update_readme(stats: str) -> bool:
|
||||
"""
|
||||
Updates readme with given data if necessary.
|
||||
Uses commit author, commit message and branch name specified by environmental variables.
|
||||
|
||||
:returns: whether the README.md file was updated or not.
|
||||
"""
|
||||
new_readme = GitHubManager._generate_new_readme(stats)
|
||||
if new_readme != GitHubManager._README_CONTENTS:
|
||||
GitHubManager.REPO.update_file(
|
||||
path=GitHubManager._README.path,
|
||||
message=EM.COMMIT_MESSAGE,
|
||||
content=new_readme,
|
||||
sha=GitHubManager._README.sha,
|
||||
branch=GitHubManager.branch(),
|
||||
committer=GitHubManager._get_author(),
|
||||
)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def update_chart(chart_path: str):
|
||||
"""
|
||||
Updates lines of code chart.
|
||||
Uses commit author, commit message and branch name specified by environmental variables.
|
||||
|
||||
:param chart_path: path to saved lines of code chart.
|
||||
"""
|
||||
with open(chart_path, "rb") as input_file:
|
||||
data = input_file.read()
|
||||
try:
|
||||
contents = GitHubManager.REPO.get_contents(chart_path)
|
||||
GitHubManager.REPO.update_file(contents.path, "Charts Updated", data, contents.sha, committer=GitHubManager._get_author())
|
||||
except UnknownObjectException:
|
||||
GitHubManager.REPO.create_file(chart_path, "Charts Added", data, committer=GitHubManager._get_author())
|
||||
43
sources/manager_localization.py
Normal file
43
sources/manager_localization.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from json import load
|
||||
from os.path import join, dirname
|
||||
from typing import Dict
|
||||
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
|
||||
|
||||
def init_localization_manager():
|
||||
"""
|
||||
Initialize localization manager.
|
||||
Load GUI translations JSON file.
|
||||
"""
|
||||
LocalizationManager.load_localization("translation.json")
|
||||
|
||||
|
||||
class LocalizationManager:
|
||||
"""
|
||||
Class for handling localization (and maybe other file IO in future).
|
||||
Stores localization in dictionary.
|
||||
"""
|
||||
|
||||
_LOCALIZATION: Dict[str, str] = dict()
|
||||
|
||||
@staticmethod
|
||||
def load_localization(file: str):
|
||||
"""
|
||||
Read localization file and store locale defined with environmental variable.
|
||||
|
||||
:param file: Localization file path, related to current file (in sources root).
|
||||
"""
|
||||
with open(join(dirname(__file__), file), encoding="utf-8") as config_file:
|
||||
data = load(config_file)
|
||||
LocalizationManager._LOCALIZATION = data[EM.LOCALE]
|
||||
|
||||
@staticmethod
|
||||
def t(key: str) -> str:
|
||||
"""
|
||||
Translate string to current localization.
|
||||
|
||||
:param key: Localization key.
|
||||
:returns: Translation string.
|
||||
"""
|
||||
return LocalizationManager._LOCALIZATION[key]
|
||||
55
sources/yearly_commit_calculator.py
Normal file
55
sources/yearly_commit_calculator.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from re import search
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
|
||||
from manager_download import DownloadManager as DM
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
from manager_github import GitHubManager as GHM
|
||||
|
||||
|
||||
async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
|
||||
"""
|
||||
Calculate commit data by years.
|
||||
Commit data includes difference between contribution additions and deletions in each quarter of each recorded year.
|
||||
|
||||
:param repositories: user repositories info dictionary.
|
||||
:returns: Commit quarter yearly data dictionary.
|
||||
"""
|
||||
yearly_data = dict()
|
||||
total = len(repositories["data"]["user"]["repositories"]["nodes"])
|
||||
for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["nodes"]):
|
||||
if repo["name"] not in EM.IGNORED_REPOS:
|
||||
print(f"{ind + 1}/{total}", "Retrieving repo:", repo["owner"]["login"], repo["name"])
|
||||
await update_yearly_data_with_commit_stats(repo, yearly_data)
|
||||
return yearly_data
|
||||
|
||||
|
||||
async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict):
|
||||
"""
|
||||
Updates yearly commit data with commits from given repository.
|
||||
Skips update if the commit isn't related to any repository.
|
||||
|
||||
:param repo_details: Dictionary with information about the given repository.
|
||||
:param yearly_data: Yearly data dictionary to update.
|
||||
"""
|
||||
owner = repo_details["owner"]["login"]
|
||||
branch_data = await DM.get_remote_graphql("repo_branch_list", owner=owner, name=repo_details["name"])
|
||||
if branch_data["data"]["repository"] is None:
|
||||
print(f"\tSkipping repo: {repo_details['name']}")
|
||||
return dict()
|
||||
|
||||
for branch in branch_data["data"]["repository"]["refs"]["nodes"]:
|
||||
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=owner, name=repo_details["name"], branch=branch["name"], id=GHM.USER.node_id)
|
||||
for commit in commit_data["data"]["repository"]["ref"]["target"]["history"]["nodes"]:
|
||||
date = search(r"\d+-\d+-\d+", commit["committedDate"]).group()
|
||||
curr_year = datetime.fromisoformat(date).year
|
||||
quarter = (datetime.fromisoformat(date).month - 1) // 3 + 1
|
||||
|
||||
if repo_details["primaryLanguage"] is not None:
|
||||
if curr_year not in yearly_data:
|
||||
yearly_data[curr_year] = dict()
|
||||
if quarter not in yearly_data[curr_year]:
|
||||
yearly_data[curr_year][quarter] = dict()
|
||||
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += commit["additions"] - commit["deletions"]
|
||||
Reference in New Issue
Block a user