You've already forked wakapi-readme-stats
documentation added, black linter added
This commit is contained in:
2
.github/workflows/build_image.yml
vendored
2
.github/workflows/build_image.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
publish-server-image:
|
publish-server-image:
|
||||||
name: Publish 'waka-readme-stats' image
|
name: Publish 'waka-readme-stats' image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout 🛎️
|
- name: Checkout 🛎️
|
||||||
|
|||||||
4
.github/workflows/codestyle.yml
vendored
4
.github/workflows/codestyle.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
name: Run codestyle check
|
name: Run codestyle check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout 🛎️
|
- name: Checkout 🛎️
|
||||||
@@ -21,4 +21,4 @@ jobs:
|
|||||||
run: pip install -r requirements.txt
|
run: pip install -r requirements.txt
|
||||||
|
|
||||||
- name: Run Codestyle ✔️
|
- name: Run Codestyle ✔️
|
||||||
run: flake8 --max-line-length=160 --exclude venv,assets .
|
run: flake8 --max-line-length=160 --exclude venv,assets . && black --line-length=160 --check --exclude='/venv/|/assets/' .
|
||||||
3
Makefile
3
Makefile
@@ -38,9 +38,10 @@ run-container:
|
|||||||
.PHONY: run-container
|
.PHONY: run-container
|
||||||
|
|
||||||
|
|
||||||
lint:
|
lint: venv
|
||||||
@ # Run flake8 linter
|
@ # Run flake8 linter
|
||||||
flake8 --max-line-length=160 --exclude venv,assets .
|
flake8 --max-line-length=160 --exclude venv,assets .
|
||||||
|
black --line-length=160 --check --exclude='/venv/|/assets/' .
|
||||||
.PHONY: lint
|
.PHONY: lint
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
|
|||||||
@@ -13,5 +13,6 @@ numpy~=1.24
|
|||||||
httpx~=0.23
|
httpx~=0.23
|
||||||
PyYAML~=6.0
|
PyYAML~=6.0
|
||||||
|
|
||||||
# Codestyle checking module:
|
# Codestyle checking modules:
|
||||||
flake8~=6.0
|
flake8~=6.0
|
||||||
|
black~=23.1
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import matplotlib.pyplot as plt
|
|||||||
from manager_download import DownloadManager as DM
|
from manager_download import DownloadManager as DM
|
||||||
|
|
||||||
|
|
||||||
MAX_LANGUAGES = 5
|
MAX_LANGUAGES = 5 # Number of top languages to add to chart, for each year quarter
|
||||||
GRAPH_PATH = "assets/bar_graph.png"
|
GRAPH_PATH = "assets/bar_graph.png" # Chart saving path.
|
||||||
|
|
||||||
|
|
||||||
async def create_loc_graph(yearly_data: Dict, save_path: str):
|
async def create_loc_graph(yearly_data: Dict, save_path: str):
|
||||||
@@ -27,7 +27,7 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
|
|||||||
languages_all_loc = dict()
|
languages_all_loc = dict()
|
||||||
for i, y in enumerate(sorted(yearly_data.keys())):
|
for i, y in enumerate(sorted(yearly_data.keys())):
|
||||||
for q in yearly_data[y].keys():
|
for q in yearly_data[y].keys():
|
||||||
langs = sorted(yearly_data[y][q].keys(), key=lambda l: yearly_data[y][q][l], reverse=True)[0:MAX_LANGUAGES]
|
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n], reverse=True)[0:MAX_LANGUAGES]
|
||||||
|
|
||||||
for lang in langs:
|
for lang in langs:
|
||||||
if lang not in languages_all_loc:
|
if lang not in languages_all_loc:
|
||||||
|
|||||||
@@ -10,24 +10,39 @@ from manager_github import GitHubManager as GHM
|
|||||||
from manager_localization import LocalizationManager as LM
|
from manager_localization import LocalizationManager as LM
|
||||||
|
|
||||||
|
|
||||||
DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"]
|
DAY_TIME_EMOJI = ["🌞", "🌆", "🌃", "🌙"] # Emojis, representing different times of day.
|
||||||
DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"]
|
DAY_TIME_NAMES = ["Morning", "Daytime", "Evening", "Night"] # Localization identifiers for different times of day.
|
||||||
WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
|
WEEK_DAY_NAMES = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] # Localization identifiers for different days of week.
|
||||||
|
|
||||||
|
|
||||||
class Symbol(Enum):
|
class Symbol(Enum):
|
||||||
|
"""
|
||||||
|
Symbol version enum.
|
||||||
|
Allows to retrieve symbols pairs by calling `Symbol.get_symbols(version)`.
|
||||||
|
"""
|
||||||
|
|
||||||
VERSION_1 = "█", "░"
|
VERSION_1 = "█", "░"
|
||||||
VERSION_2 = "⣿", "⣀"
|
VERSION_2 = "⣿", "⣀"
|
||||||
VERSION_3 = "⬛", "⬜"
|
VERSION_3 = "⬛", "⬜"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_symbols(version: int) -> Tuple[str, str]:
|
def get_symbols(version: int) -> Tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Retrieves symbols pair for specified version.
|
||||||
|
|
||||||
|
:param version: Required symbols version.
|
||||||
|
:returns: Two strings for filled and empty symbol value in a tuple.
|
||||||
|
"""
|
||||||
return Symbol[f"VERSION_{version}"].value
|
return Symbol[f"VERSION_{version}"].value
|
||||||
|
|
||||||
|
|
||||||
def make_graph(percent: float):
|
def make_graph(percent: float):
|
||||||
"""
|
"""
|
||||||
Make progress graph from API graph
|
Make text progress bar.
|
||||||
|
Length of the progress bar is 25 characters.
|
||||||
|
|
||||||
|
:param percent: Completion percent of the progress bar.
|
||||||
|
:return: The string progress bar representation.
|
||||||
"""
|
"""
|
||||||
done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION)
|
done_block, empty_block = Symbol.get_symbols(EM.SYMBOL_VERSION)
|
||||||
percent_quart = round(percent / 4)
|
percent_quart = round(percent / 4)
|
||||||
@@ -36,7 +51,20 @@ def make_graph(percent: float):
|
|||||||
|
|
||||||
def make_list(data: List = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str:
|
def make_list(data: List = None, names: List[str] = None, texts: List[str] = None, percents: List[float] = None, top_num: int = 5, sort: bool = True) -> str:
|
||||||
"""
|
"""
|
||||||
Make List
|
Make list of text progress bars with supportive info.
|
||||||
|
Each row has the following structure: [name of the measure] [quantity description (with words)] [progress bar] [total percentage].
|
||||||
|
Name of the measure: up to 25 characters.
|
||||||
|
Quantity description: how many _things_ were found, up to 20 characters.
|
||||||
|
Progress bar: measure percentage, 25 characters.
|
||||||
|
Total percentage: floating point percentage.
|
||||||
|
|
||||||
|
:param data: list of dictionaries, each of them containing a measure (name, text and percent).
|
||||||
|
:param names: list of names (names of measure), overloads data if defined.
|
||||||
|
:param texts: list of texts (quantity descriptions), overloads data if defined.
|
||||||
|
:param percents: list of percents (total percentages), overloads data if defined.
|
||||||
|
:param top_num: how many measures to display, default: 5.
|
||||||
|
:param sort: if measures should be sorted by total percentage, default: True.
|
||||||
|
:returns: The string representation of the list.
|
||||||
"""
|
"""
|
||||||
if data is not None:
|
if data is not None:
|
||||||
names = [value for item in data for key, value in item.items() if key == "name"] if names is None else names
|
names = [value for item in data for key, value in item.items() if key == "name"] if names is None else names
|
||||||
@@ -46,10 +74,16 @@ def make_list(data: List = None, names: List[str] = None, texts: List[str] = Non
|
|||||||
data = list(zip(names, texts, percents))
|
data = list(zip(names, texts, percents))
|
||||||
top_data = sorted(data[:top_num], key=lambda record: record[2]) if sort else data[:top_num]
|
top_data = sorted(data[:top_num], key=lambda record: record[2]) if sort else data[:top_num]
|
||||||
data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data]
|
data_list = [f"{n[:25]}{' ' * (25 - len(n))}{t}{' ' * (20 - len(t))}{make_graph(p)} {p:05.2f} % " for n, t, p in top_data]
|
||||||
return '\n'.join(data_list)
|
return "\n".join(data_list)
|
||||||
|
|
||||||
|
|
||||||
async def make_commit_day_time_list(time_zone: str) -> str:
|
async def make_commit_day_time_list(time_zone: str) -> str:
|
||||||
|
"""
|
||||||
|
Calculate commit-related info, how many commits were made, and at what time of day and day of week.
|
||||||
|
|
||||||
|
:param time_zone: User time zone.
|
||||||
|
:returns: string representation of statistics.
|
||||||
|
"""
|
||||||
stats = str()
|
stats = str()
|
||||||
|
|
||||||
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
|
result = await DM.get_remote_graphql("repos_contributed_to", username=GHM.USER.login)
|
||||||
@@ -62,8 +96,8 @@ async def make_commit_day_time_list(time_zone: str) -> str:
|
|||||||
result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id)
|
result = await DM.get_remote_graphql("repo_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=GHM.USER.node_id)
|
||||||
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
|
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
|
||||||
|
|
||||||
for committedDate in committed_dates:
|
for committed_date in committed_dates:
|
||||||
local_date = datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ")
|
local_date = datetime.strptime(committed_date["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ")
|
||||||
date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone))
|
date = local_date.replace(tzinfo=utc).astimezone(timezone(time_zone))
|
||||||
|
|
||||||
day_times[date.hour // 6] += 1
|
day_times[date.hour // 6] += 1
|
||||||
@@ -74,14 +108,14 @@ async def make_commit_day_time_list(time_zone: str) -> str:
|
|||||||
day_times = day_times[1:] + day_times[:1]
|
day_times = day_times[1:] + day_times[:1]
|
||||||
|
|
||||||
dt_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))]
|
dt_names = [f"{DAY_TIME_EMOJI[i]} {LM.t(DAY_TIME_NAMES[i])}" for i in range(len(day_times))]
|
||||||
dt_texts = [f'{day_time} commits' for day_time in day_times]
|
dt_texts = [f"{day_time} commits" for day_time in day_times]
|
||||||
dt_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times]
|
dt_percents = [round((day_time / sum_day) * 100, 2) for day_time in day_times]
|
||||||
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
|
title = LM.t("I am an Early") if sum(day_times[0:2]) >= sum(day_times[2:4]) else LM.t("I am a Night")
|
||||||
stats += f"**{title}** \n\n```text\n{make_list(names=dt_names, texts=dt_texts, percents=dt_percents, top_num=7, sort=False)}\n```\n"
|
stats += f"**{title}** \n\n```text\n{make_list(names=dt_names, texts=dt_texts, percents=dt_percents, top_num=7, sort=False)}\n```\n"
|
||||||
|
|
||||||
if EM.SHOW_DAYS_OF_WEEK:
|
if EM.SHOW_DAYS_OF_WEEK:
|
||||||
wd_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES]
|
wd_names = [LM.t(week_day) for week_day in WEEK_DAY_NAMES]
|
||||||
wd_texts = [f'{week_day} commits' for week_day in week_days]
|
wd_texts = [f"{week_day} commits" for week_day in week_days]
|
||||||
wd_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days]
|
wd_percents = [round((week_day / sum_week) * 100, 2) for week_day in week_days]
|
||||||
title = LM.t("I am Most Productive on") % wd_names[wd_percents.index(max(wd_percents))]
|
title = LM.t("I am Most Productive on") % wd_names[wd_percents.index(max(wd_percents))]
|
||||||
stats += f"📅 **{title}** \n\n```text\n{make_list(names=wd_names, texts=wd_texts, percents=wd_percents, top_num=7, sort=False)}\n```\n"
|
stats += f"📅 **{title}** \n\n```text\n{make_list(names=wd_names, texts=wd_texts, percents=wd_percents, top_num=7, sort=False)}\n```\n"
|
||||||
@@ -90,6 +124,12 @@ async def make_commit_day_time_list(time_zone: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def make_language_per_repo_list(repositories: Dict) -> str:
|
def make_language_per_repo_list(repositories: Dict) -> str:
|
||||||
|
"""
|
||||||
|
Calculate language-related info, how many repositories in what language user has.
|
||||||
|
|
||||||
|
:param repositories: User repositories.
|
||||||
|
:returns: string representation of statistics.
|
||||||
|
"""
|
||||||
language_count = dict()
|
language_count = dict()
|
||||||
repos_with_language = [repo for repo in repositories["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None]
|
repos_with_language = [repo for repo in repositories["data"]["user"]["repositories"]["edges"] if repo["node"]["primaryLanguage"] is not None]
|
||||||
for repo in repos_with_language:
|
for repo in repos_with_language:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from urllib.parse import quote
|
|||||||
|
|
||||||
from humanize import intword, naturalsize, intcomma, precisedelta
|
from humanize import intword, naturalsize, intcomma, precisedelta
|
||||||
|
|
||||||
from manager_download import init_download_manager, DownloadManager as DM, close_download_manager
|
from manager_download import init_download_manager, DownloadManager as DM
|
||||||
from manager_environment import EnvironmentManager as EM
|
from manager_environment import EnvironmentManager as EM
|
||||||
from manager_github import init_github_manager, GitHubManager as GHM
|
from manager_github import init_github_manager, GitHubManager as GHM
|
||||||
from manager_localization import init_localization_manager, LocalizationManager as LM
|
from manager_localization import init_localization_manager, LocalizationManager as LM
|
||||||
@@ -17,6 +17,12 @@ from graphics_list_formatter import make_list, make_commit_day_time_list, make_l
|
|||||||
|
|
||||||
|
|
||||||
async def get_waka_time_stats() -> str:
|
async def get_waka_time_stats() -> str:
|
||||||
|
"""
|
||||||
|
Collects user info from wakatime.
|
||||||
|
Info includes most common commit time, timezone, language, editors, projects and OSs.
|
||||||
|
|
||||||
|
:returns: String representation of the info.
|
||||||
|
"""
|
||||||
stats = str()
|
stats = str()
|
||||||
|
|
||||||
data = await DM.get_remote_json("waka_latest")
|
data = await DM.get_remote_json("waka_latest")
|
||||||
@@ -52,7 +58,13 @@ async def get_waka_time_stats() -> str:
|
|||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
|
||||||
async def get_short_github_info():
|
async def get_short_github_info() -> str:
|
||||||
|
"""
|
||||||
|
Collects user info from GitHub public profile.
|
||||||
|
The stats include: disk usage, contributions number, whether the user has opted to hire, public and private repositories number.
|
||||||
|
|
||||||
|
:returns: String representation of the info.
|
||||||
|
"""
|
||||||
stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
|
stats = f"**🐱 {LM.t('My GitHub Data')}** \n\n"
|
||||||
|
|
||||||
if GHM.USER.disk_usage is None:
|
if GHM.USER.disk_usage is None:
|
||||||
@@ -64,7 +76,7 @@ async def get_short_github_info():
|
|||||||
|
|
||||||
data = await DM.get_remote_json("github_stats")
|
data = await DM.get_remote_json("github_stats")
|
||||||
if len(data["years"]) > 0:
|
if len(data["years"]) > 0:
|
||||||
contributions = LM.t('Contributions in the year') % (intcomma(data["years"][0]['total']), data["years"][0]['year'])
|
contributions = LM.t("Contributions in the year") % (intcomma(data["years"][0]["total"]), data["years"][0]["year"])
|
||||||
stats += f"> 🏆 {contributions}\n > \n"
|
stats += f"> 🏆 {contributions}\n > \n"
|
||||||
|
|
||||||
opted_to_hire = GHM.USER.hireable
|
opted_to_hire = GHM.USER.hireable
|
||||||
@@ -90,7 +102,10 @@ async def get_short_github_info():
|
|||||||
|
|
||||||
async def get_stats() -> str:
|
async def get_stats() -> str:
|
||||||
"""
|
"""
|
||||||
Gets API data and returns markdown progress
|
Creates new README.md content from all the acquired statistics from all places.
|
||||||
|
The readme includes data from wakatime, contributed lines of code number, GitHub profile info and last updated date.
|
||||||
|
|
||||||
|
:returns: String representation of README.md contents.
|
||||||
"""
|
"""
|
||||||
stats = str()
|
stats = str()
|
||||||
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
|
repositories = await DM.get_remote_graphql("user_repository_list", username=GHM.USER.login, id=GHM.USER.node_id)
|
||||||
@@ -125,8 +140,7 @@ async def get_stats() -> str:
|
|||||||
await create_loc_graph(yearly_data, GRAPH_PATH)
|
await create_loc_graph(yearly_data, GRAPH_PATH)
|
||||||
GHM.update_chart(GRAPH_PATH)
|
GHM.update_chart(GRAPH_PATH)
|
||||||
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}"
|
chart_path = f"{GHM.USER.login}/{GHM.USER.login}/{GHM.branch()}/{GRAPH_PATH}"
|
||||||
stats += '**' + LM.t('Timeline') + '**\n\n'
|
stats += f"**{LM.t('Timeline')}**\n\n\n\n"
|
||||||
stats += f"\n\n"
|
|
||||||
|
|
||||||
if EM.SHOW_UPDATED_DATE:
|
if EM.SHOW_UPDATED_DATE:
|
||||||
stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC"
|
stats += f"\n Last Updated on {datetime.now().strftime(EM.UPDATED_DATE_FORMAT)} UTC"
|
||||||
@@ -135,16 +149,20 @@ async def get_stats() -> str:
|
|||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
"""
|
||||||
|
Application main function.
|
||||||
|
Initializes all managers, collects user info and updates README.md if necessary.
|
||||||
|
"""
|
||||||
init_github_manager()
|
init_github_manager()
|
||||||
await init_download_manager()
|
await init_download_manager()
|
||||||
init_localization_manager()
|
init_localization_manager()
|
||||||
|
|
||||||
if GHM.update_readme(await get_stats()):
|
if GHM.update_readme(await get_stats()):
|
||||||
print("Readme updated!")
|
print("Readme updated!")
|
||||||
await close_download_manager()
|
await DM.close_remote_resources()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
run(main())
|
run(main())
|
||||||
run_delta = datetime.now() - start_time
|
run_delta = datetime.now() - start_time
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from asyncio import Task
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from string import Template
|
from string import Template
|
||||||
@@ -11,6 +12,7 @@ from manager_github import GitHubManager as GHM
|
|||||||
|
|
||||||
|
|
||||||
GITHUB_API_QUERIES = {
|
GITHUB_API_QUERIES = {
|
||||||
|
# Query to collect info about all user repositories, including: is it a fork, name and owner login.
|
||||||
"repos_contributed_to": """
|
"repos_contributed_to": """
|
||||||
{
|
{
|
||||||
user(login: "$username") {
|
user(login: "$username") {
|
||||||
@@ -25,6 +27,7 @@ GITHUB_API_QUERIES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}""",
|
}""",
|
||||||
|
# Query to collect info about all commits in user repositories, including: commit date.
|
||||||
"repo_committed_dates": """
|
"repo_committed_dates": """
|
||||||
{
|
{
|
||||||
repository(owner: "$owner", name: "$name") {
|
repository(owner: "$owner", name: "$name") {
|
||||||
@@ -43,6 +46,7 @@ GITHUB_API_QUERIES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}""",
|
}""",
|
||||||
|
# Query to collect info about all repositories user created or collaborated on, including: name, primary language and owner login.
|
||||||
"user_repository_list": """
|
"user_repository_list": """
|
||||||
{
|
{
|
||||||
user(login: "$username") {
|
user(login: "$username") {
|
||||||
@@ -62,6 +66,7 @@ GITHUB_API_QUERIES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
""",
|
""",
|
||||||
|
# Query to collect info about user commits to given repository, including: commit date, additions and deletions numbers.
|
||||||
"repo_commit_list": """
|
"repo_commit_list": """
|
||||||
{
|
{
|
||||||
repository(owner: "$owner", name: "$name") {
|
repository(owner: "$owner", name: "$name") {
|
||||||
@@ -90,7 +95,7 @@ GITHUB_API_QUERIES = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -100,23 +105,15 @@ async def init_download_manager():
|
|||||||
- Setup headers for GitHub GraphQL requests.
|
- Setup headers for GitHub GraphQL requests.
|
||||||
- Launch static queries in background.
|
- Launch static queries in background.
|
||||||
"""
|
"""
|
||||||
await DownloadManager.load_remote_resources({
|
await DownloadManager.load_remote_resources(
|
||||||
|
{
|
||||||
"linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml",
|
"linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml",
|
||||||
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}",
|
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={EM.WAKATIME_API_KEY}",
|
||||||
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}",
|
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={EM.WAKATIME_API_KEY}",
|
||||||
"github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}"
|
"github_stats": f"https://github-contributions.vercel.app/api/v1/{GHM.USER.login}",
|
||||||
}, {
|
},
|
||||||
"Authorization": f"Bearer {EM.GH_TOKEN}"
|
{"Authorization": f"Bearer {EM.GH_TOKEN}"},
|
||||||
})
|
)
|
||||||
|
|
||||||
|
|
||||||
async def close_download_manager():
|
|
||||||
"""
|
|
||||||
Initialize download manager:
|
|
||||||
- Setup headers for GitHub GraphQL requests.
|
|
||||||
- Launch static queries in background.
|
|
||||||
"""
|
|
||||||
await DownloadManager.close_remote_resources("linguist", "waka_latest", "waka_all", "github_stats")
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadManager:
|
class DownloadManager:
|
||||||
@@ -130,6 +127,7 @@ class DownloadManager:
|
|||||||
DownloadManager launches all static queries asynchronously upon initialization and caches their results.
|
DownloadManager launches all static queries asynchronously upon initialization and caches their results.
|
||||||
It also executes dynamic queries upon request and caches result.
|
It also executes dynamic queries upon request and caches result.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_client = AsyncClient(timeout=60.0)
|
_client = AsyncClient(timeout=60.0)
|
||||||
_REMOTE_RESOURCES_CACHE = dict()
|
_REMOTE_RESOURCES_CACHE = dict()
|
||||||
|
|
||||||
@@ -145,14 +143,16 @@ class DownloadManager:
|
|||||||
DownloadManager._client.headers = github_headers
|
DownloadManager._client.headers = github_headers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def close_remote_resources(*resource: str):
|
async def close_remote_resources():
|
||||||
"""
|
"""
|
||||||
Prepare DownloadManager to launch GitHub API queries and launch all static queries.
|
Close DownloadManager and cancel all un-awaited static web queries.
|
||||||
:param resources: Dictionary of static queries, "IDENTIFIER": "URL".
|
Await all queries that could not be cancelled.
|
||||||
:param github_headers: Dictionary of headers for GitHub API queries.
|
|
||||||
"""
|
"""
|
||||||
for resource in [DownloadManager._REMOTE_RESOURCES_CACHE[r] for r in resource if isinstance(DownloadManager._REMOTE_RESOURCES_CACHE[r], Awaitable)]:
|
for resource in DownloadManager._REMOTE_RESOURCES_CACHE.values():
|
||||||
|
if isinstance(resource, Task):
|
||||||
resource.cancel()
|
resource.cancel()
|
||||||
|
elif isinstance(resource, Awaitable):
|
||||||
|
await resource
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict:
|
async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict:
|
||||||
@@ -208,9 +208,7 @@ class DownloadManager:
|
|||||||
"""
|
"""
|
||||||
key = f"{query}_{md5(dumps(kwargs, sort_keys=True).encode('utf-8')).digest()}"
|
key = f"{query}_{md5(dumps(kwargs, sort_keys=True).encode('utf-8')).digest()}"
|
||||||
if key not in DownloadManager._REMOTE_RESOURCES_CACHE:
|
if key not in DownloadManager._REMOTE_RESOURCES_CACHE:
|
||||||
res = await DownloadManager._client.post("https://api.github.com/graphql", json={
|
res = await DownloadManager._client.post("https://api.github.com/graphql", json={"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)})
|
||||||
"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)
|
|
||||||
})
|
|
||||||
DownloadManager._REMOTE_RESOURCES_CACHE[key] = res
|
DownloadManager._REMOTE_RESOURCES_CACHE[key] = res
|
||||||
else:
|
else:
|
||||||
res = DownloadManager._REMOTE_RESOURCES_CACHE[key]
|
res = DownloadManager._REMOTE_RESOURCES_CACHE[key]
|
||||||
|
|||||||
@@ -2,35 +2,44 @@ from os import getenv, environ
|
|||||||
|
|
||||||
|
|
||||||
class EnvironmentManager:
|
class EnvironmentManager:
|
||||||
_TRUTHY = ['true', '1', 't', 'y', 'yes']
|
"""
|
||||||
|
Class for handling all environmental variables used by the action.
|
||||||
|
There are only two required variables: `INPUT_GH_TOKEN` and `INPUT_WAKATIME_API_KEY`.
|
||||||
|
The others have a provided default value.
|
||||||
|
For all boolean variables a 'truthy'-list is checked (not only true/false, but also 1, t, y and yes are accepted).
|
||||||
|
List variable `IGNORED_REPOS` is split and parsed.
|
||||||
|
Integer variable `SYMBOL_VERSION` is parsed.
|
||||||
|
"""
|
||||||
|
|
||||||
GH_TOKEN = environ['INPUT_GH_TOKEN']
|
_TRUTHY = ["true", "1", "t", "y", "yes"]
|
||||||
WAKATIME_API_KEY = environ['INPUT_WAKATIME_API_KEY']
|
|
||||||
|
GH_TOKEN = environ["INPUT_GH_TOKEN"]
|
||||||
|
WAKATIME_API_KEY = environ["INPUT_WAKATIME_API_KEY"]
|
||||||
|
|
||||||
SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka")
|
SECTION_NAME = getenv("INPUT_SECTION_NAME", "waka")
|
||||||
BRANCH_NAME = getenv('INPUT_PUSH_BRANCH_NAME', "")
|
BRANCH_NAME = getenv("INPUT_PUSH_BRANCH_NAME", "")
|
||||||
|
|
||||||
SHOW_OS = getenv('INPUT_SHOW_OS', "False").lower() in _TRUTHY
|
SHOW_OS = getenv("INPUT_SHOW_OS", "False").lower() in _TRUTHY
|
||||||
SHOW_PROJECTS = getenv('INPUT_SHOW_PROJECTS', "True").lower() in _TRUTHY
|
SHOW_PROJECTS = getenv("INPUT_SHOW_PROJECTS", "True").lower() in _TRUTHY
|
||||||
SHOW_EDITORS = getenv('INPUT_SHOW_EDITORS', "True").lower() in _TRUTHY
|
SHOW_EDITORS = getenv("INPUT_SHOW_EDITORS", "True").lower() in _TRUTHY
|
||||||
SHOW_TIMEZONE = getenv('INPUT_SHOW_TIMEZONE', "True").lower() in _TRUTHY
|
SHOW_TIMEZONE = getenv("INPUT_SHOW_TIMEZONE", "True").lower() in _TRUTHY
|
||||||
SHOW_COMMIT = getenv('INPUT_SHOW_COMMIT', "True").lower() in _TRUTHY
|
SHOW_COMMIT = getenv("INPUT_SHOW_COMMIT", "True").lower() in _TRUTHY
|
||||||
SHOW_LANGUAGE = getenv('INPUT_SHOW_LANGUAGE', "True").lower() in _TRUTHY
|
SHOW_LANGUAGE = getenv("INPUT_SHOW_LANGUAGE", "True").lower() in _TRUTHY
|
||||||
SHOW_LINES_OF_CODE = getenv('INPUT_SHOW_LINES_OF_CODE', "False").lower() in _TRUTHY
|
SHOW_LINES_OF_CODE = getenv("INPUT_SHOW_LINES_OF_CODE", "False").lower() in _TRUTHY
|
||||||
SHOW_LANGUAGE_PER_REPO = getenv('INPUT_SHOW_LANGUAGE_PER_REPO', "True").lower() in _TRUTHY
|
SHOW_LANGUAGE_PER_REPO = getenv("INPUT_SHOW_LANGUAGE_PER_REPO", "True").lower() in _TRUTHY
|
||||||
SHOW_LOC_CHART = getenv('INPUT_SHOW_LOC_CHART', "True").lower() in _TRUTHY
|
SHOW_LOC_CHART = getenv("INPUT_SHOW_LOC_CHART", "True").lower() in _TRUTHY
|
||||||
SHOW_DAYS_OF_WEEK = getenv('INPUT_SHOW_DAYS_OF_WEEK', "True").lower() in _TRUTHY
|
SHOW_DAYS_OF_WEEK = getenv("INPUT_SHOW_DAYS_OF_WEEK", "True").lower() in _TRUTHY
|
||||||
SHOW_PROFILE_VIEWS = getenv('INPUT_SHOW_PROFILE_VIEWS', "True").lower() in _TRUTHY
|
SHOW_PROFILE_VIEWS = getenv("INPUT_SHOW_PROFILE_VIEWS", "True").lower() in _TRUTHY
|
||||||
SHOW_SHORT_INFO = getenv('INPUT_SHOW_SHORT_INFO', "True").lower() in _TRUTHY
|
SHOW_SHORT_INFO = getenv("INPUT_SHOW_SHORT_INFO", "True").lower() in _TRUTHY
|
||||||
SHOW_UPDATED_DATE = getenv('INPUT_SHOW_UPDATED_DATE', "True").lower() in _TRUTHY
|
SHOW_UPDATED_DATE = getenv("INPUT_SHOW_UPDATED_DATE", "True").lower() in _TRUTHY
|
||||||
SHOW_TOTAL_CODE_TIME = getenv('INPUT_SHOW_TOTAL_CODE_TIME', "True").lower() in _TRUTHY
|
SHOW_TOTAL_CODE_TIME = getenv("INPUT_SHOW_TOTAL_CODE_TIME", "True").lower() in _TRUTHY
|
||||||
|
|
||||||
COMMIT_BY_ME = getenv('INPUT_COMMIT_BY_ME', "False").lower() in _TRUTHY
|
COMMIT_BY_ME = getenv("INPUT_COMMIT_BY_ME", "False").lower() in _TRUTHY
|
||||||
COMMIT_MESSAGE = getenv('INPUT_COMMIT_MESSAGE', "Updated with Dev Metrics")
|
COMMIT_MESSAGE = getenv("INPUT_COMMIT_MESSAGE", "Updated with Dev Metrics")
|
||||||
COMMIT_USERNAME = getenv('INPUT_COMMIT_USERNAME', "")
|
COMMIT_USERNAME = getenv("INPUT_COMMIT_USERNAME", "")
|
||||||
COMMIT_EMAIL = getenv('INPUT_COMMIT_EMAIL', "")
|
COMMIT_EMAIL = getenv("INPUT_COMMIT_EMAIL", "")
|
||||||
|
|
||||||
LOCALE = getenv('INPUT_LOCALE', "en")
|
LOCALE = getenv("INPUT_LOCALE", "en")
|
||||||
UPDATED_DATE_FORMAT = getenv('INPUT_UPDATED_DATE_FORMAT', "%d/%m/%Y %H:%M:%S")
|
UPDATED_DATE_FORMAT = getenv("INPUT_UPDATED_DATE_FORMAT", "%d/%m/%Y %H:%M:%S")
|
||||||
IGNORED_REPOS = getenv('INPUT_IGNORED_REPOS', "").replace(' ', '').split(',')
|
IGNORED_REPOS = getenv("INPUT_IGNORED_REPOS", "").replace(" ", "").split(",")
|
||||||
SYMBOL_VERSION = int(getenv('INPUT_SYMBOL_VERSION'))
|
SYMBOL_VERSION = int(getenv("INPUT_SYMBOL_VERSION"))
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ from manager_environment import EnvironmentManager as EM
|
|||||||
|
|
||||||
def init_github_manager():
|
def init_github_manager():
|
||||||
"""
|
"""
|
||||||
|
Initialize GitHub manager.
|
||||||
|
Current user, user readme repo and readme file are downloaded.
|
||||||
"""
|
"""
|
||||||
GitHubManager.prepare_github_env()
|
GitHubManager.prepare_github_env()
|
||||||
print(f"Current user: {GitHubManager.USER.login}")
|
print(f"Current user: {GitHubManager.USER.login}")
|
||||||
@@ -16,61 +18,79 @@ def init_github_manager():
|
|||||||
class GitHubManager:
|
class GitHubManager:
|
||||||
USER: AuthenticatedUser
|
USER: AuthenticatedUser
|
||||||
REPO: Repository
|
REPO: Repository
|
||||||
README: ContentFile
|
_README: ContentFile
|
||||||
README_CONTENTS: str
|
_README_CONTENTS: str
|
||||||
|
|
||||||
_START_COMMENT = f'<!--START_SECTION:{EM.SECTION_NAME}-->'
|
_START_COMMENT = f"<!--START_SECTION:{EM.SECTION_NAME}-->"
|
||||||
_END_COMMENT = f'<!--END_SECTION:{EM.SECTION_NAME}-->'
|
_END_COMMENT = f"<!--END_SECTION:{EM.SECTION_NAME}-->"
|
||||||
_README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}"
|
_README_REGEX = f"{_START_COMMENT}[\\s\\S]+{_END_COMMENT}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def prepare_github_env():
|
def prepare_github_env():
|
||||||
"""
|
"""
|
||||||
|
Download and store for future use:
|
||||||
|
- Current GitHub user.
|
||||||
|
- Named repo of the user [username]/[username].
|
||||||
|
- README.md file of this repo.
|
||||||
|
- Parsed contents of the file.
|
||||||
"""
|
"""
|
||||||
github = Github(EM.GH_TOKEN)
|
github = Github(EM.GH_TOKEN)
|
||||||
GitHubManager.USER = github.get_user()
|
GitHubManager.USER = github.get_user()
|
||||||
GitHubManager.REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}")
|
GitHubManager._REPO = github.get_repo(f"{GitHubManager.USER.login}/{GitHubManager.USER.login}")
|
||||||
GitHubManager.README = GitHubManager.REPO.get_readme()
|
GitHubManager._README = GitHubManager._REPO.get_readme()
|
||||||
GitHubManager.README_CONTENTS = str(b64decode(GitHubManager.README.content), 'utf-8')
|
GitHubManager._README_CONTENTS = str(b64decode(GitHubManager._README.content), "utf-8")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _generate_new_readme(stats: str):
|
def _generate_new_readme(stats: str) -> str:
|
||||||
"""
|
"""
|
||||||
Generate a new Readme.md
|
Generates new README.md file, inserts its contents between start and end tags.
|
||||||
|
|
||||||
|
:param stats: contents to insert.
|
||||||
|
:returns: new README.md string.
|
||||||
"""
|
"""
|
||||||
readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}"
|
readme_stats = f"{GitHubManager._START_COMMENT}\n{stats}\n{GitHubManager._END_COMMENT}"
|
||||||
return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager.README_CONTENTS)
|
return sub(GitHubManager._README_REGEX, readme_stats, GitHubManager._README_CONTENTS)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_author():
|
def _get_author() -> InputGitAuthor:
|
||||||
"""
|
"""
|
||||||
|
Gets GitHub commit author specified by environmental variables.
|
||||||
|
It is the user himself or a 'readme-bot'.
|
||||||
|
|
||||||
|
:returns: Commit author.
|
||||||
"""
|
"""
|
||||||
if EM.COMMIT_BY_ME:
|
if EM.COMMIT_BY_ME:
|
||||||
return InputGitAuthor(
|
return InputGitAuthor(GitHubManager.USER.login or EM.COMMIT_USERNAME, GitHubManager.USER.email or EM.COMMIT_EMAIL)
|
||||||
GitHubManager.USER.login or EM.COMMIT_USERNAME,
|
|
||||||
GitHubManager.USER.email or EM.COMMIT_EMAIL
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return InputGitAuthor(
|
return InputGitAuthor(EM.COMMIT_USERNAME or "readme-bot", EM.COMMIT_EMAIL or "41898282+github-actions[bot]@users.noreply.github.com")
|
||||||
EM.COMMIT_USERNAME or 'readme-bot',
|
|
||||||
EM.COMMIT_EMAIL or '41898282+github-actions[bot]@users.noreply.github.com'
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def branch() -> str:
|
def branch() -> str:
|
||||||
|
"""
|
||||||
|
Gets name of branch to commit to specified by environmental variables.
|
||||||
|
It is the default branch (regularly, 'main' or 'master') or a branch specified by user.
|
||||||
|
|
||||||
|
:returns: Commit author.
|
||||||
|
"""
|
||||||
return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME
|
return GitHubManager.REPO.default_branch if EM.BRANCH_NAME == "" else EM.BRANCH_NAME
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_readme(stats: str) -> bool:
|
def update_readme(stats: str) -> bool:
|
||||||
|
"""
|
||||||
|
Updates readme with given data if necessary.
|
||||||
|
Uses commit author, commit message and branch name specified by environmental variables.
|
||||||
|
|
||||||
|
:returns: whether the README.md file was updated or not.
|
||||||
|
"""
|
||||||
new_readme = GitHubManager._generate_new_readme(stats)
|
new_readme = GitHubManager._generate_new_readme(stats)
|
||||||
if new_readme != GitHubManager.README_CONTENTS:
|
if new_readme != GitHubManager._README_CONTENTS:
|
||||||
GitHubManager.REPO.update_file(
|
GitHubManager.REPO.update_file(
|
||||||
path=GitHubManager.README.path,
|
path=GitHubManager._README.path,
|
||||||
message=EM.COMMIT_MESSAGE,
|
message=EM.COMMIT_MESSAGE,
|
||||||
content=new_readme,
|
content=new_readme,
|
||||||
sha=GitHubManager.README.sha,
|
sha=GitHubManager._README.sha,
|
||||||
branch=GitHubManager.branch(),
|
branch=GitHubManager.branch(),
|
||||||
committer=GitHubManager._get_author()
|
committer=GitHubManager._get_author(),
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@@ -78,6 +98,12 @@ class GitHubManager:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_chart(chart_path: str):
|
def update_chart(chart_path: str):
|
||||||
|
"""
|
||||||
|
Updates lines of code chart.
|
||||||
|
Uses commit author, commit message and branch name specified by environmental variables.
|
||||||
|
|
||||||
|
:param chart_path: path to saved lines of code chart.
|
||||||
|
"""
|
||||||
with open(chart_path, "rb") as input_file:
|
with open(chart_path, "rb") as input_file:
|
||||||
data = input_file.read()
|
data = input_file.read()
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -7,19 +7,37 @@ from manager_environment import EnvironmentManager as EM
|
|||||||
|
|
||||||
def init_localization_manager():
|
def init_localization_manager():
|
||||||
"""
|
"""
|
||||||
|
Initialize localization manager.
|
||||||
|
Load GUI translations JSON file.
|
||||||
"""
|
"""
|
||||||
LocalizationManager.load_localization("translation.json")
|
LocalizationManager.load_localization("translation.json")
|
||||||
|
|
||||||
|
|
||||||
class LocalizationManager:
|
class LocalizationManager:
|
||||||
|
"""
|
||||||
|
Class for handling localization (and maybe other file IO in future).
|
||||||
|
Stores localization in dictionary.
|
||||||
|
"""
|
||||||
|
|
||||||
_LOCALIZATION: Dict[str, str] = dict()
|
_LOCALIZATION: Dict[str, str] = dict()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_localization(file: str):
|
def load_localization(file: str):
|
||||||
with open(join(dirname(__file__), file), encoding='utf-8') as config_file:
|
"""
|
||||||
|
Read localization file and store locale defined with environmental variable.
|
||||||
|
|
||||||
|
:param file: Localization file path, related to current file (in sources root).
|
||||||
|
"""
|
||||||
|
with open(join(dirname(__file__), file), encoding="utf-8") as config_file:
|
||||||
data = load(config_file)
|
data = load(config_file)
|
||||||
LocalizationManager._LOCALIZATION = data[EM.LOCALE]
|
LocalizationManager._LOCALIZATION = data[EM.LOCALE]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def t(key: str) -> str:
|
def t(key: str) -> str:
|
||||||
|
"""
|
||||||
|
Translate string to current localization.
|
||||||
|
|
||||||
|
:param key: Localization key.
|
||||||
|
:returns: Translation string.
|
||||||
|
"""
|
||||||
return LocalizationManager._LOCALIZATION[key]
|
return LocalizationManager._LOCALIZATION[key]
|
||||||
|
|||||||
@@ -8,6 +8,13 @@ from manager_github import GitHubManager as GHM
|
|||||||
|
|
||||||
|
|
||||||
async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
|
async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
|
||||||
|
"""
|
||||||
|
Calculate commit data by years.
|
||||||
|
Commit data includes difference between contribution additions and deletions in each quarter of each recorded year.
|
||||||
|
|
||||||
|
:param repositories: user repositories info dictionary.
|
||||||
|
:returns: Commit quarter yearly data dictionary.
|
||||||
|
"""
|
||||||
yearly_data = dict()
|
yearly_data = dict()
|
||||||
total = len(repositories["data"]["user"]["repositories"]["edges"])
|
total = len(repositories["data"]["user"]["repositories"]["edges"])
|
||||||
for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["edges"]):
|
for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["edges"]):
|
||||||
@@ -17,7 +24,14 @@ async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
|
|||||||
return yearly_data
|
return yearly_data
|
||||||
|
|
||||||
|
|
||||||
async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict) -> Dict:
|
async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data: Dict):
|
||||||
|
"""
|
||||||
|
Updates yearly commit data with commits from given repository.
|
||||||
|
Skips update if the commit isn't related to any repository.
|
||||||
|
|
||||||
|
:param repo_details: Dictionary with information about the given repository.
|
||||||
|
:param yearly_data: Yearly data dictionary to update.
|
||||||
|
"""
|
||||||
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repo_details["owner"]["login"], name=repo_details["name"], id=GHM.USER.node_id)
|
commit_data = await DM.get_remote_graphql("repo_commit_list", owner=repo_details["owner"]["login"], name=repo_details["name"], id=GHM.USER.node_id)
|
||||||
|
|
||||||
if commit_data["data"]["repository"] is None:
|
if commit_data["data"]["repository"] is None:
|
||||||
@@ -36,4 +50,4 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data:
|
|||||||
yearly_data[curr_year][quarter] = dict()
|
yearly_data[curr_year][quarter] = dict()
|
||||||
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
|
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
|
||||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
|
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
|
||||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += (commit["additions"] - commit["deletions"])
|
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += commit["additions"] - commit["deletions"]
|
||||||
|
|||||||
Reference in New Issue
Block a user