keeping updated

This commit is contained in:
pseusys
2023-03-11 18:13:19 +01:00
10 changed files with 188 additions and 25 deletions

17
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
version: 2
updates:
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 99
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 99
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 99

43
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,43 @@
name: CodeQL
on:
push:
branches: master
pull_request:
workflow_dispatch:
schedule:
- cron: '30 13 * * 6'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
steps:
- name: Checkout 🛎️
uses: actions/checkout@v3
- name: Initialize CodeQL 🧑‍💻
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
queries: security-and-quality
- name: Perform CodeQL Analysis 📈
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

19
.github/workflows/dependency-review.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
name: Dependency review
on: pull_request
permissions:
contents: read
pull-requests: write
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: Checkout repository 🛎️
uses: actions/checkout@v3
- name: Dependency review 👀
uses: actions/dependency-review-action@v3
with:
comment-summary-in-pr: true

View File

@@ -1,6 +1,6 @@
from typing import Dict
from numpy import arange, array, add, amax
from numpy import arange, array, add, amax, zeros
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
@@ -27,26 +27,28 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
languages_all_loc = dict()
for i, y in enumerate(sorted(yearly_data.keys())):
for q in yearly_data[y].keys():
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n], reverse=True)[0:MAX_LANGUAGES]
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n]["add"] + yearly_data[y][q][n]["del"], reverse=True)[0:MAX_LANGUAGES]
for lang in langs:
if lang not in languages_all_loc:
languages_all_loc[lang] = array([[0] * years] * 4)
languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang]
languages_all_loc[lang] = zeros((years, 4, 2), dtype=int)
languages_all_loc[lang][i][q - 1] = array([yearly_data[y][q][lang]["add"], yearly_data[y][q][lang]["del"]])
fig = plt.figure()
ax = fig.add_axes([0, 0, 1.5, 1])
language_handles = []
cumulative = array([[0] * years] * 4)
cumulative = zeros((years, 4, 2), dtype=int)
for key, value in languages_all_loc.items():
color = colors[key]["color"] if colors[key]["color"] is not None else "w"
language_handles += [mpatches.Patch(color=color, label=key)]
for quarter in range(4):
ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color)
cumulative[quarter] = add(cumulative[quarter], value[quarter])
ax.bar(year_indexes + quarter * 0.21, value[:, quarter][:, 0], 0.2, bottom=cumulative[:, quarter][:, 0], color=color)
ax.bar(year_indexes + quarter * 0.21, -value[:, quarter][:, 1], 0.2, bottom=-cumulative[:, quarter][:, 1], color=color)
cumulative[:, quarter] = add(cumulative[:, quarter], value[:, quarter])
ax.axhline(y=0.5, lw=0.5, snap=True, color="k")
ax.set_ylabel("LOC added", fontdict=dict(weight="bold"))
ax.set_xticks(array([arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years)
@@ -62,6 +64,11 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
plt.ylim(0, 1.05 * amax(cumulative))
max_offset = 0.05 * amax(cumulative.flatten())
joined = cumulative.reshape(-1, cumulative.shape[-1])
max_additions = amax(joined[:, 0])
max_deletions = amax(joined[:, 1])
plt.ylim(top=max_additions + max_offset, bottom=-max_deletions - max_offset)
plt.savefig(save_path, bbox_inches="tight")
plt.close(fig)

View File

@@ -148,7 +148,7 @@ async def get_stats() -> str:
if EM.SHOW_LINES_OF_CODE:
DBM.i("Adding lines of code info...")
total_loc = sum([yearly_data[y][q][d] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
total_loc = sum([yearly_data[y][q][d]["add"] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
data = f"{intword(total_loc)} {FM.t('Lines of code')}"
stats += f"![Lines of code](https://img.shields.io/badge/{quote(FM.t('From Hello World I have written'))}-{quote(data)}-blue)\n\n"
@@ -188,8 +188,8 @@ async def main():
if GHM.update_readme(stats):
DBM.g("Readme updated!")
else:
GHM.set_github_output(stats)
DBM.g("Debug run, readme not updated. Check the latest comment for the generated stats.")
if GHM.set_github_output(stats):
DBM.g("Debug run, readme not updated. Check the latest comment for the generated stats.")
await DM.close_remote_resources()

View File

@@ -121,7 +121,7 @@ GITHUB_API_QUERIES = {
""",
"hide_outdated_comment": """
mutation {
minimizeComment(input: {classifier:OUTDATED, subjectId: "$id"}) {
minimizeComment(input: {classifier: OUTDATED, subjectId: "$id"}) {
clientMutationId
}
}
@@ -227,7 +227,7 @@ class DownloadManager:
return await DownloadManager._get_remote_resource(resource, safe_load)
@staticmethod
async def _fetch_graphql_query(query: str, **kwargs) -> Dict:
async def _fetch_graphql_query(query: str, retries_count: int = 10, **kwargs) -> Dict:
"""
Execute GitHub GraphQL API simple query.
:param query: Dynamic query identifier.
@@ -241,6 +241,8 @@ class DownloadManager:
)
if res.status_code == 200:
return res.json()
elif res.status_code == 502 and retries_count > 0:
return await DownloadManager._fetch_graphql_query(query, retries_count - 1, **kwargs)
else:
raise Exception(f"Query '{query}' failed to run by returning code of {res.status_code}: {res.json()}")

View File

@@ -1,6 +1,7 @@
from json import load
from os.path import join, dirname
from typing import Dict
from os.path import join, isfile, dirname
from pickle import load as load_pickle, dump as dump_pickle
from json import load as load_json
from typing import Dict, Optional
from manager_environment import EnvironmentManager as EM
@@ -29,7 +30,7 @@ class FileManager:
:param file: Localization file path, related to current file (in sources root).
"""
with open(join(dirname(__file__), file), encoding="utf-8") as config_file:
data = load(config_file)
data = load_json(config_file)
FileManager._LOCALIZATION = data[EM.LOCALE]
@staticmethod
@@ -55,3 +56,27 @@ class FileManager:
name = join("assets", name) if assets else name
with open(name, "a" if append else "w", encoding="utf-8") as file:
file.write(content)
@staticmethod
def cache_binary(name: str, content: Optional[Dict] = None, assets: bool = False) -> Optional[Dict]:
"""
Save binary output file if provided or read if content is None.
:param name: File name.
:param content: File content (utf-8 string) or None.
:param assets: True for saving to 'assets' directory, false otherwise.
:returns: File cache contents if content is None, None otherwise.
"""
name = join("assets", name) if assets else name
if content is None and not isfile(name):
return None
with open(name, "rb" if content is None else "wb") as file:
if content is None:
try:
return load_pickle(file)
except Exception:
return None
else:
dump_pickle(content, file)
return None

View File

@@ -109,7 +109,7 @@ class GitHubManager:
return False
@staticmethod
def set_github_output(stats: str):
def set_github_output(stats: str) -> bool:
"""
Outputs readme data as current action output instead of committing it.
@@ -117,13 +117,15 @@ class GitHubManager:
"""
DBM.i("Setting README contents as action output...")
if "GITHUB_OUTPUT" not in environ.keys():
raise Exception("Not in GitHub environment ('GITHUB_OUTPUT' not defined)!")
DBM.p("Not in GitHub environment, not setting action output!")
return False
prefix = "README stats current output:"
eol = "".join(choice(ascii_letters) for _ in range(10))
FM.write_file(environ["GITHUB_OUTPUT"], f"README_CONTENT<<{eol}\n{prefix}\n\n{stats}\n{eol}\n", append=True)
DBM.g("Action output set!")
return True
@staticmethod
def update_chart(chart_path: str) -> str:
@@ -141,7 +143,6 @@ class GitHubManager:
if not EM.DEBUG_RUN:
DBM.i("Pushing chart to repo...")
chart_path = f"https://raw.githubusercontent.com/{GitHubManager.USER.login}/{GitHubManager.USER.login}/{GitHubManager.branch()}/{chart_path}"
try:
contents = GitHubManager.REMOTE.get_contents(chart_path)
@@ -150,6 +151,8 @@ class GitHubManager:
except UnknownObjectException:
GitHubManager.REMOTE.create_file(chart_path, "Charts Added", data, committer=GitHubManager._get_author())
DBM.g("Lines of code chart created!")
chart_path = f"https://raw.githubusercontent.com/{GitHubManager.USER.login}/{GitHubManager.USER.login}/{GitHubManager.branch()}/{chart_path}"
return f"**{FM.t('Timeline')}**\n\n![Lines of Code chart]({chart_path})\n\n"
else:

View File

@@ -701,5 +701,42 @@
"private repositories": "%d Приватных репозиторіїв",
"I am an Early": "Я рання 🐤",
"I am a Night": "Я нічна 🦉"
},
"fa": {
"Monday": "دوشنبه",
"Tuesday": "سه‌شنبه",
"Wednesday": "چهارشنبه",
"Thursday": "پنج‌شنبه",
"Friday": "جمعه",
"Saturday": "شنبه",
"Sunday": "یک‌شنبه",
"Morning": "صبح",
"Daytime": "طول روز",
"Evening": "عصر",
"Night": "شب",
"Languages": "زبان‌ها",
"Editors": "ادیتورها",
"operating system": "سیستم‌عامل‌ها",
"Projects": "پروژه‌ها",
"Timezone": "منطقه‌ی زمانی",
"Contributions in the year": "%s مشارکت‌ها در سال جاری %s",
"Used in GitHub's Storage": "%s مصرف فضای گیت‌هاب",
"Opted to Hire": "جویای‌کار",
"Not Opted to Hire": "دنبال‌کار نیست",
"Profile Views": "بازدید‌های پروفایل",
"From Hello World I have written": "از اولین کدم تا کنون %s کد نوشته‌ام.",
"I am Most Productive on": "بیشتر در %s فعالیت دارم",
"This Week I Spend My Time On": "این هفته بیشتر روی این موضوعات کار کردم",
"I Mostly Code in": "من بیشتر کدهام %s هست.",
"Timeline": "زمان‌بندی",
"No Activity Tracked This Week": "این هفته فعالیتی نبوده",
"My GitHub Data": "اطلاعات گیت‌هاب من",
"Lines of code": "خط‌های کد",
"public repository": "%d رپیوزیتوری‌ عمومی",
"public repositories": "%d ریپوزیتوری‌های عمومی",
"private repository": "%d ریپوزیتوری‌ شخصی",
"private repositories": "%d ریپوزیتوری‌های شخصی",
"I am an Early": "من یک 🐤 سحر‌خیزم",
"I am a Night": "من یک 🦉 شبم"
}
}

View File

@@ -13,24 +13,33 @@ from manager_debug import DebugManager as DBM
async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
"""
Calculate commit data by years.
Commit data includes difference between contribution additions and deletions in each quarter of each recorded year.
Commit data includes contribution additions and deletions in each quarter of each recorded year.
:param repositories: user repositories info dictionary.
:returns: Commit quarter yearly data dictionary.
"""
DBM.i("Calculating yearly commit data...")
if EM.DEBUG_RUN:
content = FM.cache_binary("yearly_data.pick", assets=True)
if content is not None:
DBM.g("Yearly data restored from cache!")
return content
else:
DBM.w("No cached yearly data found, recalculating...")
yearly_data = dict()
total = len(repositories["data"]["user"]["repositories"]["nodes"])
for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["nodes"]):
if repo["name"] not in EM.IGNORED_REPOS:
repo_name = "private" if repo["isPrivate"] else f"{repo['owner']['login']}/{repo['name']}"
repo_name = "[private]" if repo["isPrivate"] else f"{repo['owner']['login']}/{repo['name']}"
DBM.i(f"\t{ind + 1}/{total} Retrieving repo: {repo_name}")
await update_yearly_data_with_commit_stats(repo, yearly_data)
DBM.g("Yearly commit data calculated!")
if EM.DEBUG_RUN:
FM.cache_binary("yearly_data.pick", yearly_data, assets=True)
FM.write_file("yearly_data.json", dumps(yearly_data), assets=True)
DBM.g("Yearly data saved to cache!")
return yearly_data
@@ -61,5 +70,6 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data:
if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = dict()
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += commit["additions"] - commit["deletions"]
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = {"add": 0, "del": 0}
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]]["add"] += commit["additions"]
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]]["del"] += commit["deletions"]