You've already forked wakapi-readme-stats
Merge branch 'master' into dependabot/pip/pygithub-approx-eq-1.58
This commit is contained in:
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@@ -21,6 +21,7 @@ jobs:
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install Dependencies 📥
|
||||
run: pip install -r requirements.txt
|
||||
|
||||
56
.github/workflows/codeql.yml
vendored
Normal file
56
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: master
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '30 13 * * 6'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
|
||||
steps:
|
||||
- name: Checkout 🛎️
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Python 3.8 🐍
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install dependencies 📥
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
echo "CODEQL_PYTHON=$(which python)" >> $GITHUB_ENV
|
||||
|
||||
- name: Initialize CodeQL 🧑💻
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
setup-python-dependencies: false
|
||||
|
||||
- name: Perform CodeQL Analysis 📈
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
19
.github/workflows/dependency-review.yml
vendored
Normal file
19
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Dependency review
|
||||
on: pull_request
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository 🛎️
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Dependency review 👀
|
||||
uses: actions/dependency-review-action@v3
|
||||
with:
|
||||
comment-summary-in-pr: true
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Dict
|
||||
|
||||
from numpy import arange, array, add, amax
|
||||
from numpy import arange, array, add, amax, zeros
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
@@ -27,26 +27,28 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
|
||||
languages_all_loc = dict()
|
||||
for i, y in enumerate(sorted(yearly_data.keys())):
|
||||
for q in yearly_data[y].keys():
|
||||
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n], reverse=True)[0:MAX_LANGUAGES]
|
||||
langs = sorted(yearly_data[y][q].keys(), key=lambda n: yearly_data[y][q][n]["add"] + yearly_data[y][q][n]["del"], reverse=True)[0:MAX_LANGUAGES]
|
||||
|
||||
for lang in langs:
|
||||
if lang not in languages_all_loc:
|
||||
languages_all_loc[lang] = array([[0] * years] * 4)
|
||||
languages_all_loc[lang][q - 1][i] = yearly_data[y][q][lang]
|
||||
languages_all_loc[lang] = zeros((years, 4, 2), dtype=int)
|
||||
languages_all_loc[lang][i][q - 1] = array([yearly_data[y][q][lang]["add"], yearly_data[y][q][lang]["del"]])
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_axes([0, 0, 1.5, 1])
|
||||
|
||||
language_handles = []
|
||||
cumulative = array([[0] * years] * 4)
|
||||
cumulative = zeros((years, 4, 2), dtype=int)
|
||||
|
||||
for key, value in languages_all_loc.items():
|
||||
color = colors[key]["color"] if colors[key]["color"] is not None else "w"
|
||||
language_handles += [mpatches.Patch(color=color, label=key)]
|
||||
|
||||
for quarter in range(4):
|
||||
ax.bar(year_indexes + quarter * 0.21, value[quarter], 0.2, bottom=cumulative[quarter], color=color)
|
||||
cumulative[quarter] = add(cumulative[quarter], value[quarter])
|
||||
ax.bar(year_indexes + quarter * 0.21, value[:, quarter][:, 0], 0.2, bottom=cumulative[:, quarter][:, 0], color=color)
|
||||
ax.bar(year_indexes + quarter * 0.21, -value[:, quarter][:, 1], 0.2, bottom=-cumulative[:, quarter][:, 1], color=color)
|
||||
cumulative[:, quarter] = add(cumulative[:, quarter], value[:, quarter])
|
||||
ax.axhline(y=0.5, lw=0.5, snap=True, color="k")
|
||||
|
||||
ax.set_ylabel("LOC added", fontdict=dict(weight="bold"))
|
||||
ax.set_xticks(array([arange(i, i + 0.84, step=0.21) for i in year_indexes]).flatten(), labels=["Q1", "Q2", "Q3", "Q4"] * years)
|
||||
@@ -62,6 +64,11 @@ async def create_loc_graph(yearly_data: Dict, save_path: str):
|
||||
ax.spines["top"].set_visible(False)
|
||||
ax.spines["right"].set_visible(False)
|
||||
|
||||
plt.ylim(0, 1.05 * amax(cumulative))
|
||||
max_offset = 0.05 * amax(cumulative.flatten())
|
||||
joined = cumulative.reshape(-1, cumulative.shape[-1])
|
||||
max_additions = amax(joined[:, 0])
|
||||
max_deletions = amax(joined[:, 1])
|
||||
plt.ylim(top=max_additions + max_offset, bottom=-max_deletions - max_offset)
|
||||
|
||||
plt.savefig(save_path, bbox_inches="tight")
|
||||
plt.close(fig)
|
||||
|
||||
@@ -148,7 +148,7 @@ async def get_stats() -> str:
|
||||
|
||||
if EM.SHOW_LINES_OF_CODE:
|
||||
DBM.i("Adding lines of code info...")
|
||||
total_loc = sum([yearly_data[y][q][d] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
|
||||
total_loc = sum([yearly_data[y][q][d]["add"] for y in yearly_data.keys() for q in yearly_data[y].keys() for d in yearly_data[y][q].keys()])
|
||||
data = f"{intword(total_loc)} {FM.t('Lines of code')}"
|
||||
stats += f")}-{quote(data)}-blue)\n\n"
|
||||
|
||||
@@ -188,7 +188,7 @@ async def main():
|
||||
if GHM.update_readme(stats):
|
||||
DBM.g("Readme updated!")
|
||||
else:
|
||||
GHM.set_github_output(stats)
|
||||
if GHM.set_github_output(stats):
|
||||
DBM.g("Debug run, readme not updated. Check the latest comment for the generated stats.")
|
||||
await DM.close_remote_resources()
|
||||
|
||||
|
||||
@@ -121,7 +121,7 @@ GITHUB_API_QUERIES = {
|
||||
""",
|
||||
"hide_outdated_comment": """
|
||||
mutation {
|
||||
minimizeComment(input: {classifier:OUTDATED, subjectId: "$id"}) {
|
||||
minimizeComment(input: {classifier: OUTDATED, subjectId: "$id"}) {
|
||||
clientMutationId
|
||||
}
|
||||
}
|
||||
@@ -227,7 +227,7 @@ class DownloadManager:
|
||||
return await DownloadManager._get_remote_resource(resource, safe_load)
|
||||
|
||||
@staticmethod
|
||||
async def _fetch_graphql_query(query: str, **kwargs) -> Dict:
|
||||
async def _fetch_graphql_query(query: str, retries_count: int = 10, **kwargs) -> Dict:
|
||||
"""
|
||||
Execute GitHub GraphQL API simple query.
|
||||
:param query: Dynamic query identifier.
|
||||
@@ -241,6 +241,8 @@ class DownloadManager:
|
||||
)
|
||||
if res.status_code == 200:
|
||||
return res.json()
|
||||
elif res.status_code == 502 and retries_count > 0:
|
||||
return await DownloadManager._fetch_graphql_query(query, retries_count - 1, **kwargs)
|
||||
else:
|
||||
raise Exception(f"Query '{query}' failed to run by returning code of {res.status_code}: {res.json()}")
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from json import load
|
||||
from os.path import join, dirname
|
||||
from typing import Dict
|
||||
from os.path import join, isfile, dirname
|
||||
from pickle import load as load_pickle, dump as dump_pickle
|
||||
from json import load as load_json
|
||||
from typing import Dict, Optional
|
||||
|
||||
from manager_environment import EnvironmentManager as EM
|
||||
|
||||
@@ -29,7 +30,7 @@ class FileManager:
|
||||
:param file: Localization file path, related to current file (in sources root).
|
||||
"""
|
||||
with open(join(dirname(__file__), file), encoding="utf-8") as config_file:
|
||||
data = load(config_file)
|
||||
data = load_json(config_file)
|
||||
FileManager._LOCALIZATION = data[EM.LOCALE]
|
||||
|
||||
@staticmethod
|
||||
@@ -55,3 +56,27 @@ class FileManager:
|
||||
name = join("assets", name) if assets else name
|
||||
with open(name, "a" if append else "w", encoding="utf-8") as file:
|
||||
file.write(content)
|
||||
|
||||
@staticmethod
|
||||
def cache_binary(name: str, content: Optional[Dict] = None, assets: bool = False) -> Optional[Dict]:
|
||||
"""
|
||||
Save binary output file if provided or read if content is None.
|
||||
|
||||
:param name: File name.
|
||||
:param content: File content (utf-8 string) or None.
|
||||
:param assets: True for saving to 'assets' directory, false otherwise.
|
||||
:returns: File cache contents if content is None, None otherwise.
|
||||
"""
|
||||
name = join("assets", name) if assets else name
|
||||
if content is None and not isfile(name):
|
||||
return None
|
||||
|
||||
with open(name, "rb" if content is None else "wb") as file:
|
||||
if content is None:
|
||||
try:
|
||||
return load_pickle(file)
|
||||
except Exception:
|
||||
return None
|
||||
else:
|
||||
dump_pickle(content, file)
|
||||
return None
|
||||
|
||||
@@ -106,7 +106,7 @@ class GitHubManager:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def set_github_output(stats: str):
|
||||
def set_github_output(stats: str) -> bool:
|
||||
"""
|
||||
Outputs readme data as current action output instead of committing it.
|
||||
|
||||
@@ -114,13 +114,15 @@ class GitHubManager:
|
||||
"""
|
||||
DBM.i("Setting README contents as action output...")
|
||||
if "GITHUB_OUTPUT" not in environ.keys():
|
||||
raise Exception("Not in GitHub environment ('GITHUB_OUTPUT' not defined)!")
|
||||
DBM.p("Not in GitHub environment, not setting action output!")
|
||||
return False
|
||||
|
||||
prefix = "README stats current output:"
|
||||
eol = "".join(choice(ascii_letters) for _ in range(10))
|
||||
FM.write_file(environ["GITHUB_OUTPUT"], f"README_CONTENT<<{eol}\n{prefix}\n\n{stats}\n{eol}\n", append=True)
|
||||
|
||||
DBM.g("Action output set!")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def update_chart(chart_path: str) -> str:
|
||||
|
||||
@@ -701,5 +701,42 @@
|
||||
"private repositories": "%d Приватных репозиторіїв",
|
||||
"I am an Early": "Я рання 🐤",
|
||||
"I am a Night": "Я нічна 🦉"
|
||||
},
|
||||
"fa": {
|
||||
"Monday": "دوشنبه",
|
||||
"Tuesday": "سهشنبه",
|
||||
"Wednesday": "چهارشنبه",
|
||||
"Thursday": "پنجشنبه",
|
||||
"Friday": "جمعه",
|
||||
"Saturday": "شنبه",
|
||||
"Sunday": "یکشنبه",
|
||||
"Morning": "صبح",
|
||||
"Daytime": "طول روز",
|
||||
"Evening": "عصر",
|
||||
"Night": "شب",
|
||||
"Languages": "زبانها",
|
||||
"Editors": "ادیتورها",
|
||||
"operating system": "سیستمعاملها",
|
||||
"Projects": "پروژهها",
|
||||
"Timezone": "منطقهی زمانی",
|
||||
"Contributions in the year": "%s مشارکتها در سال جاری %s",
|
||||
"Used in GitHub's Storage": "%s مصرف فضای گیتهاب",
|
||||
"Opted to Hire": "جویایکار",
|
||||
"Not Opted to Hire": "دنبالکار نیست",
|
||||
"Profile Views": "بازدیدهای پروفایل",
|
||||
"From Hello World I have written": "از اولین کدم تا کنون %s کد نوشتهام.",
|
||||
"I am Most Productive on": "بیشتر در %s فعالیت دارم",
|
||||
"This Week I Spend My Time On": "این هفته بیشتر روی این موضوعات کار کردم",
|
||||
"I Mostly Code in": "من بیشتر کدهام %s هست.",
|
||||
"Timeline": "زمانبندی",
|
||||
"No Activity Tracked This Week": "این هفته فعالیتی نبوده",
|
||||
"My GitHub Data": "اطلاعات گیتهاب من",
|
||||
"Lines of code": "خطهای کد",
|
||||
"public repository": "%d رپیوزیتوری عمومی",
|
||||
"public repositories": "%d ریپوزیتوریهای عمومی",
|
||||
"private repository": "%d ریپوزیتوری شخصی",
|
||||
"private repositories": "%d ریپوزیتوریهای شخصی",
|
||||
"I am an Early": "من یک 🐤 سحرخیزم",
|
||||
"I am a Night": "من یک 🦉 شبم"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,24 +13,33 @@ from manager_debug import DebugManager as DBM
|
||||
async def calculate_yearly_commit_data(repositories: Dict) -> Dict:
|
||||
"""
|
||||
Calculate commit data by years.
|
||||
Commit data includes difference between contribution additions and deletions in each quarter of each recorded year.
|
||||
Commit data includes contribution additions and deletions in each quarter of each recorded year.
|
||||
|
||||
:param repositories: user repositories info dictionary.
|
||||
:returns: Commit quarter yearly data dictionary.
|
||||
"""
|
||||
DBM.i("Calculating yearly commit data...")
|
||||
if EM.DEBUG_RUN:
|
||||
content = FM.cache_binary("yearly_data.pick", assets=True)
|
||||
if content is not None:
|
||||
DBM.g("Yearly data restored from cache!")
|
||||
return content
|
||||
else:
|
||||
DBM.w("No cached yearly data found, recalculating...")
|
||||
|
||||
yearly_data = dict()
|
||||
total = len(repositories["data"]["user"]["repositories"]["nodes"])
|
||||
|
||||
for ind, repo in enumerate(repositories["data"]["user"]["repositories"]["nodes"]):
|
||||
if repo["name"] not in EM.IGNORED_REPOS:
|
||||
repo_name = "private" if repo["isPrivate"] else f"{repo['owner']['login']}/{repo['name']}"
|
||||
repo_name = "[private]" if repo["isPrivate"] else f"{repo['owner']['login']}/{repo['name']}"
|
||||
DBM.i(f"\t{ind + 1}/{total} Retrieving repo: {repo_name}")
|
||||
await update_yearly_data_with_commit_stats(repo, yearly_data)
|
||||
DBM.g("Yearly commit data calculated!")
|
||||
|
||||
if EM.DEBUG_RUN:
|
||||
FM.cache_binary("yearly_data.pick", yearly_data, assets=True)
|
||||
FM.write_file("yearly_data.json", dumps(yearly_data), assets=True)
|
||||
DBM.g("Yearly data saved to cache!")
|
||||
return yearly_data
|
||||
|
||||
|
||||
@@ -61,5 +70,6 @@ async def update_yearly_data_with_commit_stats(repo_details: Dict, yearly_data:
|
||||
if quarter not in yearly_data[curr_year]:
|
||||
yearly_data[curr_year][quarter] = dict()
|
||||
if repo_details["primaryLanguage"]["name"] not in yearly_data[curr_year][quarter]:
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = 0
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] += commit["additions"] - commit["deletions"]
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]] = {"add": 0, "del": 0}
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]]["add"] += commit["additions"]
|
||||
yearly_data[curr_year][quarter][repo_details["primaryLanguage"]["name"]]["del"] += commit["deletions"]
|
||||
|
||||
Reference in New Issue
Block a user