merged with master

This commit is contained in:
pseusys
2023-02-16 22:37:23 +01:00
16 changed files with 875 additions and 3119 deletions

View File

@@ -1,22 +1,22 @@
INPUT_WAKATIME_API_KEY=""
INPUT_PUSH_BRANCH_NAME="main"
INPUT_SECTION_NAME="waka"
INPUT_SHOW_TIMEZONE="True"
INPUT_SHOW_PROJECTS="False"
INPUT_SHOW_EDITORS="False"
INPUT_SHOW_OS="False"
INPUT_SHOW_LANGUAGE="False"
INPUT_GH_TOKEN=""
INPUT_SYMBOL_VERSION="1"
INPUT_SHOW_LINES_OF_CODE="False"
INPUT_SHOW_LOC_CHART="False"
INPUT_SHOW_PROFILE_VIEWS="False"
INPUT_SHOW_TOTAL_CODE_TIME="True"
INPUT_SHOW_SHORT_INFO="False"
INPUT_SHOW_COMMIT="False"
INPUT_SHOW_DAYS_OF_WEEK="True"
INPUT_SHOW_LANGUAGE_PER_REPO="True"
INPUT_SHOW_UPDATED_DATE="True"
INPUT_UPDATED_DATE_FORMAT="%d/%m/%Y %H:%M:%S"
INPUT_COMMIT_BY_ME="False"
INPUT_COMMIT_MESSAGE="Updated with Dev Metrics"
INPUT_WAKATIME_API_KEY=YOUR_WAKATIME_API_KEY
INPUT_PUSH_BRANCH_NAME=main
INPUT_SECTION_NAME=waka
INPUT_SHOW_TIMEZONE=True
INPUT_SHOW_PROJECTS=True
INPUT_SHOW_EDITORS=True
INPUT_SHOW_OS=True
INPUT_SHOW_LANGUAGE=True
INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY
INPUT_SYMBOL_VERSION=1
INPUT_SHOW_LINES_OF_CODE=True
INPUT_SHOW_LOC_CHART=True
INPUT_SHOW_PROFILE_VIEWS=True
INPUT_SHOW_TOTAL_CODE_TIME=True
INPUT_SHOW_SHORT_INFO=True
INPUT_SHOW_COMMIT=True
INPUT_SHOW_DAYS_OF_WEEK=True
INPUT_SHOW_LANGUAGE_PER_REPO=True
INPUT_SHOW_UPDATED_DATE=True
INPUT_UPDATED_DATE_FORMAT=%d/%m/%Y %H:%M:%S
INPUT_COMMIT_BY_ME=False
INPUT_COMMIT_MESSAGE=Updated with Dev Metrics

37
.github/workflows/build_image.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: PUBLISH_IMAGE
on:
push:
jobs:
publish-server-image:
name: Publish 'waka-readme-stats' image
runs-on: ubuntu-latest
steps:
- name: Checkout 🛎️
uses: actions/checkout@v3
- name: Log in to the container registry 🚪
uses: docker/login-action@v2
with:
username: wakareadmestats
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker 🏋️
id: meta
uses: docker/metadata-action@v4
with:
images: wakareadmestats/waka-readme-stats
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Build and push Docker image 🏗️
uses: docker/build-push-action@v3
with:
push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/releases') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

14
.gitignore vendored
View File

@@ -1,6 +1,20 @@
# Environment files:
*.env
# Generated graph images:
*.png
# Library roots:
node_modules/
venv/
# Python caches:
__pycache__/
# Package manager configuration files:
package.json
package-lock.json
# IDE configuration files:
.vscode
.idea

View File

@@ -1,12 +1,12 @@
FROM python:3.9-alpine
ADD requirements.txt /requirements.txt
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1
WORKDIR /waka-readme-stats
ADD requirements.txt ./requirements.txt
RUN apk add --no-cache g++ jpeg-dev zlib-dev libjpeg make && pip3 install -r requirements.txt
ADD main.py /main.py
ADD loc.py /loc.py
ADD make_bar_graph.py /make_bar_graph.py
ADD colors.json /colors.json
ADD translation.json /translation.json
ENTRYPOINT ["python", "/main.py"]
ADD sources/* ./
ENTRYPOINT python3 /waka-readme-stats/main.py

49
Makefile Normal file
View File

@@ -0,0 +1,49 @@
.ONESHELL:
.DEFAULT_GOAL = help
SHELL = /bin/bash
PATH := venv/bin:node_modules/.bin:$(PATH)
help:
@ # Print help commands
echo "Welcome to 'waka-readme-stats' GitHub Actions!"
echo "The action can be tested locally with: 'make run'."
echo "NB! For local testing Python version 3.6+ and NodeJS version between 14 and 16 are required."
echo "The action image can be built locally with: 'make container'."
echo "NB! For local container building Docker version 20+ is required."
echo "The action directory and image can be cleaned with: 'make clean'."
.PHONY: help
venv:
@ # Install Python virtual environment and dependencies
python3 -m venv venv
pip install --upgrade pip
pip install -r requirements.txt
node_modules:
@ # Install NodeJS dependencies
npm i npm@next-8
npm i vega vega-lite vega-cli canvas
run-locally: venv node_modules
@ # Run action locally
source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py
.PHONY: run-locally
run-container:
@ # Run action in container
docker build -t waka-readme-stats -f Dockerfile .
docker run --env-file .env.example waka-readme-stats
.PHONY: run-container
clean:
@ # Clean all build files, including: libraries, package manager configs, docker images and containers
rm -rf venv
rm -rf node_modules
rm -f package*.json
docker rm -f waka-readme-stats 2>/dev/null || true
docker rmi $(docker images | grep "waka-readme-stats") 2> /dev/null || true
.PHONY: clean

View File

@@ -11,7 +11,7 @@ inputs:
WAKATIME_API_KEY:
description: 'Your Wakatime API Key'
required: true
SECTION_NAME:
description: 'Name used in readme to scope the updated section'
required: false
@@ -134,7 +134,7 @@ inputs:
runs:
using: 'docker'
image: 'Dockerfile'
image: 'docker://wakareadmestats/waka-readme-stats:master'
branding:
icon: 'activity'

File diff suppressed because it is too large Load Diff

107
loc.py
View File

@@ -1,107 +0,0 @@
import re
import os
import base64
import requests
from github import Github, InputGitAuthor
import datetime
from string import Template
import matplotlib.pyplot as plt
from io import StringIO, BytesIO
from dotenv import load_dotenv
import time
from make_bar_graph import build_graph
class LinesOfCode:
def __init__(self, id, username, ghtoken, repositoryData, ignored_repos):
self.id = id
self.username = username
self.g = Github(ghtoken)
self.headers = {"Authorization": "Bearer " + ghtoken}
self.repositoryData = repositoryData
self.ignored_repos = ignored_repos
def calculateLoc(self):
result = self.repositoryData
yearly_data = {}
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['name'] not in self.ignored_repos:
self.getCommitStat(repo['node'], yearly_data)
time.sleep(0.7)
return yearly_data
def plotLoc(self, yearly_data):
build_graph(yearly_data)
self.pushChart()
def run_query_v3(self, endPoint):
# print(endPoint)
request = requests.get(endPoint, headers=self.headers)
if request.status_code == 401:
raise Exception("Invalid token {}.".format(request.status_code))
elif request.status_code == 204:
return []
else:
return request.json()
def getQuarter(self, timeStamp):
month = datetime.datetime.fromisoformat(timeStamp).month
if month >= 1 and month <= 3:
return 1
elif month >= 4 and month <= 6:
return 2
elif month >= 7 and month <= 9:
return 3
elif month >= 10 and month <= 12:
return 4
def getCommitStat(self, repoDetails, yearly_data):
commitsURL = 'https://api.github.com/repos/' + repoDetails['nameWithOwner'] + '/commits'
filteredCommitsEndPoint = commitsURL + '?author=' + self.username
filteredCommitsResult = self.run_query_v3(filteredCommitsEndPoint)
# This ignores the error message you get when you try to list commits for an empty repository
if not type(filteredCommitsResult) == list:
return
this_year = datetime.datetime.utcnow().year
for i in range(len(filteredCommitsResult)):
iso_date = filteredCommitsResult[i]["commit"]["author"]["date"]
date = re.search(r'\d+-\d+-\d+', iso_date).group(0)
curr_year = datetime.datetime.fromisoformat(date).year
# if curr_year != this_year:
individualCommitEndPoint = commitsURL + '/' + filteredCommitsResult[i]["sha"]
individualCommitResult = self.run_query_v3(individualCommitEndPoint)
quarter = self.getQuarter(date)
if repoDetails['primaryLanguage'] is not None:
if curr_year not in yearly_data:
yearly_data[curr_year] = {}
if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = {}
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (individualCommitResult["stats"]["additions"] - individualCommitResult["stats"]['deletions'])
# to find total
# if 'total' not in yearly_data[curr_year]:
# yearly_data[curr_year]['total']={}
# if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']:
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2])
def pushChart(self):
repo = self.g.get_repo(f"{self.username}/{self.username}")
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
with open('bar_graph.png', 'rb') as input_file:
data = input_file.read()
try:
contents = repo.get_contents("charts/bar_graph.png")
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
except Exception as e:
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)

593
main.py
View File

@@ -1,593 +0,0 @@
'''
Readme Development Metrics With waka time progress
'''
import re
import os
import base64
from pytz import timezone
import pytz
import requests
from github import Github, GithubException, InputGitAuthor
import datetime
from string import Template
from loc import LinesOfCode
import time
import traceback
import humanize
from urllib.parse import quote
import json
import sys
from datetime import date
import math
from dotenv import load_dotenv
load_dotenv()
START_COMMENT = f'<!--START_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
END_COMMENT = f'<!--END_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
listReg = f"{START_COMMENT}[\\s\\S]+{END_COMMENT}"
waka_key = os.getenv('INPUT_WAKATIME_API_KEY')
ghtoken = os.getenv('INPUT_GH_TOKEN')
branchName = os.getenv('INPUT_PUSH_BRANCH_NAME')
showTimeZone = os.getenv('INPUT_SHOW_TIMEZONE')
showProjects = os.getenv('INPUT_SHOW_PROJECTS')
showEditors = os.getenv('INPUT_SHOW_EDITORS')
showOs = os.getenv('INPUT_SHOW_OS')
showCommit = os.getenv('INPUT_SHOW_COMMIT')
showLanguage = os.getenv('INPUT_SHOW_LANGUAGE')
show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE')
show_days_of_week = os.getenv('INPUT_SHOW_DAYS_OF_WEEK')
showLanguagePerRepo = os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO')
showLocChart = os.getenv('INPUT_SHOW_LOC_CHART')
show_profile_view = os.getenv('INPUT_SHOW_PROFILE_VIEWS')
show_short_info = os.getenv('INPUT_SHOW_SHORT_INFO')
locale = os.getenv('INPUT_LOCALE')
commit_by_me = os.getenv('INPUT_COMMIT_BY_ME')
ignored_repos_name = str(os.getenv('INPUT_IGNORED_REPOS') or '').replace(' ', '').split(',')
show_updated_date = os.getenv('INPUT_SHOW_UPDATED_DATE')
updated_date_format = os.getenv('INPUT_UPDATED_DATE_FORMAT')
commit_message = os.getenv('INPUT_COMMIT_MESSAGE')
commit_username = os.getenv('INPUT_COMMIT_USERNAME')
commit_email = os.getenv('INPUT_COMMIT_EMAIL')
show_total_code_time = os.getenv('INPUT_SHOW_TOTAL_CODE_TIME')
symbol_version = os.getenv('INPUT_SYMBOL_VERSION').strip()
show_waka_stats = 'y'
# The GraphQL query to get commit data.
userInfoQuery = """
{
viewer {
login
email
id
}
}
"""
createContributedRepoQuery = Template("""query {
user(login: "$username") {
repositoriesContributedTo(last: 100, includeUserRepositories: true) {
nodes {
isFork
name
owner {
login
}
}
}
}
}
""")
createCommittedDateQuery = Template("""
query {
repository(owner: "$owner", name: "$name") {
defaultBranchRef {
target {
... on Commit {
history(first: 100, author: { id: "$id" }) {
edges {
node {
committedDate
}
}
}
}
}
}
}
}
""")
get_loc_url = Template("""/repos/$owner/$repo/stats/code_frequency""")
get_profile_view = Template("""/repos/$owner/$repo/traffic/views?per=week""")
get_profile_traffic = Template("""/repos/$owner/$repo/traffic/popular/referrers""")
truthy = ['true', '1', 't', 'y', 'yes']
def run_v3_api(query):
request = requests.get('https://api.github.com' + query, headers=headers)
if request.status_code == 200:
return request.json()
else:
raise Exception(
"Query failed to run by returning code of {}. {},... {}".format(request.status_code, query,
str(request.json())))
repositoryListQuery = Template("""
{
user(login: "$username") {
repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], isFork: false) {
totalCount
edges {
node {
object(expression:"master") {
... on Commit {
history (author: { id: "$id" }){
totalCount
}
}
}
primaryLanguage {
color
name
id
}
stargazers {
totalCount
}
collaborators {
totalCount
}
createdAt
name
owner {
id
login
}
nameWithOwner
}
}
}
location
createdAt
name
}
}
""")
def millify(n):
millnames = ['', ' Thousand', ' Million', ' Billion', ' Trillion']
n = float(n)
millidx = max(0, min(len(millnames) - 1,
int(math.floor(0
if n == 0
else math.log10(abs(n)) / 3))))
return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx])
def run_query(query):
request = requests.post('https://api.github.com/graphql', json={'query': query}, headers=headers)
if request.status_code == 200:
return request.json()
else:
raise Exception("Query failed to run by returning code of {}. {}".format(request.status_code, query))
def make_graph(percent: float):
'''Make progress graph from API graph'''
if (symbol_version == '1'): # version 1
done_block = ''
empty_block = ''
elif (symbol_version == '2'): # version 2
done_block = ''
empty_block = ''
elif (symbol_version == '3'): # version 3
done_block = ''
empty_block = ''
else:
done_block = '' # default is version 1
empty_block = ''
pc_rnd = round(percent)
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}"
def make_list(data: list):
'''Make List'''
data_list = []
for l in data[:5]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
def make_commit_list(data: list):
'''Make List'''
data_list = []
for l in data[:7]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
def generate_commit_list(tz):
string = ''
result = run_query(userInfoQuery) # Execute the query
username = result["data"]["viewer"]["login"]
id = result["data"]["viewer"]["id"]
# print("user {}".format(username))
result = run_query(createContributedRepoQuery.substitute(username=username))
nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"]
repos = [d for d in nodes if d['isFork'] is False]
morning = 0 # 6 - 12
daytime = 0 # 12 - 18
evening = 0 # 18 - 24
night = 0 # 0 - 6
Monday = 0
Tuesday = 0
Wednesday = 0
Thursday = 0
Friday = 0
Saturday = 0
Sunday = 0
for repository in repos:
result = run_query(
createCommittedDateQuery.substitute(owner=repository["owner"]["login"], name=repository["name"], id=id))
try:
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
for committedDate in committed_dates:
date = datetime.datetime.strptime(committedDate["node"]["committedDate"],
"%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc).astimezone(
timezone(tz))
hour = date.hour
weekday = date.strftime('%A')
if 6 <= hour < 12:
morning += 1
if 12 <= hour < 18:
daytime += 1
if 18 <= hour < 24:
evening += 1
if 0 <= hour < 6:
night += 1
if weekday == "Monday":
Monday += 1
if weekday == "Tuesday":
Tuesday += 1
if weekday == "Wednesday":
Wednesday += 1
if weekday == "Thursday":
Thursday += 1
if weekday == "Friday":
Friday += 1
if weekday == "Saturday":
Saturday += 1
if weekday == "Sunday":
Sunday += 1
except Exception as ex:
if str(ex) != "'NoneType' object is not subscriptable":
print("Exception occurred " + str(ex))
sumAll = morning + daytime + evening + night
sum_week = Sunday + Monday + Tuesday + Friday + Saturday + Wednesday + Thursday
title = translate['I am an Early'] if morning + daytime >= evening + night else translate['I am a Night']
one_day = [
{"name": "🌞 " + translate['Morning'], "text": str(morning) + " commits",
"percent": round((morning / sumAll) * 100, 2)},
{"name": "🌆 " + translate['Daytime'], "text": str(daytime) + " commits",
"percent": round((daytime / sumAll) * 100, 2)},
{"name": "🌃 " + translate['Evening'], "text": str(evening) + " commits",
"percent": round((evening / sumAll) * 100, 2)},
{"name": "🌙 " + translate['Night'], "text": str(night) + " commits",
"percent": round((night / sumAll) * 100, 2)},
]
dayOfWeek = [
{"name": translate['Monday'], "text": str(Monday) + " commits", "percent": round((Monday / sum_week) * 100, 2)},
{"name": translate['Tuesday'], "text": str(Tuesday) + " commits",
"percent": round((Tuesday / sum_week) * 100, 2)},
{"name": translate['Wednesday'], "text": str(Wednesday) + " commits",
"percent": round((Wednesday / sum_week) * 100, 2)},
{"name": translate['Thursday'], "text": str(Thursday) + " commits",
"percent": round((Thursday / sum_week) * 100, 2)},
{"name": translate['Friday'], "text": str(Friday) + " commits", "percent": round((Friday / sum_week) * 100, 2)},
{"name": translate['Saturday'], "text": str(Saturday) + " commits",
"percent": round((Saturday / sum_week) * 100, 2)},
{"name": translate['Sunday'], "text": str(Sunday) + " commits", "percent": round((Sunday / sum_week) * 100, 2)},
]
string = string + '**' + title + '** \n\n' + '```text\n' + make_commit_list(one_day) + '\n\n```\n'
if show_days_of_week.lower() in truthy:
max_element = {
'percent': 0
}
for day in dayOfWeek:
if day['percent'] > max_element['percent']:
max_element = day
days_title = translate['I am Most Productive on'] % max_element['name']
string = string + '📅 **' + days_title + '** \n\n' + '```text\n' + make_commit_list(dayOfWeek) + '\n\n```\n'
return string
def get_waka_time_stats():
stats = ''
request = requests.get(
f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}")
no_activity = translate["No Activity Tracked This Week"]
if request.status_code == 401 or request.status_code != 200:
print("Error With WAKA time API returned " + str(request.status_code) + " Response " + str(request.json()))
else:
data = request.json()
if showCommit.lower() in truthy:
stats = stats + generate_commit_list(tz=data['data']['timezone']) + '\n\n'
if showTimeZone.lower() in truthy or showLanguage.lower() in truthy or showEditors.lower() in truthy or \
showProjects.lower() in truthy or showOs.lower() in truthy:
stats += '📊 **' + translate['This Week I Spend My Time On'] + '** \n\n'
stats += '```text\n'
if showTimeZone.lower() in truthy:
tzone = data['data']['timezone']
stats = stats + '⌚︎ ' + translate['Timezone'] + ': ' + tzone + '\n\n'
if showLanguage.lower() in truthy:
if len(data['data']['languages']) == 0:
lang_list = no_activity
else:
lang_list = make_list(data['data']['languages'])
stats = stats + '💬 ' + translate['Languages'] + ': \n' + lang_list + '\n\n'
if showEditors.lower() in truthy:
if len(data['data']['editors']) == 0:
edit_list = no_activity
else:
edit_list = make_list(data['data']['editors'])
stats = stats + '🔥 ' + translate['Editors'] + ': \n' + edit_list + '\n\n'
if showProjects.lower() in truthy:
if len(data['data']['projects']) == 0:
project_list = no_activity
else:
# Re-order the project list by percentage
data['data']['projects'] = sorted(data['data']['projects'], key=lambda x: x["percent"],
reverse=True)
project_list = make_list(data['data']['projects'])
stats = stats + '🐱‍💻 ' + translate['Projects'] + ': \n' + project_list + '\n\n'
if showOs.lower() in truthy:
if len(data['data']['operating_systems']) == 0:
os_list = no_activity
else:
os_list = make_list(data['data']['operating_systems'])
stats = stats + '💻 ' + translate['operating system'] + ': \n' + os_list + '\n\n'
stats += '```\n\n'
return stats
def generate_language_per_repo(result):
language_count = {}
total = 0
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['primaryLanguage'] is None:
continue
language = repo['node']['primaryLanguage']['name']
color_code = repo['node']['primaryLanguage']['color']
total += 1
if language not in language_count.keys():
language_count[language] = {}
language_count[language]['count'] = 1
else:
language_count[language]['count'] = language_count[language]['count'] + 1
language_count[language]['color'] = color_code
data = []
sorted_labels = list(language_count.keys())
sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True)
most_language_repo = sorted_labels[0]
for label in sorted_labels:
percent = round(language_count[label]['count'] / total * 100, 2)
extension = " repos"
if language_count[label]['count'] == 1:
extension = " repo"
data.append({
"name": label,
"text": str(language_count[label]['count']) + extension,
"percent": percent
})
title = translate['I Mostly Code in'] % most_language_repo
return '**' + title + '** \n\n' + '```text\n' + make_list(data) + '\n\n```\n'
def get_yearly_data():
repository_list = run_query(repositoryListQuery.substitute(username=username, id=id))
loc = LinesOfCode(id, username, ghtoken, repository_list, ignored_repos_name)
yearly_data = loc.calculateLoc()
if showLocChart.lower() in truthy:
loc.plotLoc(yearly_data)
return yearly_data
def get_line_of_code():
repositoryList = run_query(repositoryListQuery.substitute(username=username, id=id))
loc = LinesOfCode(id, username, ghtoken, repositoryList, ignored_repos_name)
yearly_data = loc.calculateLoc()
total_loc = sum(
[yearly_data[year][quarter][lang] for year in yearly_data for quarter in yearly_data[year] for lang in
yearly_data[year][quarter]])
return millify(int(total_loc))
def get_short_info(github):
string = '**🐱 ' + translate['My GitHub Data'] + '** \n\n'
user_info = github.get_user()
if user_info.disk_usage is None:
disk_usage = humanize.naturalsize(0)
print("Please add new github personal access token with user permission")
else:
disk_usage = humanize.naturalsize(user_info.disk_usage)
request = requests.get('https://github-contributions.vercel.app/api/v1/' + user_info.login)
if request.status_code == 200 and len(request.json()['years']) > 0:
this_year_data = request.json()['years'][0]
total = this_year_data['total']
year = this_year_data['year']
string += '> 🏆 ' + translate['Contributions in the year'] % (humanize.intcomma(total), year) + '\n > \n'
string += '> 📦 ' + translate["Used in GitHub's Storage"] % disk_usage + ' \n > \n'
is_hireable = user_info.hireable
public_repo = user_info.public_repos
private_repo = user_info.owned_private_repos
if private_repo is None:
private_repo = 0
if is_hireable:
string += "> 💼 " + translate["Opted to Hire"] + "\n > \n"
else:
string += "> 🚫 " + translate["Not Opted to Hire"] + "\n > \n"
string += '> 📜 '
string += translate['public repositories'] % public_repo + " " + '\n > \n' if public_repo != 1 else translate[
'public repository'] % public_repo + " " + '\n > \n'
string += '> 🔑 '
string += translate['private repositories'] % private_repo + " " + ' \n > \n' if private_repo != 1 else translate[
'private repository'] % private_repo + " " + '\n > \n'
return string
def get_stats(github):
'''Gets API data and returns markdown progress'''
stats = ''
repositoryList = run_query(repositoryListQuery.substitute(username=username, id=id))
if show_loc.lower() in truthy or showLocChart.lower() in truthy:
# This condition is written to calculate the lines of code because it is heavy process soo needs to be calculate once this will reduce the execution time
yearly_data = get_yearly_data()
if show_total_code_time.lower() in truthy:
request = requests.get(
f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={waka_key}")
if request.status_code == 401:
print("Error With WAKA time API returned " + str(request.status_code) + " Response " + str(request.json()))
elif "text" not in request.json()["data"]:
print("User stats are calculating. Try again later.")
else:
data = request.json()
stats += '![Code Time](http://img.shields.io/badge/' + quote(
str("Code Time")) + '-' + quote(str(
data['data']['text'])) + '-blue)\n\n'
if show_profile_view.lower() in truthy:
data = run_v3_api(get_profile_view.substitute(owner=username, repo=username))
stats += '![Profile Views](http://img.shields.io/badge/' + quote(str(translate['Profile Views'])) + '-' + str(
data['count']) + '-blue)\n\n'
if show_loc.lower() in truthy:
stats += '![Lines of code](https://img.shields.io/badge/' + quote(
str(translate['From Hello World I have written'])) + '-' + quote(
str(get_line_of_code())) + '%20' + quote(str(translate['Lines of code'])) + '-blue)\n\n'
if show_short_info.lower() in truthy:
stats += get_short_info(github)
if show_waka_stats.lower() in truthy:
stats += get_waka_time_stats()
if showLanguagePerRepo.lower() in truthy:
stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
if showLocChart.lower() in truthy:
stats += '**' + translate['Timeline'] + '**\n\n'
branch_name = github.get_repo(f'{username}/{username}').default_branch
stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + username + '/' + username + '/' + branch_name + '/charts/bar_graph.png) \n\n'
if show_updated_date.lower() in truthy:
now = datetime.datetime.utcnow()
d1 = now.strftime(updated_date_format)
stats = stats + "\n Last Updated on " + d1 + " UTC"
return stats
# def star_me():
# requests.put("https://api.github.com/user/starred/anmol098/waka-readme-stats", headers=headers)
def decode_readme(data: str):
'''Decode the contents of old readme'''
decoded_bytes = base64.b64decode(data)
return str(decoded_bytes, 'utf-8')
def generate_new_readme(stats: str, readme: str):
'''Generate a new Readme.md'''
stats_in_readme = f"{START_COMMENT}\n{stats}\n{END_COMMENT}"
return re.sub(listReg, stats_in_readme, readme)
if __name__ == '__main__':
try:
start_time = datetime.datetime.now().timestamp() * 1000
if ghtoken is None:
raise Exception('Token not available')
g = Github(ghtoken)
headers = {"Authorization": "Bearer " + ghtoken}
user_data = run_query(userInfoQuery) # Execute the query
if "errors" in user_data:
raise Exception(user_data)
username = user_data["data"]["viewer"]["login"]
id = user_data["data"]["viewer"]["id"]
email = user_data["data"]["viewer"]["email"]
print("Username " + username)
repo = g.get_repo(f"{username}/{username}")
contents = repo.get_readme()
try:
with open(os.path.join(os.path.dirname(__file__), 'translation.json'), encoding='utf-8') as config_file:
data = json.load(config_file)
translate = data[locale]
except Exception as e:
print("Cannot find the Locale choosing default to english")
translate = data['en']
waka_stats = get_stats(g)
# star_me()
rdmd = decode_readme(contents.content)
new_readme = generate_new_readme(stats=waka_stats, readme=rdmd)
if commit_by_me.lower() in truthy:
committer = InputGitAuthor(username or commit_username, email or commit_email)
else:
committer = InputGitAuthor(
commit_username or 'readme-bot',
commit_email or '41898282+github-actions[bot]@users.noreply.github.com'
)
if new_readme != rdmd:
try:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch=branchName,
committer=committer)
except:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch='main',
committer=committer)
print("Readme updated")
end_time = datetime.datetime.now().timestamp() * 1000
print("Program processed in {} miliseconds.".format(round(end_time - start_time, 0)))
except Exception as e:
traceback.print_exc()
print("Exception Occurred " + str(e))

View File

@@ -1,30 +1,8 @@
attrs==20.3.0
certifi==2020.12.5
chardet==4.0.0
cycler==0.10.0
Deprecated==1.2.12
entrypoints==0.3
humanize==3.3.0
idna==2.10
Jinja2==2.11.3
jsonschema==3.2.0
kiwisolver==1.3.1
MarkupSafe==1.1.1
PyGithub==1.54.1
matplotlib==3.6.3
numpy==1.24.2
Pillow==8.2.0
portpicker==1.3.1
PyGithub==1.54.1
PyJWT==1.7.1
pyparsing==2.4.7
pyrsistent==0.17.3
python-dateutil==2.8.1
python-dotenv==0.17.0
pytz==2021.1
requests==2.25.1
selenium==3.141.0
six==1.15.0
toolz==0.11.1
tornado==6.1
urllib3==1.26.5
wrapt==1.12.1
humanize==3.3.0
httpx==0.23.3
PyYAML==6.0

0
sources/colors.json Normal file
View File

202
sources/download_manager.py Normal file
View File

@@ -0,0 +1,202 @@
from hashlib import md5
from json import dumps
from string import Template
from typing import Awaitable, Dict, Callable, Optional
from httpx import AsyncClient
from yaml import safe_load
from github import AuthenticatedUser
GITHUB_API_QUERIES = {
"repositories_contributed_to": """
{
user(login: "$username") {
repositoriesContributedTo(last: 100, includeUserRepositories: true) {
nodes {
isFork
name
owner {
login
}
}
}
}
}""",
"repository_committed_dates": """
{
repository(owner: "$owner", name: "$name") {
defaultBranchRef {
target {
... on Commit {
history(first: 100, author: { id: "$id" }) {
edges {
node {
committedDate
}
}
}
}
}
}
}
}""",
"user_repository_list": """
{
user(login: "$username") {
repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR], isFork: false) {
edges {
node {
primaryLanguage {
name
}
name
owner {
login
}
}
}
}
}
}
""",
"repository_commit_list": """
{
repository(owner: "$owner", name: "$name") {
refs(refPrefix: "refs/heads/", orderBy: {direction: DESC, field: TAG_COMMIT_DATE}, first: 100) {
edges {
node {
... on Ref {
target {
... on Commit {
history(first: 100, author: { id: "$id" }) {
edges {
node {
... on Commit {
additions
deletions
committedDate
}
}
}
}
}
}
}
}
}
}
}
}
"""
}
async def init_download_manager(waka_key: str, github_key: str, user: AuthenticatedUser):
"""
Initialize download manager:
- Setup headers for GitHub GraphQL requests.
- Launch static queries in background.
:param waka_key: WakaTime API token.
:param github_key: GitHub API token.
:param user: GitHub current user info.
"""
await DownloadManager.load_remote_resources({
"linguist": "https://cdn.jsdelivr.net/gh/github/linguist@master/lib/linguist/languages.yml",
"waka_latest": f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}",
"waka_all": f"https://wakatime.com/api/v1/users/current/all_time_since_today?api_key={waka_key}",
"github_stats": f"https://github-contributions.vercel.app/api/v1/{user.login}"
}, {
"Authorization": f"Bearer {github_key}"
})
class DownloadManager:
"""
Class for handling and caching all kinds of requests.
There considered to be two types of queries:
- Static queries: queries that don't require many arguments that should be executed once
Example: queries to WakaTime API or to GitHub linguist
- Dynamic queries: queries that require many arguments and should be executed multiple times
Example: GraphQL queries to GitHub API
DownloadManager launches all static queries asynchronously upon initialization and caches their results.
It also executes dynamic queries upon request and caches result.
"""
_client = AsyncClient(timeout=60.0)
_REMOTE_RESOURCES_CACHE = dict()
@staticmethod
async def load_remote_resources(resources: Dict[str, str], github_headers: Dict[str, str]):
"""
Prepare DownloadManager to launch GitHub API queries and launch all static queries.
:param resources: Dictionary of static queries, "IDENTIFIER": "URL".
:param github_headers: Dictionary of headers for GitHub API queries.
"""
for resource, url in resources.items():
DownloadManager._REMOTE_RESOURCES_CACHE[resource] = DownloadManager._client.get(url)
DownloadManager._client.headers = github_headers
@staticmethod
async def _get_remote_resource(resource: str, convertor: Optional[Callable[[bytes], Dict]]) -> Dict:
"""
Receive execution result of static query, wait for it if necessary.
If the query wasn't cached previously, cache it.
NB! Caching is done before response parsing - to throw exception on accessing cached erroneous response.
:param resource: Static query identifier.
:param convertor: Optional function to convert `response.contents` to dict.
By default `response.json()` is used.
:return: Response dictionary.
"""
if isinstance(DownloadManager._REMOTE_RESOURCES_CACHE[resource], Awaitable):
res = await DownloadManager._REMOTE_RESOURCES_CACHE[resource]
DownloadManager._REMOTE_RESOURCES_CACHE[resource] = res
if res.status_code == 200:
if convertor is None:
return res.json()
else:
return convertor(res.content)
else:
raise Exception(f"Query '{res.url}' failed to run by returning code of {res.status_code}: {res.json()}")
@staticmethod
async def get_remote_json(resource: str) -> Dict:
"""
Shortcut for `_get_remote_resource` to return JSON response data.
:param resource: Static query identifier.
:return: Response JSON dictionary.
"""
return await DownloadManager._get_remote_resource(resource, None)
@staticmethod
async def get_remote_yaml(resource: str) -> Dict:
"""
Shortcut for `_get_remote_resource` to return YAML response data.
:param resource: Static query identifier.
:return: Response YAML dictionary.
"""
return await DownloadManager._get_remote_resource(resource, safe_load)
@staticmethod
async def get_remote_graphql(query: str, **kwargs) -> Dict:
"""
Execute GitHub GraphQL API query.
The queries are defined in `GITHUB_API_QUERIES`, all parameters should be passed as kwargs.
If the query wasn't cached previously, cache it. Cache query by its identifier + parameters hash.
NB! Caching is done before response parsing - to throw exception on accessing cached erroneous response.
Parse and return response as JSON.
:param query: Dynamic query identifier.
:param kwargs: Parameters for substitution of variables in dynamic query.
:return: Response JSON dictionary.
"""
key = f"{query}_{md5(dumps(kwargs, sort_keys=True).encode('utf-8')).digest()}"
if key not in DownloadManager._REMOTE_RESOURCES_CACHE:
res = await DownloadManager._client.post("https://api.github.com/graphql", json={
"query": Template(GITHUB_API_QUERIES[query]).substitute(kwargs)
})
DownloadManager._REMOTE_RESOURCES_CACHE[key] = res
else:
res = DownloadManager._REMOTE_RESOURCES_CACHE[key]
if res.status_code == 200:
return res.json()
else:
raise Exception(f"Query '{query}' failed to run by returning code of {res.status_code}: {res.json()}")

76
sources/loc.py Normal file
View File

@@ -0,0 +1,76 @@
import re
from asyncio import sleep
from github import Github, InputGitAuthor, AuthenticatedUser
import datetime
from download_manager import DownloadManager
from make_bar_graph import build_graph
class LinesOfCode:
def __init__(self, user: AuthenticatedUser, ghtoken, repositoryData, ignored_repos):
self.g = Github(ghtoken)
self.user = user
self.repositoryData = repositoryData
self.ignored_repos = ignored_repos
async def calculateLoc(self):
result = self.repositoryData
yearly_data = {}
total = len(result['data']['user']['repositories']['edges'])
for ind, repo in enumerate(result['data']['user']['repositories']['edges']):
if repo['node']['name'] not in self.ignored_repos:
print(f"{ind}/{total}", "Retrieving repo:", repo['node']["owner"]["login"], repo['node']['name'])
await self.getCommitStat(repo['node'], yearly_data)
await sleep(0.7)
return yearly_data
async def plotLoc(self, yearly_data):
await build_graph(yearly_data)
self.pushChart()
def getQuarter(self, timeStamp):
month = datetime.datetime.fromisoformat(timeStamp).month
if month >= 1 and month <= 3:
return 1
elif month >= 4 and month <= 6:
return 2
elif month >= 7 and month <= 9:
return 3
elif month >= 10 and month <= 12:
return 4
async def getCommitStat(self, repoDetails, yearly_data):
commit_data = await DownloadManager.get_remote_graphql("repository_commit_list", owner=repoDetails["owner"]["login"], name=repoDetails['name'], id=self.user.node_id)
if commit_data["data"]["repository"] is None:
print("\tSkipping:", repoDetails['name'])
return
for commit in [commit["node"] for branch in commit_data["data"]["repository"]["refs"]["edges"] for commit in branch["node"]["target"]["history"]["edges"]]:
date = re.search(r'\d+-\d+-\d+', commit["committedDate"]).group(0)
curr_year = datetime.datetime.fromisoformat(date).year
quarter = self.getQuarter(date)
if repoDetails['primaryLanguage'] is not None:
if curr_year not in yearly_data:
yearly_data[curr_year] = {}
if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = {}
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (commit["additions"] - commit["deletions"])
def pushChart(self):
repo = self.g.get_repo(f"{self.user.login}/{self.user.login}")
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
with open('bar_graph.png', 'rb') as input_file:
data = input_file.read()
try:
contents = repo.get_contents("charts/bar_graph.png")
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
except Exception as e:
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)

457
sources/main.py Normal file
View File

@@ -0,0 +1,457 @@
'''
Readme Development Metrics With waka time progress
'''
import re
import os
import base64
from asyncio import run
from typing import Dict
from pytz import timezone
import pytz
from github import Github, InputGitAuthor, AuthenticatedUser
import datetime
from download_manager import init_download_manager, DownloadManager
from loc import LinesOfCode
import humanize
from urllib.parse import quote
import json
import math
from dotenv import load_dotenv
load_dotenv()
START_COMMENT = f'<!--START_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
END_COMMENT = f'<!--END_SECTION:{os.getenv("INPUT_SECTION_NAME")}-->'
listReg = f"{START_COMMENT}[\\s\\S]+{END_COMMENT}"
waka_key = os.getenv('INPUT_WAKATIME_API_KEY')
ghtoken = os.getenv('INPUT_GH_TOKEN')
branchName = os.getenv('INPUT_PUSH_BRANCH_NAME')
showTimeZone = os.getenv('INPUT_SHOW_TIMEZONE')
showProjects = os.getenv('INPUT_SHOW_PROJECTS')
showEditors = os.getenv('INPUT_SHOW_EDITORS')
showOs = os.getenv('INPUT_SHOW_OS')
showCommit = os.getenv('INPUT_SHOW_COMMIT')
showLanguage = os.getenv('INPUT_SHOW_LANGUAGE')
show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE')
show_days_of_week = os.getenv('INPUT_SHOW_DAYS_OF_WEEK')
showLanguagePerRepo = os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO')
showLocChart = os.getenv('INPUT_SHOW_LOC_CHART')
show_profile_view = os.getenv('INPUT_SHOW_PROFILE_VIEWS')
show_short_info = os.getenv('INPUT_SHOW_SHORT_INFO')
locale = os.getenv('INPUT_LOCALE')
commit_by_me = os.getenv('INPUT_COMMIT_BY_ME')
ignored_repos_name = str(os.getenv('INPUT_IGNORED_REPOS') or '').replace(' ', '').split(',')
show_updated_date = os.getenv('INPUT_SHOW_UPDATED_DATE')
updated_date_format = os.getenv('INPUT_UPDATED_DATE_FORMAT')
commit_message = os.getenv('INPUT_COMMIT_MESSAGE')
commit_username = os.getenv('INPUT_COMMIT_USERNAME')
commit_email = os.getenv('INPUT_COMMIT_EMAIL')
show_total_code_time = os.getenv('INPUT_SHOW_TOTAL_CODE_TIME')
symbol_version = os.getenv('INPUT_SYMBOL_VERSION').strip()
show_waka_stats = 'y'
truthy = ['true', '1', 't', 'y', 'yes']
translate: Dict[str, str]
user: AuthenticatedUser
def millify(n):
millnames = ['', ' Thousand', ' Million', ' Billion', ' Trillion']
n = float(n)
millidx = max(0, min(len(millnames) - 1,
int(math.floor(0
if n == 0
else math.log10(abs(n)) / 3))))
return '{:.0f}{}'.format(n / 10 ** (3 * millidx), millnames[millidx])
def make_graph(percent: float):
'''Make progress graph from API graph'''
if (symbol_version == '1'): # version 1
done_block = ''
empty_block = ''
elif (symbol_version == '2'): # version 2
done_block = ''
empty_block = ''
elif (symbol_version == '3'): # version 3
done_block = ''
empty_block = ''
else:
done_block = '' # default is version 1
empty_block = ''
pc_rnd = round(percent)
return f"{done_block * int(pc_rnd / 4)}{empty_block * int(25 - int(pc_rnd / 4))}"
def make_list(data: list):
'''Make List'''
data_list = []
for l in data[:5]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name'][:25]}{' ' * (25 - ln)}{l['text']}{' ' * (20 - ln_text)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
def make_commit_list(data: list):
'''Make List'''
data_list = []
for l in data[:7]:
ln = len(l['name'])
ln_text = len(l['text'])
percent = "{:05.2f}".format(float(l['percent']))
op = f"{l['name']}{' ' * ((15 - ln) + (11 - ln_text))}{l['text']}{' ' * (7)}{make_graph(l['percent'])} {percent} % "
data_list.append(op)
return '\n'.join(data_list)
async def generate_commit_list(tz):
string = ''
result = await DownloadManager.get_remote_graphql("repositories_contributed_to", username=user.login)
nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"]
repos = [d for d in nodes if d['isFork'] is False]
morning = 0 # 6 - 12
daytime = 0 # 12 - 18
evening = 0 # 18 - 24
night = 0 # 0 - 6
Monday = 0
Tuesday = 0
Wednesday = 0
Thursday = 0
Friday = 0
Saturday = 0
Sunday = 0
for repository in repos:
result = await DownloadManager.get_remote_graphql("repository_committed_dates", owner=repository["owner"]["login"], name=repository["name"], id=user.node_id)
committed_dates = result["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
for committedDate in committed_dates:
date = datetime.datetime.strptime(committedDate["node"]["committedDate"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc).astimezone(timezone(tz))
hour = date.hour
weekday = date.strftime('%A')
if 6 <= hour < 12:
morning += 1
if 12 <= hour < 18:
daytime += 1
if 18 <= hour < 24:
evening += 1
if 0 <= hour < 6:
night += 1
if weekday == "Monday":
Monday += 1
if weekday == "Tuesday":
Tuesday += 1
if weekday == "Wednesday":
Wednesday += 1
if weekday == "Thursday":
Thursday += 1
if weekday == "Friday":
Friday += 1
if weekday == "Saturday":
Saturday += 1
if weekday == "Sunday":
Sunday += 1
sumAll = morning + daytime + evening + night
sum_week = Sunday + Monday + Tuesday + Friday + Saturday + Wednesday + Thursday
title = translate['I am an Early'] if morning + daytime >= evening + night else translate['I am a Night']
one_day = [
{"name": "🌞 " + translate['Morning'], "text": str(morning) + " commits",
"percent": round((morning / sumAll) * 100, 2)},
{"name": "🌆 " + translate['Daytime'], "text": str(daytime) + " commits",
"percent": round((daytime / sumAll) * 100, 2)},
{"name": "🌃 " + translate['Evening'], "text": str(evening) + " commits",
"percent": round((evening / sumAll) * 100, 2)},
{"name": "🌙 " + translate['Night'], "text": str(night) + " commits",
"percent": round((night / sumAll) * 100, 2)},
]
dayOfWeek = [
{"name": translate['Monday'], "text": str(Monday) + " commits", "percent": round((Monday / sum_week) * 100, 2)},
{"name": translate['Tuesday'], "text": str(Tuesday) + " commits",
"percent": round((Tuesday / sum_week) * 100, 2)},
{"name": translate['Wednesday'], "text": str(Wednesday) + " commits",
"percent": round((Wednesday / sum_week) * 100, 2)},
{"name": translate['Thursday'], "text": str(Thursday) + " commits",
"percent": round((Thursday / sum_week) * 100, 2)},
{"name": translate['Friday'], "text": str(Friday) + " commits", "percent": round((Friday / sum_week) * 100, 2)},
{"name": translate['Saturday'], "text": str(Saturday) + " commits",
"percent": round((Saturday / sum_week) * 100, 2)},
{"name": translate['Sunday'], "text": str(Sunday) + " commits", "percent": round((Sunday / sum_week) * 100, 2)},
]
string = string + '**' + title + '** \n\n' + '```text\n' + make_commit_list(one_day) + '\n\n```\n'
if show_days_of_week.lower() in truthy:
max_element = {
'percent': 0
}
for day in dayOfWeek:
if day['percent'] > max_element['percent']:
max_element = day
days_title = translate['I am Most Productive on'] % max_element['name']
string = string + '📅 **' + days_title + '** \n\n' + '```text\n' + make_commit_list(dayOfWeek) + '\n\n```\n'
return string
async def get_waka_time_stats():
stats = ''
no_activity = translate["No Activity Tracked This Week"]
data = await DownloadManager.get_remote_json("waka_latest")
if showCommit.lower() in truthy:
stats = stats + await generate_commit_list(data['data']['timezone']) + '\n\n'
if showTimeZone.lower() in truthy or showLanguage.lower() in truthy or showEditors.lower() in truthy or showProjects.lower() in truthy or showOs.lower() in truthy:
stats += '📊 **' + translate['This Week I Spend My Time On'] + '** \n\n'
stats += '```text\n'
if showTimeZone.lower() in truthy:
tzone = data['data']['timezone']
stats = stats + '⌚︎ ' + translate['Timezone'] + ': ' + tzone + '\n\n'
if showLanguage.lower() in truthy:
if len(data['data']['languages']) == 0:
lang_list = no_activity
else:
lang_list = make_list(data['data']['languages'])
stats = stats + '💬 ' + translate['Languages'] + ': \n' + lang_list + '\n\n'
if showEditors.lower() in truthy:
if len(data['data']['editors']) == 0:
edit_list = no_activity
else:
edit_list = make_list(data['data']['editors'])
stats = stats + '🔥 ' + translate['Editors'] + ': \n' + edit_list + '\n\n'
if showProjects.lower() in truthy:
if len(data['data']['projects']) == 0:
project_list = no_activity
else:
# Re-order the project list by percentage
data['data']['projects'] = sorted(data['data']['projects'], key=lambda x: x["percent"],
reverse=True)
project_list = make_list(data['data']['projects'])
stats = stats + '🐱‍💻 ' + translate['Projects'] + ': \n' + project_list + '\n\n'
if showOs.lower() in truthy:
if len(data['data']['operating_systems']) == 0:
os_list = no_activity
else:
os_list = make_list(data['data']['operating_systems'])
stats = stats + '💻 ' + translate['operating system'] + ': \n' + os_list + '\n\n'
stats += '```\n\n'
return stats
def generate_language_per_repo(result):
language_count = {}
total = 0
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['primaryLanguage'] is None:
continue
language = repo['node']['primaryLanguage']['name']
total += 1
if language not in language_count.keys():
language_count[language] = {}
language_count[language]['count'] = 1
else:
language_count[language]['count'] = language_count[language]['count'] + 1
data = []
sorted_labels = list(language_count.keys())
sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True)
most_language_repo = sorted_labels[0]
for label in sorted_labels:
percent = round(language_count[label]['count'] / total * 100, 2)
extension = " repos"
if language_count[label]['count'] == 1:
extension = " repo"
data.append({
"name": label,
"text": str(language_count[label]['count']) + extension,
"percent": percent
})
title = translate['I Mostly Code in'] % most_language_repo
return '**' + title + '** \n\n' + '```text\n' + make_list(data) + '\n\n```\n'
async def get_yearly_data():
repository_list = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
loc = LinesOfCode(user, ghtoken, repository_list, ignored_repos_name)
yearly_data = await loc.calculateLoc()
if showLocChart.lower() in truthy:
await loc.plotLoc(yearly_data)
return yearly_data
async def get_line_of_code() -> str:
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
loc = LinesOfCode(user, ghtoken, repositoryList, ignored_repos_name)
yearly_data = await loc.calculateLoc()
total_loc = sum(
[yearly_data[year][quarter][lang] for year in yearly_data for quarter in yearly_data[year] for lang in
yearly_data[year][quarter]])
return millify(int(total_loc))
async def get_short_info():
string = '**🐱 ' + translate['My GitHub Data'] + '** \n\n'
if user.disk_usage is None:
disk_usage = humanize.naturalsize(0)
print("Please add new github personal access token with user permission")
else:
disk_usage = humanize.naturalsize(user.disk_usage)
data = await DownloadManager.get_remote_json("github_stats")
if len(data['years']) > 0:
this_year_data = data['years'][0]
total = this_year_data['total']
year = this_year_data['year']
string += '> 🏆 ' + translate['Contributions in the year'] % (humanize.intcomma(total), year) + '\n > \n'
string += '> 📦 ' + translate["Used in GitHub's Storage"] % disk_usage + ' \n > \n'
is_hireable = user.hireable
public_repo = user.public_repos
private_repo = user.owned_private_repos
if private_repo is None:
private_repo = 0
if is_hireable:
string += "> 💼 " + translate["Opted to Hire"] + "\n > \n"
else:
string += "> 🚫 " + translate["Not Opted to Hire"] + "\n > \n"
string += '> 📜 '
string += translate['public repositories'] % public_repo + " " + '\n > \n' if public_repo != 1 else translate[
'public repository'] % public_repo + " " + '\n > \n'
string += '> 🔑 '
string += translate['private repositories'] % private_repo + " " + ' \n > \n' if private_repo != 1 else translate[
'private repository'] % private_repo + " " + '\n > \n'
return string
async def get_stats(github) -> str:
'''Gets API data and returns markdown progress'''
stats = ''
repositoryList = await DownloadManager.get_remote_graphql("user_repository_list", username=user.login, id=user.node_id)
if show_loc.lower() in truthy or showLocChart.lower() in truthy:
# This condition is written to calculate the lines of code because it is heavy process soo needs to be calculate once this will reduce the execution time
await get_yearly_data()
if show_total_code_time.lower() in truthy:
data = await DownloadManager.get_remote_json("waka_all")
stats += '![Code Time](http://img.shields.io/badge/' + quote(
str("Code Time")) + '-' + quote(str(
data['data']['text'])) + '-blue)\n\n'
if show_profile_view.lower() in truthy:
data = github.get_repo(f"{user.login}/{user.login}").get_views_traffic(per="week")
stats += '![Profile Views](http://img.shields.io/badge/' + quote(str(translate['Profile Views'])) + '-' + str(
data['count']) + '-blue)\n\n'
if show_loc.lower() in truthy:
stats += '![Lines of code](https://img.shields.io/badge/' + quote(
str(translate['From Hello World I have written'])) + '-' + quote(
str(await get_line_of_code())) + '%20' + quote(str(translate['Lines of code'])) + '-blue)\n\n'
if show_short_info.lower() in truthy:
stats += await get_short_info()
if show_waka_stats.lower() in truthy:
stats += await get_waka_time_stats()
if showLanguagePerRepo.lower() in truthy:
stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
if showLocChart.lower() in truthy:
stats += '**' + translate['Timeline'] + '**\n\n'
branch_name = github.get_repo(f'{user.login}/{user.login}').default_branch
stats = stats + '![Chart not found](https://raw.githubusercontent.com/' + user.login + '/' + user.login + '/' + branch_name + '/charts/bar_graph.png) \n\n'
if show_updated_date.lower() in truthy:
now = datetime.datetime.utcnow()
d1 = now.strftime(updated_date_format)
stats = stats + "\n Last Updated on " + d1 + " UTC"
return stats
def decode_readme(data: str):
'''Decode the contents of old readme'''
decoded_bytes = base64.b64decode(data)
return str(decoded_bytes, 'utf-8')
def generate_new_readme(stats: str, readme: str):
'''Generate a new Readme.md'''
stats_in_readme = f"{START_COMMENT}\n{stats}\n{END_COMMENT}"
return re.sub(listReg, stats_in_readme, readme)
async def main():
global translate, user
if ghtoken is None:
raise Exception('Token not available')
user = Github(ghtoken).get_user()
print(f"Current user: {user.login}")
await init_download_manager(waka_key, ghtoken, user)
try:
with open(os.path.join(os.path.dirname(__file__), 'translation.json'), encoding='utf-8') as config_file:
data = json.load(config_file)
translate = data[locale]
except Exception as e:
print("Cannot find the Locale choosing default to english")
translate = data['en']
g = Github(ghtoken)
waka_stats = await get_stats(g)
repo = g.get_repo(f"{user.login}/{user.login}")
contents = repo.get_readme()
rdmd = decode_readme(contents.content)
new_readme = generate_new_readme(stats=waka_stats, readme=rdmd)
if commit_by_me.lower() in truthy:
committer = InputGitAuthor(user.login or commit_username, user.email or commit_email)
else:
committer = InputGitAuthor(
commit_username or 'readme-bot',
commit_email or '41898282+github-actions[bot]@users.noreply.github.com'
)
if new_readme != rdmd:
try:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch=branchName,
committer=committer)
except:
repo.update_file(path=contents.path, message=commit_message,
content=new_readme, sha=contents.sha, branch='main',
committer=committer)
print("Readme updated")
if __name__ == '__main__':
start_time = datetime.datetime.now().timestamp() * 1000
run(main())
end_time = datetime.datetime.now().timestamp() * 1000
print(f"Program processed in {round(end_time - start_time, 0)} miliseconds.")

View File

@@ -6,11 +6,13 @@ import numpy as np
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
from download_manager import DownloadManager
MAX_LANGUAGES = 5
def build_graph(yearly_data: Dict) -> str:
async def build_graph(yearly_data: Dict) -> str:
"""
Draws graph of lines of code written by user by quarters of years.
Picks top `MAX_LANGUAGES` languages from each quarter only.
@@ -18,8 +20,7 @@ def build_graph(yearly_data: Dict) -> str:
:param yearly_data: GitHub user yearly data.
:return: String, path to graph file.
"""
with open(join(dirname(__file__), "colors.json")) as f:
colors = load(f)
colors = await DownloadManager.get_remote_yaml("linguist")
languages_all_loc = dict()
years = len(yearly_data.keys())