You've already forked wakapi-readme-stats
Merge remote-tracking branch 'origin/Alpha' into dev
# Conflicts: # main.py
This commit is contained in:
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
*.env
|
||||
*.png
|
||||
node_modules/
|
||||
__pycache__/
|
||||
|
||||
14
Dockerfile
14
Dockerfile
@@ -1,8 +1,18 @@
|
||||
FROM python:latest
|
||||
FROM nikolaik/python-nodejs:latest
|
||||
|
||||
# Install dependencies.
|
||||
ADD requirements.txt /requirements.txt
|
||||
ADD main.py /main.py
|
||||
ADD loc.py /loc.py
|
||||
ADD make_bar_graph.py /make_bar_graph.py
|
||||
ADD colors.json /colors.json
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
CMD ["python", "/main.py"]
|
||||
ENV NPM_CONFIG_PREFIX=/home/node/.npm-global
|
||||
|
||||
RUN npm -g config set user root
|
||||
|
||||
RUN npm install -g vega-lite vega-cli canvas
|
||||
|
||||
ENTRYPOINT ["python", "/main.py"]
|
||||
|
||||
|
||||
21
README.md
21
README.md
@@ -183,6 +183,27 @@ PhpStorm 1 hr 35 mins ████░░░░░░░
|
||||
PyCharm 23 mins █░░░░░░░░░░░░░░░░░░░░░░░░ 4.49%
|
||||
```
|
||||
|
||||
## :sparkling_heart: Support the project
|
||||
|
||||
I open-source almost everything I can, and I try to reply to everyone needing help using these projects. Obviously,
|
||||
this takes time. You can use this service for free.
|
||||
|
||||
However, if you are using this project and happy with it or just want to encourage me to continue creating stuff, there are few ways you can do it :-
|
||||
|
||||
- Giving proper credit when you use this action on your readme, linking back to it :D
|
||||
- Starring and sharing the project :rocket:
|
||||
- [](https://www.paypal.me/aapreneur) - You can make one-time donations via PayPal. I'll probably buy a ~~beer~~ wine 🍷
|
||||
|
||||
Thanks! :heart:
|
||||
|
||||
---
|
||||
|
||||
Contributions are welcomed! ♥
|
||||
|
||||
Made with :heart: and Python 🐍.
|
||||
|
||||
|
||||
|
||||
> This project is inspired by an awesome pinned-gist project [Awesome Pinned Gists](https://github.com/matchai/awesome-pinned-gists) <br/>
|
||||
>This project is inspired from [athul/waka-readme](https://github.com/athul/waka-readme)
|
||||
|
||||
|
||||
@@ -52,6 +52,15 @@ inputs:
|
||||
description: "Show the Total Lines of code written Badge till date"
|
||||
default: "False"
|
||||
|
||||
SHOW_LANGUAGE_PER_REPO:
|
||||
required: false
|
||||
description: ""
|
||||
default: "True"
|
||||
|
||||
SHOW_LOC_CHART:
|
||||
required: false
|
||||
description: ""
|
||||
default: "True"
|
||||
|
||||
|
||||
|
||||
|
||||
1618
colors.json
Normal file
1618
colors.json
Normal file
File diff suppressed because it is too large
Load Diff
0
geckodriver.log
Normal file
0
geckodriver.log
Normal file
94
loc.py
Normal file
94
loc.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import re
|
||||
import os
|
||||
import base64
|
||||
import requests
|
||||
from github import Github
|
||||
import datetime
|
||||
from string import Template
|
||||
import matplotlib.pyplot as plt
|
||||
from io import StringIO, BytesIO
|
||||
from dotenv import load_dotenv
|
||||
import time
|
||||
|
||||
from make_bar_graph import BarGraph
|
||||
|
||||
|
||||
class LinesOfCode:
|
||||
|
||||
def __init__(self, id, username, ghtoken, repositoryData):
|
||||
self.id = id
|
||||
self.username = username
|
||||
|
||||
self.g = Github(ghtoken)
|
||||
self.headers = {"Authorization": "Bearer " + ghtoken}
|
||||
self.repositoryData = repositoryData
|
||||
|
||||
def calculateLoc(self):
|
||||
result = self.repositoryData
|
||||
yearly_data = {}
|
||||
for repo in result['data']['user']['repositories']['edges']:
|
||||
print(repo)
|
||||
self.getCommitStat(repo['node'], yearly_data)
|
||||
time.sleep(0.7)
|
||||
# print("\n\n")
|
||||
# print(yearly_data)
|
||||
# print("here")
|
||||
graph = BarGraph(yearly_data)
|
||||
graph_file = graph.build_graph()
|
||||
self.pushChart()
|
||||
|
||||
def run_query_v3(self, nameWithOwner):
|
||||
endPoint = 'https://api.github.com/repos/' + nameWithOwner + '/stats/code_frequency'
|
||||
# print(endPoint)
|
||||
request = requests.get(endPoint, headers=self.headers)
|
||||
if request.status_code == 401:
|
||||
raise Exception("Invalid token {}. {}".format(request.status_code, nameWithOwner))
|
||||
return request.json()
|
||||
|
||||
def getQuarter(self, timeStamp):
|
||||
month = datetime.datetime.fromtimestamp(timeStamp).month
|
||||
if month >= 1 and month <= 4:
|
||||
return 1
|
||||
elif month >= 5 and month <= 8:
|
||||
return 2
|
||||
elif month >= 9 and month <= 12:
|
||||
return 3
|
||||
|
||||
def getCommitStat(self, repoDetails, yearly_data):
|
||||
result = self.run_query_v3(repoDetails['nameWithOwner'])
|
||||
this_year = datetime.datetime.utcnow().year
|
||||
|
||||
for i in range(len(result)):
|
||||
curr_year = datetime.datetime.fromtimestamp(result[i][0]).year
|
||||
# if curr_year != this_year:
|
||||
quarter = self.getQuarter(result[i][0])
|
||||
if repoDetails['primaryLanguage'] is not None:
|
||||
|
||||
if curr_year not in yearly_data:
|
||||
yearly_data[curr_year] = {}
|
||||
if quarter not in yearly_data[curr_year]:
|
||||
yearly_data[curr_year][quarter] = {}
|
||||
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
|
||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
|
||||
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (result[i][1] + result[i][2])
|
||||
|
||||
# to find total
|
||||
|
||||
# if 'total' not in yearly_data[curr_year]:
|
||||
# yearly_data[curr_year]['total']={}
|
||||
# if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']:
|
||||
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0
|
||||
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2])
|
||||
|
||||
def pushChart(self):
|
||||
repo = self.g.get_repo(f"{self.username}/{self.username}")
|
||||
with open('bar_graph.png', 'rb') as input_file:
|
||||
data = input_file.read()
|
||||
try:
|
||||
contents = repo.get_contents("charts/bar_graph.png")
|
||||
repo.update_file(contents.path, "Charts Added", data, contents.sha)
|
||||
except Exception as e:
|
||||
repo.create_file("charts/bar_graph.png", "Initial Commit", data)
|
||||
# print("pushed")
|
||||
|
||||
|
||||
140
main.py
140
main.py
@@ -1,19 +1,17 @@
|
||||
'''
|
||||
Readme Development Metrics With waka time progress
|
||||
'''
|
||||
|
||||
import locale
|
||||
import re
|
||||
import os
|
||||
import base64
|
||||
import sys
|
||||
from pytz import timezone
|
||||
import pytz
|
||||
import locale
|
||||
import requests
|
||||
from github import Github
|
||||
from github import Github, GithubException
|
||||
import datetime
|
||||
from string import Template
|
||||
import time
|
||||
|
||||
START_COMMENT = '<!--START_SECTION:waka-->'
|
||||
END_COMMENT = '<!--END_SECTION:waka-->'
|
||||
@@ -30,6 +28,9 @@ showCommit = os.getenv('INPUT_SHOW_COMMIT')
|
||||
showLanguage = os.getenv('INPUT_SHOW_LANGUAGE')
|
||||
show_loc = os.getenv('INPUT_SHOW_LINES_OF_CODE')
|
||||
|
||||
showLanguagePerRepo = 'y' if os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO') is None else os.getenv('INPUT_SHOW_LANGUAGE_PER_REPO')
|
||||
showLocChart = 'y' if os.getenv('INPUT_SHOW_LOC_CHART') is None else os.getenv('INPUT_SHOW_LOC_CHART')
|
||||
show_waka_stats = 'y'
|
||||
# The GraphQL query to get commit data.
|
||||
userInfoQuery = """
|
||||
{
|
||||
@@ -90,12 +91,54 @@ def run_v3_api(query):
|
||||
if request.status_code == 200:
|
||||
return request.json()
|
||||
else:
|
||||
print(request.json())
|
||||
raise Exception(
|
||||
"Query failed to run by returning code of {}. {},... {}".format(request.status_code, query,
|
||||
str(request.json())))
|
||||
|
||||
|
||||
repositoryListQuery = Template("""
|
||||
{
|
||||
user(login: "$username") {
|
||||
repositories(orderBy: {field: CREATED_AT, direction: ASC}, last: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], isFork: false) {
|
||||
totalCount
|
||||
edges {
|
||||
node {
|
||||
object(expression:"master") {
|
||||
... on Commit {
|
||||
history (author: { id: "$id" }){
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
}
|
||||
primaryLanguage {
|
||||
color
|
||||
name
|
||||
id
|
||||
}
|
||||
stargazers {
|
||||
totalCount
|
||||
}
|
||||
collaborators {
|
||||
totalCount
|
||||
}
|
||||
createdAt
|
||||
name
|
||||
owner {
|
||||
id
|
||||
login
|
||||
}
|
||||
nameWithOwner
|
||||
}
|
||||
}
|
||||
}
|
||||
location
|
||||
createdAt
|
||||
name
|
||||
}
|
||||
}
|
||||
""")
|
||||
|
||||
|
||||
def run_query(query):
|
||||
request = requests.post('https://api.github.com/graphql', json={'query': query}, headers=headers)
|
||||
if request.status_code == 200:
|
||||
@@ -139,7 +182,7 @@ def generate_commit_list(tz):
|
||||
result = run_query(userInfoQuery) # Execute the query
|
||||
username = result["data"]["viewer"]["login"]
|
||||
id = result["data"]["viewer"]["id"]
|
||||
print("user {}".format(username))
|
||||
# print("user {}".format(username))
|
||||
|
||||
result = run_query(createContributedRepoQuery.substitute(username=username))
|
||||
nodes = result["data"]["user"]["repositoriesContributedTo"]["nodes"]
|
||||
@@ -245,11 +288,11 @@ def generate_commit_list(tz):
|
||||
return string
|
||||
|
||||
|
||||
def get_stats():
|
||||
'''Gets API data and returns markdown progress'''
|
||||
stats = ''
|
||||
|
||||
request = requests.get(f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}")
|
||||
def get_waka_time_stats():
|
||||
stats = ''
|
||||
request = requests.get(
|
||||
f"https://wakatime.com/api/v1/users/current/stats/last_7_days?api_key={waka_key}")
|
||||
|
||||
if request.status_code != 401:
|
||||
data = request.json()
|
||||
@@ -296,6 +339,64 @@ def get_stats():
|
||||
return stats
|
||||
|
||||
|
||||
def generate_language_per_repo(result):
|
||||
language_count = {}
|
||||
total = 0
|
||||
for repo in result['data']['user']['repositories']['edges']:
|
||||
if repo['node']['primaryLanguage'] is None:
|
||||
continue
|
||||
language = repo['node']['primaryLanguage']['name']
|
||||
color_code = repo['node']['primaryLanguage']['color']
|
||||
total += 1
|
||||
if language not in language_count.keys():
|
||||
language_count[language] = {}
|
||||
language_count[language]['count'] = 1
|
||||
language_count[language]['color'] = color_code
|
||||
else:
|
||||
language_count[language]['count'] = language_count[language]['count'] + 1
|
||||
language_count[language]['color'] = color_code
|
||||
|
||||
data = []
|
||||
sorted_labels = list(language_count.keys())
|
||||
sorted_labels.sort(key=lambda x: language_count[x]['count'], reverse=True)
|
||||
most_language_repo = sorted_labels[0]
|
||||
for label in sorted_labels:
|
||||
percent = round(language_count[label]['count'] / total * 100, 2)
|
||||
data.append({
|
||||
"name": label,
|
||||
"text": str(language_count[label]['count']) + " repos",
|
||||
"percent": percent
|
||||
})
|
||||
|
||||
title = 'I mostly code in ' + most_language_repo
|
||||
return '**' + title + '** \n\n' + '```text\n' + make_commit_list(data) + '\n\n```\n'
|
||||
|
||||
|
||||
def get_stats():
|
||||
'''Gets API data and returns markdown progress'''
|
||||
|
||||
stats = ''
|
||||
repositoryList = run_query(repositoryListQuery.substitute(username=username, id=id))
|
||||
|
||||
|
||||
if show_waka_stats.lower() in ['true', '1', 't', 'y', 'yes']:
|
||||
stats = stats + get_waka_time_stats()
|
||||
|
||||
|
||||
if showLanguagePerRepo.lower() in ['true', '1', 't', 'y', 'yes']:
|
||||
stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
|
||||
|
||||
if showLocChart.lower() in ['true', '1', 't', 'y', 'yes']:
|
||||
loc = LinesOfCode(id, username, ghtoken, repositoryList)
|
||||
loc.calculateLoc()
|
||||
stats = stats + '**Timeline**\n\n'
|
||||
stats = stats + ' \n\n'
|
||||
# stats = stats + generate_language_per_repo(repositoryList) + '\n\n'
|
||||
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def decode_readme(data: str):
|
||||
'''Decode the contets of old readme'''
|
||||
decoded_bytes = base64.b64decode(data)
|
||||
@@ -309,17 +410,24 @@ def generate_new_readme(stats: str, readme: str):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
g = Github(ghtoken)
|
||||
try:
|
||||
repo = g.get_repo(f"{user}/{user}")
|
||||
except GithubException:
|
||||
print("Authentication Error. Try saving a GitHub Personal Access Token in your Repo Secrets")
|
||||
sys.exit(1)
|
||||
contents = repo.get_readme()
|
||||
if ghtoken is None:
|
||||
raise Exception('Token not available')
|
||||
g = Github(ghtoken)
|
||||
headers = {"Authorization": "Bearer " + ghtoken}
|
||||
user_data = run_query(userInfoQuery) # Execute the query
|
||||
username = user_data["data"]["viewer"]["login"]
|
||||
id = user_data["data"]["viewer"]["id"]
|
||||
# print("user {} id {}".format(username, id))
|
||||
repo = g.get_repo(f"{username}/{username}")
|
||||
contents = repo.get_readme()
|
||||
waka_stats = get_stats()
|
||||
rdmd = decode_readme(contents.content)
|
||||
new_readme = generate_new_readme(stats=waka_stats, readme=rdmd)
|
||||
# print(new_readme)
|
||||
if new_readme != rdmd:
|
||||
repo.update_file(path=contents.path, message='Updated with Dev Metrics',
|
||||
content=new_readme, sha=contents.sha, branch='master')
|
||||
print("Readme updated")
|
||||
except Exception as e:
|
||||
print("Exception Occurred" + str(e))
|
||||
|
||||
107
make_bar_graph.py
Normal file
107
make_bar_graph.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import altair as alt
|
||||
import json
|
||||
import os
|
||||
# npm install vega-lite vega-cli canvas
|
||||
|
||||
|
||||
class BarGraph:
|
||||
|
||||
def __init__(self, yearly_data):
|
||||
self.yearly_data = yearly_data
|
||||
|
||||
def build_graph(self):
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__), 'colors.json')) as f:
|
||||
colors = json.load(f)
|
||||
allColorsValues = []
|
||||
|
||||
# filter data
|
||||
max_languages = 5
|
||||
top_languages = {}
|
||||
for year in self.yearly_data.keys():
|
||||
for quarter in self.yearly_data[year].keys():
|
||||
for language in sorted(list(self.yearly_data[year][quarter].keys()),
|
||||
key=lambda lang: self.yearly_data[year][quarter][lang], reverse=True)[
|
||||
0:max_languages]:
|
||||
if 'top' not in self.yearly_data[year][quarter]:
|
||||
self.yearly_data[year][quarter]['top'] = {}
|
||||
if self.yearly_data[year][quarter][language] != 0:
|
||||
self.yearly_data[year][quarter]['top'][language] = self.yearly_data[year][quarter][language]
|
||||
|
||||
if language not in top_languages:
|
||||
top_languages[language] = 1
|
||||
top_languages[language] += 1
|
||||
|
||||
# print(self.yearly_data)
|
||||
|
||||
all_languages = list(top_languages.keys())
|
||||
|
||||
for language in all_languages:
|
||||
if colors[language]['color'] is not None:
|
||||
allColorsValues.append(colors[language]['color'])
|
||||
|
||||
languages_all_loc = {}
|
||||
|
||||
for language in all_languages:
|
||||
language_year = []
|
||||
for year in self.yearly_data.keys():
|
||||
language_quarter = [0, 0, 0]
|
||||
for quarter in self.yearly_data[year].keys():
|
||||
if language in self.yearly_data[year][quarter]['top']:
|
||||
language_quarter[quarter - 1] = self.yearly_data[year][quarter]['top'][language]
|
||||
else:
|
||||
language_quarter[quarter - 1] = 0
|
||||
language_year.append(language_quarter)
|
||||
languages_all_loc[language] = language_year
|
||||
|
||||
# print(languages_all_loc)
|
||||
|
||||
language_df = {}
|
||||
|
||||
def prep_df(df, name):
|
||||
df = df.stack().reset_index()
|
||||
df.columns = ['c1', 'c2', 'values']
|
||||
df['Language'] = name
|
||||
return df
|
||||
|
||||
for language in languages_all_loc.keys():
|
||||
language_df[language] = pd.DataFrame(languages_all_loc[language], index=list(self.yearly_data.keys()),
|
||||
columns=["Q1", "Q2", "Q3"])
|
||||
|
||||
for language in language_df.keys():
|
||||
language_df[language] = prep_df(language_df[language], language)
|
||||
|
||||
df = pd.concat(list(language_df.values()))
|
||||
|
||||
|
||||
chart = alt.Chart(df).mark_bar().encode(
|
||||
|
||||
# tell Altair which field to group columns on
|
||||
x=alt.X('c2:N', title=None),
|
||||
|
||||
# tell Altair which field to use as Y values and how to calculate
|
||||
y=alt.Y('sum(values):Q',
|
||||
axis=alt.Axis(
|
||||
grid=False,
|
||||
title='LOC added')),
|
||||
|
||||
# tell Altair which field to use to use as the set of columns to be represented in each group
|
||||
column=alt.Column('c1:N', title=None),
|
||||
|
||||
# tell Altair which field to use for color segmentation
|
||||
color=alt.Color('Language:N',
|
||||
scale=alt.Scale(
|
||||
domain=all_languages,
|
||||
# make it look pretty with an enjoyable color pallet
|
||||
range=allColorsValues,
|
||||
),
|
||||
)) \
|
||||
.configure_view(
|
||||
# remove grid lines around column clusters
|
||||
strokeOpacity=0
|
||||
)
|
||||
chart.save('bar_graph.png')
|
||||
return 'bar_graph.png'
|
||||
1305
package-lock.json
generated
Normal file
1305
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,62 @@
|
||||
altair==4.1.0
|
||||
altair-data-server==0.4.1
|
||||
altair-saver==0.5.0
|
||||
altair-viewer==0.3.0
|
||||
appdirs==1.4.3
|
||||
attrs==19.3.0
|
||||
bcrypt==3.1.7
|
||||
certifi==2020.6.20
|
||||
cffi==1.14.0
|
||||
chardet==3.0.4
|
||||
cryptography==2.9.2
|
||||
cycler==0.10.0
|
||||
Deprecated==1.2.10
|
||||
distlib==0.3.0
|
||||
entrypoints==0.3
|
||||
filelock==3.0.12
|
||||
gitdb==4.0.5
|
||||
GitPython==3.1.7
|
||||
idna==2.10
|
||||
Jinja2==2.11.2
|
||||
jsonschema==3.2.0
|
||||
kiwisolver==1.2.0
|
||||
lizard==1.17.4
|
||||
MarkupSafe==1.1.1
|
||||
matplotlib==3.3.0
|
||||
MouseInfo==0.1.2
|
||||
numpy==1.19.1
|
||||
opencv-python==4.2.0.34
|
||||
pandas==1.0.5
|
||||
paramiko==2.7.1
|
||||
Pillow==7.2.0
|
||||
pipenv==2018.11.26
|
||||
portpicker==1.3.1
|
||||
PyAutoGUI==0.9.48
|
||||
pycparser==2.20
|
||||
PyDriller==1.15.2
|
||||
PyGetWindow==0.0.8
|
||||
PyGithub==1.51
|
||||
PyJWT==1.7.1
|
||||
requests==2.24.0
|
||||
urllib3==1.25.9
|
||||
wrapt==1.12.1
|
||||
PyMsgBox==1.0.7
|
||||
PyNaCl==1.3.0
|
||||
pyparsing==2.4.7
|
||||
pyperclip==1.7.0
|
||||
PyRect==0.1.4
|
||||
pyrsistent==0.16.0
|
||||
PyScreeze==0.1.25
|
||||
python-dateutil==2.8.1
|
||||
python-dotenv==0.14.0
|
||||
PyTweening==1.0.3
|
||||
pytz==2020.1
|
||||
requests==2.24.0
|
||||
selenium==3.141.0
|
||||
six==1.15.0
|
||||
smmap==3.0.4
|
||||
toolz==0.10.0
|
||||
tornado==6.0.4
|
||||
urllib3==1.25.9
|
||||
virtualenv==20.0.18
|
||||
virtualenv-clone==0.5.4
|
||||
wrapt==1.12.1
|
||||
xlrd==1.2.0
|
||||
XlsxWriter==1.2.8
|
||||
|
||||
Reference in New Issue
Block a user