Makefile added; targets for local action run and image building added; workflow for image building and publishing added; image building in action replaced with container pulling

This commit is contained in:
pseusys
2023-01-13 05:05:13 +04:00
parent a21c4d3fcd
commit d6560d8594
10 changed files with 3258 additions and 3178 deletions

View File

@@ -18,4 +18,4 @@ INPUT_SHOW_LANGUAGE_PER_REPO="True"
INPUT_SHOW_UPDATED_DATE="True"
INPUT_UPDATED_DATE_FORMAT="%d/%m/%Y %H:%M:%S"
INPUT_COMMIT_BY_ME="False"
INPUT_COMMIT_MESSAGE="Updated with Dev Metrics"
INPUT_COMMIT_MESSAGE="Updated with Dev Metrics"

41
.github/workflows/build_image.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: PUBLISH_IMAGE
on:
push:
jobs:
publish-server-image:
name: Publish `waka-readme-stats` Container
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout 🛎️
uses: actions/checkout@v3
- name: Log in to the Container registry 🚪
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker 🏋️
id: meta
uses: docker/metadata-action@v3
with:
images: ghcr.io/${{ github.repository }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Build and push Docker image 🏗️
uses: docker/build-push-action@v2
with:
push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/releases') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -1,18 +1,14 @@
FROM nikolaik/python-nodejs:python3.9-nodejs16
ADD requirements.txt /requirements.txt
ADD main.py /main.py
ADD loc.py /loc.py
ADD make_bar_graph.py /make_bar_graph.py
ADD colors.json /colors.json
ADD translation.json /translation.json
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1
ENV PATH "$PATH:/home/root/.npm-global/bin"
WORKDIR ./waka-readme-stats
RUN python -m pip install --upgrade pip wheel setuptools
RUN pip install -r requirements.txt
RUN npm -g config set user root
RUN npm i -g npm@next-8
RUN npm i -g vega vega-lite vega-cli canvas
ADD requirements.txt ./requirements.txt
ADD Makefile ./Makefile
ADD sources/* ./
ENTRYPOINT ["python", "/main.py"]
RUN make dependencies
ENTRYPOINT python3 ./main.py

43
Makefile Normal file
View File

@@ -0,0 +1,43 @@
.DEFAULT_GOAL = run
SHELL = /bin/bash
PATH := venv/bin:node_modules/.bin:$(PATH)
help:
@echo "Welcome to `waka-readme-stats` github action!"
@echo "The action can be tested locally with: `make run`"
@echo "NB! For local testing Python version between ??? and ??? and NodeJS version between ??? and ??? are required"
@echo "The action image can be built locally with: `make container`"
@echo "NB! For local container building Docker version between ??? and ??? is required"
@echo "The action directory and image can be cleaned with: `make clean`"
.PHONY: help
venv:
python3 -m venv venv
pip install --upgrade pip
pip install -r requirements.txt
node_modules:
npm i npm@next-8
npm i vega vega-lite vega-cli canvas
dependencies: venv node_modules
.PHONY: dependencies
run: dependencies
source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py
.PHONY: run
container:
docker build -t waka-readme-stats -f Dockerfile .
.PHONY: container
clean:
rm -rf venv
rm -rf node_modules
docker rmi -f waka-readme-stats 2>/dev/null
.PHONY: clean

View File

@@ -129,7 +129,7 @@ inputs:
runs:
using: 'docker'
image: 'Dockerfile'
image: 'ghcr.io/anmol098/waka-readme-stats:master'
branding:
icon: 'activity'

File diff suppressed because it is too large Load Diff

View File

@@ -1,108 +1,108 @@
import re
import os
import base64
import requests
from github import Github, InputGitAuthor
import datetime
from string import Template
import matplotlib.pyplot as plt
from io import StringIO, BytesIO
from dotenv import load_dotenv
import time
from make_bar_graph import BarGraph
class LinesOfCode:
def __init__(self, id, username, ghtoken, repositoryData, ignored_repos):
self.id = id
self.username = username
self.g = Github(ghtoken)
self.headers = {"Authorization": "Bearer " + ghtoken}
self.repositoryData = repositoryData
self.ignored_repos = ignored_repos
def calculateLoc(self):
result = self.repositoryData
yearly_data = {}
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['name'] not in self.ignored_repos:
self.getCommitStat(repo['node'], yearly_data)
time.sleep(0.7)
return yearly_data
def plotLoc(self, yearly_data):
graph = BarGraph(yearly_data)
graph.build_graph()
self.pushChart()
def run_query_v3(self, endPoint):
# print(endPoint)
request = requests.get(endPoint, headers=self.headers)
if request.status_code == 401:
raise Exception("Invalid token {}.".format(request.status_code))
elif request.status_code == 204:
return []
else:
return request.json()
def getQuarter(self, timeStamp):
month = datetime.datetime.fromisoformat(timeStamp).month
if month >= 1 and month <= 3:
return 1
elif month >= 4 and month <= 6:
return 2
elif month >= 7 and month <= 9:
return 3
elif month >= 10 and month <= 12:
return 4
def getCommitStat(self, repoDetails, yearly_data):
commitsURL = 'https://api.github.com/repos/' + repoDetails['nameWithOwner'] + '/commits'
filteredCommitsEndPoint = commitsURL + '?author=' + self.username
filteredCommitsResult = self.run_query_v3(filteredCommitsEndPoint)
# This ignores the error message you get when you try to list commits for an empty repository
if not type(filteredCommitsResult) == list:
return
this_year = datetime.datetime.utcnow().year
for i in range(len(filteredCommitsResult)):
iso_date = filteredCommitsResult[i]["commit"]["author"]["date"]
date = re.search(r'\d+-\d+-\d+', iso_date).group(0)
curr_year = datetime.datetime.fromisoformat(date).year
# if curr_year != this_year:
individualCommitEndPoint = commitsURL + '/' + filteredCommitsResult[i]["sha"]
individualCommitResult = self.run_query_v3(individualCommitEndPoint)
quarter = self.getQuarter(date)
if repoDetails['primaryLanguage'] is not None:
if curr_year not in yearly_data:
yearly_data[curr_year] = {}
if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = {}
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (individualCommitResult["stats"]["additions"] - individualCommitResult["stats"]['deletions'])
# to find total
# if 'total' not in yearly_data[curr_year]:
# yearly_data[curr_year]['total']={}
# if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']:
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2])
def pushChart(self):
repo = self.g.get_repo(f"{self.username}/{self.username}")
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
with open('bar_graph.png', 'rb') as input_file:
data = input_file.read()
try:
contents = repo.get_contents("charts/bar_graph.png")
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
except Exception as e:
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)
import re
import os
import base64
import requests
from github import Github, InputGitAuthor
import datetime
from string import Template
import matplotlib.pyplot as plt
from io import StringIO, BytesIO
from dotenv import load_dotenv
import time
from make_bar_graph import BarGraph
class LinesOfCode:
def __init__(self, id, username, ghtoken, repositoryData, ignored_repos):
self.id = id
self.username = username
self.g = Github(ghtoken)
self.headers = {"Authorization": "Bearer " + ghtoken}
self.repositoryData = repositoryData
self.ignored_repos = ignored_repos
def calculateLoc(self):
result = self.repositoryData
yearly_data = {}
for repo in result['data']['user']['repositories']['edges']:
if repo['node']['name'] not in self.ignored_repos:
self.getCommitStat(repo['node'], yearly_data)
time.sleep(0.7)
return yearly_data
def plotLoc(self, yearly_data):
graph = BarGraph(yearly_data)
graph.build_graph()
self.pushChart()
def run_query_v3(self, endPoint):
# print(endPoint)
request = requests.get(endPoint, headers=self.headers)
if request.status_code == 401:
raise Exception("Invalid token {}.".format(request.status_code))
elif request.status_code == 204:
return []
else:
return request.json()
def getQuarter(self, timeStamp):
month = datetime.datetime.fromisoformat(timeStamp).month
if month >= 1 and month <= 3:
return 1
elif month >= 4 and month <= 6:
return 2
elif month >= 7 and month <= 9:
return 3
elif month >= 10 and month <= 12:
return 4
def getCommitStat(self, repoDetails, yearly_data):
commitsURL = 'https://api.github.com/repos/' + repoDetails['nameWithOwner'] + '/commits'
filteredCommitsEndPoint = commitsURL + '?author=' + self.username
filteredCommitsResult = self.run_query_v3(filteredCommitsEndPoint)
# This ignores the error message you get when you try to list commits for an empty repository
if not type(filteredCommitsResult) == list:
return
this_year = datetime.datetime.utcnow().year
for i in range(len(filteredCommitsResult)):
iso_date = filteredCommitsResult[i]["commit"]["author"]["date"]
date = re.search(r'\d+-\d+-\d+', iso_date).group(0)
curr_year = datetime.datetime.fromisoformat(date).year
# if curr_year != this_year:
individualCommitEndPoint = commitsURL + '/' + filteredCommitsResult[i]["sha"]
individualCommitResult = self.run_query_v3(individualCommitEndPoint)
quarter = self.getQuarter(date)
if repoDetails['primaryLanguage'] is not None:
if curr_year not in yearly_data:
yearly_data[curr_year] = {}
if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = {}
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (individualCommitResult["stats"]["additions"] - individualCommitResult["stats"]['deletions'])
# to find total
# if 'total' not in yearly_data[curr_year]:
# yearly_data[curr_year]['total']={}
# if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']:
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2])
def pushChart(self):
repo = self.g.get_repo(f"{self.username}/{self.username}")
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
with open('bar_graph.png', 'rb') as input_file:
data = input_file.read()
try:
contents = repo.get_contents("charts/bar_graph.png")
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
except Exception as e:
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)

File diff suppressed because it is too large Load Diff

View File

@@ -1,106 +1,106 @@
import os
import pandas as pd
import numpy as np
import altair as alt
import json
import os
# npm install vega-lite vega-cli canvas
class BarGraph:
def __init__(self, yearly_data):
self.yearly_data = yearly_data
def build_graph(self):
with open(os.path.join(os.path.dirname(__file__), 'colors.json')) as f:
colors = json.load(f)
allColorsValues = []
# filter data
max_languages = 5
top_languages = {}
for year in self.yearly_data.keys():
for quarter in self.yearly_data[year].keys():
for language in sorted(list(self.yearly_data[year][quarter].keys()),
key=lambda lang: self.yearly_data[year][quarter][lang], reverse=True)[
0:max_languages]:
if 'top' not in self.yearly_data[year][quarter]:
self.yearly_data[year][quarter]['top'] = {}
if self.yearly_data[year][quarter][language] != 0:
self.yearly_data[year][quarter]['top'][language] = self.yearly_data[year][quarter][language]
if language not in top_languages:
top_languages[language] = 1
top_languages[language] += 1
# print(self.yearly_data)
all_languages = list(top_languages.keys())
for language in all_languages:
if colors[language]['color'] is not None:
allColorsValues.append(colors[language]['color'])
languages_all_loc = {}
for language in all_languages:
language_year = []
for year in self.yearly_data.keys():
language_quarter = [0, 0, 0, 0]
for quarter in self.yearly_data[year].keys():
if language in self.yearly_data[year][quarter]['top']:
language_quarter[quarter - 1] = self.yearly_data[year][quarter]['top'][language]
else:
language_quarter[quarter - 1] = 0
language_year.append(language_quarter)
languages_all_loc[language] = language_year
# print(languages_all_loc)
language_df = {}
def prep_df(df, name):
df = df.stack().reset_index()
df.columns = ['c1', 'c2', 'values']
df['Language'] = name
return df
for language in languages_all_loc.keys():
language_df[language] = pd.DataFrame(languages_all_loc[language], index=list(self.yearly_data.keys()),
columns=["Q1", "Q2", "Q3", "Q4"])
for language in language_df.keys():
language_df[language] = prep_df(language_df[language], language)
df = pd.concat(language_df.values())
chart = alt.Chart(df).mark_bar().encode(
# tell Altair which field to group columns on
x=alt.X('c2:N', title=None),
# tell Altair which field to use as Y values and how to calculate
y=alt.Y('sum(values):Q',
axis=alt.Axis(
grid=False,
title='LOC added')),
# tell Altair which field to use to use as the set of columns to be represented in each group
column=alt.Column('c1:N', title=None),
# tell Altair which field to use for color segmentation
color=alt.Color('Language:N',
scale=alt.Scale(
domain=all_languages,
# make it look pretty with an enjoyable color pallet
range=allColorsValues,
),
)) \
.configure_view(
# remove grid lines around column clusters
strokeOpacity=0
)
chart.save('bar_graph.png')
return 'bar_graph.png'
import os
import pandas as pd
import numpy as np
import altair as alt
import json
import os
# npm install vega-lite vega-cli canvas
class BarGraph:
def __init__(self, yearly_data):
self.yearly_data = yearly_data
def build_graph(self):
with open(os.path.join(os.path.dirname(__file__), 'colors.json')) as f:
colors = json.load(f)
allColorsValues = []
# filter data
max_languages = 5
top_languages = {}
for year in self.yearly_data.keys():
for quarter in self.yearly_data[year].keys():
for language in sorted(list(self.yearly_data[year][quarter].keys()),
key=lambda lang: self.yearly_data[year][quarter][lang], reverse=True)[
0:max_languages]:
if 'top' not in self.yearly_data[year][quarter]:
self.yearly_data[year][quarter]['top'] = {}
if self.yearly_data[year][quarter][language] != 0:
self.yearly_data[year][quarter]['top'][language] = self.yearly_data[year][quarter][language]
if language not in top_languages:
top_languages[language] = 1
top_languages[language] += 1
# print(self.yearly_data)
all_languages = list(top_languages.keys())
for language in all_languages:
if colors[language]['color'] is not None:
allColorsValues.append(colors[language]['color'])
languages_all_loc = {}
for language in all_languages:
language_year = []
for year in self.yearly_data.keys():
language_quarter = [0, 0, 0, 0]
for quarter in self.yearly_data[year].keys():
if language in self.yearly_data[year][quarter]['top']:
language_quarter[quarter - 1] = self.yearly_data[year][quarter]['top'][language]
else:
language_quarter[quarter - 1] = 0
language_year.append(language_quarter)
languages_all_loc[language] = language_year
# print(languages_all_loc)
language_df = {}
def prep_df(df, name):
df = df.stack().reset_index()
df.columns = ['c1', 'c2', 'values']
df['Language'] = name
return df
for language in languages_all_loc.keys():
language_df[language] = pd.DataFrame(languages_all_loc[language], index=list(self.yearly_data.keys()),
columns=["Q1", "Q2", "Q3", "Q4"])
for language in language_df.keys():
language_df[language] = prep_df(language_df[language], language)
df = pd.concat(language_df.values())
chart = alt.Chart(df).mark_bar().encode(
# tell Altair which field to group columns on
x=alt.X('c2:N', title=None),
# tell Altair which field to use as Y values and how to calculate
y=alt.Y('sum(values):Q',
axis=alt.Axis(
grid=False,
title='LOC added')),
# tell Altair which field to use to use as the set of columns to be represented in each group
column=alt.Column('c1:N', title=None),
# tell Altair which field to use for color segmentation
color=alt.Color('Language:N',
scale=alt.Scale(
domain=all_languages,
# make it look pretty with an enjoyable color pallet
range=allColorsValues,
),
)) \
.configure_view(
# remove grid lines around column clusters
strokeOpacity=0
)
chart.save('bar_graph.png')
return 'bar_graph.png'