Merge pull request #352 from anmol098/feat/image_building_and_publishing

Feature: image building and publishing
This commit is contained in:
Alexander Sergeev
2023-02-16 21:27:09 +01:00
committed by GitHub
12 changed files with 3303 additions and 3233 deletions

View File

@@ -1,22 +1,22 @@
INPUT_WAKATIME_API_KEY="" INPUT_WAKATIME_API_KEY=YOUR_WAKATIME_API_KEY
INPUT_PUSH_BRANCH_NAME="main" INPUT_PUSH_BRANCH_NAME=main
INPUT_SECTION_NAME="waka" INPUT_SECTION_NAME=waka
INPUT_SHOW_TIMEZONE="True" INPUT_SHOW_TIMEZONE=True
INPUT_SHOW_PROJECTS="False" INPUT_SHOW_PROJECTS=True
INPUT_SHOW_EDITORS="False" INPUT_SHOW_EDITORS=True
INPUT_SHOW_OS="False" INPUT_SHOW_OS=True
INPUT_SHOW_LANGUAGE="False" INPUT_SHOW_LANGUAGE=True
INPUT_GH_TOKEN="" INPUT_GH_TOKEN=YOUR_GITHUB_TOKEN_KEY
INPUT_SYMBOL_VERSION="1" INPUT_SYMBOL_VERSION=1
INPUT_SHOW_LINES_OF_CODE="False" INPUT_SHOW_LINES_OF_CODE=True
INPUT_SHOW_LOC_CHART="False" INPUT_SHOW_LOC_CHART=True
INPUT_SHOW_PROFILE_VIEWS="False" INPUT_SHOW_PROFILE_VIEWS=True
INPUT_SHOW_TOTAL_CODE_TIME="True" INPUT_SHOW_TOTAL_CODE_TIME=True
INPUT_SHOW_SHORT_INFO="False" INPUT_SHOW_SHORT_INFO=True
INPUT_SHOW_COMMIT="False" INPUT_SHOW_COMMIT=True
INPUT_SHOW_DAYS_OF_WEEK="True" INPUT_SHOW_DAYS_OF_WEEK=True
INPUT_SHOW_LANGUAGE_PER_REPO="True" INPUT_SHOW_LANGUAGE_PER_REPO=True
INPUT_SHOW_UPDATED_DATE="True" INPUT_SHOW_UPDATED_DATE=True
INPUT_UPDATED_DATE_FORMAT="%d/%m/%Y %H:%M:%S" INPUT_UPDATED_DATE_FORMAT=%d/%m/%Y %H:%M:%S
INPUT_COMMIT_BY_ME="False" INPUT_COMMIT_BY_ME=False
INPUT_COMMIT_MESSAGE="Updated with Dev Metrics" INPUT_COMMIT_MESSAGE=Updated with Dev Metrics

37
.github/workflows/build_image.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: PUBLISH_IMAGE
on:
push:
jobs:
publish-server-image:
name: Publish 'waka-readme-stats' image
runs-on: ubuntu-latest
steps:
- name: Checkout 🛎️
uses: actions/checkout@v3
- name: Log in to the container registry 🚪
uses: docker/login-action@v2
with:
username: wakareadmestats
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker 🏋️
id: meta
uses: docker/metadata-action@v4
with:
images: waka-readme-stats
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
- name: Build and push Docker image 🏗️
uses: docker/build-push-action@v3
with:
push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/releases') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

14
.gitignore vendored
View File

@@ -1,6 +1,20 @@
# Environment files:
*.env *.env
# Generated graph images:
*.png *.png
# Library roots:
node_modules/ node_modules/
venv/
# Python caches:
__pycache__/ __pycache__/
# Package manager configuration files:
package.json
package-lock.json
# IDE configuration files:
.vscode .vscode
.idea

View File

@@ -1,18 +1,14 @@
FROM nikolaik/python-nodejs:python3.9-nodejs16 FROM nikolaik/python-nodejs:python3.9-nodejs16
ADD requirements.txt /requirements.txt ENV PYTHONUNBUFFERED 1
ADD main.py /main.py ENV PYTHONDONTWRITEBYTECODE 1
ADD loc.py /loc.py
ADD make_bar_graph.py /make_bar_graph.py
ADD colors.json /colors.json
ADD translation.json /translation.json
ENV PATH "$PATH:/home/root/.npm-global/bin" WORKDIR /waka-readme-stats
RUN python -m pip install --upgrade pip wheel setuptools ADD requirements.txt ./requirements.txt
RUN pip install -r requirements.txt RUN pip install --upgrade pip && pip install -r requirements.txt
RUN npm -g config set user root RUN npm i npm@next-8 && npm i vega vega-lite vega-cli canvas
RUN npm i -g npm@next-8
RUN npm i -g vega vega-lite vega-cli canvas
ENTRYPOINT ["python", "/main.py"] ADD sources/* ./
ENTRYPOINT python3 ./main.py

49
Makefile Normal file
View File

@@ -0,0 +1,49 @@
.ONESHELL:
.DEFAULT_GOAL = help
SHELL = /bin/bash
PATH := venv/bin:node_modules/.bin:$(PATH)
help:
@ # Print help commands
echo "Welcome to 'waka-readme-stats' GitHub Actions!"
echo "The action can be tested locally with: 'make run'."
echo "NB! For local testing Python version 3.6+ and NodeJS version between 14 and 16 are required."
echo "The action image can be built locally with: 'make container'."
echo "NB! For local container building Docker version 20+ is required."
echo "The action directory and image can be cleaned with: 'make clean'."
.PHONY: help
venv:
@ # Install Python virtual environment and dependencies
python3 -m venv venv
pip install --upgrade pip
pip install -r requirements.txt
node_modules:
@ # Install NodeJS dependencies
npm i npm@next-8
npm i vega vega-lite vega-cli canvas
run-locally: venv node_modules
@ # Run action locally
source <(cat .env.example | sed 's/=/=/' | sed 's/^/export /') && python3 ./sources/main.py
.PHONY: run-locally
run-container:
@ # Run action in container
docker build -t waka-readme-stats -f Dockerfile .
docker run --env-file .env.example waka-readme-stats
.PHONY: run-container
clean:
@ # Clean all build files, including: libraries, package manager configs, docker images and containers
rm -rf venv
rm -rf node_modules
rm -f package*.json
docker rm -f waka-readme-stats 2>/dev/null || true
docker rmi $(docker images | grep "waka-readme-stats") 2> /dev/null || true
.PHONY: clean

View File

@@ -134,7 +134,7 @@ inputs:
runs: runs:
using: 'docker' using: 'docker'
image: 'Dockerfile' image: 'waka-readme-stats:master'
branding: branding:
icon: 'activity' icon: 'activity'

View File

@@ -1,35 +1,9 @@
altair==4.1.0
altair-data-server==0.4.1
altair-saver==0.5.0
altair-viewer==0.3.0
attrs==20.3.0
certifi==2020.12.5
chardet==4.0.0
cycler==0.10.0
Deprecated==1.2.12
entrypoints==0.3
humanize==3.3.0
idna==2.10
Jinja2==2.11.3
jsonschema==3.2.0
kiwisolver==1.3.1
MarkupSafe==1.1.1
matplotlib==3.4.1
numpy==1.20.2
pandas==1.2.3
Pillow==8.2.0
portpicker==1.3.1
PyGithub==1.54.1 PyGithub==1.54.1
PyJWT==1.7.1 matplotlib==3.4.1
pyparsing==2.4.7
pyrsistent==0.17.3
python-dateutil==2.8.1
python-dotenv==0.17.0 python-dotenv==0.17.0
numpy==1.24.1
pandas==1.2.3
altair==4.1.0
altair-saver==0.5.0
pytz==2021.1 pytz==2021.1
requests==2.25.1 humanize==3.3.0
selenium==3.141.0
six==1.15.0
toolz==0.11.1
tornado==6.1
urllib3==1.26.5
wrapt==1.12.1

File diff suppressed because it is too large Load Diff

View File

@@ -1,108 +1,108 @@
import re import re
import os import os
import base64 import base64
import requests import requests
from github import Github, InputGitAuthor from github import Github, InputGitAuthor
import datetime import datetime
from string import Template from string import Template
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from io import StringIO, BytesIO from io import StringIO, BytesIO
from dotenv import load_dotenv from dotenv import load_dotenv
import time import time
from make_bar_graph import BarGraph from make_bar_graph import BarGraph
class LinesOfCode: class LinesOfCode:
def __init__(self, id, username, ghtoken, repositoryData, ignored_repos): def __init__(self, id, username, ghtoken, repositoryData, ignored_repos):
self.id = id self.id = id
self.username = username self.username = username
self.g = Github(ghtoken) self.g = Github(ghtoken)
self.headers = {"Authorization": "Bearer " + ghtoken} self.headers = {"Authorization": "Bearer " + ghtoken}
self.repositoryData = repositoryData self.repositoryData = repositoryData
self.ignored_repos = ignored_repos self.ignored_repos = ignored_repos
def calculateLoc(self): def calculateLoc(self):
result = self.repositoryData result = self.repositoryData
yearly_data = {} yearly_data = {}
for repo in result['data']['user']['repositories']['edges']: for repo in result['data']['user']['repositories']['edges']:
if repo['node']['name'] not in self.ignored_repos: if repo['node']['name'] not in self.ignored_repos:
self.getCommitStat(repo['node'], yearly_data) self.getCommitStat(repo['node'], yearly_data)
time.sleep(0.7) time.sleep(0.7)
return yearly_data return yearly_data
def plotLoc(self, yearly_data): def plotLoc(self, yearly_data):
graph = BarGraph(yearly_data) graph = BarGraph(yearly_data)
graph.build_graph() graph.build_graph()
self.pushChart() self.pushChart()
def run_query_v3(self, endPoint): def run_query_v3(self, endPoint):
# print(endPoint) # print(endPoint)
request = requests.get(endPoint, headers=self.headers) request = requests.get(endPoint, headers=self.headers)
if request.status_code == 401: if request.status_code == 401:
raise Exception("Invalid token {}.".format(request.status_code)) raise Exception("Invalid token {}.".format(request.status_code))
elif request.status_code == 204: elif request.status_code == 204:
return [] return []
else: else:
return request.json() return request.json()
def getQuarter(self, timeStamp): def getQuarter(self, timeStamp):
month = datetime.datetime.fromisoformat(timeStamp).month month = datetime.datetime.fromisoformat(timeStamp).month
if month >= 1 and month <= 3: if month >= 1 and month <= 3:
return 1 return 1
elif month >= 4 and month <= 6: elif month >= 4 and month <= 6:
return 2 return 2
elif month >= 7 and month <= 9: elif month >= 7 and month <= 9:
return 3 return 3
elif month >= 10 and month <= 12: elif month >= 10 and month <= 12:
return 4 return 4
def getCommitStat(self, repoDetails, yearly_data): def getCommitStat(self, repoDetails, yearly_data):
commitsURL = 'https://api.github.com/repos/' + repoDetails['nameWithOwner'] + '/commits' commitsURL = 'https://api.github.com/repos/' + repoDetails['nameWithOwner'] + '/commits'
filteredCommitsEndPoint = commitsURL + '?author=' + self.username filteredCommitsEndPoint = commitsURL + '?author=' + self.username
filteredCommitsResult = self.run_query_v3(filteredCommitsEndPoint) filteredCommitsResult = self.run_query_v3(filteredCommitsEndPoint)
# This ignores the error message you get when you try to list commits for an empty repository # This ignores the error message you get when you try to list commits for an empty repository
if not type(filteredCommitsResult) == list: if not type(filteredCommitsResult) == list:
return return
this_year = datetime.datetime.utcnow().year this_year = datetime.datetime.utcnow().year
for i in range(len(filteredCommitsResult)): for i in range(len(filteredCommitsResult)):
iso_date = filteredCommitsResult[i]["commit"]["author"]["date"] iso_date = filteredCommitsResult[i]["commit"]["author"]["date"]
date = re.search(r'\d+-\d+-\d+', iso_date).group(0) date = re.search(r'\d+-\d+-\d+', iso_date).group(0)
curr_year = datetime.datetime.fromisoformat(date).year curr_year = datetime.datetime.fromisoformat(date).year
# if curr_year != this_year: # if curr_year != this_year:
individualCommitEndPoint = commitsURL + '/' + filteredCommitsResult[i]["sha"] individualCommitEndPoint = commitsURL + '/' + filteredCommitsResult[i]["sha"]
individualCommitResult = self.run_query_v3(individualCommitEndPoint) individualCommitResult = self.run_query_v3(individualCommitEndPoint)
quarter = self.getQuarter(date) quarter = self.getQuarter(date)
if repoDetails['primaryLanguage'] is not None: if repoDetails['primaryLanguage'] is not None:
if curr_year not in yearly_data: if curr_year not in yearly_data:
yearly_data[curr_year] = {} yearly_data[curr_year] = {}
if quarter not in yearly_data[curr_year]: if quarter not in yearly_data[curr_year]:
yearly_data[curr_year][quarter] = {} yearly_data[curr_year][quarter] = {}
if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]: if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year][quarter]:
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0 yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] = 0
yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (individualCommitResult["stats"]["additions"] - individualCommitResult["stats"]['deletions']) yearly_data[curr_year][quarter][repoDetails['primaryLanguage']['name']] += (individualCommitResult["stats"]["additions"] - individualCommitResult["stats"]['deletions'])
# to find total # to find total
# if 'total' not in yearly_data[curr_year]: # if 'total' not in yearly_data[curr_year]:
# yearly_data[curr_year]['total']={} # yearly_data[curr_year]['total']={}
# if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']: # if repoDetails['primaryLanguage']['name'] not in yearly_data[curr_year]['total']:
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0 # yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]=0
# yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2]) # yearly_data[curr_year]['total'][repoDetails['primaryLanguage']['name']]+=(result[i][1]+result[i][2])
def pushChart(self): def pushChart(self):
repo = self.g.get_repo(f"{self.username}/{self.username}") repo = self.g.get_repo(f"{self.username}/{self.username}")
committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com') committer = InputGitAuthor('readme-bot', '41898282+github-actions[bot]@users.noreply.github.com')
with open('bar_graph.png', 'rb') as input_file: with open('bar_graph.png', 'rb') as input_file:
data = input_file.read() data = input_file.read()
try: try:
contents = repo.get_contents("charts/bar_graph.png") contents = repo.get_contents("charts/bar_graph.png")
repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer) repo.update_file(contents.path, "Charts Updated", data, contents.sha, committer=committer)
except Exception as e: except Exception as e:
repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer) repo.create_file("charts/bar_graph.png", "Charts Added", data, committer=committer)

File diff suppressed because it is too large Load Diff

View File

@@ -1,106 +1,106 @@
import os import os
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import altair as alt import altair as alt
import json import json
import os import os
# npm install vega-lite vega-cli canvas # npm install vega-lite vega-cli canvas
class BarGraph: class BarGraph:
def __init__(self, yearly_data): def __init__(self, yearly_data):
self.yearly_data = yearly_data self.yearly_data = yearly_data
def build_graph(self): def build_graph(self):
with open(os.path.join(os.path.dirname(__file__), 'colors.json')) as f: with open(os.path.join(os.path.dirname(__file__), 'colors.json')) as f:
colors = json.load(f) colors = json.load(f)
allColorsValues = [] allColorsValues = []
# filter data # filter data
max_languages = 5 max_languages = 5
top_languages = {} top_languages = {}
for year in self.yearly_data.keys(): for year in self.yearly_data.keys():
for quarter in self.yearly_data[year].keys(): for quarter in self.yearly_data[year].keys():
for language in sorted(list(self.yearly_data[year][quarter].keys()), for language in sorted(list(self.yearly_data[year][quarter].keys()),
key=lambda lang: self.yearly_data[year][quarter][lang], reverse=True)[ key=lambda lang: self.yearly_data[year][quarter][lang], reverse=True)[
0:max_languages]: 0:max_languages]:
if 'top' not in self.yearly_data[year][quarter]: if 'top' not in self.yearly_data[year][quarter]:
self.yearly_data[year][quarter]['top'] = {} self.yearly_data[year][quarter]['top'] = {}
if self.yearly_data[year][quarter][language] != 0: if self.yearly_data[year][quarter][language] != 0:
self.yearly_data[year][quarter]['top'][language] = self.yearly_data[year][quarter][language] self.yearly_data[year][quarter]['top'][language] = self.yearly_data[year][quarter][language]
if language not in top_languages: if language not in top_languages:
top_languages[language] = 1 top_languages[language] = 1
top_languages[language] += 1 top_languages[language] += 1
# print(self.yearly_data) # print(self.yearly_data)
all_languages = list(top_languages.keys()) all_languages = list(top_languages.keys())
for language in all_languages: for language in all_languages:
if colors[language]['color'] is not None: if colors[language]['color'] is not None:
allColorsValues.append(colors[language]['color']) allColorsValues.append(colors[language]['color'])
languages_all_loc = {} languages_all_loc = {}
for language in all_languages: for language in all_languages:
language_year = [] language_year = []
for year in self.yearly_data.keys(): for year in self.yearly_data.keys():
language_quarter = [0, 0, 0, 0] language_quarter = [0, 0, 0, 0]
for quarter in self.yearly_data[year].keys(): for quarter in self.yearly_data[year].keys():
if language in self.yearly_data[year][quarter]['top']: if language in self.yearly_data[year][quarter]['top']:
language_quarter[quarter - 1] = self.yearly_data[year][quarter]['top'][language] language_quarter[quarter - 1] = self.yearly_data[year][quarter]['top'][language]
else: else:
language_quarter[quarter - 1] = 0 language_quarter[quarter - 1] = 0
language_year.append(language_quarter) language_year.append(language_quarter)
languages_all_loc[language] = language_year languages_all_loc[language] = language_year
# print(languages_all_loc) # print(languages_all_loc)
language_df = {} language_df = {}
def prep_df(df, name): def prep_df(df, name):
df = df.stack().reset_index() df = df.stack().reset_index()
df.columns = ['c1', 'c2', 'values'] df.columns = ['c1', 'c2', 'values']
df['Language'] = name df['Language'] = name
return df return df
for language in languages_all_loc.keys(): for language in languages_all_loc.keys():
language_df[language] = pd.DataFrame(languages_all_loc[language], index=list(self.yearly_data.keys()), language_df[language] = pd.DataFrame(languages_all_loc[language], index=list(self.yearly_data.keys()),
columns=["Q1", "Q2", "Q3", "Q4"]) columns=["Q1", "Q2", "Q3", "Q4"])
for language in language_df.keys(): for language in language_df.keys():
language_df[language] = prep_df(language_df[language], language) language_df[language] = prep_df(language_df[language], language)
df = pd.concat(language_df.values()) df = pd.concat(language_df.values())
chart = alt.Chart(df).mark_bar().encode( chart = alt.Chart(df).mark_bar().encode(
# tell Altair which field to group columns on # tell Altair which field to group columns on
x=alt.X('c2:N', title=None), x=alt.X('c2:N', title=None),
# tell Altair which field to use as Y values and how to calculate # tell Altair which field to use as Y values and how to calculate
y=alt.Y('sum(values):Q', y=alt.Y('sum(values):Q',
axis=alt.Axis( axis=alt.Axis(
grid=False, grid=False,
title='LOC added')), title='LOC added')),
# tell Altair which field to use to use as the set of columns to be represented in each group # tell Altair which field to use to use as the set of columns to be represented in each group
column=alt.Column('c1:N', title=None), column=alt.Column('c1:N', title=None),
# tell Altair which field to use for color segmentation # tell Altair which field to use for color segmentation
color=alt.Color('Language:N', color=alt.Color('Language:N',
scale=alt.Scale( scale=alt.Scale(
domain=all_languages, domain=all_languages,
# make it look pretty with an enjoyable color pallet # make it look pretty with an enjoyable color pallet
range=allColorsValues, range=allColorsValues,
), ),
)) \ )) \
.configure_view( .configure_view(
# remove grid lines around column clusters # remove grid lines around column clusters
strokeOpacity=0 strokeOpacity=0
) )
chart.save('bar_graph.png') chart.save('bar_graph.png')
return 'bar_graph.png' return 'bar_graph.png'