461 lines
17 KiB
JavaScript
461 lines
17 KiB
JavaScript
// Require dependencies
|
|
const path = require('path');
|
|
const cheerio = require('cheerio');
|
|
const df = require('date-format');
|
|
const {Feed} = require('feed');
|
|
const fs = require('fs-extra');
|
|
const GitLab = require('gitlab/dist/es5').default;
|
|
const gulp = require('gulp');
|
|
const htmlclean = require('gulp-htmlclean');
|
|
const klaw = require('klaw-sync');
|
|
const log = require('fancy-log');
|
|
const merge2 = require('merge2');
|
|
const scss = require('gulp-sass');
|
|
const sync = require('browser-sync');
|
|
|
|
// Require statistic functions
|
|
const {avgTime, freqUsers, labelsAlphabet, changedLines, uniqueContributors} = require('./statistics');
|
|
|
|
// Define paths that are gonna be used commonly
|
|
const paths = {
|
|
data: {
|
|
commits: path.join(__dirname, 'data/commits/'),
|
|
issues: {
|
|
open: path.join(__dirname, 'data/issues/open/'),
|
|
closed: path.join(__dirname, 'data/issues/closed/'),
|
|
out: path.join(__dirname, 'data/issues/out/')
|
|
}
|
|
},
|
|
extra: path.join(__dirname, 'src/favicons/**'),
|
|
html: {
|
|
index: path.join(__dirname, 'src/index.html'),
|
|
posts: path.join(__dirname, 'src/posts/*.html')
|
|
},
|
|
out: path.join(__dirname, 'public/'),
|
|
scss: path.join(__dirname, 'src/scss/*.scss')
|
|
};
|
|
|
|
// Define options for Node Sass and Browser Sync
|
|
const opts = {
|
|
scss: {
|
|
outputStyle: 'compressed'
|
|
},
|
|
sync: {
|
|
server: {
|
|
baseDir: paths.out
|
|
}
|
|
}
|
|
};
|
|
|
|
// The data to download from specified month, months are zero-based zo January would be 0
|
|
// Make sure both of these are **numbers**, if they are strings it won't work properly!
|
|
const wantedMonth = new Date().getMonth();
|
|
// Since we've passed from 2018 into 2019 we also have to start checking for year now
|
|
const wantedYear = new Date().getFullYear();
|
|
|
|
// Init the months array, probably a way to do this with Dates but this works too
|
|
const months = [
|
|
'January',
|
|
'February',
|
|
'March',
|
|
'April',
|
|
'May',
|
|
'June',
|
|
'July',
|
|
'August',
|
|
'September',
|
|
'October',
|
|
'November',
|
|
'December'
|
|
];
|
|
|
|
// Add the year and month to the open/closed/out path so they're easy to identify
|
|
const commitsPath = `${paths.data.commits}${wantedYear}/${months[wantedMonth]}/`;
|
|
const openIssuesPath = `${paths.data.issues.open}${wantedYear}/${months[wantedMonth]}/`; // Folder
|
|
const closedIssuesPath = `${paths.data.issues.closed}${wantedYear}/${months[wantedMonth]}/`; // Folder
|
|
const outIssuesPath = `${paths.data.issues.out}${months[wantedMonth]}${wantedYear}`; // Will become table and statistics files
|
|
|
|
// Make the directories using fs-extra's "mkdir -p" equivalent
|
|
// It will make any directory that doesn't yet exist in the path
|
|
fs.mkdirpSync(commitsPath);
|
|
fs.mkdirpSync(openIssuesPath);
|
|
fs.mkdirpSync(closedIssuesPath);
|
|
fs.mkdirpSync(paths.data.issues.out);
|
|
|
|
// Create the browser sync server, it only starts when using `gulp watch` however
|
|
const server = sync.create();
|
|
|
|
// Copy over the HTML, using merge2 to use Gulp's async completion and multiple src's
|
|
function buildHTML() {
|
|
return merge2([
|
|
gulp.src(paths.html.index).pipe(htmlclean()).pipe(gulp.dest(paths.out)),
|
|
gulp.src(paths.html.posts).pipe(htmlclean()).pipe(gulp.dest(paths.out + 'posts/'))
|
|
]);
|
|
}
|
|
|
|
// Build the CSS
|
|
function buildCSS() {
|
|
return gulp
|
|
.src(paths.scss)
|
|
.pipe(scss(opts.scss))
|
|
.pipe(gulp.dest(paths.out + 'css/'));
|
|
}
|
|
|
|
// Build the extra stuff, for now only the favicons
|
|
function buildExtra() {
|
|
return gulp
|
|
.src(paths.extra)
|
|
.pipe(gulp.dest(paths.out));
|
|
}
|
|
|
|
// Start the Browser Sync server and watch individual file types with appropriate build functions
|
|
function watch() {
|
|
server.init(opts.sync);
|
|
gulp.watch([paths.html.index, paths.html.posts], gulp.series(buildHTML, createFeeds, reload));
|
|
gulp.watch(paths.scss, gulp.series(buildCSS, reload));
|
|
gulp.watch(paths.extra, gulp.series(buildExtra, reload));
|
|
}
|
|
|
|
// To use Gulp's async completion system this has to be done, it's ugly but can't do without it
|
|
function reload(callback) {
|
|
server.reload();
|
|
callback();
|
|
}
|
|
|
|
function download() {
|
|
// Create the API with the token
|
|
const api = new GitLab({token: require('./config.json').token});
|
|
|
|
// Return a new Promise so we can take advantage of Gulp's async completion system
|
|
// We'll reject whenever there is an error and resolve when everything is completed
|
|
return new Promise((resolve, reject) => {
|
|
// The Node GitLab API is a bit weird, first we have to find the project Tildes/Tildes
|
|
api.Projects
|
|
.show('tildes/tildes')
|
|
.catch(error => reject(new Error('There was an error fetching the project:', error)))
|
|
.then(project => {
|
|
log('Found project, downloading issues...');
|
|
// Then once we find the project we can use it and its ID to download the issues
|
|
api.Issues
|
|
.all({projectId: project.id})
|
|
.catch(error => reject(new Error('There was an error downloading the issues:', error)))
|
|
.then(issues => {
|
|
// And then once we've downloaded all the issues we can write them to file appropriately
|
|
log(`Downloaded issues, saving opened and closed issues from ${months[wantedMonth]} ${wantedYear} to file...`);
|
|
for (const issue of issues) {
|
|
const createdDate = new Date(issue.created_at);
|
|
if (createdDate.getFullYear() === wantedYear &&
|
|
createdDate.getMonth() === wantedMonth) {
|
|
fs.writeFileSync(openIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2));
|
|
}
|
|
|
|
const closedDate = new Date(issue.closed_at);
|
|
if (issue.closed_at !== null &&
|
|
closedDate.getFullYear() === wantedYear &&
|
|
closedDate.getMonth() === wantedMonth) {
|
|
fs.writeFileSync(closedIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2));
|
|
}
|
|
}
|
|
|
|
log('Finished writing issues to file.');
|
|
log('Downloading commits...');
|
|
})
|
|
.then(() => {
|
|
api.Commits.all(project.id, {ref_name: 'master', with_stats: true})
|
|
.catch(error => reject(new Error('There was an error downloading the commits:', error)))
|
|
.then(commits => {
|
|
log(`Downloaded commits, saving commits from ${months[wantedMonth]} ${wantedYear} to file...`);
|
|
for (const commit of commits) {
|
|
const authoredDate = new Date(commit.authored_date);
|
|
if (authoredDate.getFullYear() === wantedYear &&
|
|
authoredDate.getMonth() === wantedMonth) {
|
|
fs.writeFileSync(commitsPath + `${commit.short_id}.json`, JSON.stringify(commit, null, 2));
|
|
}
|
|
}
|
|
|
|
log('Finished writing commits to file.');
|
|
resolve();
|
|
});
|
|
});
|
|
});
|
|
});
|
|
}
|
|
|
|
function createIssueTable() {
|
|
// Using a Promise again for Gulp's async completion
|
|
return new Promise(resolve => {
|
|
// Klaw returns all files in a directory recursively so we're getting all opened and closed issue files
|
|
const opened = klaw(openIssuesPath);
|
|
const closed = klaw(closedIssuesPath);
|
|
|
|
// Then we want to sort all of these issue files in their arrays
|
|
opened.sort((a, b) => {
|
|
const aFile = require(a.path);
|
|
const bFile = require(b.path);
|
|
return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0);
|
|
});
|
|
|
|
closed.sort((a, b) => {
|
|
const aFile = require(a.path);
|
|
const bFile = require(b.path);
|
|
return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0);
|
|
});
|
|
|
|
// And then generate the Issue Table HTML, which is kind of a mess to do
|
|
let table = '<article id="issue-table">\n';
|
|
table += ' <h2>Issue Table</h2>\n';
|
|
|
|
table += ' <h3 id="opened">Opened</h3>\n';
|
|
table += ' <table>\n';
|
|
table += ' <thead>\n';
|
|
table += ' <tr>\n';
|
|
table += ' <td>Issue</td>\n';
|
|
table += ' <td>Title</td>\n';
|
|
table += ' <td>Author</td>\n';
|
|
table += ' <td>Opened</td>\n';
|
|
table += ' <td>Closed</td>\n';
|
|
table += ' </tr>\n';
|
|
table += ' </thead>\n';
|
|
table += ' <tbody>\n';
|
|
|
|
for (const file of opened) {
|
|
const issue = require(file.path);
|
|
table += ' <tr>\n';
|
|
table += ` <td><a href="${issue.web_url}">${issue.iid}</a></td>\n`;
|
|
|
|
let title;
|
|
if (issue.title.length >= 50) {
|
|
// We're going to be replacing all instances of <> signs to make sure nobody can add
|
|
// <script></script> in their issue title and run JS on the site or mess up the layout or something
|
|
// I do check myself before I commit and push anything but I'd rather be completely sure.
|
|
title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...';
|
|
} else {
|
|
title = issue.title.replace(/[<>]/g, '');
|
|
}
|
|
|
|
table += ` <td>${title}</td>\n`;
|
|
table += ` <td><a href="${issue.author.web_url}">${issue.author.username}</a></td>\n`;
|
|
table += ` <td>${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}</td>\n`;
|
|
|
|
let closedAt;
|
|
if (issue.closed_at === null) {
|
|
closedAt = '';
|
|
} else {
|
|
closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at));
|
|
}
|
|
|
|
table += ` <td>${closedAt}</td>\n`;
|
|
table += ' </tr>\n';
|
|
}
|
|
|
|
table += ' </tbody>\n';
|
|
table += ' </table>\n\n';
|
|
|
|
table += ' <h3 id="closed">Closed</h3>\n';
|
|
table += ' <table>\n';
|
|
table += ' <thead>\n';
|
|
table += ' <tr>\n';
|
|
table += ' <td>Issue</td>\n';
|
|
table += ' <td>Title</td>\n';
|
|
table += ' <td>Author</td>\n';
|
|
table += ' <td>Opened</td>\n';
|
|
table += ' <td>Closed</td>\n';
|
|
table += ' </tr>\n';
|
|
table += ' <thead>\n';
|
|
table += ' <tbody>\n';
|
|
|
|
for (const file of closed) {
|
|
const issue = require(file.path);
|
|
table += ' <tr>\n';
|
|
table += ` <td><a href="${issue.web_url}">${issue.iid}</a></td>\n`;
|
|
|
|
let title;
|
|
if (issue.title.length >= 50) {
|
|
title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...';
|
|
} else {
|
|
title = issue.title.replace(/[<>]/g, '');
|
|
}
|
|
|
|
table += ` <td>${title}\n`;
|
|
table += ` <td><a href="${issue.author.web_url}">${issue.author.username}</a>\n`;
|
|
table += ` <td>${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}</td>\n`;
|
|
|
|
let closedAt;
|
|
if (issue.closed_at === null) {
|
|
closedAt = '';
|
|
} else {
|
|
closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at));
|
|
}
|
|
|
|
table += ` <td>${closedAt}</td>\n`;
|
|
table += ' </tr>\n';
|
|
}
|
|
|
|
table += ' </tbody>\n';
|
|
table += ' </table>\n';
|
|
table += '</article>\n';
|
|
|
|
// And finally when the HTML is done generating we can write it and resolve that Promise we made
|
|
fs.writeFileSync(outIssuesPath + '_table.html', table, {encoding: 'UTF-8'});
|
|
resolve();
|
|
});
|
|
}
|
|
|
|
function createStatistics() {
|
|
return new Promise(resolve => {
|
|
// Same process as the Issue Table generation
|
|
const commits = klaw(commitsPath);
|
|
const opened = klaw(openIssuesPath);
|
|
const closed = klaw(closedIssuesPath);
|
|
|
|
let statistics = '<article id="statistics">\n';
|
|
statistics += ' <h2>Statistics</h2>\n';
|
|
|
|
const commitStats = changedLines(commits);
|
|
const contributors = uniqueContributors(commits);
|
|
|
|
statistics += ` <p>In the month of ${months[wantedMonth]}, `;
|
|
statistics += `${commits.length} commits were made by ${contributors.length} contributors, `;
|
|
statistics += `changing a total of ${Math.abs(commitStats.total)} (+${commitStats.added}|-${commitStats.deleted}) lines. `;
|
|
statistics += `${opened.length} issues were opened and `;
|
|
statistics += `${closed.length} issues were closed.</p>\n`;
|
|
|
|
statistics += ` <p>An average of ${(opened.length / 30).toFixed(2)} issues were opened `;
|
|
statistics += `and ${(closed.length / 30).toFixed(2)} issues were closed each day.</p>\n`;
|
|
|
|
statistics += ` <p>The average time to close issues was ${avgTime(closed, 'days')} days `;
|
|
statistics += `or ${avgTime(closed, 'hours')} hours.</p>\n`;
|
|
|
|
const topUsers = freqUsers(opened, 3);
|
|
statistics += ' <p>Top 3 issue creators:</p>\n';
|
|
statistics += ' <ol>\n';
|
|
for (const user in topUsers) {
|
|
statistics += ' <li>\n';
|
|
statistics += ` <a href="https://gitlab.com/${user}">${user}</a>`;
|
|
statistics += ' with ';
|
|
statistics += `<a href="https://gitlab.com/tildes/tildes/issues?state=all&author_username=${user}">${topUsers[user]} issues created</a>.\n`;
|
|
statistics += ' </li>\n';
|
|
}
|
|
|
|
statistics += ' </ol>\n';
|
|
|
|
let labels = labelsAlphabet(opened, true);
|
|
statistics += ' <p>Amount of labels assigned to currently open issues:</p>\n';
|
|
statistics += ' <ul>\n';
|
|
for (const label in labels) {
|
|
statistics += ' <li>\n';
|
|
statistics += ` <a href="https://gitlab.com/tildes/tildes/issues?state=opened&label_name%5B%5D=${label.replace(' ', '+')}")>${label}</a>:`;
|
|
statistics += `${labels[label]} `;
|
|
if (labels[label] === 1) {
|
|
statistics += 'time.\n';
|
|
} else {
|
|
statistics += 'times.\n';
|
|
}
|
|
|
|
statistics += ' </li>\n';
|
|
}
|
|
|
|
statistics += ' </ul>\n';
|
|
|
|
labels = labelsAlphabet(closed, false);
|
|
statistics += ' <p>Amount of labels assigned to closed issues:</p>\n';
|
|
statistics += ' <ul>\n';
|
|
for (const label in labels) {
|
|
statistics += ' <li>\n';
|
|
statistics += ` <a href="https://gitlab.com/tildes/tildes/issues?state=opened&label_name%5B%5D=${label.replace(' ', '+')}")>${label}</a>:`;
|
|
statistics += `${labels[label]} `;
|
|
if (labels[label] === 1) {
|
|
statistics += 'time.\n';
|
|
} else {
|
|
statistics += 'times.\n';
|
|
}
|
|
|
|
statistics += ' </li>\n';
|
|
}
|
|
|
|
statistics += ' </ul>\n';
|
|
statistics += '</article>\n';
|
|
|
|
fs.writeFileSync(outIssuesPath + '_statistics.html', statistics, {encoding: 'UTF-8'});
|
|
resolve();
|
|
});
|
|
}
|
|
|
|
function createFeeds() {
|
|
const feed = new Feed({
|
|
title: 'Tildes Issue Log',
|
|
description: 'Monthly blog highlighting the changes of Tildes.net',
|
|
id: 'https://til.bauke.xyz',
|
|
link: 'https://til.bauke.xyz',
|
|
language: 'en',
|
|
image: 'https://til.bauke.xyz/android-chrome-192x192.png',
|
|
favicon: 'https://til.bauke.xyz/favicon.ico',
|
|
copyright: 'AGPL-3.0-or-later Tildes Issue Log Contributors https://gitlab.com/Bauke/tildes-issue-log',
|
|
generator: 'https://github.com/jpmonette/feed',
|
|
feedLinks: {
|
|
atom: 'https://til.bauke.xyz/feed.atom',
|
|
json: 'https://til.bauke.xyz/feed.json',
|
|
rss: 'https://til.bauke.xyz/feed.rss'
|
|
},
|
|
author: {
|
|
name: 'Bauke',
|
|
email: 'me@bauke.xyz',
|
|
link: 'https://bauke.xyz'
|
|
}
|
|
});
|
|
const posts = fs.readdirSync(path.join(paths.out, 'posts'));
|
|
// Sort the posts descending year and month
|
|
posts.sort((a, b) => {
|
|
const yearA = Number(a.replace(/\D/g, ''));
|
|
const yearB = Number(b.replace(/\D/g, ''));
|
|
if (yearA === yearB) {
|
|
const monthA = months.join(',').toLowerCase().split(',').indexOf(a.substring(0, a.indexOf('-'))) + 1;
|
|
const monthB = months.join(',').toLowerCase().split(',').indexOf(b.substring(0, b.indexOf('-'))) + 1;
|
|
return monthB - monthA;
|
|
}
|
|
|
|
return yearB - yearA;
|
|
});
|
|
for (const post of posts) {
|
|
// Skip the template, that doesn't need to be included
|
|
if (post.includes('template')) {
|
|
continue;
|
|
}
|
|
|
|
const html = fs.readFileSync(path.join(paths.out, 'posts', post), 'UTF8');
|
|
const $ = cheerio.load(html);
|
|
const title = $('#wrapper>h1').text();
|
|
const id = `https://til.bauke.xyz/posts/${post}`;
|
|
const date = new Date(Date.UTC(
|
|
Number(post.replace(/\D/g, '')),
|
|
// Add one to the month since UTC months are 0 based and since we set the
|
|
// day as 0 we'll get the Date back as the last day of the previous month
|
|
months.join(',').toLowerCase().split(',').indexOf(post.substring(0, post.indexOf('-'))) + 1,
|
|
0, 23, 59, 59
|
|
));
|
|
const content = $('#post')
|
|
.html()
|
|
.replace(/<article id="toc">.+?<\/article>/g, ''); // Remove the TOC
|
|
feed.addItem({
|
|
title,
|
|
id,
|
|
link: id,
|
|
date,
|
|
published: date,
|
|
description: `${title}'s Issue Log`,
|
|
content,
|
|
image: 'https://til.bauke.xyz/android-chrome-192x192.png'
|
|
});
|
|
}
|
|
|
|
fs.writeFileSync(path.join(paths.out, 'feed.atom'), feed.atom1());
|
|
fs.writeFileSync(path.join(paths.out, 'feed.json'), feed.json1());
|
|
fs.writeFileSync(path.join(paths.out, 'feed.rss'), feed.rss2());
|
|
return Promise.resolve();
|
|
}
|
|
|
|
exports.build = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds);
|
|
exports.download = gulp.series(download, gulp.parallel(createIssueTable, createStatistics));
|
|
exports.no_download = gulp.parallel(createIssueTable, createStatistics);
|
|
exports.watch = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds, watch);
|