2019-01-05 22:44:59 +00:00
|
|
|
// Require dependencies
|
2019-07-05 21:51:11 +00:00
|
|
|
const path = require('path');
|
|
|
|
const cheerio = require('cheerio');
|
|
|
|
const df = require('date-format');
|
|
|
|
const {Feed} = require('feed');
|
|
|
|
const fs = require('fs-extra');
|
2019-11-30 12:31:16 +00:00
|
|
|
const GitLab = require('gitlab').Gitlab;
|
2019-07-05 21:51:11 +00:00
|
|
|
const gulp = require('gulp');
|
|
|
|
const htmlclean = require('gulp-htmlclean');
|
|
|
|
const klaw = require('klaw-sync');
|
|
|
|
const log = require('fancy-log');
|
|
|
|
const merge2 = require('merge2');
|
|
|
|
const scss = require('gulp-sass');
|
|
|
|
const sync = require('browser-sync');
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Require statistic functions
|
2019-07-05 21:51:11 +00:00
|
|
|
const {avgTime, freqUsers, labelsAlphabet, changedLines, uniqueContributors} = require('./statistics');
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Define paths that are gonna be used commonly
|
|
|
|
const paths = {
|
|
|
|
data: {
|
2019-01-30 14:41:18 +00:00
|
|
|
commits: path.join(__dirname, 'data/commits/'),
|
2019-01-05 22:44:59 +00:00
|
|
|
issues: {
|
|
|
|
open: path.join(__dirname, 'data/issues/open/'),
|
|
|
|
closed: path.join(__dirname, 'data/issues/closed/'),
|
2019-07-05 21:51:11 +00:00
|
|
|
out: path.join(__dirname, 'data/issues/out/')
|
|
|
|
}
|
2019-01-05 22:44:59 +00:00
|
|
|
},
|
|
|
|
extra: path.join(__dirname, 'src/favicons/**'),
|
2019-07-05 21:51:11 +00:00
|
|
|
html: {
|
2019-01-05 22:44:59 +00:00
|
|
|
index: path.join(__dirname, 'src/index.html'),
|
2019-07-05 21:51:11 +00:00
|
|
|
posts: path.join(__dirname, 'src/posts/*.html')
|
2019-01-05 22:44:59 +00:00
|
|
|
},
|
|
|
|
out: path.join(__dirname, 'public/'),
|
2019-07-05 21:51:11 +00:00
|
|
|
scss: path.join(__dirname, 'src/scss/*.scss')
|
|
|
|
};
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:43:30 +00:00
|
|
|
// Define options for Node Sass and Browser Sync
|
2019-01-05 22:44:59 +00:00
|
|
|
const opts = {
|
|
|
|
scss: {
|
2019-07-05 21:51:11 +00:00
|
|
|
outputStyle: 'compressed'
|
2019-01-05 22:44:59 +00:00
|
|
|
},
|
|
|
|
sync: {
|
|
|
|
server: {
|
2019-07-05 21:51:11 +00:00
|
|
|
baseDir: paths.out
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// The data to download from specified month, months are zero-based zo January would be 0
|
|
|
|
// Make sure both of these are **numbers**, if they are strings it won't work properly!
|
2019-07-05 21:51:11 +00:00
|
|
|
const wantedMonth = new Date().getMonth();
|
2019-01-05 22:44:59 +00:00
|
|
|
// Since we've passed from 2018 into 2019 we also have to start checking for year now
|
2019-07-05 21:51:11 +00:00
|
|
|
const wantedYear = new Date().getFullYear();
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Init the months array, probably a way to do this with Dates but this works too
|
2018-07-16 01:29:01 +00:00
|
|
|
const months = [
|
|
|
|
'January',
|
|
|
|
'February',
|
|
|
|
'March',
|
|
|
|
'April',
|
|
|
|
'May',
|
|
|
|
'June',
|
|
|
|
'July',
|
|
|
|
'August',
|
|
|
|
'September',
|
|
|
|
'October',
|
|
|
|
'November',
|
2019-07-05 21:51:11 +00:00
|
|
|
'December'
|
|
|
|
];
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Add the year and month to the open/closed/out path so they're easy to identify
|
2019-07-05 21:51:11 +00:00
|
|
|
const commitsPath = `${paths.data.commits}${wantedYear}/${months[wantedMonth]}/`;
|
|
|
|
const openIssuesPath = `${paths.data.issues.open}${wantedYear}/${months[wantedMonth]}/`; // Folder
|
|
|
|
const closedIssuesPath = `${paths.data.issues.closed}${wantedYear}/${months[wantedMonth]}/`; // Folder
|
|
|
|
const outIssuesPath = `${paths.data.issues.out}${months[wantedMonth]}${wantedYear}`; // Will become table and statistics files
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Make the directories using fs-extra's "mkdir -p" equivalent
|
|
|
|
// It will make any directory that doesn't yet exist in the path
|
2019-07-05 21:51:11 +00:00
|
|
|
fs.mkdirpSync(commitsPath);
|
|
|
|
fs.mkdirpSync(openIssuesPath);
|
|
|
|
fs.mkdirpSync(closedIssuesPath);
|
|
|
|
fs.mkdirpSync(paths.data.issues.out);
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Create the browser sync server, it only starts when using `gulp watch` however
|
2019-07-05 21:51:11 +00:00
|
|
|
const server = sync.create();
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Copy over the HTML, using merge2 to use Gulp's async completion and multiple src's
|
|
|
|
function buildHTML() {
|
|
|
|
return merge2([
|
|
|
|
gulp.src(paths.html.index).pipe(htmlclean()).pipe(gulp.dest(paths.out)),
|
2019-07-05 21:51:11 +00:00
|
|
|
gulp.src(paths.html.posts).pipe(htmlclean()).pipe(gulp.dest(paths.out + 'posts/'))
|
|
|
|
]);
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Build the CSS
|
|
|
|
function buildCSS() {
|
|
|
|
return gulp
|
|
|
|
.src(paths.scss)
|
|
|
|
.pipe(scss(opts.scss))
|
2019-07-05 21:51:11 +00:00
|
|
|
.pipe(gulp.dest(paths.out + 'css/'));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Build the extra stuff, for now only the favicons
|
|
|
|
function buildExtra() {
|
|
|
|
return gulp
|
|
|
|
.src(paths.extra)
|
2019-07-05 21:51:11 +00:00
|
|
|
.pipe(gulp.dest(paths.out));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// Start the Browser Sync server and watch individual file types with appropriate build functions
|
|
|
|
function watch() {
|
2019-07-05 21:51:11 +00:00
|
|
|
server.init(opts.sync);
|
|
|
|
gulp.watch([paths.html.index, paths.html.posts], gulp.series(buildHTML, createFeeds, reload));
|
|
|
|
gulp.watch(paths.scss, gulp.series(buildCSS, reload));
|
|
|
|
gulp.watch(paths.extra, gulp.series(buildExtra, reload));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// To use Gulp's async completion system this has to be done, it's ugly but can't do without it
|
|
|
|
function reload(callback) {
|
2019-07-05 21:51:11 +00:00
|
|
|
server.reload();
|
|
|
|
callback();
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
function download() {
|
|
|
|
// Create the API with the token
|
2019-07-05 21:51:11 +00:00
|
|
|
const api = new GitLab({token: require('./config.json').token});
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Return a new Promise so we can take advantage of Gulp's async completion system
|
|
|
|
// We'll reject whenever there is an error and resolve when everything is completed
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
// The Node GitLab API is a bit weird, first we have to find the project Tildes/Tildes
|
|
|
|
api.Projects
|
|
|
|
.show('tildes/tildes')
|
2019-07-05 21:51:11 +00:00
|
|
|
.catch(error => reject(new Error('There was an error fetching the project:', error)))
|
|
|
|
.then(project => {
|
|
|
|
log('Found project, downloading issues...');
|
2019-01-05 22:44:59 +00:00
|
|
|
// Then once we find the project we can use it and its ID to download the issues
|
|
|
|
api.Issues
|
2019-07-05 21:51:11 +00:00
|
|
|
.all({projectId: project.id})
|
|
|
|
.catch(error => reject(new Error('There was an error downloading the issues:', error)))
|
|
|
|
.then(issues => {
|
2019-01-05 22:44:59 +00:00
|
|
|
// And then once we've downloaded all the issues we can write them to file appropriately
|
2019-07-05 21:51:11 +00:00
|
|
|
log(`Downloaded issues, saving opened and closed issues from ${months[wantedMonth]} ${wantedYear} to file...`);
|
2019-01-05 22:44:59 +00:00
|
|
|
for (const issue of issues) {
|
2019-07-05 21:51:11 +00:00
|
|
|
const createdDate = new Date(issue.created_at);
|
2019-01-05 22:44:59 +00:00
|
|
|
if (createdDate.getFullYear() === wantedYear &&
|
|
|
|
createdDate.getMonth() === wantedMonth) {
|
2019-07-05 21:51:11 +00:00
|
|
|
fs.writeFileSync(openIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
const closedDate = new Date(issue.closed_at);
|
2019-01-05 22:44:59 +00:00
|
|
|
if (issue.closed_at !== null &&
|
|
|
|
closedDate.getFullYear() === wantedYear &&
|
|
|
|
closedDate.getMonth() === wantedMonth) {
|
2019-07-05 21:51:11 +00:00
|
|
|
fs.writeFileSync(closedIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-05 21:51:11 +00:00
|
|
|
|
|
|
|
log('Finished writing issues to file.');
|
|
|
|
log('Downloading commits...');
|
2019-01-30 14:41:18 +00:00
|
|
|
})
|
|
|
|
.then(() => {
|
2019-07-05 21:51:11 +00:00
|
|
|
api.Commits.all(project.id, {ref_name: 'master', with_stats: true})
|
|
|
|
.catch(error => reject(new Error('There was an error downloading the commits:', error)))
|
|
|
|
.then(commits => {
|
|
|
|
log(`Downloaded commits, saving commits from ${months[wantedMonth]} ${wantedYear} to file...`);
|
2019-01-30 14:41:18 +00:00
|
|
|
for (const commit of commits) {
|
2019-07-05 21:51:11 +00:00
|
|
|
const authoredDate = new Date(commit.authored_date);
|
2019-01-30 14:41:18 +00:00
|
|
|
if (authoredDate.getFullYear() === wantedYear &&
|
|
|
|
authoredDate.getMonth() === wantedMonth) {
|
2019-07-05 21:51:11 +00:00
|
|
|
fs.writeFileSync(commitsPath + `${commit.short_id}.json`, JSON.stringify(commit, null, 2));
|
2019-01-30 14:41:18 +00:00
|
|
|
}
|
|
|
|
}
|
2019-07-05 21:51:11 +00:00
|
|
|
|
|
|
|
log('Finished writing commits to file.');
|
|
|
|
resolve();
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
function createIssueTable() {
|
|
|
|
// Using a Promise again for Gulp's async completion
|
2019-07-05 21:51:11 +00:00
|
|
|
return new Promise(resolve => {
|
2019-01-05 22:44:59 +00:00
|
|
|
// Klaw returns all files in a directory recursively so we're getting all opened and closed issue files
|
2019-07-05 21:51:11 +00:00
|
|
|
const opened = klaw(openIssuesPath);
|
|
|
|
const closed = klaw(closedIssuesPath);
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
// Then we want to sort all of these issue files in their arrays
|
2019-07-05 21:51:11 +00:00
|
|
|
opened.sort((a, b) => {
|
|
|
|
const aFile = require(a.path);
|
|
|
|
const bFile = require(b.path);
|
|
|
|
return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0);
|
|
|
|
});
|
|
|
|
|
|
|
|
closed.sort((a, b) => {
|
|
|
|
const aFile = require(a.path);
|
|
|
|
const bFile = require(b.path);
|
|
|
|
return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0);
|
|
|
|
});
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// And then generate the Issue Table HTML, which is kind of a mess to do
|
2019-07-05 21:51:11 +00:00
|
|
|
let table = '<article id="issue-table">\n';
|
|
|
|
table += ' <h2>Issue Table</h2>\n';
|
|
|
|
|
|
|
|
table += ' <h3 id="opened">Opened</h3>\n';
|
|
|
|
table += ' <table>\n';
|
|
|
|
table += ' <thead>\n';
|
|
|
|
table += ' <tr>\n';
|
|
|
|
table += ' <td>Issue</td>\n';
|
|
|
|
table += ' <td>Title</td>\n';
|
|
|
|
table += ' <td>Author</td>\n';
|
|
|
|
table += ' <td>Opened</td>\n';
|
|
|
|
table += ' <td>Closed</td>\n';
|
|
|
|
table += ' </tr>\n';
|
|
|
|
table += ' </thead>\n';
|
|
|
|
table += ' <tbody>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
for (const file of opened) {
|
2019-07-05 21:51:11 +00:00
|
|
|
const issue = require(file.path);
|
|
|
|
table += ' <tr>\n';
|
|
|
|
table += ` <td><a href="${issue.web_url}">${issue.iid}</a></td>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
let title;
|
2019-01-05 22:44:59 +00:00
|
|
|
if (issue.title.length >= 50) {
|
|
|
|
// We're going to be replacing all instances of <> signs to make sure nobody can add
|
|
|
|
// <script></script> in their issue title and run JS on the site or mess up the layout or something
|
|
|
|
// I do check myself before I commit and push anything but I'd rather be completely sure.
|
2019-11-30 12:31:16 +00:00
|
|
|
title = issue.title.slice(0, 47).replace(/[<>]/g, '') + '...';
|
2019-01-05 22:44:59 +00:00
|
|
|
} else {
|
2019-07-05 21:51:11 +00:00
|
|
|
title = issue.title.replace(/[<>]/g, '');
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ` <td>${title}</td>\n`;
|
|
|
|
table += ` <td><a href="${issue.author.web_url}">${issue.author.username}</a></td>\n`;
|
|
|
|
table += ` <td>${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}</td>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
let closedAt;
|
2019-01-05 22:44:59 +00:00
|
|
|
if (issue.closed_at === null) {
|
2019-07-05 21:51:11 +00:00
|
|
|
closedAt = '';
|
2019-01-05 22:44:59 +00:00
|
|
|
} else {
|
2019-07-05 21:51:11 +00:00
|
|
|
closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ` <td>${closedAt}</td>\n`;
|
|
|
|
table += ' </tr>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ' </tbody>\n';
|
|
|
|
table += ' </table>\n\n';
|
|
|
|
|
|
|
|
table += ' <h3 id="closed">Closed</h3>\n';
|
|
|
|
table += ' <table>\n';
|
|
|
|
table += ' <thead>\n';
|
|
|
|
table += ' <tr>\n';
|
|
|
|
table += ' <td>Issue</td>\n';
|
|
|
|
table += ' <td>Title</td>\n';
|
|
|
|
table += ' <td>Author</td>\n';
|
|
|
|
table += ' <td>Opened</td>\n';
|
|
|
|
table += ' <td>Closed</td>\n';
|
|
|
|
table += ' </tr>\n';
|
|
|
|
table += ' <thead>\n';
|
|
|
|
table += ' <tbody>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
|
|
|
|
for (const file of closed) {
|
2019-07-05 21:51:11 +00:00
|
|
|
const issue = require(file.path);
|
|
|
|
table += ' <tr>\n';
|
|
|
|
table += ` <td><a href="${issue.web_url}">${issue.iid}</a></td>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
let title;
|
2019-01-05 22:44:59 +00:00
|
|
|
if (issue.title.length >= 50) {
|
2019-11-30 12:31:16 +00:00
|
|
|
title = issue.title.slice(0, 47).replace(/[<>]/g, '') + '...';
|
2019-01-05 22:44:59 +00:00
|
|
|
} else {
|
2019-07-05 21:51:11 +00:00
|
|
|
title = issue.title.replace(/[<>]/g, '');
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ` <td>${title}\n`;
|
|
|
|
table += ` <td><a href="${issue.author.web_url}">${issue.author.username}</a>\n`;
|
|
|
|
table += ` <td>${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}</td>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
let closedAt;
|
2019-01-05 22:44:59 +00:00
|
|
|
if (issue.closed_at === null) {
|
2019-07-05 21:51:11 +00:00
|
|
|
closedAt = '';
|
2019-01-05 22:44:59 +00:00
|
|
|
} else {
|
2019-07-05 21:51:11 +00:00
|
|
|
closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at));
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ` <td>${closedAt}</td>\n`;
|
|
|
|
table += ' </tr>\n';
|
2018-07-16 01:29:01 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
table += ' </tbody>\n';
|
|
|
|
table += ' </table>\n';
|
|
|
|
table += '</article>\n';
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-01-05 22:44:59 +00:00
|
|
|
// And finally when the HTML is done generating we can write it and resolve that Promise we made
|
2019-07-05 21:51:11 +00:00
|
|
|
fs.writeFileSync(outIssuesPath + '_table.html', table, {encoding: 'UTF-8'});
|
|
|
|
resolve();
|
|
|
|
});
|
2018-07-16 01:29:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function createStatistics() {
|
2019-07-05 21:51:11 +00:00
|
|
|
return new Promise(resolve => {
|
2019-01-05 22:44:59 +00:00
|
|
|
// Same process as the Issue Table generation
|
2019-07-05 21:51:11 +00:00
|
|
|
const commits = klaw(commitsPath);
|
|
|
|
const opened = klaw(openIssuesPath);
|
|
|
|
const closed = klaw(closedIssuesPath);
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
let statistics = '<article id="statistics">\n';
|
|
|
|
statistics += ' <h2>Statistics</h2>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
const commitStats = changedLines(commits);
|
|
|
|
const contributors = uniqueContributors(commits);
|
2019-01-30 15:48:45 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ` <p>In the month of ${months[wantedMonth]}, `;
|
|
|
|
statistics += `${commits.length} commits were made by ${contributors.length} contributors, `;
|
|
|
|
statistics += `changing a total of ${Math.abs(commitStats.total)} (+${commitStats.added}|-${commitStats.deleted}) lines. `;
|
|
|
|
statistics += `${opened.length} issues were opened and `;
|
|
|
|
statistics += `${closed.length} issues were closed.</p>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ` <p>An average of ${(opened.length / 30).toFixed(2)} issues were opened `;
|
|
|
|
statistics += `and ${(closed.length / 30).toFixed(2)} issues were closed each day.</p>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ` <p>The average time to close issues was ${avgTime(closed, 'days')} days `;
|
|
|
|
statistics += `or ${avgTime(closed, 'hours')} hours.</p>\n`;
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
const topUsers = freqUsers(opened, 3);
|
|
|
|
statistics += ' <p>Top 3 issue creators:</p>\n';
|
|
|
|
statistics += ' <ol>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
for (const user in topUsers) {
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' <li>\n';
|
|
|
|
statistics += ` <a href="https://gitlab.com/${user}">${user}</a>`;
|
|
|
|
statistics += ' with ';
|
|
|
|
statistics += `<a href="https://gitlab.com/tildes/tildes/issues?state=all&author_username=${user}">${topUsers[user]} issues created</a>.\n`;
|
|
|
|
statistics += ' </li>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' </ol>\n';
|
|
|
|
|
|
|
|
let labels = labelsAlphabet(opened, true);
|
|
|
|
statistics += ' <p>Amount of labels assigned to currently open issues:</p>\n';
|
|
|
|
statistics += ' <ul>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
for (const label in labels) {
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' <li>\n';
|
|
|
|
statistics += ` <a href="https://gitlab.com/tildes/tildes/issues?state=opened&label_name%5B%5D=${label.replace(' ', '+')}")>${label}</a>:`;
|
|
|
|
statistics += `${labels[label]} `;
|
|
|
|
if (labels[label] === 1) {
|
|
|
|
statistics += 'time.\n';
|
|
|
|
} else {
|
|
|
|
statistics += 'times.\n';
|
|
|
|
}
|
|
|
|
|
|
|
|
statistics += ' </li>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' </ul>\n';
|
|
|
|
|
|
|
|
labels = labelsAlphabet(closed, false);
|
|
|
|
statistics += ' <p>Amount of labels assigned to closed issues:</p>\n';
|
|
|
|
statistics += ' <ul>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
for (const label in labels) {
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' <li>\n';
|
|
|
|
statistics += ` <a href="https://gitlab.com/tildes/tildes/issues?state=opened&label_name%5B%5D=${label.replace(' ', '+')}")>${label}</a>:`;
|
|
|
|
statistics += `${labels[label]} `;
|
|
|
|
if (labels[label] === 1) {
|
|
|
|
statistics += 'time.\n';
|
|
|
|
} else {
|
|
|
|
statistics += 'times.\n';
|
|
|
|
}
|
|
|
|
|
|
|
|
statistics += ' </li>\n';
|
2019-01-05 22:44:59 +00:00
|
|
|
}
|
2018-07-16 01:29:01 +00:00
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
statistics += ' </ul>\n';
|
|
|
|
statistics += '</article>\n';
|
|
|
|
|
|
|
|
fs.writeFileSync(outIssuesPath + '_statistics.html', statistics, {encoding: 'UTF-8'});
|
|
|
|
resolve();
|
|
|
|
});
|
2018-07-16 01:29:01 +00:00
|
|
|
}
|
2019-01-05 22:44:59 +00:00
|
|
|
|
2019-07-05 21:07:23 +00:00
|
|
|
function createFeeds() {
|
|
|
|
const feed = new Feed({
|
|
|
|
title: 'Tildes Issue Log',
|
|
|
|
description: 'Monthly blog highlighting the changes of Tildes.net',
|
|
|
|
id: 'https://til.bauke.xyz',
|
|
|
|
link: 'https://til.bauke.xyz',
|
|
|
|
language: 'en',
|
|
|
|
image: 'https://til.bauke.xyz/android-chrome-192x192.png',
|
|
|
|
favicon: 'https://til.bauke.xyz/favicon.ico',
|
|
|
|
copyright: 'AGPL-3.0-or-later Tildes Issue Log Contributors https://gitlab.com/Bauke/tildes-issue-log',
|
|
|
|
generator: 'https://github.com/jpmonette/feed',
|
|
|
|
feedLinks: {
|
|
|
|
atom: 'https://til.bauke.xyz/feed.atom',
|
|
|
|
json: 'https://til.bauke.xyz/feed.json',
|
2019-07-05 21:51:11 +00:00
|
|
|
rss: 'https://til.bauke.xyz/feed.rss'
|
2019-07-05 21:07:23 +00:00
|
|
|
},
|
|
|
|
author: {
|
|
|
|
name: 'Bauke',
|
|
|
|
email: 'me@bauke.xyz',
|
2019-07-05 21:51:11 +00:00
|
|
|
link: 'https://bauke.xyz'
|
|
|
|
}
|
|
|
|
});
|
|
|
|
const posts = fs.readdirSync(path.join(paths.out, 'posts'));
|
2019-07-07 11:51:40 +00:00
|
|
|
// Remove the template, that doesn't need to be included
|
|
|
|
posts.splice(posts.indexOf('template.html'), 1);
|
2019-07-05 21:07:23 +00:00
|
|
|
// Sort the posts descending year and month
|
|
|
|
posts.sort((a, b) => {
|
2019-07-05 21:51:11 +00:00
|
|
|
const yearA = Number(a.replace(/\D/g, ''));
|
|
|
|
const yearB = Number(b.replace(/\D/g, ''));
|
2019-07-05 21:07:23 +00:00
|
|
|
if (yearA === yearB) {
|
2019-11-30 12:31:16 +00:00
|
|
|
const monthA = months.join(',').toLowerCase().split(',').indexOf(a.slice(0, a.indexOf('-'))) + 1;
|
|
|
|
const monthB = months.join(',').toLowerCase().split(',').indexOf(b.slice(0, b.indexOf('-'))) + 1;
|
2019-07-05 21:51:11 +00:00
|
|
|
return monthB - monthA;
|
2019-07-05 21:07:23 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
return yearB - yearA;
|
|
|
|
});
|
2019-07-07 11:51:40 +00:00
|
|
|
for (let i = 0; i < 5; i++) {
|
|
|
|
const post = posts[i];
|
2019-07-05 21:51:11 +00:00
|
|
|
const html = fs.readFileSync(path.join(paths.out, 'posts', post), 'UTF8');
|
|
|
|
const $ = cheerio.load(html);
|
|
|
|
const title = $('#wrapper>h1').text();
|
|
|
|
const id = `https://til.bauke.xyz/posts/${post}`;
|
2019-07-05 21:07:23 +00:00
|
|
|
const date = new Date(Date.UTC(
|
|
|
|
Number(post.replace(/\D/g, '')),
|
|
|
|
// Add one to the month since UTC months are 0 based and since we set the
|
|
|
|
// day as 0 we'll get the Date back as the last day of the previous month
|
2019-11-30 12:31:16 +00:00
|
|
|
months.join(',').toLowerCase().split(',').indexOf(post.slice(0, post.indexOf('-'))) + 1,
|
2019-07-05 21:07:23 +00:00
|
|
|
0, 23, 59, 59
|
2019-07-05 21:51:11 +00:00
|
|
|
));
|
2019-07-05 21:07:23 +00:00
|
|
|
const content = $('#post')
|
|
|
|
.html()
|
2019-07-05 21:51:11 +00:00
|
|
|
.replace(/<article id="toc">.+?<\/article>/g, ''); // Remove the TOC
|
2019-07-05 21:07:23 +00:00
|
|
|
feed.addItem({
|
|
|
|
title,
|
|
|
|
id,
|
|
|
|
link: id,
|
|
|
|
date,
|
|
|
|
published: date,
|
|
|
|
description: `${title}'s Issue Log`,
|
|
|
|
content,
|
2019-07-05 21:51:11 +00:00
|
|
|
image: 'https://til.bauke.xyz/android-chrome-192x192.png'
|
|
|
|
});
|
2019-07-05 21:07:23 +00:00
|
|
|
}
|
2019-07-05 21:51:11 +00:00
|
|
|
|
|
|
|
fs.writeFileSync(path.join(paths.out, 'feed.atom'), feed.atom1());
|
|
|
|
fs.writeFileSync(path.join(paths.out, 'feed.json'), feed.json1());
|
|
|
|
fs.writeFileSync(path.join(paths.out, 'feed.rss'), feed.rss2());
|
|
|
|
return Promise.resolve();
|
2019-07-05 21:07:23 +00:00
|
|
|
}
|
|
|
|
|
2019-07-05 21:51:11 +00:00
|
|
|
exports.build = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds);
|
|
|
|
exports.download = gulp.series(download, gulp.parallel(createIssueTable, createStatistics));
|
|
|
|
exports.no_download = gulp.parallel(createIssueTable, createStatistics);
|
|
|
|
exports.watch = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds, watch);
|