From 23f0cff306082f2ef536665ce8bbe01476dafa52 Mon Sep 17 00:00:00 2001 From: Bauke Date: Fri, 5 Jul 2019 23:51:11 +0200 Subject: [PATCH] fix: fix all linting errors --- gulpfile.js | 475 ++++++++++++++++++++------------------ package.json | 6 +- src/scss/_anchor.scss | 2 - src/scss/_colors.scss | 2 +- src/scss/_responsive.scss | 2 +- src/scss/common.scss | 19 +- src/scss/index.scss | 2 +- src/scss/post.scss | 22 +- statistics.js | 133 +++++++---- 9 files changed, 362 insertions(+), 301 deletions(-) diff --git a/gulpfile.js b/gulpfile.js index ab93c4e..612c69f 100644 --- a/gulpfile.js +++ b/gulpfile.js @@ -1,21 +1,20 @@ // Require dependencies -const - cheerio = require('cheerio'), - df = require('date-format'), - {Feed} = require('feed'), - fs = require('fs-extra'), - gitlab = require('gitlab/dist/es5').default, - gulp = require('gulp'), - htmlclean = require('gulp-htmlclean'), - klaw = require('klaw-sync'), - log = require('fancy-log'), - merge2 = require('merge2'), - path = require('path'), - scss = require('gulp-sass'), - sync = require('browser-sync') +const path = require('path'); +const cheerio = require('cheerio'); +const df = require('date-format'); +const {Feed} = require('feed'); +const fs = require('fs-extra'); +const GitLab = require('gitlab/dist/es5').default; +const gulp = require('gulp'); +const htmlclean = require('gulp-htmlclean'); +const klaw = require('klaw-sync'); +const log = require('fancy-log'); +const merge2 = require('merge2'); +const scss = require('gulp-sass'); +const sync = require('browser-sync'); // Require statistic functions -const { avgTime, freqUsers, labelsAlphabet, changedLines, uniqueContributors } = require('./statistics') +const {avgTime, freqUsers, labelsAlphabet, changedLines, uniqueContributors} = require('./statistics'); // Define paths that are gonna be used commonly const paths = { @@ -24,35 +23,35 @@ const paths = { issues: { open: path.join(__dirname, 'data/issues/open/'), closed: path.join(__dirname, 'data/issues/closed/'), - out: path.join(__dirname, 'data/issues/out/'), - }, + out: path.join(__dirname, 'data/issues/out/') + } }, extra: path.join(__dirname, 'src/favicons/**'), - html: { + html: { index: path.join(__dirname, 'src/index.html'), - posts: path.join(__dirname, 'src/posts/*.html'), + posts: path.join(__dirname, 'src/posts/*.html') }, out: path.join(__dirname, 'public/'), - scss: path.join(__dirname, 'src/scss/*.scss'), -} + scss: path.join(__dirname, 'src/scss/*.scss') +}; // Define options for Node Sass and Browser Sync const opts = { scss: { - outputStyle: 'compressed', + outputStyle: 'compressed' }, sync: { server: { - baseDir: paths.out, - }, - }, -} + baseDir: paths.out + } + } +}; // The data to download from specified month, months are zero-based zo January would be 0 // Make sure both of these are **numbers**, if they are strings it won't work properly! -const wantedMonth = new Date().getMonth() +const wantedMonth = new Date().getMonth(); // Since we've passed from 2018 into 2019 we also have to start checking for year now -const wantedYear = new Date().getFullYear() +const wantedYear = new Date().getFullYear(); // Init the months array, probably a way to do this with Dates but this works too const months = [ @@ -67,31 +66,31 @@ const months = [ 'September', 'October', 'November', - 'December', -] + 'December' +]; // Add the year and month to the open/closed/out path so they're easy to identify -const commitsPath = `${paths.data.commits}${wantedYear}/${months[wantedMonth]}/` -const openIssuesPath = `${paths.data.issues.open}${wantedYear}/${months[wantedMonth]}/` // folder -const closedIssuesPath = `${paths.data.issues.closed}${wantedYear}/${months[wantedMonth]}/` // folder -const outIssuesPath = `${paths.data.issues.out}${months[wantedMonth]}${wantedYear}` // will become table and statistics files +const commitsPath = `${paths.data.commits}${wantedYear}/${months[wantedMonth]}/`; +const openIssuesPath = `${paths.data.issues.open}${wantedYear}/${months[wantedMonth]}/`; // Folder +const closedIssuesPath = `${paths.data.issues.closed}${wantedYear}/${months[wantedMonth]}/`; // Folder +const outIssuesPath = `${paths.data.issues.out}${months[wantedMonth]}${wantedYear}`; // Will become table and statistics files // Make the directories using fs-extra's "mkdir -p" equivalent // It will make any directory that doesn't yet exist in the path -fs.mkdirpSync(commitsPath) -fs.mkdirpSync(openIssuesPath) -fs.mkdirpSync(closedIssuesPath) -fs.mkdirpSync(paths.data.issues.out) +fs.mkdirpSync(commitsPath); +fs.mkdirpSync(openIssuesPath); +fs.mkdirpSync(closedIssuesPath); +fs.mkdirpSync(paths.data.issues.out); // Create the browser sync server, it only starts when using `gulp watch` however -const server = sync.create() +const server = sync.create(); // Copy over the HTML, using merge2 to use Gulp's async completion and multiple src's function buildHTML() { return merge2([ gulp.src(paths.html.index).pipe(htmlclean()).pipe(gulp.dest(paths.out)), - gulp.src(paths.html.posts).pipe(htmlclean()).pipe(gulp.dest(paths.out + 'posts/')), - ]) + gulp.src(paths.html.posts).pipe(htmlclean()).pipe(gulp.dest(paths.out + 'posts/')) + ]); } // Build the CSS @@ -99,33 +98,33 @@ function buildCSS() { return gulp .src(paths.scss) .pipe(scss(opts.scss)) - .pipe(gulp.dest(paths.out + 'css/')) + .pipe(gulp.dest(paths.out + 'css/')); } // Build the extra stuff, for now only the favicons function buildExtra() { return gulp .src(paths.extra) - .pipe(gulp.dest(paths.out)) + .pipe(gulp.dest(paths.out)); } // Start the Browser Sync server and watch individual file types with appropriate build functions function watch() { - server.init(opts.sync) - gulp.watch([ paths.html.index, paths.html.posts ], gulp.series(buildHTML, createFeeds, reload)) - gulp.watch(paths.scss, gulp.series(buildCSS, reload)) - gulp.watch(paths.extra, gulp.series(buildExtra, reload)) + server.init(opts.sync); + gulp.watch([paths.html.index, paths.html.posts], gulp.series(buildHTML, createFeeds, reload)); + gulp.watch(paths.scss, gulp.series(buildCSS, reload)); + gulp.watch(paths.extra, gulp.series(buildExtra, reload)); } // To use Gulp's async completion system this has to be done, it's ugly but can't do without it function reload(callback) { - server.reload() - callback() + server.reload(); + callback(); } function download() { // Create the API with the token - const api = new gitlab({ token: require('./config.json').token }) + const api = new GitLab({token: require('./config.json').token}); // Return a new Promise so we can take advantage of Gulp's async completion system // We'll reject whenever there is an error and resolve when everything is completed @@ -133,240 +132,253 @@ function download() { // The Node GitLab API is a bit weird, first we have to find the project Tildes/Tildes api.Projects .show('tildes/tildes') - .catch((error) => reject(new Error('There was an error fetching the project:', error))) - .then((project) => { - log('Found project, downloading issues...') + .catch(error => reject(new Error('There was an error fetching the project:', error))) + .then(project => { + log('Found project, downloading issues...'); // Then once we find the project we can use it and its ID to download the issues api.Issues - .all({ projectId: project.id }) - .catch((error) => reject(new Error('There was an error downloading the issues:', error))) - .then((issues) => { + .all({projectId: project.id}) + .catch(error => reject(new Error('There was an error downloading the issues:', error))) + .then(issues => { // And then once we've downloaded all the issues we can write them to file appropriately - log(`Downloaded issues, saving opened and closed issues from ${months[wantedMonth]} ${wantedYear} to file...`) + log(`Downloaded issues, saving opened and closed issues from ${months[wantedMonth]} ${wantedYear} to file...`); for (const issue of issues) { - const createdDate = new Date(issue.created_at) + const createdDate = new Date(issue.created_at); if (createdDate.getFullYear() === wantedYear && createdDate.getMonth() === wantedMonth) { - fs.writeFileSync(openIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2)) + fs.writeFileSync(openIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2)); } - const closedDate = new Date(issue.closed_at) + const closedDate = new Date(issue.closed_at); if (issue.closed_at !== null && closedDate.getFullYear() === wantedYear && closedDate.getMonth() === wantedMonth) { - fs.writeFileSync(closedIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2)) + fs.writeFileSync(closedIssuesPath + `${issue.iid}.json`, JSON.stringify(issue, null, 2)); } } - log('Finished writing issues to file.') - log('Downloading commits...') + + log('Finished writing issues to file.'); + log('Downloading commits...'); }) .then(() => { - api.Commits.all(project.id, { ref_name: 'master', with_stats: true }) - .catch((error) => reject(new Error('There was an error downloading the commits:', error))) - .then((commits) => { - log(`Downloaded commits, saving commits from ${months[wantedMonth]} ${wantedYear} to file...`) + api.Commits.all(project.id, {ref_name: 'master', with_stats: true}) + .catch(error => reject(new Error('There was an error downloading the commits:', error))) + .then(commits => { + log(`Downloaded commits, saving commits from ${months[wantedMonth]} ${wantedYear} to file...`); for (const commit of commits) { - const authoredDate = new Date(commit.authored_date) + const authoredDate = new Date(commit.authored_date); if (authoredDate.getFullYear() === wantedYear && authoredDate.getMonth() === wantedMonth) { - fs.writeFileSync(commitsPath + `${commit.short_id}.json`, JSON.stringify(commit, null, 2)) + fs.writeFileSync(commitsPath + `${commit.short_id}.json`, JSON.stringify(commit, null, 2)); } } - log('Finished writing commits to file.') - resolve() - }) - }) - }) - }) + + log('Finished writing commits to file.'); + resolve(); + }); + }); + }); + }); } function createIssueTable() { // Using a Promise again for Gulp's async completion - return new Promise((resolve) => { + return new Promise(resolve => { // Klaw returns all files in a directory recursively so we're getting all opened and closed issue files - let opened = klaw(openIssuesPath) - let closed = klaw(closedIssuesPath) + const opened = klaw(openIssuesPath); + const closed = klaw(closedIssuesPath); // Then we want to sort all of these issue files in their arrays - opened.sort(function(a, b) { - const aFile = require(a.path) - const bFile = require(b.path) - return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0) - }) + opened.sort((a, b) => { + const aFile = require(a.path); + const bFile = require(b.path); + return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0); + }); - closed.sort(function(a, b) { - const aFile = require(a.path) - const bFile = require(b.path) - return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0) - }) + closed.sort((a, b) => { + const aFile = require(a.path); + const bFile = require(b.path); + return (aFile.iid > bFile.iid) ? 1 : ((bFile.iid > aFile.iid) ? -1 : 0); + }); // And then generate the Issue Table HTML, which is kind of a mess to do - let table = '
\n' - table += '

Issue Table

\n' + let table = '
\n'; + table += '

Issue Table

\n'; - table += '

Opened

\n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' + table += '

Opened

\n'; + table += '
IssueTitleAuthorOpenedClosed
\n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; for (const file of opened) { - const issue = require(file.path) - table += ' \n' - table += ` \n` + const issue = require(file.path); + table += ' \n'; + table += ` \n`; - let title + let title; if (issue.title.length >= 50) { // We're going to be replacing all instances of <> signs to make sure nobody can add // in their issue title and run JS on the site or mess up the layout or something // I do check myself before I commit and push anything but I'd rather be completely sure. - title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...' + title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...'; } else { - title = issue.title.replace(/[<>]/g, '') + title = issue.title.replace(/[<>]/g, ''); } - table += ` \n` - table += ` \n` - table += ` \n` + table += ` \n`; + table += ` \n`; + table += ` \n`; - let closedAt + let closedAt; if (issue.closed_at === null) { - closedAt = '' + closedAt = ''; } else { - closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at)) + closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at)); } - table += ` \n` - table += ' \n' + table += ` \n`; + table += ' \n'; } - table += ' \n' - table += '
IssueTitleAuthorOpenedClosed
${issue.iid}
${issue.iid}${title}${issue.author.username}${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}${title}${issue.author.username}${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}${closedAt}
${closedAt}
\n\n' + table += ' \n'; + table += ' \n\n'; - table += '

Closed

\n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' - table += ' \n' + table += '

Closed

\n'; + table += '
IssueTitleAuthorOpenedClosed
\n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; + table += ' \n'; for (const file of closed) { - const issue = require(file.path) - table += ' \n' - table += ` \n` + const issue = require(file.path); + table += ' \n'; + table += ` \n`; - let title + let title; if (issue.title.length >= 50) { - title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...' + title = issue.title.substring(0, 47).replace(/[<>]/g, '') + '...'; } else { - title = issue.title.replace(/[<>]/g, '') + title = issue.title.replace(/[<>]/g, ''); } - table += ` \n` + table += ` \n`; - let closedAt + let closedAt; if (issue.closed_at === null) { - closedAt = '' + closedAt = ''; } else { - closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at)) + closedAt = df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.closed_at)); } - table += ` \n` - table += ' \n' + table += ` \n`; + table += ' \n'; } - table += ' \n' - table += '
IssueTitleAuthorOpenedClosed
${issue.iid}
${issue.iid}${title}\n` - table += ` ${issue.author.username}\n` - table += ` ${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}${title}\n`; + table += ` ${issue.author.username}\n`; + table += ` ${df.asString('yyyy/MM/dd hh:mm:ss', new Date(issue.created_at))}${closedAt}
${closedAt}
\n' - table += '
\n' + table += ' \n'; + table += ' \n'; + table += '
\n'; // And finally when the HTML is done generating we can write it and resolve that Promise we made - fs.writeFileSync(outIssuesPath + '_table.html', table, { encoding: 'UTF-8' }) - resolve() - }) + fs.writeFileSync(outIssuesPath + '_table.html', table, {encoding: 'UTF-8'}); + resolve(); + }); } function createStatistics() { - return new Promise((resolve) => { + return new Promise(resolve => { // Same process as the Issue Table generation - let commits = klaw(commitsPath) - let opened = klaw(openIssuesPath) - let closed = klaw(closedIssuesPath) + const commits = klaw(commitsPath); + const opened = klaw(openIssuesPath); + const closed = klaw(closedIssuesPath); - let statistics = '
\n' - statistics += '

Statistics

\n' + let statistics = '
\n'; + statistics += '

Statistics

\n'; - const commitStats = changedLines(commits) - const contributors = uniqueContributors(commits) + const commitStats = changedLines(commits); + const contributors = uniqueContributors(commits); - statistics += `

In the month of ${months[wantedMonth]}, ` - statistics += `${commits.length} commits were made by ${contributors.length} contributors, ` - statistics += `changing a total of ${Math.abs(commitStats.total)} (+${commitStats.added}|-${commitStats.deleted}) lines. ` - statistics += `${opened.length} issues were opened and ` - statistics += `${closed.length} issues were closed.

\n` + statistics += `

In the month of ${months[wantedMonth]}, `; + statistics += `${commits.length} commits were made by ${contributors.length} contributors, `; + statistics += `changing a total of ${Math.abs(commitStats.total)} (+${commitStats.added}|-${commitStats.deleted}) lines. `; + statistics += `${opened.length} issues were opened and `; + statistics += `${closed.length} issues were closed.

\n`; - statistics += `

An average of ${(opened.length / 30).toFixed(2)} issues were opened ` - statistics += `and ${(closed.length / 30).toFixed(2)} issues were closed each day.

\n` + statistics += `

An average of ${(opened.length / 30).toFixed(2)} issues were opened `; + statistics += `and ${(closed.length / 30).toFixed(2)} issues were closed each day.

\n`; - statistics += `

The average time to close issues was ${avgTime(closed, 'days')} days ` - statistics += `or ${avgTime(closed, 'hours')} hours.

\n` + statistics += `

The average time to close issues was ${avgTime(closed, 'days')} days `; + statistics += `or ${avgTime(closed, 'hours')} hours.

\n`; - const topUsers = freqUsers(opened, 3) - statistics += '

Top 3 issue creators:

\n' - statistics += '
    \n' + const topUsers = freqUsers(opened, 3); + statistics += '

    Top 3 issue creators:

    \n'; + statistics += '
      \n'; for (const user in topUsers) { - statistics += '
    1. \n' - statistics += ` ${user}` - statistics += ' with ' - statistics += `${topUsers[user]} issues created.\n` - statistics += '
    2. \n' + statistics += '
    3. \n'; + statistics += ` ${user}`; + statistics += ' with '; + statistics += `${topUsers[user]} issues created.\n`; + statistics += '
    4. \n'; } - statistics += '
    \n' - let labels = labelsAlphabet(opened, true) - statistics += '

    Amount of labels assigned to currently open issues:

    \n' - statistics += '
      \n' + statistics += '
\n'; + + let labels = labelsAlphabet(opened, true); + statistics += '

Amount of labels assigned to currently open issues:

\n'; + statistics += '
    \n'; for (const label in labels) { - statistics += '
  • \n' - statistics += ` ${label}:` - statistics += `${labels[label]} ` - if (labels[label] === 1) statistics += 'time.\n' - else statistics += 'times.\n' - statistics += '
  • \n' - } - statistics += '
\n' + statistics += '
  • \n'; + statistics += ` ${label}:`; + statistics += `${labels[label]} `; + if (labels[label] === 1) { + statistics += 'time.\n'; + } else { + statistics += 'times.\n'; + } - labels = labelsAlphabet(closed, false) - statistics += '

    Amount of labels assigned to closed issues:

    \n' - statistics += '
      \n' + statistics += ' \n'; + } + + statistics += '
    \n'; + + labels = labelsAlphabet(closed, false); + statistics += '

    Amount of labels assigned to closed issues:

    \n'; + statistics += '
      \n'; for (const label in labels) { - statistics += '
    • \n' - statistics += ` ${label}:` - statistics += `${labels[label]} ` - if (labels[label] === 1) statistics += 'time.\n' - else statistics += 'times.\n' - statistics += '
    • \n' - } - statistics += '
    \n' - statistics += '
  • \n' + statistics += '
  • \n'; + statistics += ` ${label}:`; + statistics += `${labels[label]} `; + if (labels[label] === 1) { + statistics += 'time.\n'; + } else { + statistics += 'times.\n'; + } - fs.writeFileSync(outIssuesPath + '_statistics.html', statistics, { encoding: 'UTF-8' }) - resolve() - }) + statistics += '
  • \n'; + } + + statistics += ' \n'; + statistics += '
    \n'; + + fs.writeFileSync(outIssuesPath + '_statistics.html', statistics, {encoding: 'UTF-8'}); + resolve(); + }); } function createFeeds() { @@ -383,47 +395,47 @@ function createFeeds() { feedLinks: { atom: 'https://til.bauke.xyz/feed.atom', json: 'https://til.bauke.xyz/feed.json', - rss: 'https://til.bauke.xyz/feed.rss', + rss: 'https://til.bauke.xyz/feed.rss' }, author: { name: 'Bauke', email: 'me@bauke.xyz', - link: 'https://bauke.xyz', - }, - }) - const posts = fs.readdirSync(path.join(paths.out, 'posts')) + link: 'https://bauke.xyz' + } + }); + const posts = fs.readdirSync(path.join(paths.out, 'posts')); // Sort the posts descending year and month posts.sort((a, b) => { - const yearA = Number(a.replace(/\D/g, '')) - const yearB = Number(b.replace(/\D/g, '')) + const yearA = Number(a.replace(/\D/g, '')); + const yearB = Number(b.replace(/\D/g, '')); if (yearA === yearB) { - const monthA = months.join(',').toLowerCase().split(',').indexOf(a.substring(0, a.indexOf('-'))) + 1 - const monthB = months.join(',').toLowerCase().split(',').indexOf(b.substring(0, b.indexOf('-'))) + 1 - return monthB - monthA + const monthA = months.join(',').toLowerCase().split(',').indexOf(a.substring(0, a.indexOf('-'))) + 1; + const monthB = months.join(',').toLowerCase().split(',').indexOf(b.substring(0, b.indexOf('-'))) + 1; + return monthB - monthA; } - return yearB - yearA - }) + return yearB - yearA; + }); for (const post of posts) { // Skip the template, that doesn't need to be included if (post.includes('template')) { - continue + continue; } - const html = fs.readFileSync(path.join(paths.out, 'posts', post), 'UTF8') - const $ = cheerio.load(html) - const title = $('#wrapper>h1').text() - const id = `https://til.bauke.xyz/posts/${post}` + const html = fs.readFileSync(path.join(paths.out, 'posts', post), 'UTF8'); + const $ = cheerio.load(html); + const title = $('#wrapper>h1').text(); + const id = `https://til.bauke.xyz/posts/${post}`; const date = new Date(Date.UTC( Number(post.replace(/\D/g, '')), // Add one to the month since UTC months are 0 based and since we set the // day as 0 we'll get the Date back as the last day of the previous month months.join(',').toLowerCase().split(',').indexOf(post.substring(0, post.indexOf('-'))) + 1, 0, 23, 59, 59 - )) + )); const content = $('#post') .html() - .replace(/
    .+?<\/article>/g, '') // Remove the TOC + .replace(/
    .+?<\/article>/g, ''); // Remove the TOC feed.addItem({ title, id, @@ -432,16 +444,17 @@ function createFeeds() { published: date, description: `${title}'s Issue Log`, content, - image: 'https://til.bauke.xyz/android-chrome-192x192.png', - }) + image: 'https://til.bauke.xyz/android-chrome-192x192.png' + }); } - fs.writeFileSync(path.join(paths.out, 'feed.atom'), feed.atom1()) - fs.writeFileSync(path.join(paths.out, 'feed.json'), feed.json1()) - fs.writeFileSync(path.join(paths.out, 'feed.rss'), feed.rss2()) - return Promise.resolve() + + fs.writeFileSync(path.join(paths.out, 'feed.atom'), feed.atom1()); + fs.writeFileSync(path.join(paths.out, 'feed.json'), feed.json1()); + fs.writeFileSync(path.join(paths.out, 'feed.rss'), feed.rss2()); + return Promise.resolve(); } -exports.build = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds) -exports.download = gulp.series(download, gulp.parallel(createIssueTable, createStatistics)) -exports.no_download = gulp.parallel(createIssueTable, createStatistics) -exports.watch = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds, watch) +exports.build = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds); +exports.download = gulp.series(download, gulp.parallel(createIssueTable, createStatistics)); +exports.no_download = gulp.parallel(createIssueTable, createStatistics); +exports.watch = gulp.series(gulp.parallel(buildHTML, buildCSS, buildExtra), createFeeds, watch); diff --git a/package.json b/package.json index f6f666a..113d002 100644 --- a/package.json +++ b/package.json @@ -40,6 +40,10 @@ } }, "xo": { - "space": true + "space": true, + "rules": { + "camelcase": "off", + "guard-for-in": "off" + } } } diff --git a/src/scss/_anchor.scss b/src/scss/_anchor.scss index 78c1bee..eb6f08b 100644 --- a/src/scss/_anchor.scss +++ b/src/scss/_anchor.scss @@ -1,5 +1,3 @@ -@import 'colors'; - a { color: $cyan; text-decoration: none; diff --git a/src/scss/_colors.scss b/src/scss/_colors.scss index d7dca90..5aa651a 100644 --- a/src/scss/_colors.scss +++ b/src/scss/_colors.scss @@ -3,7 +3,7 @@ $background: #282a36; $selection: #44475a; $comment: #6272a4; -$red: #ff5555; +$red: #f55; $orange: #ffb86c; $yellow: #f1fa8c; $green: #50fa7b; diff --git a/src/scss/_responsive.scss b/src/scss/_responsive.scss index 0cd12b6..4fd997d 100644 --- a/src/scss/_responsive.scss +++ b/src/scss/_responsive.scss @@ -3,9 +3,9 @@ width: 95vw; } } - @media screen and (max-width: 500px) { #post > #toc { + // stylelint-disable-next-line property-blacklist float: none; margin: 0; width: auto; diff --git a/src/scss/common.scss b/src/scss/common.scss index b0dad49..bb5f14e 100644 --- a/src/scss/common.scss +++ b/src/scss/common.scss @@ -1,8 +1,17 @@ -@import 'anchor'; @import 'colors'; +@import 'anchor'; -html, body, p, ul, ol, li, -h1, h2, h3, h4, h5 { +html, +body, +p, +ul, +ol, +li, +h1, +h2, +h3, +h4, +h5 { margin: 0; padding: 0; } @@ -30,8 +39,8 @@ body { > h3 { display: inline-block; - margin: 0 10px 4px 10px; - padding: 0 4px 4px 4px; + margin: 0 10px 4px; + padding: 0 4px 4px; border-bottom: 4px solid; &:nth-child(7n + 1) { diff --git a/src/scss/index.scss b/src/scss/index.scss index 6d0f9f9..d416bd1 100644 --- a/src/scss/index.scss +++ b/src/scss/index.scss @@ -1,5 +1,5 @@ -@import 'anchor'; @import 'colors'; +@import 'anchor'; #posts { background-color: rgba(0, 0, 0, 0.25); diff --git a/src/scss/post.scss b/src/scss/post.scss index 3ef7c51..2ff3c79 100644 --- a/src/scss/post.scss +++ b/src/scss/post.scss @@ -1,5 +1,5 @@ -@import 'anchor'; @import 'colors'; +@import 'anchor'; #post { > #toc { @@ -8,6 +8,7 @@ width: fit-content; border: 4px solid $yellow; border-top: none; + // stylelint-disable-next-line property-blacklist float: right; background-color: $background; @@ -25,7 +26,7 @@ font-size: 1.25em; } - &:before { + &::before { content: '>'; padding-right: 0.5em; } @@ -40,7 +41,7 @@ font-size: 1.25em; } - &:before { + &::before { content: '>>'; padding-right: 0.5em; padding-left: 0.5em; @@ -52,10 +53,12 @@ } > *:not(#toc) { - padding: 1em 1em; + padding: 1em; background-color: rgba(0, 0, 0, 0.25); - p, ol, ul { + p, + ol, + ul { padding: 0.2em 0; } @@ -80,7 +83,7 @@ h3 { font-size: 1.3em; padding: 2px 0; - margin: 1em 0 0.4em 0; + margin: 1em 0 0.4em; border-bottom: 4px solid $red; width: 25%; } @@ -89,7 +92,8 @@ font-size: 1.1em; } - ol, ul { + ol, + ul { font-size: 1.1em; margin-bottom: 0.2em; @@ -105,7 +109,7 @@ ul { list-style-type: none; - > li:before { + > li::before { content: '>'; padding-left: 0.5em; padding-right: 0.5em; @@ -135,7 +139,7 @@ > td { padding: 0.5em 0.8em; border-left: 2px solid $comment; - + &:last-child { border-right: 2px solid $comment; } diff --git a/statistics.js b/statistics.js index 13827b3..67707d5 100644 --- a/statistics.js +++ b/statistics.js @@ -7,20 +7,28 @@ */ function avgTime(data, time) { - if (time !== 'hours' && time !== 'days') return Error('avgTime(data, time): time should be "hours" or "days"') - let avg - for (const file of data) { - const issue = require(file.path) - const openDate = new Date(issue.created_at) - const closeDate = new Date(issue.closed_at) - let diff - if (time === 'days') diff = (closeDate - openDate) / (1000 * 60 * 60 * 24) - else if (time === 'hours') diff = (closeDate - openDate) / (1000 * 60 * 60) - avg = (typeof avg === 'undefined') - ? avg = diff - : avg += diff + if (time !== 'hours' && time !== 'days') { + return new Error('avgTime(data, time): time should be "hours" or "days"'); } - return (avg / data.length).toFixed(2) + + let avg; + for (const file of data) { + const issue = require(file.path); + const openDate = new Date(issue.created_at); + const closeDate = new Date(issue.closed_at); + let diff; + if (time === 'days') { + diff = (closeDate - openDate) / (1000 * 60 * 60 * 24); + } else if (time === 'hours') { + diff = (closeDate - openDate) / (1000 * 60 * 60); + } + + avg = (typeof avg === 'undefined') ? + avg = diff : + avg += diff; + } + + return (avg / data.length).toFixed(2); } /** @@ -32,20 +40,31 @@ function avgTime(data, time) { */ function freqUsers(data, maxUsers) { - if (typeof maxUsers === 'undefined') maxUsers = 3 - let userCounts = {} + if (typeof maxUsers === 'undefined') { + maxUsers = 3; + } + + const userCounts = {}; for (const file of data) { - const issue = require(file.path) - if (typeof userCounts[issue.author.username] === 'undefined') userCounts[issue.author.username] = 1 - else userCounts[issue.author.username]++ + const issue = require(file.path); + if (typeof userCounts[issue.author.username] === 'undefined') { + userCounts[issue.author.username] = 1; + } else { + userCounts[issue.author.username]++; + } } - const sortedArray = Object.keys(userCounts).sort((a, b) => userCounts[b] - userCounts[a]) - const sortedObject = {} + + const sortedArray = Object.keys(userCounts).sort((a, b) => userCounts[b] - userCounts[a]); + const sortedObject = {}; for (let i = 0; i < maxUsers; i++) { - if (typeof sortedArray[i] === 'undefined') break - sortedObject[sortedArray[i]] = userCounts[sortedArray[i]] + if (typeof sortedArray[i] === 'undefined') { + break; + } + + sortedObject[sortedArray[i]] = userCounts[sortedArray[i]]; } - return sortedObject + + return sortedObject; } /** @@ -57,61 +76,75 @@ function freqUsers(data, maxUsers) { */ function labelsAlphabet(data, checkNull) { - if (typeof checkNull === 'undefined') checkNull = false - const labels = {} + if (typeof checkNull === 'undefined') { + checkNull = false; + } + + const labels = {}; for (const file of data) { - const issue = require(file.path) - if (checkNull && issue.closed_at !== null) continue + const issue = require(file.path); + if (checkNull && issue.closed_at !== null) { + continue; + } + for (const label of issue.labels) { - if (typeof labels[label] === 'undefined') labels[label] = 1 - else labels[label]++ + if (typeof labels[label] === 'undefined') { + labels[label] = 1; + } else { + labels[label]++; + } } } - const labelsOrdered = {} - Object.keys(labels).sort().forEach(label => labelsOrdered[label] = labels[label]) - return labelsOrdered + + const labelsOrdered = {}; + Object.keys(labels).sort().forEach(label => { + labelsOrdered[label] = labels[label]; + }); + return labelsOrdered; } /** * @function changedLines * @description Returns the number of added, deleted and total lines changed * @param {Array} data Array with paths leading to GitLab Commit .json files (with stats) - * @returns {Object} + * @returns {Object} Object with added/deleted/total lines changed */ function changedLines(data) { const stats = { added: 0, deleted: 0, - total: 0, - } + total: 0 + }; for (const file of data) { - const commit = require(file.path) - stats.added += commit.stats.additions - stats.deleted += commit.stats.deletions - stats.total += commit.stats.additions - commit.stats.deletions + const commit = require(file.path); + stats.added += commit.stats.additions; + stats.deleted += commit.stats.deletions; + stats.total += commit.stats.additions - commit.stats.deletions; } - return stats + + return stats; } /** * @function uniqueContributors * @description Returns the names of all contributors * @param {Array} data Array with paths leading to GitLab Commit .json files (with stats) - * @returns {Array} + * @returns {Array} Array with names of all contributors */ function uniqueContributors(data) { - const contributors = [] + const contributors = []; for (const file of data) { - const commit = require(file.path) + const commit = require(file.path); if (!contributors.includes(commit.author_name)) { - contributors.push(commit.author_name) + contributors.push(commit.author_name); } } - return contributors + + return contributors; } -exports.avgTime = avgTime -exports.freqUsers = freqUsers -exports.labelsAlphabet = labelsAlphabet -exports.changedLines = changedLines -exports.uniqueContributors = uniqueContributors +exports.avgTime = avgTime; +exports.freqUsers = freqUsers; +exports.labelsAlphabet = labelsAlphabet; +exports.changedLines = changedLines; +exports.uniqueContributors = uniqueContributors;