|
1 | 1 | import Handlebars from "handlebars"; |
2 | 2 | import fs from "fs/promises"; |
3 | 3 | import prettyBytes from 'pretty-bytes'; |
4 | | -import {gzipSize} from 'gzip-size'; |
5 | 4 | import {getBlobHistory} from './repo.js'; |
| 5 | +import pacote from "pacote"; |
| 6 | +import zlib from "zlib"; |
| 7 | +import tar from "tar-stream"; |
| 8 | +import { Readable } from "stream"; |
6 | 9 |
|
7 | | -const generateFileReport = async (files) => { |
8 | | - const stat = {}; |
| 10 | +const FILE_SIZE_DIFF_THRESHOLD = 512; // 0.5KB |
9 | 11 |
|
10 | | - for(const [name, file] of Object.entries(files)) { |
11 | | - const commits = await getBlobHistory(file); |
| 12 | +const readJSONFile = async (file) => JSON.parse(String(await fs.readFile(file))); |
12 | 13 |
|
13 | | - stat[file] = { |
| 14 | +const {version} = await readJSONFile('./package.json'); |
| 15 | + |
| 16 | +const parseVersion = (tag) => { |
| 17 | + const [, major, minor, patch] = /^v?(\d+)\.(\d+)\.(\d+)/.exec(tag) || []; |
| 18 | + return [major, minor, patch]; |
| 19 | +} |
| 20 | + |
| 21 | +const [MAJOR_NUMBER] = parseVersion(version); |
| 22 | + |
| 23 | +async function getFilesFromNPM(pkg) { |
| 24 | + const tgzData = await pacote.tarball(pkg); // Buffer з npm |
| 25 | + const files = {}; |
| 26 | + |
| 27 | + return new Promise((resolve, reject) => { |
| 28 | + const extract = tar.extract(); |
| 29 | + |
| 30 | + extract.on("entry", (header, stream, next) => { |
| 31 | + const buffers = []; |
| 32 | + |
| 33 | + stream.on('data', (buffer) => { |
| 34 | + buffers.push(buffer); |
| 35 | + }); |
| 36 | + |
| 37 | + stream.on("end", () => { |
| 38 | + const content = Buffer.concat(buffers); |
| 39 | + |
| 40 | + const gzipped = zlib.gzipSync(content); |
| 41 | + |
| 42 | + files[header.name.replace(/^package\//, '')] = { |
| 43 | + gzip: gzipped.length, |
| 44 | + compressed: header.size ? gzipped.length / header.size : 1, |
| 45 | + ...header |
| 46 | + }; |
| 47 | + |
| 48 | + next(); |
| 49 | + }); |
| 50 | + }); |
| 51 | + |
| 52 | + Readable.from(tgzData) |
| 53 | + .pipe(zlib.createGunzip()) |
| 54 | + .pipe(extract) |
| 55 | + .on("error", reject) |
| 56 | + .on('finish', () => resolve(files)); |
| 57 | + }); |
| 58 | +} |
| 59 | + |
| 60 | + |
| 61 | + |
| 62 | + |
| 63 | +const generateFileReport = async (files, historyCount = 3) => { |
| 64 | + const allFilesStat = {}; |
| 65 | + const commits = (await getBlobHistory('package.json', historyCount)).filter(({tag}) => { |
| 66 | + return MAJOR_NUMBER === parseVersion(tag)[0]; |
| 67 | + }); |
| 68 | + const warns = []; |
| 69 | + |
| 70 | + const npmHistory = {}; |
| 71 | + |
| 72 | + await Promise.all(commits.map(async ({tag}) => { |
| 73 | + npmHistory[tag] = await getFilesFromNPM(`axios@${tag.replace(/^v/, '')}`); |
| 74 | + })); |
| 75 | + |
| 76 | + for(const [name, filename] of Object.entries(files)) { |
| 77 | + const file = await fs.stat(filename).catch(console.warn); |
| 78 | + const gzip = file ? zlib.gzipSync(await fs.readFile(filename)).length : 0; |
| 79 | + |
| 80 | + const stat = allFilesStat[filename] = file ? { |
14 | 81 | name, |
15 | | - size: (await fs.stat(file)).size, |
16 | | - path: file, |
17 | | - gzip: await gzipSize(String(await fs.readFile(file))), |
18 | | - commits, |
19 | | - history: commits.map(({tag, size}) => `${prettyBytes(size)} (${tag})`).join(' ← ') |
| 82 | + size: file.size, |
| 83 | + path: filename, |
| 84 | + gzip, |
| 85 | + compressed: file.size ? gzip / file.size : 1, |
| 86 | + history: commits.map(({tag}) => { |
| 87 | + const files = npmHistory[tag]; |
| 88 | + const file = files && files[filename] || null; |
| 89 | + |
| 90 | + return { |
| 91 | + tag, |
| 92 | + ...file |
| 93 | + }; |
| 94 | + }) |
| 95 | + } : null; |
| 96 | + |
| 97 | + |
| 98 | + |
| 99 | + if(stat.history[0]) { |
| 100 | + const diff = stat.gzip - stat.history[0].gzip; |
| 101 | + |
| 102 | + if (diff > FILE_SIZE_DIFF_THRESHOLD) { |
| 103 | + warns.push({ |
| 104 | + filename, |
| 105 | + sizeReport: true, |
| 106 | + diff, |
| 107 | + percent: stat.gzip ? diff / stat.gzip : 0, |
| 108 | + }); |
| 109 | + } |
20 | 110 | } |
21 | 111 | } |
22 | 112 |
|
23 | | - return stat; |
| 113 | + return { |
| 114 | + version, |
| 115 | + files: allFilesStat, |
| 116 | + warns |
| 117 | + }; |
24 | 118 | } |
25 | 119 |
|
26 | 120 | const generateBody = async ({files, template = './templates/pr.hbs'} = {}) => { |
27 | | - const data = { |
28 | | - files: await generateFileReport(files) |
29 | | - }; |
| 121 | + const data = await generateFileReport(files); |
30 | 122 |
|
31 | | - Handlebars.registerHelper('filesize', (bytes)=> prettyBytes(bytes)); |
| 123 | + Handlebars.registerHelper('filesize', (bytes)=> bytes != null ? prettyBytes(bytes) : '<unknown>'); |
| 124 | + Handlebars.registerHelper('percent', (value)=> Number.isFinite(value) ? `${(value * 100).toFixed(1)}%` : `---` ); |
32 | 125 |
|
33 | 126 | return Handlebars.compile(String(await fs.readFile(template)))(data); |
34 | 127 | } |
35 | 128 |
|
36 | 129 | console.log(await generateBody({ |
37 | 130 | files: { |
38 | | - 'Browser build (UMD)' : './dist/axios.min.js', |
39 | | - 'Browser build (ESM)' : './dist/esm/axios.min.js', |
| 131 | + 'Browser build (UMD)' : 'dist/axios.min.js', |
| 132 | + 'Browser build (ESM)' : 'dist/esm/axios.min.js', |
40 | 133 | } |
41 | 134 | })); |
42 | | - |
|
0 commit comments