Skip to content

Commit 50d14f4

Browse files
sgomesjsnajdr
andauthored
Fix totals for areas in comments (#27)
* Fix totals for areas in comments * Declare loop variables in for loops, use sizeType consistently * Use sumSizesOf in comments Co-authored-by: Jarda Snajdr <jsnajdr@gmail.com>
1 parent 113434b commit 50d14f4

File tree

2 files changed

+62
-4
lines changed

2 files changed

+62
-4
lines changed

server/comments.js

Lines changed: 51 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ const { log, getPRNumber } = require('./utils');
33
const db = require('./db');
44
const gh = require('./github');
55
const printDeltaTable = require('./delta-table');
6+
const { sumSizesOf, ZERO_SIZE } = require('./delta');
67

78
const REPO = 'Automattic/wp-calypso';
89
const WATERMARK = 'c52822';
@@ -35,6 +36,49 @@ function groupByArea(deltas) {
3536
});
3637
}
3738

39+
function totalDeltasForArea(areaDelta, delta) {
40+
if (!areaDelta) {
41+
return {...ZERO_SIZE};
42+
}
43+
44+
// Produce an array of arrays:
45+
// [ [ chunks in use in first commit ] , [ chunks in use in second commit ] ]
46+
// The items will be unique inside each array.
47+
const chunksInUse = ['firstChunks', 'secondChunks']
48+
.map(chunkType => areaDelta.reduce(
49+
(acc, group) => {
50+
for (const chunk of group[chunkType]) {
51+
acc.add(chunk);
52+
}
53+
return acc;
54+
},
55+
new Set()
56+
))
57+
.map(set => [...set]);
58+
59+
// Produce an array of size objects, representing the sum of all the chunks for each commit:
60+
// [ { stat_size: 0, parsed_size: 0, gzip_size: 0 }, { stat_size: 0, parsed_size: 0, gzip_size: 0 } ]
61+
// The first object is for the first commit, and the second object for the second commit.
62+
const chunkSizes = ['firstSizes', 'secondSizes']
63+
.map((property, index) => chunksInUse[index].reduce(
64+
(acc, chunkName) => {
65+
const chunk = delta.chunks.find(chunk => chunk.name === chunkName) || {};
66+
acc = sumSizesOf(acc, chunk[property]);
67+
return acc;
68+
},
69+
{...ZERO_SIZE}
70+
));
71+
72+
// Produce a single object with the delta between first and second commit:
73+
// { stat_size: 0, parsed_size: 0, gzip_size: 0 }
74+
let deltaSizes = {};
75+
for (const sizeType in chunkSizes[0]) {
76+
deltaSizes[sizeType] = chunkSizes[1][sizeType] - chunkSizes[0][sizeType];
77+
}
78+
79+
return deltaSizes;
80+
}
81+
3882
const AREAS = [
3983
{
4084
id: 'runtime',
@@ -90,8 +134,11 @@ function watermarkString(watermark) {
90134
return `icfy-watermark: ${watermark}`;
91135
}
92136

93-
async function statsMessage(push) {
94-
const delta = await db.getPushDelta(push.ancestor, push.sha, { extractManifestGroup: true });
137+
async function getDelta(push) {
138+
return await db.getPushDelta(push.ancestor, push.sha, { extractManifestGroup: true });
139+
}
140+
141+
function statsMessage(delta) {
95142
const byArea = groupByArea(delta.groups);
96143

97144
const message = [];
@@ -112,7 +159,7 @@ async function statsMessage(push) {
112159
continue;
113160
}
114161

115-
const bytesDelta = _.reduce(areaDelta, (sum, delta) => sum + delta.deltaSizes.gzip_size, 0);
162+
const bytesDelta = totalDeltasForArea(areaDelta, delta).gzip_size || 0;
116163
const changedBytes = Math.abs(bytesDelta);
117164
const suffix = bytesDelta < 0 ? 'removed 📉' : 'added 📈';
118165

@@ -187,7 +234,7 @@ module.exports = async function commentOnGithub(sha) {
187234

188235
const [firstComment, ...otherComments] = await getOurPRCommentIDs(REPO, prNumber);
189236

190-
const message = await statsMessage(push);
237+
const message = statsMessage(await getDelta(push));
191238

192239
if (!firstComment) {
193240
log('Posting first comment on PR', prNumber);

server/delta.js

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
const _ = require('lodash');
22

33
const sizes = ['stat_size', 'parsed_size', 'gzip_size'];
4+
const ZERO_SIZE = sizes.reduce((acc, size) => ({...acc, [size]: 0}), {});
45

56
function sizesOf(stat) {
67
return stat ? _.pick(stat, sizes) : null;
@@ -225,6 +226,8 @@ function deltaFromStatsAndGroups(firstStats, firstGroups, secondStats, secondGro
225226
const secondSizes = sizesOfGroup(secondGroup, secondStats);
226227
const deltaSizes = deltaSizesOf(firstSizes, secondSizes);
227228
const deltaPercents = deltaPercentsOf(firstSizes, deltaSizes);
229+
const firstChunks = (firstGroup || {}).chunks || [];
230+
const secondChunks = (secondGroup || {}).chunks || [];
228231

229232
if (isDeltaEligible(deltaSizes)) {
230233
deltas.push({
@@ -233,6 +236,8 @@ function deltaFromStatsAndGroups(firstStats, firstGroups, secondStats, secondGro
233236
secondSizes,
234237
deltaSizes,
235238
deltaPercents,
239+
firstChunks,
240+
secondChunks,
236241
});
237242
}
238243
}
@@ -243,12 +248,16 @@ function deltaFromStatsAndGroups(firstStats, firstGroups, secondStats, secondGro
243248
const firstSizes = null;
244249
const secondSizes = sizesOfGroup(secondGroup, secondStats);
245250
const deltaSizes = deltaSizesOf(firstSizes, secondSizes);
251+
const firstChunks = [];
252+
const secondChunks = secondGroup.chunks || [];
246253

247254
deltas.push({
248255
name,
249256
firstSizes,
250257
secondSizes,
251258
deltaSizes,
259+
firstChunks,
260+
secondChunks,
252261
});
253262
}
254263
}
@@ -258,3 +267,5 @@ function deltaFromStatsAndGroups(firstStats, firstGroups, secondStats, secondGro
258267

259268
exports.deltaFromStats = deltaFromStats;
260269
exports.deltaFromStatsAndGroups = deltaFromStatsAndGroups;
270+
exports.sumSizesOf = sumSizesOf;
271+
exports.ZERO_SIZE = ZERO_SIZE;

0 commit comments

Comments
 (0)