~koehr/the-250kb-club

the-250kb-club/compile-list.mjs -rw-r--r-- 2.5 KiB
d858e7d8 — koehr updates URLs 2 months ago
                                                                                
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import fs from 'fs'
import chalk from 'chalk'
import phantomas from 'phantomas'
import pageData from './src/components/pages.mjs'

const INPUT_FILE = './pages.txt'
const OUTPUT_FILE = './src/components/pages.mjs'
const RECHECK_THRESHOLD = 60*60*24*7*1000 // recheck pages older than 1 week
const REJECT_THRESHOLD = 256000

const LOGGING_PREFIXES = {
  info: `[${chalk.bold.white('II')}]`,
  warn: `[${chalk.bold.yellow('WW')}]`,
  error: `[${chalk.bold.red('EE')}]`,
  debug: `[${chalk.bold.white('DD')}]`,
}

function log (level='info') {
  const args = [...arguments].slice(1)
  let prefix = LOGGING_PREFIXES[level]
  console.log(prefix, ...args)
}
function info () { log('info', ...arguments) }
function warn () { log('warn', ...arguments) }
function error () { log('error', ...arguments) }
function debug () { log('debug', ...arguments) }

function calcWeights (url, m) {
  const extraWeight = m.cssSize + m.jsSize + m.webfontSize + m.otherSize
  const contentWeight = m.htmlSize + m.jsonSize + m.imageSize + m.base64Size + m.videoSize

  if (m.contentSize > REJECT_THRESHOLD) {
    warn(url, 'oversized by', m.contentSize - REJECT_THRESHOLD)
  }

  return { url, contentWeight, extraWeight, stamp: Date.now() }
}

async function generateMetrics (urls) {
  debug('Checking', urls)
  const metricsList = []
  const keyedPageData = pageData.reduce((acc, page) => {
    // stores url/stamp pairs to decide for recheck
    acc[page.url] = page
    return acc
  }, {})
  const knownURLs = Object.keys(keyedPageData)
  const now = Date.now()

  for (const url of urls) {
    if (knownURLs.indexOf(url) >= 0) {
      if (now - keyedPageData[url].stamp < RECHECK_THRESHOLD) {
        debug('skipping known URL', url)
        metricsList.push(keyedPageData[url]) // push old data to list
        continue
      }
    }
    try {
      debug('fetching and analyzing', url)
      const results = await phantomas(url)
      const weights = calcWeights(url, results.getMetrics())
      metricsList.push(weights) // TODO: what to do with oversized pages?
    } catch(err) {
      error(`failed to analyze ${url}`, err)
    }
  }

  try {
    // TODO: poor mans JSON to JS converter?
    fs.writeFileSync(OUTPUT_FILE, 'export default ' + JSON.stringify(metricsList))
  } catch (err) {
    error(`failed to write results to ${OUTPUT_FILE}`, err)
  }
}

try {
  const rawString = fs.readFileSync(INPUT_FILE, 'utf8')
  const urls = rawString.split('\n').filter(line => line.startsWith('http'))
  generateMetrics(urls)
} catch (err) {
  error(`failed to read page list from ${INPUT_FILE}`, err)
}