Fetch Lighthouse performance data programmatically with Node.js.
const API_KEY = process.env.PSI_API_KEY
const url = 'https://example.com'
const response = await fetch(
`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&key=${API_KEY}`
)
const data = await response.json()
console.log('Performance:', data.lighthouseResult.categories.performance.score * 100)
The score is 0-1, so multiply by 100 for the familiar 0-100 scale.
function getCoreWebVitals(data) {
const audits = data.lighthouseResult.audits
return {
lcp: audits['largest-contentful-paint'].numericValue,
cls: audits['cumulative-layout-shift'].numericValue,
tbt: audits['total-blocking-time'].numericValue,
}
}
const vitals = getCoreWebVitals(data)
console.log(`LCP: ${vitals.lcp}ms, CLS: ${vitals.cls}, TBT: ${vitals.tbt}ms`)
Note: The API returns TBT (Total Blocking Time) as a proxy for INP since INP requires real user interaction data.
Request multiple categories in one call:
const categories = ['performance', 'accessibility', 'seo', 'best-practices']
const categoryParam = categories.map(c => `category=${c}`).join('&')
const response = await fetch(
`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&key=${API_KEY}&${categoryParam}`
)
const data = await response.json()
const scores = Object.entries(data.lighthouseResult.categories).reduce((acc, [key, value]) => {
acc[key] = Math.round(value.score * 100)
return acc
}, {})
console.log(scores)
// { performance: 87, accessibility: 92, seo: 100, 'best-practices': 95 }
interface PSIResponse {
lighthouseResult: {
categories: {
'performance': CategoryResult
'accessibility'?: CategoryResult
'seo'?: CategoryResult
'best-practices'?: CategoryResult
}
audits: Record<string, AuditResult>
}
loadingExperience?: LoadingExperience
}
interface CategoryResult {
score: number
title: string
}
interface AuditResult {
score: number | null
numericValue?: number
displayValue?: string
}
interface LoadingExperience {
metrics: {
LARGEST_CONTENTFUL_PAINT_MS?: MetricValue
CUMULATIVE_LAYOUT_SHIFT_SCORE?: MetricValue
INTERACTION_TO_NEXT_PAINT?: MetricValue
}
}
interface MetricValue {
percentile: number
category: 'FAST' | 'AVERAGE' | 'SLOW'
}
async function fetchPSI(url: string): Promise<PSIResponse> {
const response = await fetch(
`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&key=${process.env.PSI_API_KEY}`
)
if (!response.ok) {
throw new Error(`PSI API error: ${response.status}`)
}
return response.json()
}
The loadingExperience object contains real Chrome user data when available:
function getFieldData(data) {
const field = data.loadingExperience?.metrics
if (!field)
return null
return {
lcp: field.LARGEST_CONTENTFUL_PAINT_MS?.percentile,
cls: field.CUMULATIVE_LAYOUT_SHIFT_SCORE?.percentile,
inp: field.INTERACTION_TO_NEXT_PAINT?.percentile,
lcpCategory: field.LARGEST_CONTENTFUL_PAINT_MS?.category,
clsCategory: field.CUMULATIVE_LAYOUT_SHIFT_SCORE?.category,
inpCategory: field.INTERACTION_TO_NEXT_PAINT?.category,
}
}
Field data is only available for URLs with enough Chrome traffic. Low-traffic pages return null.
async function fetchPSI(url) {
const response = await fetch(
`https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&key=${process.env.PSI_API_KEY}`
)
if (response.status === 429) {
const retryAfter = response.headers.get('Retry-After') || 60
console.log(`Rate limited. Retry after ${retryAfter}s`)
return null
}
if (response.status === 400) {
const error = await response.json()
console.error('Invalid request:', error.error.message)
return null
}
if (!response.ok) {
console.error(`PSI API error: ${response.status}`)
return null
}
return response.json()
}
import 'dotenv/config'
const API_KEY = process.env.PSI_API_KEY
async function analyzeUrl(url) {
const apiUrl = new URL('https://www.googleapis.com/pagespeedonline/v5/runPagespeed')
apiUrl.searchParams.set('url', url)
apiUrl.searchParams.set('key', API_KEY)
apiUrl.searchParams.set('strategy', 'mobile')
apiUrl.searchParams.append('category', 'performance')
apiUrl.searchParams.append('category', 'accessibility')
const response = await fetch(apiUrl)
if (!response.ok) {
console.error(`Failed to analyze ${url}: ${response.status}`)
return null
}
const data = await response.json()
const { lighthouseResult } = data
return {
url,
performance: Math.round(lighthouseResult.categories.performance.score * 100),
accessibility: Math.round(lighthouseResult.categories.accessibility.score * 100),
lcp: lighthouseResult.audits['largest-contentful-paint'].numericValue,
cls: lighthouseResult.audits['cumulative-layout-shift'].numericValue,
tbt: lighthouseResult.audits['total-blocking-time'].numericValue,
}
}
const result = await analyzeUrl('https://example.com')
console.log(result)
Building API clients, handling rate limits, and parsing responses is time you're not spending on actual performance improvements.
Unlighthouse handles the complexity and crawls your entire site:
npx unlighthouse --site https://your-site.com