Skip to content

Completing reports migration #41

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 18 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 73 additions & 77 deletions definitions/output/reports/reports_dynamic.js
Original file line number Diff line number Diff line change
@@ -1,24 +1,38 @@
const configs = new reports.HTTPArchiveReports()
const metrics = configs.listMetrics()
const lenses = configs.lenses

const bucket = 'httparchive'
const storagePath = '/reports/dev/'

function generateExportQuery (metric, sql, params, ctx) {
// Adjust start and end dates to update reports retrospectively
const startDate = '2024-12-01' // constants.currentMonth;
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO: remove hardcoded dates.

const endDate = '2024-12-01' // constants.currentMonth;

function generateExportPath (ctx, params) {
if (params.sql.type === 'histogram') {
return `${storagePath}${params.date.replaceAll('-', '_')}/${params.metric.id}.json`
} else if (params.sql.type === 'timeseries') {
return `${storagePath}${params.metric.id}.json`
} else {
throw new Error('Unknown SQL type')
}
}

function generateExportQuery (ctx, params) {
let query = ''
if (sql.type === 'histogram') {
if (params.sql.type === 'histogram') {
query = `
SELECT
* EXCEPT(date)
FROM ${ctx.self()}
SELECT * EXCEPT(date)
FROM \`reports.${params.sql.type}\`
WHERE date = '${params.date}'
`
} else if (sql.type === 'timeseries') {
} else if (params.sql.type === 'timeseries') {
query = `
SELECT
FORMAT_DATE('%Y_%m_%d', date) AS date,
* EXCEPT(date)
FROM ${ctx.self()}
FROM \`reports.${params.sql.type}\`
`
} else {
throw new Error('Unknown SQL type')
Expand All @@ -28,80 +42,62 @@ FROM ${ctx.self()}
return queryOutput
}

function generateExportPath (metric, sql, params) {
if (sql.type === 'histogram') {
return `${storagePath}${params.date.replaceAll('-', '_')}/${metric.id}.json`
} else if (sql.type === 'timeseries') {
return `${storagePath}${metric.id}.json`
} else {
throw new Error('Unknown SQL type')
}
}

const iterations = []
// dates
for (
let date = constants.currentMonth; date >= constants.currentMonth; date = constants.fnPastMonth(date)) {
iterations.push({
date,
devRankFilter: constants.devRankFilter
})
}

if (iterations.length === 1) {
const params = iterations[0]
let date = endDate;
date >= startDate;
date = constants.fnPastMonth(date)
) {
// metrics
metrics.forEach(metric => {
// timeseries and histograms
metric.SQL.forEach(sql => {
publish(metric.id + '_' + sql.type, {
type: 'incremental',
protected: true,
bigquery: sql.type === 'histogram' ? { partitionBy: 'date', clusterBy: ['client'] } : {},
schema: 'reports'
// tags: ['crawl_complete', 'http_reports']
}).preOps(ctx => `
--DELETE FROM ${ctx.self()}
--WHERE date = '${params.date}';
`).query(
ctx => sql.query(ctx, params)
).postOps(ctx => `
SELECT
reports.run_export_job(
JSON '''{
"destination": "cloud_storage",
"config": {
"bucket": "${bucket}",
"name": "${generateExportPath(metric, sql, params)}"
},
"query": "${generateExportQuery(metric, sql, params, ctx)}"
}'''
);
`)
})
})
} else {
iterations.forEach((params, i) => {
metrics.forEach(metric => {
metric.SQL.forEach(sql => {
operate(metric.id + '_' + sql.type + '_' + params.date, {
// tags: ['crawl_complete', 'http_reports']
}).queries(ctx => `
DELETE FROM reports.${metric.id}_${sql.type}
WHERE date = '${params.date}';

INSERT INTO reports.${metric.id}_${sql.type}` + sql.query(ctx, params)
).postOps(ctx => `
SELECT
reports.run_export_job(
JSON '''{
"destination": "cloud_storage",
"config": {
"bucket": "${bucket}",
"name": "${generateExportPath(metric, sql, params)}"
},
"query": "${generateExportQuery(metric, sql, params, ctx)}"
}'''
);
`)
})
// lenses
for (const [key, value] of Object.entries(lenses)) {
iterations.push({
date,
metric,
sql,
lens: { name: key, sql: value },
devRankFilter: constants.devRankFilter
})
}
})
})
}

iterations.forEach((params, i) => {
operate(
params.metric.id + '_' + params.sql.type + '_' + params.lens.name + '_' + params.date)
.tags(['crawl_complete', 'reports'])
.queries(ctx => `
CREATE TABLE IF NOT EXISTS reports.${params.sql.type} (
date DATE,
lens STRING,
metric STRING,
client STRING,
data JSON
)
PARTITION BY date
CLUSTER BY metric, lens, client;

DELETE FROM reports.${params.sql.type}
WHERE date = '${params.date}'
AND metric = '${params.metric.id}';

INSERT INTO reports.${params.sql.type} ${params.sql.query(ctx, params)};

SELECT
reports.run_export_job(
JSON '''{
"destination": "cloud_storage",
"config": {
"bucket": "${bucket}",
"name": "${generateExportPath(ctx, params)}"
},
"query": "${generateExportQuery(ctx, params)}"
}'''
);
`)
})
61 changes: 45 additions & 16 deletions includes/reports.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,19 @@ const config = {
{
type: 'histogram',
query: DataformTemplateBuilder.create((ctx, params) => `
WITH pages AS (
SELECT
date,
client,
CAST(FLOOR(INT64(summary.bytesTotal) / 1024 / 100) * 100 AS INT64) AS bin
FROM crawl.pages
WHERE
date = '${params.date}'
${params.lens.sql}
AND is_root_page
AND INT64(summary.bytesTotal) > 0
)

SELECT
*,
SUM(pdf) OVER (PARTITION BY client ORDER BY bin) AS cdf
Expand All @@ -16,25 +29,38 @@ FROM (
volume / SUM(volume) OVER (PARTITION BY client) AS pdf
FROM (
SELECT
date,
client,
CAST(FLOOR(INT64(summary.bytesTotal) / 1024 / 100) * 100 AS INT64) AS bin,
*,
COUNT(0) AS volume
FROM ${ctx.ref('crawl', 'pages')}
WHERE
date = '${params.date}' ${params.devRankFilter}
FROM pages
GROUP BY
date,
client,
bin
HAVING bin IS NOT NULL
)
)
ORDER BY
date,
bin,
client
`)
},
{
type: 'timeseries',
query: DataformTemplateBuilder.create((ctx, params) => `
WITH pages AS (
SELECT
date,
client,
INT64(summary.bytesTotal) AS bytesTotal
FROM crawl.pages
WHERE
date = '${params.date}'
${params.lens.sql}
AND is_root_page
AND INT64(summary.bytesTotal) > 0
)

SELECT
date,
client,
Expand All @@ -44,16 +70,7 @@ SELECT
ROUND(APPROX_QUANTILES(bytesTotal, 1001)[OFFSET(501)] / 1024, 2) AS p50,
ROUND(APPROX_QUANTILES(bytesTotal, 1001)[OFFSET(751)] / 1024, 2) AS p75,
ROUND(APPROX_QUANTILES(bytesTotal, 1001)[OFFSET(901)] / 1024, 2) AS p90
FROM (
SELECT
date,
client,
INT64(summary.bytesTotal) AS bytesTotal
FROM ${ctx.ref('crawl', 'pages')}
WHERE
date = '${params.date}' ${params.devRankFilter} AND
INT64(summary.bytesTotal) > 0
)
FROM pages
GROUP BY
date,
client,
Expand All @@ -65,9 +82,21 @@ GROUP BY
}
}

const lenses = {
all: '',
top1k: 'AND rank <= 1000',
top10k: 'AND rank <= 10000',
top100k: 'AND rank <= 100000',
top1m: 'AND rank <= 1000000',
drupal: 'AND \'Drupal\' IN UNNEST(technologies.technology)',
magento: 'AND \'Magento\' IN UNNEST(technologies.technology)',
wordpress: 'AND \'WordPress\' IN UNNEST(technologies.technology)'
}

class HTTPArchiveReports {
constructor () {
this.config = config
this.lenses = lenses
}

listReports () {
Expand Down
91 changes: 91 additions & 0 deletions infra/bigquery-export/reports.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import { BigQueryExport } from './bigquery.js'
import { StorageExport } from './storage.js'
import { FirestoreBatch } from './firestore.js'

export class ReportsExporter {
constructor () {
this.bigquery = new BigQueryExport()
this.storage = new StorageExport()
}

// Export timeseries reports
async exportTimeseries (exportConfig) {
const metric = exportConfig.name
const query = `
SELECT
FORMAT_DATE('%Y_%m_%d', date) AS date,
* EXCEPT(date)
FROM reports.${metric}_timeseries
`
const rows = await this.bigquery.queryResults(query)
await this.storage.exportToJson(rows, `${this.storagePath}${metric}.json`)
}

// Export monthly histogram report
async exportHistogram (exportConfig) {
const metric = exportConfig.name
const date = exportConfig.date

const query = `
SELECT * EXCEPT(date)
FROM reports.${metric}_histogram
WHERE date = '${date}'
`
const rows = await this.bigquery.queryResults(query)
await this.storage.exportToJson(rows, `${this.storagePath}${date.replaceAll('-', '_')}/${metric}.json`)
}

async export (exportConfig) {
if (exportConfig.dataform_trigger !== 'report_complete') {
console.error('Invalid dataform trigger')
return
}

this.storagePath = 'reports/' + exportConfig.environment !== 'prod' ? 'dev/' : ''

if (exportConfig.lense && exportConfig.lense !== 'all') {
this.storagePath = this.storagePath + `${exportConfig.lense}/`
}

if (exportConfig.type === 'histogram') {
await this.exportHistogram(exportConfig)
} else if (exportConfig.type === 'timeseries') {
await this.exportTimeseries(exportConfig)
} else {
console.error('Invalid report type')
}
}
}

export class TechReportsExporter {
constructor () {
this.firestore = new FirestoreBatch()
}

async export (exportConfig) {
if (exportConfig.dataform_trigger !== 'tech_report_complete') {
console.error('Invalid dataform trigger')
return
}

let query = ''
if (exportConfig.type === 'report') {
query = `
SELECT
STRING(date) AS date,
* EXCEPT(date)
FROM httparchive.reports.tech_report_${exportConfig.name}
WHERE date = '${exportConfig.date}'
`
} else if (exportConfig.type === 'dict') {
query = `
SELECT *
FROM reports.tech_report_${exportConfig.name}
`
} else {
console.error('Invalid export type')
}

await this.firestore.export(exportConfig, query)
}
}
6 changes: 1 addition & 5 deletions infra/bigquery-export/storage.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import { Storage } from '@google-cloud/storage'
import { BigQueryExport } from './bigquery.js'
import { Readable } from 'stream'
import zlib from 'zlib'

const bigquery = new BigQueryExport()
const storage = new Storage()

export class StorageUpload {
Expand All @@ -15,9 +13,7 @@ export class StorageUpload {
})
}

async exportToJson (query, fileName) {
const data = await bigquery.queryResults(query)

async exportToJson (data, fileName) {
const bucket = storage.bucket(this.bucket)
const file = bucket.file(fileName)

Expand Down
Loading
Loading