Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 | 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 4x 4x 4x 4x 4x 1x 1x 1x 1x 1x 4x 4x 4x 1x 1x 1x 1x 1x 1x 1x 1x 1x | import * as glob from "glob"
import * as path from "path"
import * as lodash from "lodash"
import {
BAKED_BASE_URL,
OPTIMIZE_SVG_EXPORTS,
BAKED_SITE_DIR,
} from "../settings/serverSettings"
import * as db from "../db/db"
import { bakeGraphersToSvgs } from "../baker/GrapherImageBaker"
import { warn } from "../serverUtils/slackLog"
import { Chart } from "../db/model/Chart"
import md5 from "md5"
import { Url } from "../clientUtils/urls/Url"
interface ChartExportMeta {
key: string
svgUrl: string
version: number
width: number
height: number
}
// Splits a grapher URL like https://ourworldindata.org/grapher/soil-lifespans?tab=chart
// into its slug (soil-lifespans) and queryStr (?tab=chart)
export const grapherUrlToSlugAndQueryStr = (grapherUrl: string) => {
const url = Url.fromURL(grapherUrl)
const slug = lodash.last(url.pathname?.split("/")) as string
const queryStr = url.queryStr
return { slug, queryStr }
}
// Combines a grapher slug, and potentially its query string, to _part_ of an export file
// name. It's called fileKey and not fileName because the actual export filename also includes
// other parts, like chart version and width/height.
export const grapherSlugToExportFileKey = (
slug: string,
queryStr: string | undefined
) => `${slug}${queryStr ? `-${md5(queryStr)}` : ""}`
export interface GrapherExports {
get: (grapherUrl: string) => ChartExportMeta | undefined
}
export const bakeGrapherUrls = async (urls: string[]) => {
const currentExports = await getGrapherExportsByUrl()
const slugToId = await Chart.mapSlugsToIds()
const toBake = []
// Check that we need to bake this url, and don't already have an export
for (const url of urls) {
const current = currentExports.get(url)
if (!current) {
toBake.push(url)
continue
}
const slug = lodash.last(Url.fromURL(url).pathname?.split("/"))
if (!slug) {
warn(`Invalid chart url ${url}`)
continue
}
const chartId = slugToId[slug]
if (chartId === undefined) {
warn(`Couldn't find chart with slug ${slug}`)
continue
}
const rows = await db.queryMysql(
`SELECT charts.config->>"$.version" AS version FROM charts WHERE charts.id=?`,
[chartId]
)
if (!rows.length) {
warn(`Mysteriously missing chart by id ${chartId}`)
continue
}
if (rows[0].version > current.version) toBake.push(url)
}
if (toBake.length > 0) {
for (const grapherUrls of lodash.chunk(toBake, 5)) {
await bakeGraphersToSvgs(
grapherUrls,
`${BAKED_SITE_DIR}/exports`,
OPTIMIZE_SVG_EXPORTS
)
}
}
}
export const getGrapherExportsByUrl = async (): Promise<GrapherExports> => {
// Index the files to see what we have available, using the most recent version
// if multiple exports exist
const files = glob.sync(`${BAKED_SITE_DIR}/exports/*.svg`)
const exportsByKey = new Map<string, ChartExportMeta>()
for (const filepath of files) {
const filename = path.basename(filepath)
const [key, version, dims] = filename.toLowerCase().split("_")
const versionNumber = parseInt(version.split("v")[1])
const [width, height] = dims.split("x")
const current = exportsByKey.get(key)
if (!current || current.version < versionNumber) {
exportsByKey.set(key, {
key: key,
svgUrl: `${BAKED_BASE_URL}/exports/${filename}`,
version: versionNumber,
width: parseInt(width),
height: parseInt(height),
})
}
}
return {
get(grapherUrl: string) {
const { slug, queryStr } = grapherUrlToSlugAndQueryStr(grapherUrl)
return exportsByKey.get(
grapherSlugToExportFileKey(slug, queryStr).toLowerCase()
)
},
}
}
|