Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: upload blobs on onDev event #5552

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
33 changes: 15 additions & 18 deletions packages/build/src/plugins_core/blobs_upload/index.ts
Expand Up @@ -5,10 +5,8 @@ import pMap from 'p-map'
import semver from 'semver'

import { log, logError } from '../../log/logger.js'
import { scanForBlobs } from '../../utils/blobs.js'
import { CoreStep, CoreStepCondition, CoreStepFunction } from '../types.js'

import { getKeysToUpload, getFileWithMetadata } from './utils.js'
import { getFileWithMetadata, getKeysToUpload, scanForBlobs } from '../../utils/blobs.js'
import { type CoreStep, type CoreStepCondition, type CoreStepFunction } from '../types.js'

const coreStep: CoreStepFunction = async function ({
debug,
Expand All @@ -35,11 +33,8 @@ const coreStep: CoreStepFunction = async function ({

// If we don't have native `fetch` in the global scope, add a polyfill.
if (semver.lt(nodeVersion, '18.0.0')) {
const nodeFetch = await import('node-fetch')

// @ts-expect-error The types between `node-fetch` and the native `fetch`
// are not a 100% match, even though the APIs are mostly compatible.
storeOpts.fetch = nodeFetch.default
const nodeFetch = (await import('node-fetch')).default as unknown as typeof fetch
storeOpts.fetch = nodeFetch
}

const blobs = await scanForBlobs(buildDir, packagePath)
Expand Down Expand Up @@ -72,16 +67,18 @@ const coreStep: CoreStepFunction = async function ({
log(logs, `Uploading ${keys.length} blobs to deploy store...`)
}

const uploadBlob = async (key) => {
if (debug && !quiet) {
log(logs, `- Uploading blob ${key}`, { indent: true })
}
const { data, metadata } = await getFileWithMetadata(blobs.directory, key)
await blobStore.set(key, data, { metadata })
}

try {
await pMap(keys, uploadBlob, { concurrency: 10 })
await pMap(
keys,
async (key: string) => {
if (debug && !quiet) {
log(logs, `- Uploading blob ${key}`, { indent: true })
}
const { data, metadata } = await getFileWithMetadata(blobs.directory, key)
await blobStore.set(key, data, { metadata })
},
{ concurrency: 10 },
)
} catch (err) {
logError(logs, `Error uploading blobs to deploy store: ${err.message}`)

Expand Down
56 changes: 0 additions & 56 deletions packages/build/src/plugins_core/blobs_upload/utils.ts

This file was deleted.

109 changes: 109 additions & 0 deletions packages/build/src/plugins_core/dev_blobs_upload/index.ts
@@ -0,0 +1,109 @@
import { version as nodeVersion } from 'node:process'

import { getDeployStore } from '@netlify/blobs'
import pMap from 'p-map'
import semver from 'semver'

import { log, logError } from '../../log/logger.js'
import { getFileWithMetadata, getKeysToUpload, scanForBlobs } from '../../utils/blobs.js'
import { type CoreStep, type CoreStepCondition, type CoreStepFunction } from '../types.js'

const coreStep: CoreStepFunction = async function ({
debug,
logs,
deployId,
buildDir,
quiet,
packagePath,
constants: { SITE_ID, NETLIFY_API_TOKEN, NETLIFY_API_HOST },
}) {
// This should never happen due to the condition check
if (!deployId || !NETLIFY_API_TOKEN) {
return {}
}
// for cli deploys with `netlify deploy --build` the `NETLIFY_API_HOST` is undefined
const apiHost = NETLIFY_API_HOST || 'api.netlify.com'

const storeOpts: Parameters<typeof getDeployStore>[0] = {
siteID: SITE_ID,
deployID: deployId,
token: NETLIFY_API_TOKEN,
apiURL: `https://${apiHost}`,
}

// If we don't have native `fetch` in the global scope, add a polyfill.
if (semver.lt(nodeVersion, '18.0.0')) {
const nodeFetch = (await import('node-fetch')).default as unknown as typeof fetch
storeOpts.fetch = nodeFetch
}

const blobs = await scanForBlobs(buildDir, packagePath)

// We checked earlier, but let's be extra safe
if (blobs === null) {
if (!quiet) {
log(logs, 'No blobs to upload to deploy store.')
}
return {}
}

// If using the deploy config API, configure the store to use the region that
// was configured for the deploy.
if (!blobs.isLegacyDirectory) {
storeOpts.experimentalRegion = 'auto'
}

const blobStore = getDeployStore(storeOpts)
const keys = await getKeysToUpload(blobs.directory)

if (keys.length === 0) {
if (!quiet) {
log(logs, 'No blobs to upload to deploy store.')
}
return {}
}

if (!quiet) {
log(logs, `Uploading ${keys.length} blobs to deploy store...`)
}

try {
await pMap(
keys,
async (key: string) => {
if (debug && !quiet) {
log(logs, `- Uploading blob ${key}`, { indent: true })
}
const { data, metadata } = await getFileWithMetadata(blobs.directory, key)
await blobStore.set(key, data, { metadata })
},
{ concurrency: 10 },
)
} catch (err) {
logError(logs, `Error uploading blobs to deploy store: ${err.message}`)

throw new Error(`Failed while uploading blobs to deploy store`)
}

if (!quiet) {
log(logs, `Done uploading blobs to deploy store.`)
}

return {}
}

const deployAndBlobsPresent: CoreStepCondition = async ({
deployId,
buildDir,
packagePath,
constants: { NETLIFY_API_TOKEN },
}) => Boolean(NETLIFY_API_TOKEN && deployId && (await scanForBlobs(buildDir, packagePath)))

export const devUploadBlobs: CoreStep = {
event: 'onDev',
coreStep,
coreStepId: 'dev_blobs_upload',
coreStepName: 'Uploading blobs',
coreStepDescription: () => 'Uploading blobs to development deploy store',
condition: deployAndBlobsPresent,
}
3 changes: 2 additions & 1 deletion packages/build/src/steps/get.ts
Expand Up @@ -4,6 +4,7 @@ import { uploadBlobs } from '../plugins_core/blobs_upload/index.js'
import { buildCommandCore } from '../plugins_core/build_command.js'
import { deploySite } from '../plugins_core/deploy/index.js'
import { applyDeployConfig } from '../plugins_core/deploy_config/index.js'
import { devUploadBlobs } from '../plugins_core/dev_blobs_upload/index.js'
import { bundleEdgeFunctions } from '../plugins_core/edge_functions/index.js'
import { bundleFunctions } from '../plugins_core/functions/index.js'
import { preCleanup } from '../plugins_core/pre_cleanup/index.js'
Expand Down Expand Up @@ -39,7 +40,7 @@ export const getDevSteps = function (command, steps, eventHandlers?: any[]) {

const eventSteps = getEventSteps(eventHandlers)

const sortedSteps = sortSteps([preDevCleanup, ...steps, eventSteps, devCommandStep], DEV_EVENTS)
const sortedSteps = sortSteps([preDevCleanup, ...steps, devUploadBlobs, eventSteps, devCommandStep], DEV_EVENTS)
const events = getEvents(sortedSteps)

return { steps: sortedSteps, events }
Expand Down
63 changes: 58 additions & 5 deletions packages/build/src/utils/blobs.ts
@@ -1,4 +1,5 @@
import { resolve } from 'node:path'
import { readFile } from 'node:fs/promises'
import path from 'node:path'

import { fdir } from 'fdir'

Expand All @@ -7,8 +8,8 @@ const DEPLOY_CONFIG_BLOBS_PATH = '.netlify/deploy/v1/blobs/deploy'

/** Retrieve the absolute path of the deploy scoped internal blob directories */
export const getBlobsDirs = (buildDir: string, packagePath?: string) => [
resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH),
resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH),
path.resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH),
path.resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH),
]

/**
Expand All @@ -21,7 +22,7 @@ export const getBlobsDirs = (buildDir: string, packagePath?: string) => [
* @returns
*/
export const scanForBlobs = async function (buildDir: string, packagePath?: string) {
const blobsDir = resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH)
const blobsDir = path.resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH)
const blobsDirScan = await new fdir().onlyCounts().crawl(blobsDir).withPromise()

if (blobsDirScan.files > 0) {
Expand All @@ -31,7 +32,7 @@ export const scanForBlobs = async function (buildDir: string, packagePath?: stri
}
}

const legacyBlobsDir = resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH)
const legacyBlobsDir = path.resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH)
const legacyBlobsDirScan = await new fdir().onlyCounts().crawl(legacyBlobsDir).withPromise()

if (legacyBlobsDirScan.files > 0) {
Expand All @@ -43,3 +44,55 @@ export const scanForBlobs = async function (buildDir: string, packagePath?: stri

return null
}

const METADATA_PREFIX = '$'
const METADATA_SUFFIX = '.json'

/** Given output directory, find all file paths to upload excluding metadata files */
export const getKeysToUpload = async (blobsDir: string): Promise<string[]> => {
const files = await new fdir()
.withRelativePaths() // we want the relative path from the blobsDir
.filter((fpath) => !path.basename(fpath).startsWith(METADATA_PREFIX))
.crawl(blobsDir)
.withPromise()

// normalize the path separators to all use the forward slash
return files.map((f) => f.split(path.sep).join('/'))
}

/** Read a file and its metadata file from the blobs directory */
export const getFileWithMetadata = async (
blobsDir: string,
key: string,
): Promise<{ data: Buffer; metadata: Record<string, string> }> => {
const contentPath = path.join(blobsDir, key)
const dirname = path.dirname(key)
const basename = path.basename(key)
const metadataPath = path.join(blobsDir, dirname, `${METADATA_PREFIX}${basename}${METADATA_SUFFIX}`)

const [data, metadata] = await Promise.all([readFile(contentPath), readMetadata(metadataPath)]).catch((err) => {
throw new Error(`Failed while reading '${key}' and its metadata: ${err.message}`)
})

return { data, metadata }
}

const readMetadata = async (metadataPath: string): Promise<Record<string, string>> => {
let metadataFile: string
try {
metadataFile = await readFile(metadataPath, { encoding: 'utf8' })
} catch (err) {
if (err.code === 'ENOENT') {
// no metadata file found, that's ok
return {}
}
throw err
}

try {
return JSON.parse(metadataFile)
} catch {
// Normalize the error message
throw new Error(`Error parsing metadata file '${metadataPath}'`)
}
}