diff --git a/packages/build/src/plugins_core/blobs_upload/index.ts b/packages/build/src/plugins_core/blobs_upload/index.ts index f7554e156b..f9fc5e9619 100644 --- a/packages/build/src/plugins_core/blobs_upload/index.ts +++ b/packages/build/src/plugins_core/blobs_upload/index.ts @@ -5,10 +5,8 @@ import pMap from 'p-map' import semver from 'semver' import { log, logError } from '../../log/logger.js' -import { scanForBlobs } from '../../utils/blobs.js' -import { CoreStep, CoreStepCondition, CoreStepFunction } from '../types.js' - -import { getKeysToUpload, getFileWithMetadata } from './utils.js' +import { getFileWithMetadata, getKeysToUpload, scanForBlobs } from '../../utils/blobs.js' +import { type CoreStep, type CoreStepCondition, type CoreStepFunction } from '../types.js' const coreStep: CoreStepFunction = async function ({ debug, @@ -35,11 +33,8 @@ const coreStep: CoreStepFunction = async function ({ // If we don't have native `fetch` in the global scope, add a polyfill. if (semver.lt(nodeVersion, '18.0.0')) { - const nodeFetch = await import('node-fetch') - - // @ts-expect-error The types between `node-fetch` and the native `fetch` - // are not a 100% match, even though the APIs are mostly compatible. - storeOpts.fetch = nodeFetch.default + const nodeFetch = (await import('node-fetch')).default as unknown as typeof fetch + storeOpts.fetch = nodeFetch } const blobs = await scanForBlobs(buildDir, packagePath) @@ -72,16 +67,18 @@ const coreStep: CoreStepFunction = async function ({ log(logs, `Uploading ${keys.length} blobs to deploy store...`) } - const uploadBlob = async (key) => { - if (debug && !quiet) { - log(logs, `- Uploading blob ${key}`, { indent: true }) - } - const { data, metadata } = await getFileWithMetadata(blobs.directory, key) - await blobStore.set(key, data, { metadata }) - } - try { - await pMap(keys, uploadBlob, { concurrency: 10 }) + await pMap( + keys, + async (key: string) => { + if (debug && !quiet) { + log(logs, `- Uploading blob ${key}`, { indent: true }) + } + const { data, metadata } = await getFileWithMetadata(blobs.directory, key) + await blobStore.set(key, data, { metadata }) + }, + { concurrency: 10 }, + ) } catch (err) { logError(logs, `Error uploading blobs to deploy store: ${err.message}`) diff --git a/packages/build/src/plugins_core/blobs_upload/utils.ts b/packages/build/src/plugins_core/blobs_upload/utils.ts deleted file mode 100644 index eafb37ae76..0000000000 --- a/packages/build/src/plugins_core/blobs_upload/utils.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { readFile } from 'node:fs/promises' -import path from 'node:path' - -import { fdir } from 'fdir' - -const METADATA_PREFIX = '$' -const METADATA_SUFFIX = '.json' - -/** Given output directory, find all file paths to upload excluding metadata files */ -export async function getKeysToUpload(blobsDir: string): Promise { - const files = await new fdir() - .withRelativePaths() // we want the relative path from the blobsDir - .filter((fpath) => !path.basename(fpath).startsWith(METADATA_PREFIX)) - .crawl(blobsDir) - .withPromise() - - // normalize the path separators to all use the forward slash - return files.map((f) => f.split(path.sep).join('/')) -} - -/** Read a file and its metadata file from the blobs directory */ -export async function getFileWithMetadata( - blobsDir: string, - key: string, -): Promise<{ data: Buffer; metadata: Record }> { - const contentPath = path.join(blobsDir, key) - const dirname = path.dirname(key) - const basename = path.basename(key) - const metadataPath = path.join(blobsDir, dirname, `${METADATA_PREFIX}${basename}${METADATA_SUFFIX}`) - - const [data, metadata] = await Promise.all([readFile(contentPath), readMetadata(metadataPath)]).catch((err) => { - throw new Error(`Failed while reading '${key}' and its metadata: ${err.message}`) - }) - - return { data, metadata } -} - -async function readMetadata(metadataPath: string): Promise> { - let metadataFile - try { - metadataFile = await readFile(metadataPath, { encoding: 'utf8' }) - } catch (err) { - if (err.code === 'ENOENT') { - // no metadata file found, that's ok - return {} - } - throw err - } - - try { - return JSON.parse(metadataFile) - } catch { - // Normalize the error message - throw new Error(`Error parsing metadata file '${metadataPath}'`) - } -} diff --git a/packages/build/src/plugins_core/dev_blobs_upload/index.ts b/packages/build/src/plugins_core/dev_blobs_upload/index.ts new file mode 100644 index 0000000000..dca2863626 --- /dev/null +++ b/packages/build/src/plugins_core/dev_blobs_upload/index.ts @@ -0,0 +1,109 @@ +import { version as nodeVersion } from 'node:process' + +import { getDeployStore } from '@netlify/blobs' +import pMap from 'p-map' +import semver from 'semver' + +import { log, logError } from '../../log/logger.js' +import { getFileWithMetadata, getKeysToUpload, scanForBlobs } from '../../utils/blobs.js' +import { type CoreStep, type CoreStepCondition, type CoreStepFunction } from '../types.js' + +const coreStep: CoreStepFunction = async function ({ + debug, + logs, + deployId, + buildDir, + quiet, + packagePath, + constants: { SITE_ID, NETLIFY_API_TOKEN, NETLIFY_API_HOST }, +}) { + // This should never happen due to the condition check + if (!deployId || !NETLIFY_API_TOKEN) { + return {} + } + // for cli deploys with `netlify deploy --build` the `NETLIFY_API_HOST` is undefined + const apiHost = NETLIFY_API_HOST || 'api.netlify.com' + + const storeOpts: Parameters[0] = { + siteID: SITE_ID, + deployID: deployId, + token: NETLIFY_API_TOKEN, + apiURL: `https://${apiHost}`, + } + + // If we don't have native `fetch` in the global scope, add a polyfill. + if (semver.lt(nodeVersion, '18.0.0')) { + const nodeFetch = (await import('node-fetch')).default as unknown as typeof fetch + storeOpts.fetch = nodeFetch + } + + const blobs = await scanForBlobs(buildDir, packagePath) + + // We checked earlier, but let's be extra safe + if (blobs === null) { + if (!quiet) { + log(logs, 'No blobs to upload to deploy store.') + } + return {} + } + + // If using the deploy config API, configure the store to use the region that + // was configured for the deploy. + if (!blobs.isLegacyDirectory) { + storeOpts.experimentalRegion = 'auto' + } + + const blobStore = getDeployStore(storeOpts) + const keys = await getKeysToUpload(blobs.directory) + + if (keys.length === 0) { + if (!quiet) { + log(logs, 'No blobs to upload to deploy store.') + } + return {} + } + + if (!quiet) { + log(logs, `Uploading ${keys.length} blobs to deploy store...`) + } + + try { + await pMap( + keys, + async (key: string) => { + if (debug && !quiet) { + log(logs, `- Uploading blob ${key}`, { indent: true }) + } + const { data, metadata } = await getFileWithMetadata(blobs.directory, key) + await blobStore.set(key, data, { metadata }) + }, + { concurrency: 10 }, + ) + } catch (err) { + logError(logs, `Error uploading blobs to deploy store: ${err.message}`) + + throw new Error(`Failed while uploading blobs to deploy store`) + } + + if (!quiet) { + log(logs, `Done uploading blobs to deploy store.`) + } + + return {} +} + +const deployAndBlobsPresent: CoreStepCondition = async ({ + deployId, + buildDir, + packagePath, + constants: { NETLIFY_API_TOKEN }, +}) => Boolean(NETLIFY_API_TOKEN && deployId && (await scanForBlobs(buildDir, packagePath))) + +export const devUploadBlobs: CoreStep = { + event: 'onDev', + coreStep, + coreStepId: 'dev_blobs_upload', + coreStepName: 'Uploading blobs', + coreStepDescription: () => 'Uploading blobs to development deploy store', + condition: deployAndBlobsPresent, +} diff --git a/packages/build/src/steps/get.ts b/packages/build/src/steps/get.ts index 7451b76c34..01ce46694d 100644 --- a/packages/build/src/steps/get.ts +++ b/packages/build/src/steps/get.ts @@ -4,6 +4,7 @@ import { uploadBlobs } from '../plugins_core/blobs_upload/index.js' import { buildCommandCore } from '../plugins_core/build_command.js' import { deploySite } from '../plugins_core/deploy/index.js' import { applyDeployConfig } from '../plugins_core/deploy_config/index.js' +import { devUploadBlobs } from '../plugins_core/dev_blobs_upload/index.js' import { bundleEdgeFunctions } from '../plugins_core/edge_functions/index.js' import { bundleFunctions } from '../plugins_core/functions/index.js' import { preCleanup } from '../plugins_core/pre_cleanup/index.js' @@ -39,7 +40,7 @@ export const getDevSteps = function (command, steps, eventHandlers?: any[]) { const eventSteps = getEventSteps(eventHandlers) - const sortedSteps = sortSteps([preDevCleanup, ...steps, eventSteps, devCommandStep], DEV_EVENTS) + const sortedSteps = sortSteps([preDevCleanup, ...steps, devUploadBlobs, eventSteps, devCommandStep], DEV_EVENTS) const events = getEvents(sortedSteps) return { steps: sortedSteps, events } diff --git a/packages/build/src/utils/blobs.ts b/packages/build/src/utils/blobs.ts index 56cbd794b6..15bfb6958b 100644 --- a/packages/build/src/utils/blobs.ts +++ b/packages/build/src/utils/blobs.ts @@ -1,4 +1,5 @@ -import { resolve } from 'node:path' +import { readFile } from 'node:fs/promises' +import path from 'node:path' import { fdir } from 'fdir' @@ -7,8 +8,8 @@ const DEPLOY_CONFIG_BLOBS_PATH = '.netlify/deploy/v1/blobs/deploy' /** Retrieve the absolute path of the deploy scoped internal blob directories */ export const getBlobsDirs = (buildDir: string, packagePath?: string) => [ - resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH), - resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH), + path.resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH), + path.resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH), ] /** @@ -21,7 +22,7 @@ export const getBlobsDirs = (buildDir: string, packagePath?: string) => [ * @returns */ export const scanForBlobs = async function (buildDir: string, packagePath?: string) { - const blobsDir = resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH) + const blobsDir = path.resolve(buildDir, packagePath || '', DEPLOY_CONFIG_BLOBS_PATH) const blobsDirScan = await new fdir().onlyCounts().crawl(blobsDir).withPromise() if (blobsDirScan.files > 0) { @@ -31,7 +32,7 @@ export const scanForBlobs = async function (buildDir: string, packagePath?: stri } } - const legacyBlobsDir = resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH) + const legacyBlobsDir = path.resolve(buildDir, packagePath || '', LEGACY_BLOBS_PATH) const legacyBlobsDirScan = await new fdir().onlyCounts().crawl(legacyBlobsDir).withPromise() if (legacyBlobsDirScan.files > 0) { @@ -43,3 +44,55 @@ export const scanForBlobs = async function (buildDir: string, packagePath?: stri return null } + +const METADATA_PREFIX = '$' +const METADATA_SUFFIX = '.json' + +/** Given output directory, find all file paths to upload excluding metadata files */ +export const getKeysToUpload = async (blobsDir: string): Promise => { + const files = await new fdir() + .withRelativePaths() // we want the relative path from the blobsDir + .filter((fpath) => !path.basename(fpath).startsWith(METADATA_PREFIX)) + .crawl(blobsDir) + .withPromise() + + // normalize the path separators to all use the forward slash + return files.map((f) => f.split(path.sep).join('/')) +} + +/** Read a file and its metadata file from the blobs directory */ +export const getFileWithMetadata = async ( + blobsDir: string, + key: string, +): Promise<{ data: Buffer; metadata: Record }> => { + const contentPath = path.join(blobsDir, key) + const dirname = path.dirname(key) + const basename = path.basename(key) + const metadataPath = path.join(blobsDir, dirname, `${METADATA_PREFIX}${basename}${METADATA_SUFFIX}`) + + const [data, metadata] = await Promise.all([readFile(contentPath), readMetadata(metadataPath)]).catch((err) => { + throw new Error(`Failed while reading '${key}' and its metadata: ${err.message}`) + }) + + return { data, metadata } +} + +const readMetadata = async (metadataPath: string): Promise> => { + let metadataFile: string + try { + metadataFile = await readFile(metadataPath, { encoding: 'utf8' }) + } catch (err) { + if (err.code === 'ENOENT') { + // no metadata file found, that's ok + return {} + } + throw err + } + + try { + return JSON.parse(metadataFile) + } catch { + // Normalize the error message + throw new Error(`Error parsing metadata file '${metadataPath}'`) + } +}