diff --git a/README.md b/README.md index acfc43f68..f9741b09e 100644 --- a/README.md +++ b/README.md @@ -202,6 +202,7 @@ datadogWebpackPlugin({ rum?: { disabled?: boolean, sourcemaps?: { + bailOnError?: boolean, dryRun?: boolean, intakeUrl?: string, maxConcurrency?: number, @@ -257,6 +258,7 @@ datadogWebpackPlugin({ rum?: { disabled?: boolean; sourcemaps?: { + bailOnError?: boolean; dryRun?: boolean; intakeUrl?: string; maxConcurrency?: number; diff --git a/packages/core/src/plugins/git/trackedFilesMatcher.ts b/packages/core/src/plugins/git/trackedFilesMatcher.ts index 1e424cec2..f05def0a1 100644 --- a/packages/core/src/plugins/git/trackedFilesMatcher.ts +++ b/packages/core/src/plugins/git/trackedFilesMatcher.ts @@ -26,6 +26,13 @@ export class TrackedFilesMatcher { } } + private displaySource(src: string) { + if (src.length <= 40) { + return src; + } + return `[...]${src.slice(-35)}`; + } + // Looks up the sources declared in the sourcemap and return a list of related tracked files. public matchSourcemap( srcmapPath: string, @@ -44,7 +51,9 @@ export class TrackedFilesMatcher { } const filtered = this.matchSources(sources); if (filtered.length === 0) { - onSourcesNotFound(`Sources not in the tracked files.`); + onSourcesNotFound( + `${sources.map(this.displaySource).join(', ')} not in the tracked files.`, + ); return undefined; } diff --git a/packages/plugins/rum/README.md b/packages/plugins/rum/README.md index 4e87e9806..601b50102 100644 --- a/packages/plugins/rum/README.md +++ b/packages/plugins/rum/README.md @@ -11,6 +11,7 @@ Interact with our Real User Monitoring product (RUM) in Datadog directly from yo - [Configuration](#configuration) - [Sourcemaps Upload](#sourcemaps-upload) + - [`rum.sourcemaps.bailOnError`](#rumsourcemapsbailonerror) - [`rum.sourcemaps.dryRun`](#rumsourcemapsdryrun) - [`rum.sourcemaps.intakeUrl`](#rumsourcemapsintakeurl) - [`rum.sourcemaps.maxConcurrency`](#rumsourcemapsmaxconcurrency) @@ -25,6 +26,7 @@ Interact with our Real User Monitoring product (RUM) in Datadog directly from yo rum?: { disabled?: boolean; sourcemaps?: { + bailOnError?: boolean; dryRun?: boolean; intakeUrl?: string; maxConcurrency?: number; @@ -43,6 +45,12 @@ Upload JavaScript sourcemaps to Datadog to un-minify your errors. > You can override the intake URL by setting the `DATADOG_SOURCEMAP_INTAKE_URL` environment variable (eg. `https://sourcemap-intake.datadoghq.com/v1/input`). > Or only the domain with the `DATADOG_SITE` environment variable (eg. `datadoghq.com`). +### `rum.sourcemaps.bailOnError` + +> default: `false` + +Should the upload of sourcemaps fail the build on first error? + ### `rum.sourcemaps.dryRun` > default: `false` diff --git a/packages/plugins/rum/src/sourcemaps/payload.ts b/packages/plugins/rum/src/sourcemaps/payload.ts index c765e3822..7c83c5512 100644 --- a/packages/plugins/rum/src/sourcemaps/payload.ts +++ b/packages/plugins/rum/src/sourcemaps/payload.ts @@ -4,6 +4,7 @@ import type { RepositoryData } from '@dd/core/types'; import { promises as fs } from 'fs'; +import path from 'path'; import type { Sourcemap } from '../types'; @@ -57,8 +58,8 @@ const SLASH_RX = /[/]+|[\\]+/g; const SLASH_TRIM_RX = /^[/]+|^[\\]+|[/]+$|[\\]+$/g; // Verify any repeated pattern between the path and prefix. -export const prefixRepeat = (path: string, prefix: string): string => { - const pathParts = path.replace(SLASH_TRIM_RX, '').split(SLASH_RX); +export const prefixRepeat = (filePath: string, prefix: string): string => { + const pathParts = filePath.replace(SLASH_TRIM_RX, '').split(SLASH_RX); const prefixParts = prefix.replace(SLASH_TRIM_RX, '').split(SLASH_RX); const normalizedPath = pathParts.join('/'); @@ -75,14 +76,14 @@ export const prefixRepeat = (path: string, prefix: string): string => { }; // Verify that every files are available. -export const checkFile = async (path: string): Promise => { +export const checkFile = async (filePath: string): Promise => { const validity: FileValidity = { empty: false, exists: true, }; try { - const stats = await fs.stat(path); + const stats = await fs.stat(filePath); if (stats.size === 0) { validity.empty = true; } @@ -172,7 +173,7 @@ export const getPayload = async ( sourcemap.sourcemapFilePath, (reason) => { warnings.push( - `No tracked files found for sources contained in ${sourcemap.sourcemapFilePath}: "${reason}"`, + `${path.basename(sourcemap.sourcemapFilePath)}: "${reason}"`, ); }, ), diff --git a/packages/plugins/rum/src/sourcemaps/sender.ts b/packages/plugins/rum/src/sourcemaps/sender.ts index a73070a02..95d689bcd 100644 --- a/packages/plugins/rum/src/sourcemaps/sender.ts +++ b/packages/plugins/rum/src/sourcemaps/sender.ts @@ -2,6 +2,7 @@ // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2019-Present Datadog, Inc. +import { formatDuration } from '@dd/core/helpers'; import type { Logger } from '@dd/core/log'; import type { GlobalContext } from '@dd/core/types'; import retry from 'async-retry'; @@ -24,6 +25,13 @@ const nbRetries = 5; type DataResponse = { data: Gzip; headers: Record }; const green = chalk.green.bold; +const yellow = chalk.yellow.bold; +const red = chalk.red.bold; + +type FileMetadata = { + sourcemap: string; + file: string; +}; export const doRequest = async ( url: string, @@ -47,19 +55,19 @@ export const doRequest = async ( }); } catch (error: any) { // We don't want to retry if there is a non-fetch related error. - bail(new Error(error)); + bail(error); return; } if (!response.ok) { // Not instantiating the error here, as it will make Jest throw in the tests. - const error = `HTTP ${response.status} ${response.statusText}`; + const errorMessage = `HTTP ${response.status} ${response.statusText}`; if (errorCodesNoRetry.includes(response.status)) { - bail(new Error(error)); + bail(new Error(errorMessage)); return; } else { // Trigger the retry. - throw new Error(error); + throw new Error(errorMessage); } } @@ -69,7 +77,7 @@ export const doRequest = async ( return result; } catch (error: any) { // We don't want to retry on parsing errors. - bail(new Error(error)); + bail(error); } }, { @@ -133,13 +141,17 @@ export const upload = async ( context: GlobalContext, log: Logger, ) => { + const errors: { metadata?: FileMetadata; error: Error }[] = []; + const warnings: string[] = []; + if (!context.auth?.apiKey) { - throw new Error('No authentication token provided'); + errors.push({ error: new Error('No authentication token provided') }); + return { errors, warnings }; } if (payloads.length === 0) { - log('No sourcemaps to upload', 'warn'); - return; + warnings.push('No sourcemaps to upload'); + return { errors, warnings }; } const queue = new PQueue({ concurrency: options.maxConcurrency }); @@ -149,6 +161,8 @@ export const upload = async ( 'DD-EVP-ORIGIN-VERSION': context.version, }; + const addPromises = []; + for (const payload of payloads) { const metadata = { sourcemap: (payload.content.get('source_map') as MultipartFileValue)?.path.replace( @@ -163,23 +177,34 @@ export const upload = async ( log(`Queuing ${green(metadata.sourcemap)} | ${green(metadata.file)}`); - // eslint-disable-next-line no-await-in-loop - queue.add(async () => { - await doRequest( - options.intakeUrl, - getData(payload, defaultHeaders), - (error: Error, attempt: number) => { - log( - `Failed to upload sourcemaps: ${error.message}\nRetrying ${attempt}/${nbRetries}`, - 'warn', + addPromises.push( + queue.add(async () => { + try { + await doRequest( + options.intakeUrl, + getData(payload, defaultHeaders), + // On retry we store the error as a warning. + (error: Error, attempt: number) => { + const warningMessage = `Failed to upload ${yellow(metadata.sourcemap)} | ${yellow(metadata.file)}:\n ${error.message}\nRetrying ${attempt}/${nbRetries}`; + warnings.push(warningMessage); + log(warningMessage, 'warn'); + }, ); - }, - ); - log(`Sent ${green(metadata.sourcemap)} | ${green(metadata.file)}`); - }); + log(`Sent ${green(metadata.sourcemap)} | ${green(metadata.file)}`); + } catch (e: any) { + errors.push({ metadata, error: e }); + // Depending on the configuration we throw or not. + if (options.bailOnError === true) { + throw e; + } + } + }), + ); } - return queue.onIdle(); + await Promise.all(addPromises); + await queue.onIdle(); + return { warnings, errors }; }; export const sendSourcemaps = async ( @@ -188,6 +213,7 @@ export const sendSourcemaps = async ( context: GlobalContext, log: Logger, ) => { + const start = Date.now(); const prefix = options.minifiedPathPrefix; const metadata: Metadata = { @@ -207,20 +233,52 @@ export const sendSourcemaps = async ( const errors = payloads.map((payload) => payload.errors).flat(); const warnings = payloads.map((payload) => payload.warnings).flat(); + if (warnings.length > 0) { + log(`Warnings while preparing payloads:\n - ${warnings.join('\n - ')}`, 'warn'); + } + if (errors.length > 0) { - const errorMsg = `Failed to upload sourcemaps:\n - ${errors.join('\n - ')}`; + const errorMsg = `Failed to prepare payloads, aborting upload :\n - ${errors.join('\n - ')}`; log(errorMsg, 'error'); - throw new Error(errorMsg); + // Depending on the configuration we throw or not. + if (options.bailOnError === true) { + throw new Error(errorMsg); + } + return; } - if (warnings.length > 0) { - log(`Warnings while uploading sourcemaps:\n - ${warnings.join('\n - ')}`, 'warn'); + const { errors: uploadErrors, warnings: uploadWarnings } = await upload( + payloads, + options, + context, + log, + ); + + log( + `Done uploading ${green(sourcemaps.length.toString())} sourcemaps in ${green(formatDuration(Date.now() - start))}.`, + 'info', + ); + + if (uploadErrors.length > 0) { + const listOfErrors = ` - ${uploadErrors + .map(({ metadata: fileMetadata, error }) => { + if (fileMetadata) { + return `${red(fileMetadata.file)} | ${red(fileMetadata.sourcemap)} : ${error.message}`; + } + return error.message; + }) + .join('\n - ')}`; + + const errorMsg = `Failed to upload some sourcemaps:\n${listOfErrors}`; + log(errorMsg, 'error'); + // Depending on the configuration we throw or not. + // This should not be reached as we'd have thrown earlier. + if (options.bailOnError === true) { + throw new Error(errorMsg); + } } - try { - await upload(payloads, options, context, log); - } catch (error: any) { - log(`Failed to upload sourcemaps: ${error.message}`, 'error'); - throw error; + if (uploadWarnings.length > 0) { + log(`Warnings while uploading sourcemaps:\n - ${warnings.join('\n - ')}`, 'warn'); } }; diff --git a/packages/plugins/rum/src/types.ts b/packages/plugins/rum/src/types.ts index 6e9a331f1..ac70fac15 100644 --- a/packages/plugins/rum/src/types.ts +++ b/packages/plugins/rum/src/types.ts @@ -9,6 +9,7 @@ import type { CONFIG_KEY } from './constants'; export type MinifiedPathPrefix = `http://${string}` | `https://${string}` | `/${string}`; export type RumSourcemapsOptions = { + bailOnError?: boolean; dryRun?: boolean; intakeUrl?: string; maxConcurrency?: number; diff --git a/packages/plugins/rum/src/validate.ts b/packages/plugins/rum/src/validate.ts index e47f76752..42d69d909 100644 --- a/packages/plugins/rum/src/validate.ts +++ b/packages/plugins/rum/src/validate.ts @@ -99,6 +99,7 @@ export const validateSourcemapsOptions = ( // Add the defaults. const sourcemapsWithDefaults: RumSourcemapsOptionsWithDefaults = { + bailOnError: false, dryRun: false, maxConcurrency: 20, intakeUrl: diff --git a/packages/tests/src/plugins/rum/sourcemaps/sender.test.ts b/packages/tests/src/plugins/rum/sourcemaps/sender.test.ts index cf2ad6166..e7b9d485d 100644 --- a/packages/tests/src/plugins/rum/sourcemaps/sender.test.ts +++ b/packages/tests/src/plugins/rum/sourcemaps/sender.test.ts @@ -2,9 +2,9 @@ // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2019-Present Datadog, Inc. -import type { MultipartValue } from '@dd/rum-plugins/sourcemaps/payload'; -import { doRequest, getData, sendSourcemaps } from '@dd/rum-plugins/sourcemaps/sender'; +import { doRequest, getData, sendSourcemaps, upload } from '@dd/rum-plugins/sourcemaps/sender'; import { getContextMock } from '@dd/tests/helpers/mocks'; +import retry from 'async-retry'; import { vol } from 'memfs'; import nock from 'nock'; import { Readable, type Stream } from 'stream'; @@ -14,6 +14,7 @@ import { API_PATH, FAKE_URL, INTAKE_URL, + getPayloadMock, getSourcemapMock, getSourcemapsConfiguration, } from '../testHelpers'; @@ -32,6 +33,8 @@ jest.mock('async-retry', () => { }); }); +const retryMock = jest.mocked(retry); + function readFully(stream: Stream): Promise { const chunks: any[] = []; return new Promise((resolve, reject) => { @@ -61,31 +64,7 @@ describe('RUM Plugin Sourcemaps', () => { __dirname, ); - const payload = { - content: new Map([ - [ - 'source_map', - { - type: 'file', - path: '/path/to/sourcemap.js.map', - options: { filename: 'source_map', contentType: 'application/json' }, - }, - ], - [ - 'minified_file', - { - type: 'file', - path: '/path/to/minified.min.js', - options: { - filename: 'minified_file', - contentType: 'application/javascript', - }, - }, - ], - ]), - errors: [], - warnings: [], - }; + const payload = getPayloadMock(); const { data, headers } = await getData(payload)(); const zippedData = await readFully(data); @@ -105,14 +84,7 @@ describe('RUM Plugin Sourcemaps', () => { describe('sendSourcemaps', () => { afterEach(async () => { nock.cleanAll(); - - // Using a setTimeout because it creates an error on the ReadStreams created for the payloads. - await new Promise((resolve) => { - setTimeout(() => { - vol.reset(); - resolve(true); - }, 100); - }); + vol.reset(); }); test('It should upload sourcemaps.', async () => { @@ -137,7 +109,35 @@ describe('RUM Plugin Sourcemaps', () => { expect(scope.isDone()).toBe(true); }); - test('It should throw in case of payload issues', async () => { + test('It should alert in case of payload issues', async () => { + const scope = nock(FAKE_URL).post(API_PATH).reply(200); + // Emulate some fixtures. + vol.fromJSON( + { + // Empty file. + '/path/to/minified.min.js': '', + }, + __dirname, + ); + + const logMock = jest.fn(); + + await sendSourcemaps( + [getSourcemapMock()], + getSourcemapsConfiguration(), + getContextMock(), + logMock, + ); + + expect(logMock).toHaveBeenCalledTimes(1); + expect(logMock).toHaveBeenCalledWith( + expect.stringMatching('Failed to prepare payloads, aborting upload'), + 'error', + ); + expect(scope.isDone()).toBe(false); + }); + + test('It should throw in case of payload issues and bailOnError', async () => { const scope = nock(FAKE_URL).post(API_PATH).reply(200); // Emulate some fixtures. vol.fromJSON( @@ -151,11 +151,11 @@ describe('RUM Plugin Sourcemaps', () => { await expect(async () => { await sendSourcemaps( [getSourcemapMock()], - getSourcemapsConfiguration(), + getSourcemapsConfiguration({ bailOnError: true }), getContextMock(), () => {}, ); - }).rejects.toThrow('Failed to upload sourcemaps:'); + }).rejects.toThrow('Failed to prepare payloads, aborting upload'); expect(scope.isDone()).toBe(false); }); @@ -196,12 +196,12 @@ describe('RUM Plugin Sourcemaps', () => { .times(2) .reply(404) .post(API_PATH) - .reply(200, {}); + .reply(200, { data: 'ok' }); const response = await doRequest(INTAKE_URL, getDataMock); expect(scope.isDone()).toBe(true); - expect(response).toEqual({}); + expect(response).toEqual({ data: 'ok' }); }); test('It should throw on too many retries', async () => { @@ -233,8 +233,62 @@ describe('RUM Plugin Sourcemaps', () => { await expect(async () => { await doRequest(INTAKE_URL, () => ({ data, headers: {} })); - }).rejects.toThrow('TypeError: Response body object should not be disturbed or locked'); + }).rejects.toThrow('Response body object should not be disturbed or locked'); expect(scope.isDone()).toBe(true); }); }); + + describe('upload', () => { + test('It should not throw', async () => { + retryMock.mockImplementation(jest.fn()); + + const payloads = [getPayloadMock()]; + + const { warnings, errors } = await upload( + payloads, + getSourcemapsConfiguration(), + getContextMock(), + () => {}, + ); + + expect(warnings).toHaveLength(0); + expect(errors).toHaveLength(0); + }); + + test('It should alert in case of errors', async () => { + retryMock.mockRejectedValue(new Error('Fake Error')); + + const payloads = [getPayloadMock()]; + const { warnings, errors } = await upload( + payloads, + getSourcemapsConfiguration(), + getContextMock(), + jest.fn(), + ); + + expect(errors).toHaveLength(1); + expect(errors[0]).toMatchObject({ + metadata: { + sourcemap: expect.any(String), + file: expect.any(String), + }, + error: new Error('Fake Error'), + }); + expect(warnings).toHaveLength(0); + }); + + test('It should throw in case of errors with bailOnError', async () => { + retryMock.mockRejectedValue(new Error('Fake Error')); + + const payloads = [getPayloadMock()]; + expect(async () => { + await upload( + payloads, + getSourcemapsConfiguration({ bailOnError: true }), + getContextMock(), + () => {}, + ); + }).rejects.toThrow('Fake Error'); + }); + }); }); diff --git a/packages/tests/src/plugins/rum/testHelpers.ts b/packages/tests/src/plugins/rum/testHelpers.ts index c96984abf..dc219fa29 100644 --- a/packages/tests/src/plugins/rum/testHelpers.ts +++ b/packages/tests/src/plugins/rum/testHelpers.ts @@ -4,7 +4,7 @@ import { TrackedFilesMatcher } from '@dd/core/plugins/git/trackedFilesMatcher'; import type { RepositoryData } from '@dd/core/types'; -import type { Metadata } from '@dd/rum-plugins/sourcemaps/payload'; +import type { Metadata, MultipartValue, Payload } from '@dd/rum-plugins/sourcemaps/payload'; import type { RumSourcemapsOptions, RumSourcemapsOptionsWithDefaults, @@ -30,6 +30,7 @@ export const getSourcemapsConfiguration = ( options: Partial = {}, ): RumSourcemapsOptionsWithDefaults => { return { + bailOnError: false, dryRun: false, maxConcurrency: 10, intakeUrl: INTAKE_URL, @@ -70,3 +71,36 @@ export const getRepositoryDataMock = (options: Partial = {}): Re ...options, }; }; + +export const getPayloadMock = ( + options: Partial = {}, + content: [string, MultipartValue][] = [], +): Payload => { + return { + content: new Map([ + [ + 'source_map', + { + type: 'file', + path: '/path/to/sourcemap.js.map', + options: { filename: 'source_map', contentType: 'application/json' }, + }, + ], + [ + 'minified_file', + { + type: 'file', + path: '/path/to/minified.min.js', + options: { + filename: 'minified_file', + contentType: 'application/javascript', + }, + }, + ], + ...content, + ]), + errors: [], + warnings: [], + ...options, + }; +}; diff --git a/packages/tests/src/plugins/rum/validate.test.ts b/packages/tests/src/plugins/rum/validate.test.ts index 2ad87d389..e1d5b17dd 100644 --- a/packages/tests/src/plugins/rum/validate.test.ts +++ b/packages/tests/src/plugins/rum/validate.test.ts @@ -77,6 +77,7 @@ describe('RUM Plugins validate', () => { expect(errors.length).toBe(0); expect(config).toEqual({ + bailOnError: false, dryRun: false, maxConcurrency: 20, intakeUrl: 'https://sourcemap-intake.datadoghq.com/api/v2/srcmap',