diff --git a/.gitignore b/.gitignore index 19503773ce931..842c59fac24ff 100644 --- a/.gitignore +++ b/.gitignore @@ -94,6 +94,9 @@ content/en/real_user_monitoring/ios/crash_reporting.md content/en/real_user_monitoring/ios/* content/en/real_user_monitoring/reactnative.md +# Synthetics +content/en/synthetics/cicd_integrations/github_actions.md + # serverless content/en/serverless/libraries_integrations/forwarder.md content/en/serverless/libraries_integrations/plugin.md diff --git a/Makefile b/Makefile index c60cf0943f766..37f37622b138e 100644 --- a/Makefile +++ b/Makefile @@ -115,6 +115,8 @@ clean-auto-doc: ##Remove all doc automatically created rm -f content/en/serverless/libraries_integrations/macro.md ;fi @if [ content/en/serverless/libraries_integrations/cli.md ]; then \ rm -f content/en/serverless/libraries_integrations/cli.md ;fi + @if [ content/en/synthetics/cicd_integrations/github_actions.md ]; then \ + rm -f content/en/synthetics/cicd_integrations/github_actions.md ;fi @if [ content/en/real_user_monitoring/android/_index.md ]; then \ rm -f content/en/real_user_monitoring/android/_index.md ;fi @if [ content/en/real_user_monitoring/android/data_collected.md ]; then \ diff --git a/content/en/synthetics/ci_results_explorer.md b/content/en/synthetics/ci_results_explorer.md index 179c2c565a5d2..5f37cbaed4e38 100644 --- a/content/en/synthetics/ci_results_explorer.md +++ b/content/en/synthetics/ci_results_explorer.md @@ -8,7 +8,7 @@ further_reading: - link: "https://www.datadoghq.com/blog/datadog-synthetic-ci-cd-testing/" tag: "Blog" text: "Incorporate Datadog Synthetic tests into your CI/CD pipeline" -- link: "https://learn.datadoghq.com/course/view.php?id=37" +- link: "https://learn.datadoghq.com/enrol/index.php?id=37" tag: "Learning Center" text: "Learn how to run Synthetic tests in CI/CD pipelines" - link: "/synthetics/search/" @@ -19,7 +19,9 @@ further_reading: ## Overview -The CI Results Explorer gives you visibility into your CI jobs that are executing Synthetic tests. +The CI Results Explorer provides visibility into the CI jobs executing your Synthetic tests. + +{{< img src="synthetics/ci_results_explorer/ci_results_explorer.png" alt="CI Results Explorer" style="width:100%;">}} You can: @@ -28,47 +30,55 @@ You can: * Keep track of the progress of your tests in CI pipelines. * Identify flaky tests to fix. -{{< img src="synthetics/ci_results_explorer/ci_results_explorer.png" alt="CI Results Explorer" style="width:100%;">}} - -## Exploring batches +## Explore test batches -The CI Results Explorer displays batches of tests that were run through [Synthetics and CI/CD][1]. Every batch corresponds to a call to the Datadog API (through the NPM package or directly through the API endpoint) to trigger one or several test executions. +The CI Results Explorer displays batches of tests run by [Synthetics and your CI/CD provider][1]. Every batch corresponds with a call to the Datadog API (through one of your [CI/CD Integrations][1], the [datadog-ci][2] NPM package, or directly through the API endpoint) and triggers one or several test executions. -Click on a batch to open a side panel containing batch CI metadata and batch test results. Explore the test executions performed as part of the batch and pinpoint test failures. Click on a failing test result to see the detailed **Test Result** page and investigate the root cause of the issue. +1. Click on a batch to open a side panel containing batch CI metadata and batch test results. +2. Explore the test executions performed as part of the batch and pinpoint test failures. +3. Click on a failing test result to see the detailed **Test Result** page and investigate the root cause of the issue. ## Search ### Facets and tags -The panel on the left side of the page lists several facets you can use to search through your batches: +The panel on the left side of the page lists several facets you can use to search through your batches. -**Batch** facets allow you to filter on attributes of your batches: +**Batch** facets allow you to filter on attributes of your batches. | Facet | Description | |------------------|-------------------------------------------------------------| -| `Summary Status` | The status of the batch: `Passed`, `Failed`, `In Progress`. | +| `Summary Status` | The status of the batch: `Passed`, `Failed`, and `In Progress`. | | `Duration` | The overall duration of the batch. | | `ID` | The batch ID. | -**Git** facets allow you to filter on Git-related attributes of your batches: +**Git** facets allow you to filter on Git-related attributes of your batches. | Facet | Description | |-------------|-------------------------------------------| | `Branch` | The branch associated with the batch. | | `Commit SHA`| The commit SHA associated with the batch. | +| `Repository URL`| The URL of the Git repository associated with the batch. | +| `Tag` | The Git tag associated with the batch. | -**CI** facets allow you to filter on CI-related attributes of your batches: +**CI** facets allow you to filter on CI-related attributes of your batches. | Facet | Description | |----------------|---------------------------------------------| | `CI Provider` | The CI provider associated with the batch. | +| `Job Name` | The job name associated with the batch. | +| `Job URL` | The job URL associated with the batch. | +| `Pipeline ID` | The pipeline ID associated with the batch. | +| `Pipeline Name` | The pipeline or repository name associated with the batch. | +| `Pipeline Number` | The pipeline or build number associated with the batch. | | `Pipeline URL` | The pipeline URL associated with the batch. | +| `Stage Name` | The stage name associated with the batch. | -**Test result** facets allow you to filter on attributes of the test results executed as part of your batches: +**Test result** facets allow you to filter on attributes of the test results executed as part of your batches. | Facet | Description | |------------------|---------------------------------------------------------------------------------------------------------| -| `Execution Rule` | The execution rule associated with the test result of the batch: `Blocking`, `Non Blocking`, `Skipped`. | +| `Execution Rule` | The execution rule associated with the test result of the batch: `Blocking`, `Non Blocking`, and `Skipped`. | | `Fast Retries` | The number of fast retries associated with the test result of the batch. | | `Location` | The location associated with the test result of the batch. | | `Test ID` | The test ID associated with the test result of the batch. | @@ -76,11 +86,12 @@ The panel on the left side of the page lists several facets you can use to searc ### Create your query -To query the CI Results Explorer data, use the [same query syntax][2] as on the **Tests** page. +To query the CI Results Explorer data, use the [same query syntax][3] as you do on the **Tests** page. ## Further Reading {{< partial name="whats-next/whats-next.html" >}} [1]: /synthetics/cicd_integrations -[2]: /synthetics/search/ +[2]: https://www.npmjs.com/package/@datadog/datadog-ci +[3]: /synthetics/search/ diff --git a/content/en/synthetics/cicd_integrations/_index.md b/content/en/synthetics/cicd_integrations/_index.md index cff9b60a1b100..bd81ca4dd0666 100644 --- a/content/en/synthetics/cicd_integrations/_index.md +++ b/content/en/synthetics/cicd_integrations/_index.md @@ -1,7 +1,7 @@ --- title: Synthetics and CI/CD kind: documentation -description: Run Synthetic tests on-demand in your CI/CD pipelines. +description: Run Synthetic tests on-demand or at predefined intervals in your CI/CD pipelines. aliases: - /synthetics/ci - /synthetics/cicd_testing @@ -9,392 +9,60 @@ further_reading: - link: "https://www.datadoghq.com/blog/datadog-synthetic-ci-cd-testing/" tag: "Blog" text: "Incorporate Datadog Synthetic tests into your CI/CD pipeline" -- link: "https://learn.datadoghq.com/course/view.php?id=37" +- link: "https://www.datadoghq.com/blog/shift-left-testing-best-practices/" + tag: "Blog" + text: "Best practices for shift-left testing" +- link: "https://learn.datadoghq.com/enrol/index.php?id=37" tag: "Learning Center" text: "Learn how to run Synthetic tests in CI/CD pipelines" -- link: "/synthetics/browser_tests/" - tag: "Documentation" - text: "Configure a Browser Test" - link: "/synthetics/api_tests/" tag: "Documentation" text: "Configure an API Test" -- link: "https://www.datadoghq.com/blog/shift-left-testing-best-practices/" - tag: "Blog" - text: "Best practices for shift-left testing" +- link: "/synthetics/multistep" + tag: "Documentation" + text: "Configure a Multistep API Test" +- link: "/synthetics/browser_tests/" + tag: "Documentation" + text: "Configure a Browser Test" --- -In addition to running tests at pre-defined intervals, you can run Datadog Synthetic tests on-demand by using the `@datadog/datadog-ci` package or the API. Run Datadog Synthetic tests in your continuous integration (CI) pipelines to block branches from being deployed and breaking your application in production. +## Overview -Use Synthetic CI/CD testing to also run tests as part of your continuous delivery (CD) process and evaluate the state of your applications and services in production immediately after a deployment finishes. You can detect potential regressions that may impact your users and automatically trigger a rollback when a critical test fails. +In addition to running tests at predefined intervals, you can run Datadog Synthetic tests on-demand by using the `@datadog/datadog-ci` package or the API. Run Datadog Synthetic tests in your continuous integration (CI) pipelines to block branches from being deployed and breaking your application in production. + +Use Synthetics and CI/CD to also run tests as part of your continuous delivery (CD) process and evaluate the state of your applications and services in production immediately after a deployment finishes. You can detect potential regressions that may impact your users and automatically trigger a rollback when a critical test fails. This functionality reduces time spent fixing issues in production by proactively catching bugs and regressions earlier in the process, allowing your engineering teams to focus on non-urgent work instead. To get started, see [Integrations](#integrations) and [use the API](#use-the-api) or the [open-source CLI package](#use-the-cli). -## CLI usage - -### Package installation - -The package is published under [@datadog/datadog-ci][1] in the NPM registry. - -{{< tabs >}} -{{% tab "NPM" %}} - -Install the package through NPM: - -```bash -npm install --save-dev @datadog/datadog-ci -``` - -{{% /tab %}} -{{% tab "Yarn" %}} - -Install the package through Yarn: - -```bash -yarn add --dev @datadog/datadog-ci -``` - -{{% /tab %}} -{{< /tabs >}} - -### Setup the client - -To setup your client, Datadog API and application keys need to be configured. These keys can be defined in three different ways: - -1. As environment variables: - - ```bash - export DATADOG_API_KEY="" - export DATADOG_APP_KEY="" - ``` - -2. Passed to the CLI when running your tests: - - ```bash - datadog-ci synthetics run-tests --apiKey "" --appKey "" - ``` - -3. Or defined in a global configuration file: - - The global JSON configuration file can specify additional advanced options. Specify the path to this file using the flag `--config` [when launching your tests](#run-tests). If the name of your global configuration file is set to `datadog-ci.json`, it defaults to it. - -In the global configuration file, you can configure the following options: - -`apiKey` -: The API key used to query the Datadog API. - -`appKey` -: The application key used to query the Datadog API. - -`datadogSite` -: The Datadog instance to which request is sent. The default is `datadoghq.com`. Your Datadog site is {{< region-param key="dd_site" code="true" >}}. - -`files` -: Glob pattern to detect synthetic tests config files. - -`global` -: Overrides of synthetic tests applied to all tests ([see below for description of each field](#configure-tests)). +## Integrations -`proxy` -: The proxy to be used for outgoing connections to Datadog. `host` and `port` keys are mandatory arguments, `protocol` key defaults to `http`. Supported values for `protocol` key are `http`, `https`, `socks`, `socks4`, `socks4a`, `socks5`, `socks5h`, `pac+data`, `pac+file`, `pac+ftp`, `pac+http`, `pac+https`. The library used to configure the proxy is [proxy-agent][2] library. - -`subdomain` -: The name of the custom subdomain set to access your Datadog application. If the URL used to access Datadog is `myorg.datadoghq.com` the `subdomain` value then needs to be set to `myorg`. - -**Example global configuration file**: - -```json -{ - "apiKey": "", - "appKey": "", - "datadogSite": "datadoghq.com", - "files": "{,!(node_modules)/**/}*.synthetics.json", - "global": { - "allowInsecureCertificates": true, - "basicAuth": { "username": "test", "password": "test" }, - "body": "{\"fakeContent\":true}", - "bodyType": "application/json", - "cookies": "name1=value1;name2=value2;", - "deviceIds": ["laptop_large"], - "followRedirects": true, - "headers": { "": "" }, - "locations": ["aws:us-west-1"], - "retry": { "count": 2, "interval": 300 }, - "executionRule": "blocking", - "startUrl": "{{URL}}?static_hash={{STATIC_HASH}}", - "variables": { "titleVariable": "new value" }, - "pollingTimeout": 180000 - }, - "proxy": { - "auth": { - "username": "login", - "password": "pwd" - }, - "host": "127.0.0.1", - "port": 3128, - "protocol": "http" - }, - "subdomain": "subdomainname" -} -``` - -### Configure tests - -By default, the client automatically discovers and runs all tests specified in `**/*.synthetics.json` files (the path can be configured in the [global configuration file](#setup-the-client). These files have a `tests` key, which contains an array of objects with the IDs of the tests to run and any potential configuration overrides for these tests. - -**Example basic test configuration file**: - -```json -{ - "tests": [ - { - "id": "" - }, - { - "id": "" - } - ] -} -``` - -#### Further configuration - -The default configurations used for the tests are the original tests' configurations (visible in the UI or when [getting your tests' configurations from the API][3]). - -However, in the context of your CI deployment, you can optionally decide to override some (or all) of your tests parameters by using the below overrides. If you want to define overrides for all of your tests, these same parameters can be set at the [global configuration file](#setup-the-client) level. - -`allowInsecureCertificates` -: **Type**: boolean
-Disable certificate checks in HTTP tests. - -`basicAuth` -: **Type**: object
-Credentials to provide in case a basic authentication is encountered in HTTP or browser tests. - - `username`: string. Username to use in basic authentication. - - `password`: string. Password to use in basic authentication. - -`body` -: **Type**: string
-Data to send in HTTP tests. - -`bodyType` -: **Type**: string
-Type of the data sent in HTTP tests. - -`cookies` -: **Type**: string
-Use provided string as cookie header in HTTP or browser tests. - -`deviceIds` -: **Type**: array
-List of devices on which to run the browser test. - -`followRedirects` -: **Type**: boolean
-Indicates whether to follow redirections in HTTP tests. - -`headers` -: **Type**: object
-Headers to replace in the HTTP or browser test. This object should contain, as keys, the name of the header to replace and, as values, the new value of the header. - -`locations` -: **Type**: array
-List of locations from which the test should be run. - -`retry` -: **Type**: object
-Retry policy for the test: - - `count`: integer. Number of attempts to perform in case of test failure. - - `interval`: integer. Interval between the attempts (in milliseconds). - -`executionRule` -: **Type**: string
-Execution rule of the test: defines the behavior of the CLI in case of a failing test: - - `blocking`: The CLI returns an error if the test fails. - - `non_blocking`: The CLI only prints a warning if the test fails. - - `skipped`: The test is not executed at all. - -`startUrl` -: **Type**: string
-New start URL to provide to the HTTP or browser test. - -`variables` -: **Type**: object
-Variables to replace in the test. This object should contain, as keys, the name of the variable to replace and, as values, the new value of the variable. - -`pollingTimeout` -: **Type**: integer
-The duration in milliseconds after which `datadog-ci` stops polling for test results. The default is 120,000 ms. At the CI level, test results completed after this duration are considered failed. - -**Note**: Tests' overrides take precedence over global overrides. - -**Example advanced test configuration file**: - -```json -{ - "tests": [ - { - "id": "", - "config": { - "allowInsecureCertificates": true, - "basicAuth": { "username": "test", "password": "test" }, - "body": "{\"fakeContent\":true}", - "bodyType": "application/json", - "cookies": "name1=value1;name2=value2;", - "deviceIds": ["laptop_large"], - "followRedirects": true, - "headers": { "": "" }, - "locations": ["aws:us-west-1"], - "retry": { "count": 2, "interval": 300 }, - "executionRule": "skipped", - "startUrl": "{{URL}}?static_hash={{STATIC_HASH}}", - "variables": { "titleVariable": "new value" }, - "pollingTimeout": 180000 - } - } - ] -} -``` +{{< whatsnext desc="With Synthetics and CI/CD, you can run Synthetic tests in the CI platform provider of your choice:" >}} + {{< nextlink href="synthetics/cicd_integrations/github_actions" >}}GitHub Actions{{< /nextlink >}} + {{< nextlink href="synthetics/cicd_integrations/jenkins" >}}Jenkins{{< /nextlink >}} +{{< /whatsnext >}} -#### Execution rule +## Use the CLI -The _execution rule_ of each test can also be defined in-app, at the test level. Use the drop-down menu next to **CI Execution**. +The [`@datadog/datadog-ci` package][1] allows you to run Synthetics tests directly within your CI/CD pipeline. -{{< img src="synthetics/cicd_integrations/execution_rule.mp4" alt="CI/CD Integrations Execution Rule" video="true" width="100%">}} +To use the [`@datadog/datadog-ci` NPM package][2], see [Configuration][3]. -The execution rule associated with the test is always the most restrictive one that was set in the configuration file. From the most restrictive to the least restrictive: `skipped`, `non_blocking`, `blocking`. For example, if your test is configured to be `skipped` in the UI but to `blocking` in the configuration file, it is `skipped` when running your tests. +You can also trigger tests by searching with tags. For example: `"ci": "datadog-ci synthetics run-tests --config fileconfig.json -s 'tag:staging'"`. -#### Start URL +**Note:** This command works as an argument. Do not use this in your configuration files. -You can configure on which url your test starts by providing a `startUrl` to your test object and build your own starting URL using any part of your test's original starting URL with the following variables: +## Use the API +The Synthetics API endpoints allow you to launch tests at any stage in your staging and deployment lifecycle. For example, after a canary deployment with an automated rollback. -`URL` -: Test's original starting URL
-**Example**: `https://www.example.org:81/path/to/something?abc=123#target` +Use the API endpoints to quickly verify that a new deployment does not introduce any regression. See the [Trigger tests from CI/CD pipelines][4] and [Get details of batch][5] endpoints to use them within your CI through cURL or a supported client. -`DOMAIN` -: Test's domain name
-**Example**: `example.org` +### Trigger tests from CI/CD pipelines -`HASH` -: Test's hash
-**Example**: `#target` - -`HOST` -: Test's host
-**Example**: `www.example.org:81` - -`HOSTNAME` -: Test's hostname
-**Example**: `www.example.org` - -`ORIGIN` -: Test's origin
-**Example**: `https://www.example.org:81` - -`PARAMS` -: Test's query parameters
-**Example**: `?abc=123` - -`PATHNAME` -: Test's URl path
-**Example**: `/path/to/something` - -`PORT` -: Test's host port
-**Example**: `81` - -`PROTOCOL` -: Test's protocol
-**Example**: `https:` - -`SUBDOMAIN` -: Test's sub domain
-**Example**: `www` - -For instance, if your test's starting URL is `https://www.example.org:81/path/to/something?abc=123#target`, it can be written as: - -* `{{PROTOCOL}}//{{SUBDOMAIN}}.{{DOMAIN}}:{{PORT}}{{PATHNAME}}{{PARAMS}}{{HASH}}` -* `{{PROTOCOL}}//{{HOST}}{{PATHNAME}}{{PARAMS}}{{HASH}}` -* `{{URL}}` - -You can also leverage your own environment variables to customize your start URL. - -**Note:** If you have environment variables with names corresponding to one of the above reserved variables, your environment variables will be ignored and replaced by the corresponding component parsed from your test `startUrl`. - -### Running tests - -You can decide to have the CLI autodiscover all your `**/*.synthetics.json` Synthetic tests (or all the tests associated to the path specified in your [global configuration file](#setup-the-client)) or to specify the tests you want to run using the `-p,--public-id` flag. - -Run tests by executing the CLI: - -{{< tabs >}} -{{% tab "Yarn" %}} - -```bash -yarn datadog-ci synthetics run-tests -``` - -**Note**: If you are launching your tests with a custom global configuration file, append your command with `--config }} - -### Use the testing tunnel - -The [@datadog/datadog-ci][1] NPM package also comes with a tunnel functionality allowing you to swiftly trigger Synthetic tests on your internal applications. The testing tunnel creates an end-to-end encrypted HTTP proxy between your infrastructure and Datadog, allowing all test requests sent through the CLI to be automatically routed through the `datadog-ci` client, consequently enabling Datadog to run test on your internal applications. - -To learn how to get started using the testing tunnel, see the [Synthetics testing tunnel documentation][4]. - -### Visualize test results - -#### In your CI - -You can see the outcome of test executions directly in your CI as your tests are being executed. - -{{< img src="synthetics/cicd_integrations/successful_test_result.png" alt="Successful Test Result" style="width:100%;">}} - -You can identify what caused a test to fail by looking at the execution logs and searching for causes of the failed assertion: - -{{< img src="synthetics/cicd_integrations/failed_test_result.png" alt="Failed Test Result" style="width:100%;">}} - -#### In Datadog application - -You can also see your CI test results listed in the [CI Results Explorer][5] and on test details pages: - -{{< img src="synthetics/ci_results_explorer/ci_results_explorer.png" alt="CI Results Explorer" style="width:100%;">}} - -## API usage - -The trigger endpoint provides the list of triggered checks alongside their result identifiers. A polling endpoint is available to obtain the full results of the tests if they are available. - -### Trigger tests endpoint - -The test triggering endpoint supports starting up to 50 tests in one request. +The test triggering endpoint supports up to 50 tests in one request. * **Endpoint**: `https://api.{{< region-param key="dd_site" >}}/api/v1/synthetics/tests/trigger/ci` * **Method**: `POST` @@ -408,311 +76,29 @@ The test triggering endpoint supports starting up to 50 tests in one request. } ``` -The `TEST_TO_TRIGGER` objects are composed of the `public_id` (required) of the test to trigger and optional configuration overrides ([see below](#configure-tests) for a description of each field). +The `TEST_TO_TRIGGER` objects compose of the required `public_id` for the test you want to trigger and the optional configuration overrides. For descriptions of each field, see [Configure tests][6]. -The public identifier of a test can be either the identifier of the test found in the URL of a test details page (for `https://app.datadoghq.com/synthetics/details/abc-def-ghi`, it would be `abc-def-ghi`) or the full URL to the details page (that is, `https://app.datadoghq.com/synthetics/details/abc-def-ghi`). +A test's public identifier is either the identifier of the test found in the URL of a test's details page (for example: the identifier for `https://app.datadoghq.com/synthetics/details/abc-def-ghi` is `abc-def-ghi`) or the full URL of a test's details page (for example: `https://app.datadoghq.com/synthetics/details/abc-def-ghi`). -#### Example request +For more information, see the [Synthetics API endpoint documentation][4]. -```bash -#!/bin/sh +### Get details of batch -api_key="" -app_key="" - -curl -X POST \ --H 'Content-Type: application/json' \ --H "DD-API-KEY: ${api_key}" \ --H "DD-APPLICATION-KEY: ${app_key}" \ --d '{ - "tests": [ - { - "public_id": "abc-def-ghi", - "allowInsecureCertificates": true, - "basicAuth": { "username": "test", "password": "test" }, - "body": "{\"fakeContent\":true}", - "bodyType": "application/json", - "cookies": "name1=value1;name2=value2;", - "deviceIds": ["laptop_large"], - "followRedirects": true, - "headers": { "NEW_HEADER": "NEW VALUE" }, - "locations": ["aws:us-west-1"], - "retry": { "count": 2, "interval": 300 }, - "startUrl": "http://new.url/", - "variables": { "titleVariable": "new value" } - } - ] -}' "https://api.{{< region-param key="dd_site" >}}/api/v1/synthetics/tests/trigger/ci" -``` +The get batch details endpoint retrieves results for the group of tests triggered in your CI/CD pipeline, otherwise known as a batch. You must provide the `batch_id` for the relevant CI execution. -#### Example response - -```json -{ - "batch_id": null, - "results": [ - { - "result_id": "0123456789012345678", - "public_id": "abc-def-ghi", - "location": 30019 - } - ], - "triggered_check_ids": [ - "abc-def-ghi" - ], - "locations": [ - { - "display_name": "N. California (AWS)", - "name": "aws:us-west-1", - "region": "Americas", - "is_active": true, - "is_public": true, - "id": 30019 - } - ] -} -``` - -### Poll results endpoint - -* **Endpoint**: `https://api.{{< region-param key="dd_site" >}}/api/v1/synthetics/tests/poll_results` +* **Endpoint**: `https://api.{{< region-param key="dd_site" >}}/api/v1/synthetics/ci/batch/{batch_id}` * **Method**: `GET` -* **Parameters**: A JSON array containing the list of result identifiers to obtain results from. - -#### Example request - -```bash -#!/bin/sh - -api_key="" -app_key="" - -curl -G \ - "https://api.{{< region-param key="dd_site" >}}/api/v1/synthetics/tests/poll_results" \ - -H "DD-API-KEY: ${api_key}" \ - -H "DD-APPLICATION-KEY: ${app_key}" \ - -d "result_ids=[220123456789012345678]" -``` - -#### Example response - -{{< tabs >}} - -{{% tab "API Test" %}} - -```json -{ - "results": [ - { - "check": { - "config": { - "assertions": [ - { - "operator": "lessThan", - "target": 2000, - "type": "responseTime" - } - ], - "configVariables": [], - "request": { - "basicAuth": { - "password": "test", - "username": "test" - }, - "body": "{\"fakeContent\":true}", - "headers": { - "Content-Type": "application/json", - "Cookie": "name1=value1;name2=value2;", - "NEW_HEADER": "NEW VALUE" - }, - "method": "GET", - "timeout": 30, - "url": "http://new.url/" - } - }, - "locations": [ - 30019 - ], - "options": { - "allow_insecure": true, - "follow_redirects": true, - "min_failure_duration": 0, - "min_location_failed": 1, - "monitor_options": { - "include_tags": true, - "locked": false, - "new_host_delay": 300, - "notify_audit": false, - "notify_no_data": false, - "renotify_interval": 0 - }, - "retry": { - "count": 2, - "interval": 300 - }, - "tick_every": 60 - }, - "subtype": "http", - "type": "api" - }, - "check_id": "7654321", - "check_version": 2, - "config_override": { - "allowInsecureCertificates": true, - "basicAuth": { - "password": "test", - "username": "test" - }, - "body": "{\"fakeContent\":true}", - "bodyType": "application/json", - "cookies": "name1=value1;name2=value2;", - "deviceIds": [ - "laptop_large" - ], - "followRedirects": true, - "headers": { - "Content-Type": "application/json", - "Cookie": "name1=value1;name2=value2;", - "NEW_HEADER": "NEW VALUE" - }, - "locations": [ - "aws:us-west-1" - ], - "public_id": "abc-def-ghi", - "retry": { - "count": 2, - "interval": 300 - }, - "startUrl": "http://example.org/", - "variables": { - "titleVariable": "new value" - } - }, - "dc_id": 30019, - "orgID": 2, - "result": { - "assertionResults": [ - { - "actual": 27.92, - "valid": true - } - ], - "dnsServer": "8.8.8.8", - "eventType": "finished", - "healthCheckRatio": 1, - "httpStatusCode": 400, - "mainDC": "us1.prod", - "passed": true, - "resolvedIp": "93.184.216.34", - "responseSize": 349, - "runType": 2, - "subtype": "http", - "timings": { - "dns": 24.6, - "download": 0.1, - "firstByte": 1.4, - "tcp": 1.8, - "total": 27.9 - }, - "unhealthy": false - }, - "resultID": "220123456789012345678", - "timestamp": 1612404331304 - } - ] -} -``` - -{{% /tab %}} - -{{% tab "Browser Test" %}} - -```json -{ - "results": [ - { - "check_id": "123456", - "timestamp": 1601639904704, - "orgID": 2, - "result": { - "runType": 2, - "artifactsBucketKey": "2/e2e-tests/abc-def-ghi/results/17221670732431167/chrome.laptop_large/artifacts__1601639913277.json", - "browserType": "chrome", - "eventType": "finished", - "stepDetails": [ - { - "browserErrors": [], - "skipped": false, - "description": "Navigate to start URL", - "warnings": [], - "url": "about:blank", - "value": "https://example.com", - "duration": 1002, - "allowFailure": false, - "screenshotBucketKey": "2/e2e-tests/abc-def-ghi/results/17221670732431167/chrome.laptop_large/step-0__1601639913294.jpeg", - "type": "goToUrlAndMeasureTti", - "stepId": -1 - }, - { - "browserErrors": [], - "stepElementUpdates": { - "version": 1, - "multiLocator": { - "ab": "/*[local-name()=\"html\"][1]/*[local-name()=\"body\"][1]/*[local-name()=\"div\"][1]/*[local-name()=\"h1\"][1]", - "co": "[{\"text\":\"example domain\",\"textType\":\"directText\"}]", - "cl": "/*[local-name()=\"html\"]/*[local-name()=\"body\"]/*[local-name()=\"div\"][1]/*[local-name()=\"h1\"][1]", - "at": "/*[local-name()=\"html\"]/*[local-name()=\"body\"]/*[local-name()=\"div\"][1]/*[local-name()=\"h1\"][1]", - "clt": "/descendant::*[text()[normalize-space(translate(., 'ABCDEFGHIJKLMNOPQRSTUVWXYZÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞŸŽŠŒ', 'abcdefghijklmnopqrstuvwxyzàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿžšœ')) = \"example domain\"]]", - "ro": "//*[local-name()=\"h1\"]" - } - }, - "skipped": false, - "description": "Test heading \"Example Domain\" content", - "url": "https://www.example.com/", - "checkType": "contains", - "value": "Example Domain", - "duration": 204, - "allowFailure": false, - "screenshotBucketKey": "2/e2e-tests/abc-def-ghi/results/17221670732431167/chrome.laptop_large/step-1__1601639913410.jpeg", - "type": "assertElementContent", - "stepId": 2275176 - } - ], - "browserVersion": "84.0.4147.135", - "mainDC": "us1.prod", - "timeToInteractive": 269, - "device": { - "name": "Laptop Large", - "height": 1100, - "width": 1440, - "userAgent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", - "id": "chrome.laptop_large", - "isMobile": false, - "browser": "chrome" - }, - "passed": true, - "duration": 1206, - "startUrl": "https://www.example.com", - "metadata": {} - }, - "dc_id": 30005, - "resultID": "17221670732431167", - "metadata": {} - } - ] -} -``` - -{{% /tab %}} +* **Parameters**: The `batch_id` for the batch of test results you want to inspect. -{{< /tabs >}} +For more information, see the [Synthetics API endpoint documentation][5]. ## Further Reading {{< partial name="whats-next/whats-next.html" >}} -[1]: https://www.npmjs.com/package/@datadog/datadog-ci -[2]: https://github.com/TooTallNate/node-proxy-agent -[3]: /api/v1/synthetics/#get-test -[4]: /synthetics/testing_tunnel/ -[5]: /synthetics/ci_results_explorer +[1]: https://github.com/DataDog/datadog-ci +[2]: https://www.npmjs.com/package/@datadog/datadog-ci +[3]: /synthetics/cicd_integrations/configuration +[4]: /api/latest/synthetics/#trigger-tests-from-cicd-pipelines +[5]: /api/latest/synthetics/#get-details-of-batch +[6]: /synthetics/cicd_integrations/configuration#configure-tests diff --git a/content/en/synthetics/cicd_integrations/configuration.md b/content/en/synthetics/cicd_integrations/configuration.md index e69de29bb2d1d..e9b1809fb03d8 100644 --- a/content/en/synthetics/cicd_integrations/configuration.md +++ b/content/en/synthetics/cicd_integrations/configuration.md @@ -0,0 +1,394 @@ +--- +title: CI/CD Integrations Configuration +kind: documentation +description: Configure Synthetics to run test in your CI/CD pipelines +further_reading: +- link: "/synthetics/cicd_integrations" + tag: "Documentation" + text: "Learn about Synthetics and CI/CD" +- link: "/synthetics/ci_results_explorer" + tag: "Documentation" + text: "Learn about the CI Results Explorer" +- link: "/synthetics/testing_tunnel" + tag: "Documentation" + text: "Learn about the Testing Tunnel" +--- + +## Overview + +Use the `@datadog-ci` NPM package to run Synthetic tests directly within your CI/CD pipeline. You can automatically halt a build, block a deployment, and roll back a deployment when a Synthetics test detects a regression. + +To configure which URL your test starts on, provide a `startUrl` to your test object. Build your own starting URL with any part of your test's original starting URL and the following environment variables: + +### Install a package + +The package is published under [@datadog/datadog-ci][1] in the NPM registry. + +{{< tabs >}} +{{% tab "NPM" %}} + +Install the package through NPM: + +```bash +npm install --save-dev @datadog/datadog-ci +``` + +{{% /tab %}} +{{% tab "Yarn" %}} + +Install the package through Yarn: + +```bash +yarn add --dev @datadog/datadog-ci +``` + +{{% /tab %}} +{{< /tabs >}} + +### Setup a client + +To setup your client, Datadog API and application keys need to be configured. These keys can be defined in three different ways: + +1. As environment variables: + + ```bash + export DATADOG_API_KEY="" + export DATADOG_APP_KEY="" + ``` + +2. Passed to the CLI when running your tests: + + ```bash + datadog-ci synthetics run-tests --apiKey "" --appKey "" + ``` + +3. Or defined in a global configuration file: + + The global JSON configuration file can specify additional advanced options. Specify the path to this file using the flag `--config` [when launching your tests](#run-tests). If you set the name of your global configuration file to `datadog-ci.json`, that name is the default. + +In the global configuration file, you can configure the following options: + +`apiKey` +: The API key used to query the Datadog API. + +`appKey` +: The application key used to query the Datadog API. + +`datadogSite` +: The Datadog instance to which request is sent. The default is `datadoghq.com`. Your Datadog site is {{< region-param key="dd_site" code="true" >}}. + +`files` +: Glob pattern to detect Synthetic tests config files. + +`global` +: Overrides of Synthetic tests applied to all tests ([see below for descriptions of each field](#configure-tests)). + +`proxy` +: The proxy to be used for outgoing connections to Datadog. `host` and `port` keys are mandatory arguments, `protocol` key defaults to `http`. Supported values for `protocol` key are `http`, `https`, `socks`, `socks4`, `socks4a`, `socks5`, `socks5h`, `pac+data`, `pac+file`, `pac+ftp`, `pac+http`, `pac+https`. The library used to configure the proxy is the [proxy-agent][2] library. + +`subdomain` +: The name of the custom subdomain set to access your Datadog application. If the URL used to access Datadog is `myorg.datadoghq.com`, the `subdomain` value then needs to be set to `myorg`. + +`tunnel` +: Use the [secure tunnel][3] to execute your test batch. + +`testSearchQuery` +: Pass a query to select which Synthetic tests to run. If you are running tests in the CLI, use the `-s` flag. + +For example: + +{{< code-block lang="json" filename="Global Configuration File" disable_copy="false" collapsible="true" >}} +{ + "apiKey": "", + "appKey": "", + "datadogSite": "datadoghq.com", + "files": "{,!(node_modules)/**/}*.synthetics.json", + "global": { + "allowInsecureCertificates": true, + "basicAuth": { "username": "test", "password": "test" }, + "body": "{\"fakeContent\":true}", + "bodyType": "application/json", + "cookies": "name1=value1;name2=value2;", + "deviceIds": ["laptop_large"], + "followRedirects": true, + "headers": { "": "" }, + "locations": ["aws:us-west-1"], + "retry": { "count": 2, "interval": 300 }, + "executionRule": "blocking", + "startUrl": "{{URL}}?static_hash={{STATIC_HASH}}", + "variables": { "titleVariable": "new value" }, + "pollingTimeout": 180000 + }, + "proxy": { + "auth": { + "username": "login", + "password": "pwd" + }, + "host": "127.0.0.1", + "port": 3128, + "protocol": "http" + }, + "subdomain": "subdomainname", + "tunnel": true +} +{{< /code-block >}} + +### Configure tests + +By default, the client automatically discovers and runs all tests specified in `**/*.synthetics.json` files. This path can be configured in the [global configuration file](#setup-a-client). + +These files have a `tests` key which contains an array of objects with the IDs of the tests to run and any potential test configuration overrides. + +For example: + +{{< code-block lang="json" filename="Basic Test Configuration File" disable_copy="false" collapsible="true" >}} +{ + "tests": [ + { + "id": "" + }, + { + "id": "" + } + ] +} +{{< /code-block >}} + +#### Additional configuration + +The default configurations used for the tests are the original tests' configurations, which are visible in the UI or by [getting your tests' configurations from the API][4]. + +However, in the context of your CI deployment, you may decide to override some or all of your test parameters with the overrides below. To define overrides for all of your tests, set the same parameters at the [global configuration file](#setup-a-client) level. + +`allowInsecureCertificates` +: **Type**: boolean
+Disable certificate checks in HTTP tests. + +`basicAuth` +: **Type**: object
+Credentials to provide in case a basic authentication is encountered in HTTP or browser tests. + - `username`: string. Username to use in basic authentication. + - `password`: string. Password to use in basic authentication. + +`body` +: **Type**: string
+Data to send in HTTP tests. + +`bodyType` +: **Type**: string
+Type of the data sent in HTTP tests. + +`cookies` +: **Type**: string
+Use provided string as cookie header in HTTP or browser tests. + +`deviceIds` +: **Type**: array
+List of devices on which to run the browser test. + +`followRedirects` +: **Type**: boolean
+Indicates whether to follow redirections in HTTP tests. + +`headers` +: **Type**: object
+Headers to replace in the HTTP or browser test. This object should contain the name of the header to replace as keys and the new value of the header as values. + +`locations` +: **Type**: array
+List of locations from which the test runs. + +`retry` +: **Type**: object
+Retry policy for the test. + - `count`: integer. Number of attempts to perform in case of test failure. + - `interval`: integer. Interval between the attempts (in milliseconds). + +`executionRule` +: **Type**: string
+Execution rule of the test that defines the CLI behavior in case of a failing test: + - `blocking`: The CLI returns an error if the test fails. + - `non_blocking`: The CLI only prints a warning if the test fails. + - `skipped`: The test is not executed at all. + +`startUrl` +: **Type**: string
+New start URL to provide to the HTTP or browser test. + +`variables` +: **Type**: object
+Variables to replace in the test. This object should contain the name of the variable to replace as keys and the new value of the variable as values. + +`pollingTimeout` +: **Type**: integer
+The duration in milliseconds after which `datadog-ci` stops polling for test results. The default is 120,000 ms. At the CI level, test results completed after this duration are considered failed. + +**Note**: The test's overrides take precedence over global overrides. + +{{< code-block lang="json" filename="Advanced Test Configuration File" disable_copy="false" collapsible="true" >}} +{ + "tests": [ + { + "id": "", + "config": { + "allowInsecureCertificates": true, + "basicAuth": { "username": "test", "password": "test" }, + "body": "{\"fakeContent\":true}", + "bodyType": "application/json", + "cookies": "name1=value1;name2=value2;", + "deviceIds": ["laptop_large"], + "followRedirects": true, + "headers": { "": "" }, + "locations": ["aws:us-west-1"], + "retry": { "count": 2, "interval": 300 }, + "executionRule": "skipped", + "startUrl": "{{URL}}?static_hash={{STATIC_HASH}}", + "variables": { "titleVariable": "new value" }, + "pollingTimeout": 180000 + } + } + ] +} + +{{< /code-block >}} + +#### Execution rule + +Use the drop-down menu next to **CI Execution** to define the execution rule for each test at the test level. + +{{< img src="synthetics/cicd_integrations/execution_rule.mp4" alt="CI Execution Rule" video="true" width="100%">}} + +The execution rule associated with the test is the most restrictive one in the configuration file. The options range from most to least restrictive: `skipped`, `non_blocking`, and `blocking`. For example, if your test is configured as `skipped` in the UI but `blocking` in the configuration file, it is `skipped` when your test runs. + +#### Starting URL + +`URL` +: Test's original starting URL
+**Example**: `https://www.example.org:81/path/to/something?abc=123#target` + +`DOMAIN` +: Test's domain name
+**Example**: `example.org` + +`HASH` +: Test's hash
+**Example**: `#target` + +`HOST` +: Test's host
+**Example**: `www.example.org:81` + +`HOSTNAME` +: Test's hostname
+**Example**: `www.example.org` + +`ORIGIN` +: Test's origin
+**Example**: `https://www.example.org:81` + +`PARAMS` +: Test's query parameters
+**Example**: `?abc=123` + +`PATHNAME` +: Test's URl path
+**Example**: `/path/to/something` + +`PORT` +: Test's host port
+**Example**: `81` + +`PROTOCOL` +: Test's protocol
+**Example**: `https:` + +`SUBDOMAIN` +: Test's sub domain
+**Example**: `www` + +Whether you use Synthetic tests to control your CI/CD deployments in production or staging, you can run Synthetic tests against a generated staging URL instead of in production by setting local environment variables in your test's starting URL. + +To trigger an existing Synthetics test on a staging endpoint instead of in production, set the `$SUBDOMAIN` environment variable to `staging-example` and the `$PORT` environment variable to a port used for staging. Your Synthetic tests run against the generated staging URL instead of running in production. + +For example, you can write `https://app.datadoghq.com/synthetics/details/abc-123-zyx?live=1h#test-results` as: + +* `{{PROTOCOL}}//{{SUBDOMAIN}}.{{DOMAIN}}:{{PORT}}{{PATHNAME}}{{PARAMS}}{{HASH}}` +* `{{PROTOCOL}}//{{HOST}}{{PATHNAME}}{{PARAMS}}{{HASH}}` +* `{{URL}}` + +**Note:** If you have environment variables with names corresponding to one of the reserved variables above, your environment variables are ignored and replaced with the corresponding component parsed from your test's `startUrl`. + +### Run tests + +You can decide to have the CLI auto-discover all your `**/*.synthetics.json` Synthetic tests (or all the tests associated to the path specified in your [global configuration file](#setup-a-client)) or to specify the tests you want to run using the `-p,--public-id` flag. + +Run tests by executing the CLI: + +{{< tabs >}} +{{% tab "Yarn" %}} + +```bash +yarn datadog-ci synthetics run-tests +``` + +**Note**: If you are launching your tests with a custom global configuration file, append your command with `--config }} + +### Use the testing tunnel + +The [@datadog/datadog-ci][1] NPM package also comes with secure tunnelling, allowing you to trigger Synthetic tests on your internal applications. + +The testing tunnel creates an end-to-end encrypted HTTP proxy between your infrastructure and Datadog that allows all test requests sent through the CLI to be automatically routed through the `datadog-ci` client. + +For more information, see [Testing Tunnel][3]. + +### Visualize test results + +#### In your CI + +You can see the outcome of test executions directly in your CI as your tests are being executed. + +{{< img src="synthetics/cicd_integrations/successful_test_result.png" alt="Successful Test Result" style="width:100%;">}} + +You can identify what caused a test to fail by looking at the execution logs and searching for causes of the failed assertion: + +{{< img src="synthetics/cicd_integrations/failed_test_result.png" alt="Failed Test Result" style="width:100%;">}} + +#### In the Datadog application + +You can also see your CI test results listed in the [CI Results Explorer][5] and on test details pages: + +{{< img src="synthetics/ci_results_explorer/ci_results_explorer.png" alt="CI Results Explorer" style="width:100%;">}} + +## Further Reading + +{{< partial name="whats-next/whats-next.html" >}} + +[1]: https://www.npmjs.com/package/@datadog/datadog-ci +[2]: https://github.com/TooTallNate/node-proxy-agent +[3]: /synthetics/testing_tunnel/ +[4]: /api/latest/synthetics/#get-a-test-configuration +[5]: /synthetics/ci_results_explorer diff --git a/content/en/synthetics/cicd_integrations/github_actions.md b/content/en/synthetics/cicd_integrations/github_actions.md deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/content/en/synthetics/cicd_integrations/jenkins.md b/content/en/synthetics/cicd_integrations/jenkins.md index e69de29bb2d1d..95c5f8ba44a5d 100644 --- a/content/en/synthetics/cicd_integrations/jenkins.md +++ b/content/en/synthetics/cicd_integrations/jenkins.md @@ -0,0 +1,101 @@ +--- +title: Jenkins +kind: documentation +description: Configure your Jenkins instance to run Synthetic tests in your CI/CD pipelines. +further_reading: +- link: "/continuous_integration/setup_pipelines/jenkins/" + tag: "Documentation" + text: "Set up Tracing on a Jenkins Pipeline" +--- + +## Overview + +Add Synthetic tests to your Jenkins environment. + +Datadog recommends discussing with your SRE and Infrastructure teams to determine a solution that models your existing Jenkins architecture and customizes an installation that fits your business requirements. + +## Setup + +To use Docker in your Jenkins environment, see [Using Docker with Pipeline][1]. + +### Prerequisites + +* Node.js v10.24.1+ +* A global JSON configuration file uploaded to your Jenkins instance through the [Config File Provider][2]. You need this file to define the global properties for your Synthetics test setup. + +You can store environment variables directly within the global configuration file or [use credentials][3]. For more information about test configurations, see [Configure tests][4]. + +### Run the `@datadog/datadog-ci` package + +Install and run the Node.js and npm packages within your Jenkins environment with the Jenkins Node.js plugin. + +For more information about the existing Datadog-Jenkins integration, see [Set up Tracing on a Jenkins Pipeline][5]. + +### Add a NodeJS installation + +Navigate to the global Jenkins Configuration panel and add a Node.js installation. + +{{< img src="synthetics/cicd_integrations/jenkins/nodejs-installation.png" alt="NodeJS Installations in Jenkins" style="width:80%;">}} + +Install `@datadog/datadog-ci` globally for all relevant Node.js installations. + +#### Tags + +To run Synthetic tests with tags in a Jenkins Declarative pipeline: + +{{< code-block lang="groovy" disable_copy="false" collapsible="true" >}} +pipeline { + agent any + stages { + stage('Run e2e tests') { + steps { + withCredentials([string(credentialsId: 'datadog-api-key', variable: 'DATADOG_API_KEY'), string(credentialsId: 'datadog-app-key', variable: 'DATADOG_APP_KEY')]) { + nodejs(nodeJSInstallationName: 'Node 10.24.x') { + configFileProvider( + [configFile(fileId: 'config-file-id', variable: 'DATADOG_CI_CONFIG')]) { + sh 'datadog-ci synthetics run-tests -s "tag:e2e" --config $DATADOG_CI_CONFIG' + } + } + } + } + } + } +{{< /code-block >}} + +#### Custom test file + +To run Synthetic tests with a custom test file in a Jenkins Declarative pipeline: + +{{< code-block lang="groovy" disable_copy="false" collapsible="true" >}} +pipeline { + agent any + stages { + stage('Run e2e tests') { + steps { + withCredentials([string(credentialsId: 'datadog-api-key', variable: 'DATADOG_API_KEY'), string(credentialsId: 'datadog-app-key', variable: 'DATADOG_APP_KEY')]) { + nodejs(nodeJSInstallationName: 'Node 10.24.x') { + configFileProvider( + [configFile(fileId: 'config-file-id', variable: 'DATADOG_CI_CONFIG'), configFile(fileId: 'test-file-id', variable: 'DATADOG_CI_TEST_FILE')]) { + sh 'datadog-ci synthetics run-tests -f $DATADOG_CI_TEST_FILE --config $DATADOG_CI_CONFIG' + } + } + } + } + } + } +} +{{< /code-block >}} + +You can expect the following output: + +{{< img src="synthetics/cicd_integrations/jenkins/example-test-run.png" alt="Example Test Run in Jenkins" style="width:80%;">}} + +## Further Reading + +{{< partial name="whats-next/whats-next.html" >}} + +[1]: https://www.jenkins.io/doc/book/pipeline/docker/#using-docker-with-pipeline +[2]: https://plugins.jenkins.io/config-file-provider/ +[3]: https://www.jenkins.io/doc/book/using/using-credentials/#adding-new-global-credentials +[4]: /synthetics/cicd_integrations/configuration#configure-tests +[5]: /continuous_integration/setup_pipelines/jenkins/ diff --git a/content/en/synthetics/testing_tunnel.md b/content/en/synthetics/testing_tunnel.md index 95e32db26aa24..4a42418f853a9 100644 --- a/content/en/synthetics/testing_tunnel.md +++ b/content/en/synthetics/testing_tunnel.md @@ -1,12 +1,12 @@ --- title: Synthetic Testing Tunnel kind: documentation -description: Local and CI/CD testing with Datadog Synthetic testing tunnel. +description: Local and remote CI/CD testing with Datadog's Synthetic testing tunnel. further_reading: - link: "https://www.datadoghq.com/blog/datadog-synthetic-ci-cd-testing/" tag: "Blog" text: "Incorporate Datadog Synthetic tests into your CI/CD pipeline" -- link: "https://learn.datadoghq.com/course/view.php?id=37" +- link: "https://learn.datadoghq.com/enrol/index.php?id=37" tag: "Learning Center" text: "Learn how to run Synthetic tests in CI/CD pipelines" - link: "https://www.datadoghq.com/blog/internal-application-testing-with-datadog/" diff --git a/content/en/synthetics/troubleshooting/_index.md b/content/en/synthetics/troubleshooting/_index.md index 8511622234521..fffb3cb0b63d9 100644 --- a/content/en/synthetics/troubleshooting/_index.md +++ b/content/en/synthetics/troubleshooting/_index.md @@ -26,29 +26,29 @@ If you see a sudden spike or overall increase in your API test [timing metrics][ ### Recording -#### My website is not loading in the iframe +#### The website is not loading in the iframe -After downloading the [Datadog extension][4], you are unable to see your website in the iframe on the right side of your Browser test's recorder and the iframe displays `Your website does not support being loaded through an iframe.`. This could mean that your application has some settings preventing it from being opened in an iframe. If that is the case, try opening your website in a pop up by clicking **Open in Popup** to record your journey. +After downloading the [Datadog extension][4], you are unable to see your website in the iframe on the right side of your Browser test's recorder and the iframe displays `Your website does not support being loaded through an iframe.`. This could mean that your application has some settings preventing it from being opened in an iframe. If that is the case, try opening your website in a popup by clicking **Open in Popup** to record your journey. -#### Some of my applications load in the iframe but some do not +#### Some applications load in the iframe but some do not This means your applications and environments have different restrictions, which causes some of them to be visualized in an iframe while the others are not viewable. #### I'm seeing a "We've detected HTTP requests that are not supported inside the iframe, you may need to record in a popup" banner at the top of the iframe -This most likely means you are trying to record steps on an `http` page. Only `https` is supported in the recorder iframe. You should open your page as a pop up or change your URL to an `https` one to start recording on the page. +This most likely means you are trying to record steps on an `http` page. Only `https` is supported in the recorder iframe. You should open your page as a popup or change your URL to an `https` one to start recording on the page. {{< img src="synthetics/http_iframe.png" alt="HTTP in iframe" style="width:100%;" >}} -#### My website is not loading in the iframe and I cannot record any steps, even when opening my website in a pop up +#### My website is not loading in the iframe and I cannot record any steps, even when opening my website in a popup -After downloading the [Datadog extension][4], you are unable to see your website in the iframe on the right side of your Browser test's recorder. Additionally, you cannot record any steps, regardless of whether you open your website in the iframe or in a pop up: +After downloading the [Datadog extension][4], you are unable to see your website in the iframe on the right side of your Browser test's recorder. Additionally, you cannot record any steps, regardless of whether you open your website in the iframe or in a popup: -{{< img src="synthetics/recording_iframe.mp4" alt="Issues recording Browser test steps" video="true" width="100%" >}} +{{< img src="synthetics/recording_iframe.mp4" alt="Issues recording Browser test steps" video="true" width="100%" >}} If that happens, ensure the [Datadog extension][5] has the permissions to read and change data on the intended websites by specifying your website in the `On specific sites` section or by toggling `On all sites`: -{{< img src="synthetics/extension.mp4" alt="Allowing extension to read data on all sites" video="true" width="100%" >}} +{{< img src="synthetics/extension.mp4" alt="Allowing extension to read data on all sites" video="true" width="100%" >}} #### I'm unable to record steps on my application @@ -56,15 +56,15 @@ Your Chrome browser might have some policies preventing the extension from perfo #### I don't see the login page in the recorder. What is happening? -By default, the iframe/pop up of the recorder uses your own browser. This means that if you’re already logged into your application, the iframe/pop up might directly display a post login page, therefore preventing you from recording your login steps without logging out first. +By default, the iframe/popup of the recorder uses your own browser. This means that if you’re already logged into your application, the iframe/popup might directly display a post login page, therefore preventing you from recording your login steps without logging out first. To be able to record your steps without logging out from your application, just leverage the recorder’s **incognito mode**: -{{< img src="synthetics/incognito_mode.mp4" alt="Using Incognito Mode Browser Tests" video="true" width="100%" >}} +{{< img src="synthetics/incognito_mode.mp4" alt="Using Incognito Mode Browser Tests" video="true" width="100%" >}} -**Opening a pop up in incognito mode** allows you to start your test’s recording from the start URL set in your test configuration with a session completely isolated from your own browser's main session and user data. +**Opening a popup window in incognito mode** allows you to start your test’s recording from the start URL set in your test configuration with a session completely isolated from your own browser's main session and user data. -The freshly opened incognito pop up ignores all your previous browser history: cookies, local data, etc. You are consequently automatically logged out from your account and can start recording your login steps as if you were visiting your website for the first time. +This incognito popup window ignores your previous browser history including cookies and local data. You are automatically logged out from your account and can start recording your login steps as if you were visiting your website for the first time. ### Test results @@ -91,7 +91,7 @@ One of your Browser test steps is showing a `None or multiple elements detected` This means that the user locator defined for that step is either targeting several elements, or none of them, consequently preventing the Browser test from knowing which element needs to be interacted with. To fix it, go edit your recording, open the advanced options of the step that is having the issue, go to the page the step is testing, and click on `Test`. This highlights the located element or prints an error message. You can then go ahead and fix your user locator to have it match a single element of the page: -{{< img src="synthetics/fix_user_locator.mp4" alt="Fixing User Locator error" video="true" width="100%" >}} +{{< img src="synthetics/fix_user_locator.mp4" alt="Fixing User Locator error" video="true" width="100%" >}} ## API and browser tests @@ -116,11 +116,11 @@ Additionally, you might also have to ensure [Datadog Synthetic Monitoring IP ran ### Missing notifications -Synthetic tests by default do not [renotify][12]. This means that if you add your notification handle (email address, Slack handle, etc.) after a transition got generated (for example, a test going into alert or recovering from a previous alert), no notification is sent for that transition. A notification is sent for the next transition. +Synthetic tests by default do not [renotify][12]. This means that if you add your notification handle such as your email address or Slack handle after a transition is generated (for example: a test going into alert or recovering from a previous alert), a notification is not sent for that transition. A notification is sent for the next transition. ## Private locations -### My private location containers sometimes get killed `OOM`. +### My private location containers sometimes get killed `OOM` Private location containers getting killed `Out Of Memory` generally uncover a resource exhaustion issue on your private location workers. Make sure your private location containers are provisioned with [sufficient memory resources][13]. @@ -136,15 +136,17 @@ This could uncover a resource exhaustion issue on your private locations workers This might mean your private location is unable to reach the endpoint your API test is set to run on. Confirm that the private location is installed in the same network as the endpoint you are willing to test. You can also try to run your test on different endpoints to see if you get the same `TIMEOUT` error or not. -{{< img src="synthetics/timeout.png" alt="API test on private location timing out" style="width:100%;" >}} +{{< img src="synthetics/timeout.png" alt="API test on private location timing out" style="width:70%;" >}} -### I’m seeing `invalid mount config for type "bind": source path must be a directory` when attempting to run a private location. +### I’m seeing `invalid mount config for type "bind": source path must be a directory` when attempting to run a private location This occurs when you attempt to mount a single file in a Windows-based container, which is not supported. For more information, see the [Docker mount volume documentation][14]. Ensure that the source of the bind mount is a local directory. -## CI/CD Testing +## Synthetics and CI/CD -### I'm not seeing any CI metadata in my CI Results Explorer +## CI Results Explorer + +### CI metadata does not appear in the CI Results Explorer Check whether you are using API endpoints to trigger your CI/CD test runs. To have your CI Results Explorer populate with CI metadata, you must use the [NPM package][15]. diff --git a/local/bin/py/build/configurations/pull_config.yaml b/local/bin/py/build/configurations/pull_config.yaml index ffb5126fa69c9..74b9951f5b866 100644 --- a/local/bin/py/build/configurations/pull_config.yaml +++ b/local/bin/py/build/configurations/pull_config.yaml @@ -272,6 +272,20 @@ aliases: - /serverless/datadog_lambda_library/ - /serverless/serverless_integrations/cli/ + + - repo_name: synthetics-ci-github-action + contents: + - action: pull-and-push-file + branch: main + globs: + - 'README.md' + options: + dest_path: '/synthetics/cicd_integrations/' + file_name: 'github_actions.md' + front_matters: + dependencies: ["https://github.com/DataDog/synthetics-ci-github-action/blob/main/README.md"] + title: Synthetics and CI GitHub Actions + kind: documentation - repo_name: dd-sdk-android contents: diff --git a/local/bin/py/build/configurations/pull_config_preview.yaml b/local/bin/py/build/configurations/pull_config_preview.yaml index 301237e472176..aa7d48d1d10c8 100644 --- a/local/bin/py/build/configurations/pull_config_preview.yaml +++ b/local/bin/py/build/configurations/pull_config_preview.yaml @@ -273,7 +273,21 @@ aliases: - /serverless/datadog_lambda_library/ - /serverless/serverless_integrations/cli/ - + + - repo_name: synthetics-ci-github-action + contents: + - action: pull-and-push-file + branch: main + globs: + - 'README.md' + options: + dest_path: '/synthetics/cicd_integrations/' + file_name: 'github_actions.md' + front_matters: + dependencies: ["https://github.com/DataDog/synthetics-ci-github-action/blob/main/README.md"] + title: Synthetics CI GitHub Actions + kind: documentation + - repo_name: dd-sdk-android contents: - action: pull-and-push-file